diff --git a/ultralytics/.github/ISSUE_TEMPLATE/bug-report.yml b/ultralytics/.github/ISSUE_TEMPLATE/bug-report.yml new file mode 100755 index 0000000..203250b --- /dev/null +++ b/ultralytics/.github/ISSUE_TEMPLATE/bug-report.yml @@ -0,0 +1,96 @@ +name: ๐Ÿ› Bug Report +# title: " " +description: Problems with YOLOv8 +labels: [bug, triage] +body: + - type: markdown + attributes: + value: | + Thank you for submitting a YOLOv8 ๐Ÿ› Bug Report! + + - type: checkboxes + attributes: + label: Search before asking + description: > + Please search the Ultralytics [Docs](https://docs.ultralytics.com) and [issues](https://github.com/ultralytics/ultralytics/issues) to see if a similar bug report already exists. + options: + - label: > + I have searched the YOLOv8 [issues](https://github.com/ultralytics/ultralytics/issues) and found no similar bug report. + required: true + + - type: dropdown + attributes: + label: YOLOv8 Component + description: | + Please select the part of YOLOv8 where you found the bug. + multiple: true + options: + - "Install" + - "Train" + - "Val" + - "Predict" + - "Export" + - "Multi-GPU" + - "Augmentation" + - "Hyperparameter Tuning" + - "Integrations" + - "Other" + validations: + required: false + + - type: textarea + attributes: + label: Bug + description: Provide console output with error messages and/or screenshots of the bug. + placeholder: | + ๐Ÿ’ก ProTip! Include as much information as possible (screenshots, logs, tracebacks etc.) to receive the most helpful response. + validations: + required: true + + - type: textarea + attributes: + label: Environment + description: Please specify the software and hardware you used to produce the bug. + placeholder: | + Paste output of `yolo checks` or `ultralytics.checks()` command, i.e.: + ``` + Ultralytics YOLOv8.0.181 ๐Ÿš€ Python-3.11.2 torch-2.0.1 CPU (Apple M2) + Setup complete โœ… (8 CPUs, 16.0 GB RAM, 266.5/460.4 GB disk) + + OS macOS-13.5.2 + Environment Jupyter + Python 3.11.2 + Install git + RAM 16.00 GB + CPU Apple M2 + CUDA None + ``` + validations: + required: false + + - type: textarea + attributes: + label: Minimal Reproducible Example + description: > + When asking a question, people will be better able to provide help if you provide code that they can easily understand and use to **reproduce** the problem. + This is referred to by community members as creating a [minimal reproducible example](https://docs.ultralytics.com/help/minimum_reproducible_example/). + placeholder: | + ``` + # Code to reproduce your issue here + ``` + validations: + required: false + + - type: textarea + attributes: + label: Additional + description: Anything else you would like to share? + + - type: checkboxes + attributes: + label: Are you willing to submit a PR? + description: > + (Optional) We encourage you to submit a [Pull Request](https://github.com/ultralytics/ultralytics/pulls) (PR) to help improve YOLOv8 for everyone, especially if you have a good understanding of how to implement a fix or feature. + See the YOLOv8 [Contributing Guide](https://docs.ultralytics.com/help/contributing) to get started. + options: + - label: Yes I'd like to help by submitting a PR! diff --git a/ultralytics/.github/ISSUE_TEMPLATE/bug-report.yml:Zone.Identifier b/ultralytics/.github/ISSUE_TEMPLATE/bug-report.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/ISSUE_TEMPLATE/bug-report.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.github/ISSUE_TEMPLATE/config.yml b/ultralytics/.github/ISSUE_TEMPLATE/config.yml new file mode 100755 index 0000000..9018a62 --- /dev/null +++ b/ultralytics/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,11 @@ +blank_issues_enabled: true +contact_links: + - name: ๐Ÿ“„ Docs + url: https://docs.ultralytics.com/ + about: Full Ultralytics YOLOv8 Documentation + - name: ๐Ÿ’ฌ Forum + url: https://community.ultralytics.com/ + about: Ask on Ultralytics Community Forum + - name: ๐ŸŽง Discord + url: https://ultralytics.com/discord + about: Ask on Ultralytics Discord diff --git a/ultralytics/.github/ISSUE_TEMPLATE/config.yml:Zone.Identifier b/ultralytics/.github/ISSUE_TEMPLATE/config.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/ISSUE_TEMPLATE/config.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.github/ISSUE_TEMPLATE/feature-request.yml b/ultralytics/.github/ISSUE_TEMPLATE/feature-request.yml new file mode 100755 index 0000000..76fc7b1 --- /dev/null +++ b/ultralytics/.github/ISSUE_TEMPLATE/feature-request.yml @@ -0,0 +1,50 @@ +name: ๐Ÿš€ Feature Request +description: Suggest a YOLOv8 idea +# title: " " +labels: [enhancement] +body: + - type: markdown + attributes: + value: | + Thank you for submitting a YOLOv8 ๐Ÿš€ Feature Request! + + - type: checkboxes + attributes: + label: Search before asking + description: > + Please search the Ultralytics [Docs](https://docs.ultralytics.com) and [issues](https://github.com/ultralytics/ultralytics/issues) to see if a similar feature request already exists. + options: + - label: > + I have searched the YOLOv8 [issues](https://github.com/ultralytics/ultralytics/issues) and found no similar feature requests. + required: true + + - type: textarea + attributes: + label: Description + description: A short description of your feature. + placeholder: | + What new feature would you like to see in YOLOv8? + validations: + required: true + + - type: textarea + attributes: + label: Use case + description: | + Describe the use case of your feature request. It will help us understand and prioritize the feature request. + placeholder: | + How would this feature be used, and who would use it? + + - type: textarea + attributes: + label: Additional + description: Anything else you would like to share? + + - type: checkboxes + attributes: + label: Are you willing to submit a PR? + description: > + (Optional) We encourage you to submit a [Pull Request](https://github.com/ultralytics/ultralytics/pulls) (PR) to help improve YOLOv8 for everyone, especially if you have a good understanding of how to implement a fix or feature. + See the YOLOv8 [Contributing Guide](https://docs.ultralytics.com/help/contributing) to get started. + options: + - label: Yes I'd like to help by submitting a PR! diff --git a/ultralytics/.github/ISSUE_TEMPLATE/feature-request.yml:Zone.Identifier b/ultralytics/.github/ISSUE_TEMPLATE/feature-request.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/ISSUE_TEMPLATE/feature-request.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.github/ISSUE_TEMPLATE/question.yml b/ultralytics/.github/ISSUE_TEMPLATE/question.yml new file mode 100755 index 0000000..090bc5d --- /dev/null +++ b/ultralytics/.github/ISSUE_TEMPLATE/question.yml @@ -0,0 +1,33 @@ +name: โ“ Question +description: Ask a YOLOv8 question +# title: " " +labels: [question] +body: + - type: markdown + attributes: + value: | + Thank you for asking a YOLOv8 โ“ Question! + + - type: checkboxes + attributes: + label: Search before asking + description: > + Please search the Ultralytics [Docs](https://docs.ultralytics.com), [issues](https://github.com/ultralytics/ultralytics/issues) and [discussions](https://github.com/ultralytics/ultralytics/discussions) to see if a similar question already exists. + options: + - label: > + I have searched the YOLOv8 [issues](https://github.com/ultralytics/ultralytics/issues) and [discussions](https://github.com/ultralytics/ultralytics/discussions) and found no similar questions. + required: true + + - type: textarea + attributes: + label: Question + description: What is your question? + placeholder: | + ๐Ÿ’ก ProTip! Include as much information as possible (screenshots, logs, tracebacks etc.) to receive the most helpful response. + validations: + required: true + + - type: textarea + attributes: + label: Additional + description: Anything else you would like to share? diff --git a/ultralytics/.github/ISSUE_TEMPLATE/question.yml:Zone.Identifier b/ultralytics/.github/ISSUE_TEMPLATE/question.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/ISSUE_TEMPLATE/question.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.github/dependabot.yml b/ultralytics/.github/dependabot.yml new file mode 100755 index 0000000..2d4ae31 --- /dev/null +++ b/ultralytics/.github/dependabot.yml @@ -0,0 +1,27 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# Dependabot for package version updates +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: pip + directory: "/" + schedule: + interval: weekly + time: "04:00" + open-pull-requests-limit: 10 + reviewers: + - glenn-jocher + labels: + - dependencies + + - package-ecosystem: github-actions + directory: "/.github/workflows" + schedule: + interval: weekly + time: "04:00" + open-pull-requests-limit: 5 + reviewers: + - glenn-jocher + labels: + - dependencies diff --git a/ultralytics/.github/dependabot.yml:Zone.Identifier b/ultralytics/.github/dependabot.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/dependabot.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.github/workflows/ci.yaml b/ultralytics/.github/workflows/ci.yaml new file mode 100755 index 0000000..81e5f1c --- /dev/null +++ b/ultralytics/.github/workflows/ci.yaml @@ -0,0 +1,280 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# YOLO Continuous Integration (CI) GitHub Actions tests + +name: Ultralytics CI + +on: + push: + branches: [main] + pull_request: + branches: [main] + schedule: + - cron: '0 0 * * *' # runs at 00:00 UTC every day + workflow_dispatch: + inputs: + hub: + description: 'Run HUB' + default: false + type: boolean + benchmarks: + description: 'Run Benchmarks' + default: false + type: boolean + tests: + description: 'Run Tests' + default: false + type: boolean + gpu: + description: 'Run GPU' + default: false + type: boolean + conda: + description: 'Run Conda' + default: false + type: boolean + +jobs: + HUB: + if: github.repository == 'ultralytics/ultralytics' && (github.event_name == 'schedule' || github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && github.event.inputs.hub == 'true')) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ['3.11'] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' # caching pip dependencies + - name: Install requirements + shell: bash # for Windows compatibility + run: | + python -m pip install --upgrade pip wheel + pip install -e . --extra-index-url https://download.pytorch.org/whl/cpu + - name: Check environment + run: | + yolo checks + pip list + - name: Test HUB training + shell: python + env: + API_KEY: ${{ secrets.ULTRALYTICS_HUB_API_KEY }} + MODEL_ID: ${{ secrets.ULTRALYTICS_HUB_MODEL_ID }} + run: | + import os + from ultralytics import YOLO, hub + api_key, model_id = os.environ['API_KEY'], os.environ['MODEL_ID'] + hub.login(api_key) + hub.reset_model(model_id) + model = YOLO('https://hub.ultralytics.com/models/' + model_id) + model.train() + - name: Test HUB inference API + shell: python + env: + API_KEY: ${{ secrets.ULTRALYTICS_HUB_API_KEY }} + MODEL_ID: ${{ secrets.ULTRALYTICS_HUB_MODEL_ID }} + run: | + import os + import requests + import json + api_key, model_id = os.environ['API_KEY'], os.environ['MODEL_ID'] + url = f"https://api.ultralytics.com/v1/predict/{model_id}" + headers = {"x-api-key": api_key} + data = {"size": 320, "confidence": 0.25, "iou": 0.45} + with open("ultralytics/assets/zidane.jpg", "rb") as f: + response = requests.post(url, headers=headers, data=data, files={"image": f}) + assert response.status_code == 200, f'Status code {response.status_code}, Reason {response.reason}' + print(json.dumps(response.json(), indent=2)) + + Benchmarks: + if: github.event_name != 'workflow_dispatch' || github.event.inputs.benchmarks == 'true' + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ['3.10'] + model: [yolov8n] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' # caching pip dependencies + - name: Install requirements + shell: bash # for Windows compatibility + run: | + python -m pip install --upgrade pip wheel + pip install -e ".[export]" coverage --extra-index-url https://download.pytorch.org/whl/cpu + # Fix SavedModel issue "partially initialized module 'jax' has no attribute 'version' (most likely due to a circular import)" in https://github.com/google/jax/discussions/14036 + # pip install -U 'jax!=0.4.15' 'jaxlib!=0.4.15' + yolo export format=tflite imgsz=32 || true + - name: Check environment + run: | + yolo checks + pip list + # - name: Benchmark DetectionModel + # shell: bash + # run: coverage run -a --source=ultralytics -m ultralytics.cfg.__init__ benchmark model='path with spaces/${{ matrix.model }}.pt' imgsz=160 verbose=0.318 + - name: Benchmark SegmentationModel + shell: bash + run: coverage run -a --source=ultralytics -m ultralytics.cfg.__init__ benchmark model='path with spaces/${{ matrix.model }}-seg.pt' imgsz=160 verbose=0.286 + - name: Benchmark ClassificationModel + shell: bash + run: coverage run -a --source=ultralytics -m ultralytics.cfg.__init__ benchmark model='path with spaces/${{ matrix.model }}-cls.pt' imgsz=160 verbose=0.166 + - name: Benchmark PoseModel + shell: bash + run: coverage run -a --source=ultralytics -m ultralytics.cfg.__init__ benchmark model='path with spaces/${{ matrix.model }}-pose.pt' imgsz=160 verbose=0.185 + - name: Merge Coverage Reports + run: | + coverage xml -o coverage-benchmarks.xml + - name: Upload Coverage Reports to CodeCov + if: github.repository == 'ultralytics/ultralytics' + uses: codecov/codecov-action@v3 + with: + flags: Benchmarks + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + - name: Benchmark Summary + run: | + cat benchmarks.log + echo "$(cat benchmarks.log)" >> $GITHUB_STEP_SUMMARY + + Tests: + if: github.event_name != 'workflow_dispatch' || github.event.inputs.tests == 'true' + timeout-minutes: 60 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ['3.11'] + torch: [latest] + include: + - os: ubuntu-latest + python-version: '3.8' # torch 1.8.0 requires python >=3.6, <=3.8 + torch: '1.8.0' # min torch version CI https://pypi.org/project/torchvision/ + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' # caching pip dependencies + - name: Install requirements + shell: bash # for Windows compatibility + run: | # CoreML must be installed before export due to protobuf error from AutoInstall + python -m pip install --upgrade pip wheel + torch="" + if [ "${{ matrix.torch }}" == "1.8.0" ]; then + torch="torch==1.8.0 torchvision==0.9.0" + fi + pip install -e . $torch pytest-cov "coremltools>=7.0" --extra-index-url https://download.pytorch.org/whl/cpu + - name: Check environment + run: | + yolo checks + pip list + - name: Pytest tests + shell: bash # for Windows compatibility + run: | + slow="" + if [[ "${{ github.event_name }}" == "schedule" ]] || [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then + pip install mlflow pycocotools 'ray[tune]' + slow="--slow" + fi + pytest $slow --cov=ultralytics/ --cov-report xml tests/ + - name: Upload Coverage Reports to CodeCov + if: github.repository == 'ultralytics/ultralytics' # && matrix.os == 'ubuntu-latest' && matrix.python-version == '3.11' + uses: codecov/codecov-action@v3 + with: + flags: Tests + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + GPU: + if: github.repository == 'ultralytics/ultralytics' && (github.event_name != 'workflow_dispatch' || github.event.inputs.gpu == 'true') + timeout-minutes: 60 + runs-on: gpu-latest + steps: + - uses: actions/checkout@v4 + - name: Install requirements + run: pip install -e . + - name: Check environment + run: | + yolo checks + pip list + - name: Pytest tests + run: pytest --cov=ultralytics/ --cov-report xml tests/test_cuda.py + - name: Upload Coverage Reports to CodeCov + uses: codecov/codecov-action@v3 + with: + flags: GPU + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + Conda: + if: github.repository == 'ultralytics/ultralytics' && (github.event_name == 'schedule_disabled' || github.event.inputs.conda == 'true') + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ['3.11'] + defaults: + run: + shell: bash -el {0} + steps: + - uses: conda-incubator/setup-miniconda@v3 + with: + python-version: ${{ matrix.python-version }} + mamba-version: "*" + channels: conda-forge,defaults + channel-priority: true + activate-environment: anaconda-client-env + - name: Install Libmamba + run: | + conda config --set solver libmamba + - name: Install Ultralytics package from conda-forge + run: | + conda install -c pytorch -c conda-forge pytorch torchvision ultralytics openvino + - name: Install pip packages + run: | + pip install pytest 'coremltools>=7.0' + - name: Check environment + run: | + conda list + - name: Test CLI + run: | + yolo predict model=yolov8n.pt imgsz=320 + yolo train model=yolov8n.pt data=coco8.yaml epochs=1 imgsz=32 + yolo val model=yolov8n.pt data=coco8.yaml imgsz=32 + yolo export model=yolov8n.pt format=torchscript imgsz=160 + - name: Test Python + run: | + python -c " + from ultralytics import YOLO + model = YOLO('yolov8n.pt') + results = model.train(data='coco8.yaml', epochs=3, imgsz=160) + results = model.val(imgsz=160) + results = model.predict(imgsz=160) + results = model.export(format='onnx', imgsz=160) + " + - name: PyTest + run: | + git clone https://github.com/ultralytics/ultralytics + pytest ultralytics/tests + + Summary: + runs-on: ubuntu-latest + needs: [HUB, Benchmarks, Tests, GPU, Conda] # Add job names that you want to check for failure + if: always() # This ensures the job runs even if previous jobs fail + steps: + - name: Check for failure and notify + if: (needs.HUB.result == 'failure' || needs.Benchmarks.result == 'failure' || needs.Tests.result == 'failure' || needs.GPU.result == 'failure' || needs.Conda.result == 'failure') && github.repository == 'ultralytics/ultralytics' && (github.event_name == 'schedule' || github.event_name == 'push') + uses: slackapi/slack-github-action@v1.24.0 + with: + payload: | + {"text": " GitHub Actions error for ${{ github.workflow }} โŒ\n\n\n*Repository:* https://github.com/${{ github.repository }}\n*Action:* https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}\n*Author:* ${{ github.actor }}\n*Event:* ${{ github.event_name }}\n"} + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL_YOLO }} diff --git a/ultralytics/.github/workflows/ci.yaml:Zone.Identifier b/ultralytics/.github/workflows/ci.yaml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/workflows/ci.yaml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.github/workflows/cla.yml b/ultralytics/.github/workflows/cla.yml new file mode 100755 index 0000000..2d9bfe9 --- /dev/null +++ b/ultralytics/.github/workflows/cla.yml @@ -0,0 +1,37 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +name: CLA Assistant +on: + issue_comment: + types: + - created + pull_request_target: + types: + - reopened + - opened + - synchronize + +jobs: + CLA: + if: github.repository == 'ultralytics/ultralytics' + runs-on: ubuntu-latest + steps: + - name: CLA Assistant + if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I sign the CLA') || github.event_name == 'pull_request_target' + uses: contributor-assistant/github-action@v2.3.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # must be repository secret token + PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} + with: + path-to-signatures: 'signatures/version1/cla.json' + path-to-document: 'https://docs.ultralytics.com/help/CLA' # CLA document + # branch should not be protected + branch: 'main' + allowlist: dependabot[bot],github-actions,[pre-commit*,pre-commit*,bot* + + remote-organization-name: ultralytics + remote-repository-name: cla + custom-pr-sign-comment: 'I have read the CLA Document and I sign the CLA' + custom-allsigned-prcomment: All Contributors have signed the CLA. โœ… + #custom-notsigned-prcomment: 'pull request comment with Introductory message to ask new contributors to sign' diff --git a/ultralytics/.github/workflows/cla.yml:Zone.Identifier b/ultralytics/.github/workflows/cla.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/workflows/cla.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.github/workflows/codeql.yaml b/ultralytics/.github/workflows/codeql.yaml new file mode 100755 index 0000000..5dc86e8 --- /dev/null +++ b/ultralytics/.github/workflows/codeql.yaml @@ -0,0 +1,42 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +name: "CodeQL" + +on: + schedule: + - cron: '0 0 1 * *' + workflow_dispatch: + +jobs: + analyze: + name: Analyze + runs-on: ${{ 'ubuntu-latest' }} + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: ['python'] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: security-extended,security-and-quality + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/ultralytics/.github/workflows/codeql.yaml:Zone.Identifier b/ultralytics/.github/workflows/codeql.yaml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/workflows/codeql.yaml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.github/workflows/docker.yaml b/ultralytics/.github/workflows/docker.yaml new file mode 100755 index 0000000..2875c45 --- /dev/null +++ b/ultralytics/.github/workflows/docker.yaml @@ -0,0 +1,148 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# Builds ultralytics/ultralytics:latest images on DockerHub https://hub.docker.com/r/ultralytics + +name: Publish Docker Images + +on: + push: + branches: [main] + workflow_dispatch: + inputs: + Dockerfile: + type: boolean + description: Use Dockerfile + default: true + Dockerfile-cpu: + type: boolean + description: Use Dockerfile-cpu + Dockerfile-arm64: + type: boolean + description: Use Dockerfile-arm64 + Dockerfile-jetson: + type: boolean + description: Use Dockerfile-jetson + Dockerfile-python: + type: boolean + description: Use Dockerfile-python + Dockerfile-conda: + type: boolean + description: Use Dockerfile-conda + push: + type: boolean + description: Push images to Docker Hub + default: true + +jobs: + docker: + if: github.repository == 'ultralytics/ultralytics' + name: Push + runs-on: ubuntu-latest + strategy: + fail-fast: false + max-parallel: 6 + matrix: + include: + - dockerfile: "Dockerfile" + tags: "latest" + platforms: "linux/amd64" + - dockerfile: "Dockerfile-cpu" + tags: "latest-cpu" + platforms: "linux/amd64" + - dockerfile: "Dockerfile-arm64" + tags: "latest-arm64" + platforms: "linux/arm64" + - dockerfile: "Dockerfile-jetson" + tags: "latest-jetson" + platforms: "linux/arm64" + - dockerfile: "Dockerfile-python" + tags: "latest-python" + platforms: "linux/amd64" + # - dockerfile: "Dockerfile-conda" + # tags: "latest-conda" + # platforms: "linux/amd64" + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 # copy full .git directory to access full git history in Docker images + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Retrieve Ultralytics version + id: get_version + run: | + VERSION=$(grep "^__version__ =" ultralytics/__init__.py | awk -F"'" '{print $2}') + echo "Retrieved Ultralytics version: $VERSION" + echo "version=$VERSION" >> $GITHUB_OUTPUT + + VERSION_TAG=$(echo "${{ matrix.tags }}" | sed "s/latest/${VERSION}/") + echo "Intended version tag: $VERSION_TAG" + echo "version_tag=$VERSION_TAG" >> $GITHUB_OUTPUT + + - name: Check if version tag exists on DockerHub + id: check_tag + run: | + RESPONSE=$(curl -s https://hub.docker.com/v2/repositories/ultralytics/ultralytics/tags/$VERSION_TAG) + MESSAGE=$(echo $RESPONSE | jq -r '.message') + if [[ "$MESSAGE" == "null" ]]; then + echo "Tag $VERSION_TAG already exists on DockerHub." + echo "exists=true" >> $GITHUB_OUTPUT + elif [[ "$MESSAGE" == *"404"* ]]; then + echo "Tag $VERSION_TAG does not exist on DockerHub." + echo "exists=false" >> $GITHUB_OUTPUT + else + echo "Unexpected response from DockerHub. Please check manually." + echo "exists=false" >> $GITHUB_OUTPUT + fi + env: + VERSION_TAG: ${{ steps.get_version.outputs.version_tag }} + + - name: Build Image + if: github.event_name == 'push' || github.event.inputs[matrix.dockerfile] == 'true' + run: | + docker build --platform ${{ matrix.platforms }} -f docker/${{ matrix.dockerfile }} \ + -t ultralytics/ultralytics:${{ matrix.tags }} \ + -t ultralytics/ultralytics:${{ steps.get_version.outputs.version_tag }} . + + - name: Run Tests + if: (github.event_name == 'push' || github.event.inputs[matrix.dockerfile] == 'true') && matrix.platforms == 'linux/amd64' && matrix.dockerfile != 'Dockerfile-conda' # arm64 images not supported on GitHub CI runners + run: docker run ultralytics/ultralytics:${{ matrix.tags }} /bin/bash -c "pip install pytest && pytest tests" + + - name: Run Benchmarks + # WARNING: Dockerfile (GPU) error on TF.js export 'module 'numpy' has no attribute 'object'. + if: (github.event_name == 'push' || github.event.inputs[matrix.dockerfile] == 'true') && matrix.platforms == 'linux/amd64' && matrix.dockerfile != 'Dockerfile' && matrix.dockerfile != 'Dockerfile-conda' # arm64 images not supported on GitHub CI runners + run: docker run ultralytics/ultralytics:${{ matrix.tags }} yolo benchmark model=yolov8n.pt imgsz=160 verbose=0.318 + + - name: Push Docker Image with Ultralytics version tag + if: (github.event_name == 'push' || (github.event.inputs[matrix.dockerfile] == 'true' && github.event.inputs.push == 'true')) && steps.check_tag.outputs.exists == 'false' && matrix.dockerfile != 'Dockerfile-conda' + run: | + docker push ultralytics/ultralytics:${{ steps.get_version.outputs.version_tag }} + + - name: Push Docker Image with latest tag + if: github.event_name == 'push' || (github.event.inputs[matrix.dockerfile] == 'true' && github.event.inputs.push == 'true') + run: | + docker push ultralytics/ultralytics:${{ matrix.tags }} + if [[ "${{ matrix.tags }}" == "latest" ]]; then + t=ultralytics/ultralytics:latest-runner + docker build -f docker/Dockerfile-runner -t $t . + docker push $t + fi + + - name: Notify on failure + if: github.event_name == 'push' && failure() # do not notify on cancelled() as cancelling is performed by hand + uses: slackapi/slack-github-action@v1.24.0 + with: + payload: | + {"text": " GitHub Actions error for ${{ github.workflow }} โŒ\n\n\n*Repository:* https://github.com/${{ github.repository }}\n*Action:* https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}\n*Author:* ${{ github.actor }}\n*Event:* ${{ github.event_name }}\n"} + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL_YOLO }} diff --git a/ultralytics/.github/workflows/docker.yaml:Zone.Identifier b/ultralytics/.github/workflows/docker.yaml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/workflows/docker.yaml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.github/workflows/greetings.yml b/ultralytics/.github/workflows/greetings.yml new file mode 100755 index 0000000..224fe57 --- /dev/null +++ b/ultralytics/.github/workflows/greetings.yml @@ -0,0 +1,58 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +name: Greetings + +on: + pull_request_target: + types: [opened] + issues: + types: [opened] + +jobs: + greeting: + runs-on: ubuntu-latest + steps: + - uses: actions/first-interaction@v1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + pr-message: | + ๐Ÿ‘‹ Hello @${{ github.actor }}, thank you for submitting an Ultralytics YOLOv8 ๐Ÿš€ PR! To allow your work to be integrated as seamlessly as possible, we advise you to: + + - โœ… Verify your PR is **up-to-date** with `ultralytics/ultralytics` `main` branch. If your PR is behind you can update your code by clicking the 'Update branch' button or by running `git pull` and `git merge main` locally. + - โœ… Verify all YOLOv8 Continuous Integration (CI) **checks are passing**. + - โœ… Update YOLOv8 [Docs](https://docs.ultralytics.com) for any new or updated features. + - โœ… Reduce changes to the absolute **minimum** required for your bug fix or feature addition. _"It is not daily increase but daily decrease, hack away the unessential. The closer to the source, the less wastage there is."_ โ€” Bruce Lee + + See our [Contributing Guide](https://docs.ultralytics.com/help/contributing) for details and let us know if you have any questions! + + issue-message: | + ๐Ÿ‘‹ Hello @${{ github.actor }}, thank you for your interest in Ultralytics YOLOv8 ๐Ÿš€! We recommend a visit to the [Docs](https://docs.ultralytics.com) for new users where you can find many [Python](https://docs.ultralytics.com/usage/python/) and [CLI](https://docs.ultralytics.com/usage/cli/) usage examples and where many of the most common questions may already be answered. + + If this is a ๐Ÿ› Bug Report, please provide a [minimum reproducible example](https://docs.ultralytics.com/help/minimum_reproducible_example/) to help us debug it. + + If this is a custom training โ“ Question, please provide as much information as possible, including dataset image examples and training logs, and verify you are following our [Tips for Best Training Results](https://docs.ultralytics.com/yolov5/tutorials/tips_for_best_training_results/). + + Join the vibrant [Ultralytics Discord](https://ultralytics.com/discord) ๐ŸŽง community for real-time conversations and collaborations. This platform offers a perfect space to inquire, showcase your work, and connect with fellow Ultralytics users. + + ## Install + + Pip install the `ultralytics` package including all [requirements](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) in a [**Python>=3.8**](https://www.python.org/) environment with [**PyTorch>=1.8**](https://pytorch.org/get-started/locally/). + + ```bash + pip install ultralytics + ``` + + ## Environments + + YOLOv8 may be run in any of the following up-to-date verified environments (with all dependencies including [CUDA](https://developer.nvidia.com/cuda)/[CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/) and [PyTorch](https://pytorch.org/) preinstalled): + + - **Notebooks** with free GPU: Run on Gradient Open In Colab Open In Kaggle + - **Google Cloud** Deep Learning VM. See [GCP Quickstart Guide](https://docs.ultralytics.com/yolov5/environments/google_cloud_quickstart_tutorial/) + - **Amazon** Deep Learning AMI. See [AWS Quickstart Guide](https://docs.ultralytics.com/yolov5/environments/aws_quickstart_tutorial/) + - **Docker Image**. See [Docker Quickstart Guide](https://docs.ultralytics.com/yolov5/environments/docker_image_quickstart_tutorial/) Docker Pulls + + ## Status + + Ultralytics CI + + If this badge is green, all [Ultralytics CI](https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml?query=event%3Aschedule) tests are currently passing. CI tests verify correct operation of all YOLOv8 [Modes](https://docs.ultralytics.com/modes/) and [Tasks](https://docs.ultralytics.com/tasks/) on macOS, Windows, and Ubuntu every 24 hours and on every commit. diff --git a/ultralytics/.github/workflows/greetings.yml:Zone.Identifier b/ultralytics/.github/workflows/greetings.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/workflows/greetings.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.github/workflows/links.yml b/ultralytics/.github/workflows/links.yml new file mode 100755 index 0000000..a5ddf70 --- /dev/null +++ b/ultralytics/.github/workflows/links.yml @@ -0,0 +1,78 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# Continuous Integration (CI) GitHub Actions tests broken link checker using https://github.com/lycheeverse/lychee +# Ignores the following status codes to reduce false positives: +# - 403(OpenVINO, 'forbidden') +# - 429(Instagram, 'too many requests') +# - 500(Zenodo, 'cached') +# - 502(Zenodo, 'bad gateway') +# - 999(LinkedIn, 'unknown status code') + +name: Check Broken links + +on: + workflow_dispatch: + schedule: + - cron: '0 0 * * *' # runs at 00:00 UTC every day + +jobs: + Links: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Download and install lychee + run: | + LYCHEE_URL=$(curl -s https://api.github.com/repos/lycheeverse/lychee/releases/latest | grep "browser_download_url" | grep "x86_64-unknown-linux-gnu.tar.gz" | cut -d '"' -f 4) + curl -L $LYCHEE_URL -o lychee.tar.gz + tar xzf lychee.tar.gz + sudo mv lychee /usr/local/bin + + - name: Test Markdown and HTML links with retry + uses: nick-invision/retry@v2 + with: + timeout_minutes: 5 + retry_wait_seconds: 60 + max_attempts: 3 + command: | + lychee --accept 403,429,500,502,999 \ + --exclude-loopback \ + --exclude 'https?://(www\.)?(linkedin\.com|twitter\.com|instagram\.com|kaggle\.com|fonts\.gstatic\.com|url\.com)' \ + --exclude-path docs/zh \ + --exclude-path docs/es \ + --exclude-path docs/ru \ + --exclude-path docs/pt \ + --exclude-path docs/fr \ + --exclude-path docs/de \ + --exclude-path docs/ja \ + --exclude-path docs/ko \ + --exclude-path docs/hi \ + --exclude-path docs/ar \ + --exclude-mail \ + --github-token ${{ secrets.GITHUB_TOKEN }} \ + './**/*.md' './**/*.html' + + - name: Test Markdown, HTML, YAML, Python and Notebook links with retry + if: github.event_name == 'workflow_dispatch' + uses: nick-invision/retry@v2 + with: + timeout_minutes: 5 + retry_wait_seconds: 60 + max_attempts: 3 + command: | + lychee --accept 429,999 \ + --exclude-loopback \ + --exclude 'https?://(www\.)?(linkedin\.com|twitter\.com|instagram\.com|kaggle\.com|fonts\.gstatic\.com|url\.com)' \ + --exclude-path '**/ci.yaml' \ + --exclude-path docs/zh \ + --exclude-path docs/es \ + --exclude-path docs/ru \ + --exclude-path docs/pt \ + --exclude-path docs/fr \ + --exclude-path docs/de \ + --exclude-path docs/ja \ + --exclude-path docs/ko \ + --exclude-path docs/hi \ + --exclude-path docs/ar \ + --exclude-mail \ + --github-token ${{ secrets.GITHUB_TOKEN }} \ + './**/*.md' './**/*.html' './**/*.yml' './**/*.yaml' './**/*.py' './**/*.ipynb' diff --git a/ultralytics/.github/workflows/links.yml:Zone.Identifier b/ultralytics/.github/workflows/links.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/workflows/links.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.github/workflows/publish.yml b/ultralytics/.github/workflows/publish.yml new file mode 100755 index 0000000..8f62cb5 --- /dev/null +++ b/ultralytics/.github/workflows/publish.yml @@ -0,0 +1,112 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# Publish pip package to PyPI https://pypi.org/project/ultralytics/ and Docs to https://docs.ultralytics.com + +name: Publish to PyPI and Deploy Docs + +on: + push: + branches: [main] + workflow_dispatch: + inputs: + pypi: + type: boolean + description: Publish to PyPI + docs: + type: boolean + description: Deploy Docs + +jobs: + publish: + if: github.repository == 'ultralytics/ultralytics' && github.actor == 'glenn-jocher' + name: Publish + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: "0" # pulls all commits (needed correct last updated dates in Docs) + - name: Set up Python environment + uses: actions/setup-python@v5 + with: + python-version: '3.10' + cache: 'pip' # caching pip dependencies + - name: Install dependencies + run: | + python -m pip install --upgrade pip wheel build twine + pip install -e ".[dev]" --extra-index-url https://download.pytorch.org/whl/cpu + - name: Check PyPI version + shell: python + run: | + import os + import ultralytics + from ultralytics.utils.checks import check_latest_pypi_version + + v_local = tuple(map(int, ultralytics.__version__.split('.'))) + v_pypi = tuple(map(int, check_latest_pypi_version().split('.'))) + print(f'Local version is {v_local}') + print(f'PyPI version is {v_pypi}') + d = [a - b for a, b in zip(v_local, v_pypi)] # diff + increment = (d[0] == d[1] == 0) and (0 < d[2] < 3) # only publish if patch version increments by 1 or 2 + os.system(f'echo "increment={increment}" >> $GITHUB_OUTPUT') + os.system(f'echo "version={ultralytics.__version__}" >> $GITHUB_OUTPUT') + if increment: + print('Local version is higher than PyPI version. Publishing new version to PyPI โœ….') + id: check_pypi + - name: Publish to PyPI + continue-on-error: true + if: (github.event_name == 'push' || github.event.inputs.pypi == 'true') && steps.check_pypi.outputs.increment == 'True' + env: + PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} + run: | + python -m build + python -m twine upload dist/* -u __token__ -p $PYPI_TOKEN + - name: Deploy Docs + continue-on-error: true + if: (github.event_name == 'push' || github.event.inputs.docs == 'true') && github.repository == 'ultralytics/ultralytics' && github.actor == 'glenn-jocher' + env: + PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} + INDEXNOW_KEY: ${{ secrets.INDEXNOW_KEY_DOCS }} + run: | + python docs/build_docs.py + git config --global user.name "Glenn Jocher" + git config --global user.email "glenn.jocher@ultralytics.com" + git clone https://github.com/ultralytics/docs.git docs-repo + cd docs-repo + git checkout gh-pages || git checkout -b gh-pages + rm -rf * + cp -R ../site/* . + echo "$INDEXNOW_KEY" > "$INDEXNOW_KEY.txt" + git add . + LATEST_HASH=$(git rev-parse --short=7 HEAD) + git commit -m "Update Docs for 'ultralytics ${{ steps.check_pypi.outputs.version }} - $LATEST_HASH'" + git push https://$PERSONAL_ACCESS_TOKEN@github.com/ultralytics/docs.git gh-pages + - name: Extract PR Details + run: | + if [ "${{ github.event_name }}" = "pull_request" ]; then + PR_JSON=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" https://api.github.com/repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}) + PR_NUMBER=${{ github.event.pull_request.number }} + PR_TITLE=$(echo $PR_JSON | jq -r '.title') + else + COMMIT_SHA=${{ github.event.after }} + PR_JSON=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" "https://api.github.com/search/issues?q=repo:${{ github.repository }}+is:pr+is:merged+sha:$COMMIT_SHA") + PR_NUMBER=$(echo $PR_JSON | jq -r '.items[0].number') + PR_TITLE=$(echo $PR_JSON | jq -r '.items[0].title') + fi + echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_ENV + echo "PR_TITLE=$PR_TITLE" >> $GITHUB_ENV + - name: Notify on Slack (Success) + if: success() && github.event_name == 'push' && steps.check_pypi.outputs.increment == 'True' + uses: slackapi/slack-github-action@v1.24.0 + with: + payload: | + {"text": " GitHub Actions success for ${{ github.workflow }} โœ…\n\n\n*Repository:* https://github.com/${{ github.repository }}\n*Action:* https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}\n*Author:* ${{ github.actor }}\n*Event:* NEW 'ultralytics ${{ steps.check_pypi.outputs.version }}' pip package published ๐Ÿ˜ƒ\n*Job Status:* ${{ job.status }}\n*Pull Request:* ${{ env.PR_TITLE }}\n"} + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL_YOLO }} + - name: Notify on Slack (Failure) + if: failure() + uses: slackapi/slack-github-action@v1.24.0 + with: + payload: | + {"text": " GitHub Actions error for ${{ github.workflow }} โŒ\n\n\n*Repository:* https://github.com/${{ github.repository }}\n*Action:* https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}\n*Author:* ${{ github.actor }}\n*Event:* ${{ github.event_name }}\n*Job Status:* ${{ job.status }}\n*Pull Request:* ${{ env.PR_TITLE }}\n"} + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL_YOLO }} diff --git a/ultralytics/.github/workflows/publish.yml:Zone.Identifier b/ultralytics/.github/workflows/publish.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/workflows/publish.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.github/workflows/stale.yml b/ultralytics/.github/workflows/stale.yml new file mode 100755 index 0000000..cc7fde6 --- /dev/null +++ b/ultralytics/.github/workflows/stale.yml @@ -0,0 +1,47 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +name: Close stale issues +on: + schedule: + - cron: '0 0 * * *' # Runs at 00:00 UTC every day + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + + stale-issue-message: | + ๐Ÿ‘‹ Hello there! We wanted to give you a friendly reminder that this issue has not had any recent activity and may be closed soon, but don't worry - you can always reopen it if needed. If you still have any questions or concerns, please feel free to let us know how we can help. + + For additional resources and information, please see the links below: + + - **Docs**: https://docs.ultralytics.com + - **HUB**: https://hub.ultralytics.com + - **Community**: https://community.ultralytics.com + + Feel free to inform us of any other **issues** you discover or **feature requests** that come to mind in the future. Pull Requests (PRs) are also always welcomed! + + Thank you for your contributions to YOLO ๐Ÿš€ and Vision AI โญ + + stale-pr-message: | + ๐Ÿ‘‹ Hello there! We wanted to let you know that we've decided to close this pull request due to inactivity. We appreciate the effort you put into contributing to our project, but unfortunately, not all contributions are suitable or aligned with our product roadmap. + + We hope you understand our decision, and please don't let it discourage you from contributing to open source projects in the future. We value all of our community members and their contributions, and we encourage you to keep exploring new projects and ways to get involved. + + For additional resources and information, please see the links below: + + - **Docs**: https://docs.ultralytics.com + - **HUB**: https://hub.ultralytics.com + - **Community**: https://community.ultralytics.com + + Thank you for your contributions to YOLO ๐Ÿš€ and Vision AI โญ + + days-before-issue-stale: 30 + days-before-issue-close: 10 + days-before-pr-stale: 90 + days-before-pr-close: 30 + exempt-issue-labels: 'documentation,tutorial,TODO' + operations-per-run: 300 # The maximum number of operations per run, used to control rate limiting. diff --git a/ultralytics/.github/workflows/stale.yml:Zone.Identifier b/ultralytics/.github/workflows/stale.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.github/workflows/stale.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.gitignore b/ultralytics/.gitignore new file mode 100755 index 0000000..c8987d8 --- /dev/null +++ b/ultralytics/.gitignore @@ -0,0 +1,165 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# Profiling +*.pclprof + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +.idea +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# VSCode project settings +.vscode/ + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site +mkdocs_github_authors.yaml + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# datasets and projects +datasets/ +runs/ +wandb/ +tests/ +.DS_Store + +# Neural Network weights ----------------------------------------------------------------------------------------------- +weights/ +*.weights +*.pt +*.pb +*.onnx +*.engine +*.mlmodel +*.mlpackage +*.torchscript +*.tflite +*.h5 +*_saved_model/ +*_web_model/ +*_openvino_model/ +*_paddle_model/ +pnnx* + +# Autogenerated files for tests +/ultralytics/assets/ diff --git a/ultralytics/.gitignore:Zone.Identifier b/ultralytics/.gitignore:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.gitignore:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/.pre-commit-config.yaml b/ultralytics/.pre-commit-config.yaml new file mode 100755 index 0000000..4c47d7e --- /dev/null +++ b/ultralytics/.pre-commit-config.yaml @@ -0,0 +1,92 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# Pre-commit hooks. For more information see https://github.com/pre-commit/pre-commit-hooks/blob/main/README.md +# Optionally remove from local hooks with 'rm .git/hooks/pre-commit' + +# Define bot property if installed via https://github.com/marketplace/pre-commit-ci +ci: + autofix_prs: true + autoupdate_commit_msg: '[pre-commit.ci] pre-commit suggestions' + autoupdate_schedule: monthly + submodules: true + +# Exclude directories (optional) +# exclude: 'docs/' + +# Define repos to run +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict + # - id: check-yaml + - id: check-docstring-first + - id: double-quote-string-fixer + - id: detect-private-key + + - repo: https://github.com/asottile/pyupgrade + rev: v3.15.0 + hooks: + - id: pyupgrade + name: Upgrade code + + - repo: https://github.com/PyCQA/isort + rev: 5.12.0 + hooks: + - id: isort + name: Sort imports + + - repo: https://github.com/google/yapf + rev: v0.40.2 + hooks: + - id: yapf + name: YAPF formatting + + - repo: https://github.com/executablebooks/mdformat + rev: 0.7.17 + hooks: + - id: mdformat + name: MD formatting + additional_dependencies: + - mdformat-gfm + # - mdformat-black + # - mdformat-frontmatter + args: + - --wrap=no + exclude: 'docs/.*\.md' + # exclude: "README.md|README.zh-CN.md|CONTRIBUTING.md" + + - repo: https://github.com/PyCQA/flake8 + rev: 6.1.0 + hooks: + - id: flake8 + name: PEP8 + + - repo: https://github.com/codespell-project/codespell + rev: v2.2.6 + hooks: + - id: codespell + exclude: 'docs/de|docs/fr|docs/pt|docs/es|docs/mkdocs_de.yml' + args: + - --ignore-words-list=crate,nd,strack,dota,ane,segway,fo,gool,winn + + - repo: https://github.com/PyCQA/docformatter + rev: v1.7.5 + hooks: + - id: docformatter + +# - repo: https://github.com/asottile/yesqa +# rev: v1.4.0 +# hooks: +# - id: yesqa + +# - repo: https://github.com/asottile/dead +# rev: v1.5.0 +# hooks: +# - id: dead + +# - repo: https://github.com/ultralytics/pre-commit +# rev: bd60a414f80a53fb8f593d3bfed4701fc47e4b23 +# hooks: +# - id: capitalize-comments diff --git a/ultralytics/.pre-commit-config.yaml:Zone.Identifier b/ultralytics/.pre-commit-config.yaml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/.pre-commit-config.yaml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/1.png b/ultralytics/1.png new file mode 100755 index 0000000..30eeece Binary files /dev/null and b/ultralytics/1.png differ diff --git a/ultralytics/1.png:Zone.Identifier b/ultralytics/1.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/1.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/2.png b/ultralytics/2.png new file mode 100755 index 0000000..58e3bc9 Binary files /dev/null and b/ultralytics/2.png differ diff --git a/ultralytics/2.png:Zone.Identifier b/ultralytics/2.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/2.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/CITATION.cff b/ultralytics/CITATION.cff new file mode 100755 index 0000000..8e85b7a --- /dev/null +++ b/ultralytics/CITATION.cff @@ -0,0 +1,20 @@ +cff-version: 1.2.0 +preferred-citation: + type: software + message: If you use this software, please cite it as below. + authors: + - family-names: Jocher + given-names: Glenn + orcid: "https://orcid.org/0000-0001-5950-6979" + - family-names: Chaurasia + given-names: Ayush + orcid: "https://orcid.org/0000-0002-7603-6750" + - family-names: Qiu + given-names: Jing + orcid: "https://orcid.org/0000-0003-3783-7069" + title: "YOLO by Ultralytics" + version: 8.0.0 + # doi: 10.5281/zenodo.3908559 # TODO + date-released: 2023-1-10 + license: AGPL-3.0 + url: "https://github.com/ultralytics/ultralytics" diff --git a/ultralytics/CITATION.cff:Zone.Identifier b/ultralytics/CITATION.cff:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/CITATION.cff:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/CONTRIBUTING.md b/ultralytics/CONTRIBUTING.md new file mode 100755 index 0000000..615ef41 --- /dev/null +++ b/ultralytics/CONTRIBUTING.md @@ -0,0 +1,96 @@ +# Contributing to YOLOv8 ๐Ÿš€ + +We love your input! We want to make contributing to YOLOv8 as easy and transparent as possible, whether it's: + +- Reporting a bug +- Discussing the current state of the code +- Submitting a fix +- Proposing a new feature +- Becoming a maintainer + +YOLOv8 works so well due to our combined community effort, and for every small improvement you contribute you will be helping push the frontiers of what's possible in AI ๐Ÿ˜ƒ! + +## Submitting a Pull Request (PR) ๐Ÿ› ๏ธ + +Submitting a PR is easy! This example shows how to submit a PR for updating `requirements.txt` in 4 steps: + +### 1. Select File to Update + +Select `requirements.txt` to update by clicking on it in GitHub. + +

PR_step1

+ +### 2. Click 'Edit this file' + +Button is in top-right corner. + +

PR_step2

+ +### 3. Make Changes + +Change `matplotlib` version from `3.2.2` to `3.3`. + +

PR_step3

+ +### 4. Preview Changes and Submit PR + +Click on the **Preview changes** tab to verify your updates. At the bottom of the screen select 'Create a **new branch** for this commit', assign your branch a descriptive name such as `fix/matplotlib_version` and click the green **Propose changes** button. All done, your PR is now submitted to YOLOv8 for review and approval ๐Ÿ˜ƒ! + +

PR_step4

+ +### PR recommendations + +To allow your work to be integrated as seamlessly as possible, we advise you to: + +- โœ… Verify your PR is **up-to-date** with `ultralytics/ultralytics` `main` branch. If your PR is behind you can update your code by clicking the 'Update branch' button or by running `git pull` and `git merge main` locally. + +

PR recommendation 1

+ +- โœ… Verify all YOLOv8 Continuous Integration (CI) **checks are passing**. + +

PR recommendation 2

+ +- โœ… Reduce changes to the absolute **minimum** required for your bug fix or feature addition. _"It is not daily increase but daily decrease, hack away the unessential. The closer to the source, the less wastage there is."_ โ€” Bruce Lee + +### Docstrings + +Not all functions or classes require docstrings but when they do, we follow [google-style docstrings format](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings). Here is an example: + +```python +""" + What the function does. Performs NMS on given detection predictions. + + Args: + arg1: The description of the 1st argument + arg2: The description of the 2nd argument + + Returns: + What the function returns. Empty if nothing is returned. + + Raises: + Exception Class: When and why this exception can be raised by the function. +""" +``` + +## Submitting a Bug Report ๐Ÿ› + +If you spot a problem with YOLOv8 please submit a Bug Report! + +For us to start investigating a possible problem we need to be able to reproduce it ourselves first. We've created a few short guidelines below to help users provide what we need in order to get started. + +When asking a question, people will be better able to provide help if you provide **code** that they can easily understand and use to **reproduce** the problem. This is referred to by community members as creating a [minimum reproducible example](https://docs.ultralytics.com/help/minimum_reproducible_example/). Your code that reproduces the problem should be: + +- โœ… **Minimal** โ€“ Use as little code as possible that still produces the same problem +- โœ… **Complete** โ€“ Provide **all** parts someone else needs to reproduce your problem in the question itself +- โœ… **Reproducible** โ€“ Test the code you're about to provide to make sure it reproduces the problem + +In addition to the above requirements, for [Ultralytics](https://ultralytics.com/) to provide assistance your code should be: + +- โœ… **Current** โ€“ Verify that your code is up-to-date with current GitHub [main](https://github.com/ultralytics/ultralytics/tree/main) branch, and if necessary `git pull` or `git clone` a new copy to ensure your problem has not already been resolved by previous commits. +- โœ… **Unmodified** โ€“ Your problem must be reproducible without any modifications to the codebase in this repository. [Ultralytics](https://ultralytics.com/) does not provide support for custom code โš ๏ธ. + +If you believe your problem meets all of the above criteria, please close this issue and raise a new one using the ๐Ÿ› **Bug Report** [template](https://github.com/ultralytics/ultralytics/issues/new/choose) and providing a [minimum reproducible example](https://docs.ultralytics.com/help/minimum_reproducible_example/) to help us better understand and diagnose your problem. + +## License + +By contributing, you agree that your contributions will be licensed under the [AGPL-3.0 license](https://choosealicense.com/licenses/agpl-3.0/) diff --git a/ultralytics/CONTRIBUTING.md:Zone.Identifier b/ultralytics/CONTRIBUTING.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/CONTRIBUTING.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/LICENSE b/ultralytics/LICENSE new file mode 100755 index 0000000..be3f7b2 --- /dev/null +++ b/ultralytics/LICENSE @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/ultralytics/LICENSE:Zone.Identifier b/ultralytics/LICENSE:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/LICENSE:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/MANIFEST.in b/ultralytics/MANIFEST.in new file mode 100755 index 0000000..cd7df3e --- /dev/null +++ b/ultralytics/MANIFEST.in @@ -0,0 +1,8 @@ +include *.md +include requirements.txt +include LICENSE +include setup.py +include ultralytics/assets/bus.jpg +include ultralytics/assets/zidane.jpg +include tests/*.py +recursive-include ultralytics *.yaml diff --git a/ultralytics/MANIFEST.in:Zone.Identifier b/ultralytics/MANIFEST.in:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/MANIFEST.in:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/README.md b/ultralytics/README.md new file mode 100755 index 0000000..9e71812 --- /dev/null +++ b/ultralytics/README.md @@ -0,0 +1,265 @@ +
+

+ + YOLO Vision banner +

+ +[ไธญๆ–‡](https://docs.ultralytics.com/zh/) | [ํ•œ๊ตญ์–ด](https://docs.ultralytics.com/ko/) | [ๆ—ฅๆœฌ่ชž](https://docs.ultralytics.com/ja/) | [ะ ัƒััะบะธะน](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Franรงais](https://docs.ultralytics.com/fr/) | [Espaรฑol](https://docs.ultralytics.com/es/) | [Portuguรชs](https://docs.ultralytics.com/pt/) | [เคนเคฟเคจเฅเคฆเฅ€](https://docs.ultralytics.com/hi/) | [ุงู„ุนุฑุจูŠุฉ](https://docs.ultralytics.com/ar/)
+ +
+ Ultralytics CI + Ultralytics Code Coverage + YOLOv8 Citation + Docker Pulls + Discord +
+ Run on Gradient + Open In Colab + Open In Kaggle +
+
+ +[Ultralytics](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics) is a cutting-edge, state-of-the-art (SOTA) model that builds upon the success of previous YOLO versions and introduces new features and improvements to further boost performance and flexibility. YOLOv8 is designed to be fast, accurate, and easy to use, making it an excellent choice for a wide range of object detection and tracking, instance segmentation, image classification and pose estimation tasks. + +We hope that the resources here will help you get the most out of YOLOv8. Please browse the YOLOv8 Docs for details, raise an issue on GitHub for support, and join our Discord community for questions and discussions! + +To request an Enterprise License please complete the form at [Ultralytics Licensing](https://ultralytics.com/license). + +YOLOv8 performance plots + +
+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ +##
Documentation
+ +See below for a quickstart installation and usage example, and see the [YOLOv8 Docs](https://docs.ultralytics.com) for full documentation on training, validation, prediction and deployment. + +
+Install + +Pip install the ultralytics package including all [requirements](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) in a [**Python>=3.8**](https://www.python.org/) environment with [**PyTorch>=1.8**](https://pytorch.org/get-started/locally/). + +[![PyPI version](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + +```bash +pip install ultralytics +``` + +For alternative installation methods including [Conda](https://anaconda.org/conda-forge/ultralytics), [Docker](https://hub.docker.com/r/ultralytics/ultralytics), and Git, please refer to the [Quickstart Guide](https://docs.ultralytics.com/quickstart). + +
+ +
+Usage + +#### CLI + +YOLOv8 may be used directly in the Command Line Interface (CLI) with a `yolo` command: + +```bash +yolo predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' +``` + +`yolo` can be used for a variety of tasks and modes and accepts additional arguments, i.e. `imgsz=640`. See the YOLOv8 [CLI Docs](https://docs.ultralytics.com/usage/cli) for examples. + +#### Python + +YOLOv8 may also be used directly in a Python environment, and accepts the same [arguments](https://docs.ultralytics.com/usage/cfg/) as in the CLI example above: + +```python +from ultralytics import YOLO + +# Load a model +model = YOLO("yolov8n.yaml") # build a new model from scratch +model = YOLO("yolov8n.pt") # load a pretrained model (recommended for training) + +# Use the model +model.train(data="coco128.yaml", epochs=3) # train the model +metrics = model.val() # evaluate model performance on the validation set +results = model("https://ultralytics.com/images/bus.jpg") # predict on an image +path = model.export(format="onnx") # export the model to ONNX format +``` + +See YOLOv8 [Python Docs](https://docs.ultralytics.com/usage/python) for more examples. + +
+ +##
Models
+ +YOLOv8 [Detect](https://docs.ultralytics.com/tasks/detect), [Segment](https://docs.ultralytics.com/tasks/segment) and [Pose](https://docs.ultralytics.com/tasks/pose) models pretrained on the [COCO](https://docs.ultralytics.com/datasets/detect/coco) dataset are available here, as well as YOLOv8 [Classify](https://docs.ultralytics.com/tasks/classify) models pretrained on the [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet) dataset. [Track](https://docs.ultralytics.com/modes/track) mode is available for all Detect, Segment and Pose models. + +Ultralytics YOLO supported tasks + +All [Models](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) download automatically from the latest Ultralytics [release](https://github.com/ultralytics/assets/releases) on first use. + +
Detection (COCO) + +See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examples with these models trained on [COCO](https://docs.ultralytics.com/datasets/detect/coco/), which include 80 pre-trained classes. + +| Model | size
(pixels) | mAPval
50-95 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | +| ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- **mAPval** values are for single-model single-scale on [COCO val2017](http://cocodataset.org) dataset.
Reproduce by `yolo val detect data=coco.yaml device=0` +- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance.
Reproduce by `yolo val detect data=coco.yaml batch=1 device=0|cpu` + +
+ +
Detection (Open Image V7) + +See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examples with these models trained on [Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/), which include 600 pre-trained classes. + +| Model | size
(pixels) | mAPval
50-95 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | +| ----------------------------------------------------------------------------------------- | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18.4 | 142.4 | 1.21 | 3.5 | 10.5 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27.7 | 183.1 | 1.40 | 11.4 | 29.7 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33.6 | 408.5 | 2.26 | 26.2 | 80.6 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34.9 | 596.9 | 2.43 | 44.1 | 167.4 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36.3 | 860.6 | 3.56 | 68.7 | 260.6 | + +- **mAPval** values are for single-model single-scale on [Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/) dataset.
Reproduce by `yolo val detect data=open-images-v7.yaml device=0` +- **Speed** averaged over Open Image V7 val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance.
Reproduce by `yolo val detect data=open-images-v7.yaml batch=1 device=0|cpu` + +
+ +
Segmentation (COCO) + +See [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage examples with these models trained on [COCO-Seg](https://docs.ultralytics.com/datasets/segment/coco/), which include 80 pre-trained classes. + +| Model | size
(pixels) | mAPbox
50-95 | mAPmask
50-95 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | +| -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- **mAPval** values are for single-model single-scale on [COCO val2017](http://cocodataset.org) dataset.
Reproduce by `yolo val segment data=coco-seg.yaml device=0` +- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance.
Reproduce by `yolo val segment data=coco-seg.yaml batch=1 device=0|cpu` + +
+ +
Pose (COCO) + +See [Pose Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples with these models trained on [COCO-Pose](https://docs.ultralytics.com/datasets/pose/coco/), which include 1 pre-trained class, person. + +| Model | size
(pixels) | mAPpose
50-95 | mAPpose
50 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | +| ---------------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | +| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | +| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | +| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | +| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | +| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | +| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +- **mAPval** values are for single-model single-scale on [COCO Keypoints val2017](http://cocodataset.org) dataset.
Reproduce by `yolo val pose data=coco-pose.yaml device=0` +- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance.
Reproduce by `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu` + +
+ +
Classification (ImageNet) + +See [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usage examples with these models trained on [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), which include 1000 pretrained classes. + +| Model | size
(pixels) | acc
top1 | acc
top5 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) at 640 | +| -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ | +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- **acc** values are model accuracies on the [ImageNet](https://www.image-net.org/) dataset validation set.
Reproduce by `yolo val classify data=path/to/ImageNet device=0` +- **Speed** averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance.
Reproduce by `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu` + +
+ +##
Integrations
+ +Our key integrations with leading AI platforms extend the functionality of Ultralytics' offerings, enhancing tasks like dataset labeling, training, visualization, and model management. Discover how Ultralytics, in collaboration with [Roboflow](https://roboflow.com/?ref=ultralytics), ClearML, [Comet](https://bit.ly/yolov8-readme-comet), Neural Magic and [OpenVINO](https://docs.ultralytics.com/integrations/openvino), can optimize your AI workflow. + +
+ +Ultralytics active learning integrations +
+
+ +
+ + Roboflow logo + space + + ClearML logo + space + + Comet ML logo + space + + NeuralMagic logo +
+ +| Roboflow | ClearML โญ NEW | Comet โญ NEW | Neural Magic โญ NEW | +| :--------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------: | +| Label and export your custom datasets directly to YOLOv8 for training with [Roboflow](https://roboflow.com/?ref=ultralytics) | Automatically track, visualize and even remotely train YOLOv8 using [ClearML](https://cutt.ly/yolov5-readme-clearml) (open-source!) | Free forever, [Comet](https://bit.ly/yolov8-readme-comet) lets you save YOLOv8 models, resume training, and interactively visualize and debug predictions | Run YOLOv8 inference up to 6x faster with [Neural Magic DeepSparse](https://bit.ly/yolov5-neuralmagic) | + +##
Ultralytics HUB
+ +Experience seamless AI with [Ultralytics HUB](https://bit.ly/ultralytics_hub) โญ, the all-in-one solution for data visualization, YOLOv5 and YOLOv8 ๐Ÿš€ model training and deployment, without any coding. Transform images into actionable insights and bring your AI visions to life with ease using our cutting-edge platform and user-friendly [Ultralytics App](https://ultralytics.com/app_install). Start your journey for **Free** now! + + +Ultralytics HUB preview image + +##
Contribute
+ +We love your input! YOLOv5 and YOLOv8 would not be possible without help from our community. Please see our [Contributing Guide](https://docs.ultralytics.com/help/contributing) to get started, and fill out our [Survey](https://ultralytics.com/survey?utm_source=github&utm_medium=social&utm_campaign=Survey) to send us feedback on your experience. Thank you ๐Ÿ™ to all our contributors! + + + + +Ultralytics open-source contributors + +##
License
+ +Ultralytics offers two licensing options to accommodate diverse use cases: + +- **AGPL-3.0 License**: This [OSI-approved](https://opensource.org/licenses/) open-source license is ideal for students and enthusiasts, promoting open collaboration and knowledge sharing. See the [LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) file for more details. +- **Enterprise License**: Designed for commercial use, this license permits seamless integration of Ultralytics software and AI models into commercial goods and services, bypassing the open-source requirements of AGPL-3.0. If your scenario involves embedding our solutions into a commercial offering, reach out through [Ultralytics Licensing](https://ultralytics.com/license). + +##
Contact
+ +For Ultralytics bug reports and feature requests please visit [GitHub Issues](https://github.com/ultralytics/ultralytics/issues), and join our [Discord](https://ultralytics.com/discord) community for questions and discussions! + +
+
+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
diff --git a/ultralytics/README.md:Zone.Identifier b/ultralytics/README.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/README.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/README.zh-CN.md b/ultralytics/README.zh-CN.md new file mode 100755 index 0000000..7a3bf9c --- /dev/null +++ b/ultralytics/README.zh-CN.md @@ -0,0 +1,265 @@ +
+

+ + YOLO Vision banner +

+ +[ไธญๆ–‡](https://docs.ultralytics.com/zh/) | [ํ•œ๊ตญ์–ด](https://docs.ultralytics.com/ko/) | [ๆ—ฅๆœฌ่ชž](https://docs.ultralytics.com/ja/) | [ะ ัƒััะบะธะน](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Franรงais](https://docs.ultralytics.com/fr/) | [Espaรฑol](https://docs.ultralytics.com/es/) | [Portuguรชs](https://docs.ultralytics.com/pt/) | [เคนเคฟเคจเฅเคฆเฅ€](https://docs.ultralytics.com/hi/) | [ุงู„ุนุฑุจูŠุฉ](https://docs.ultralytics.com/ar/)
+ +
+ Ultralytics CI + Ultralytics Code Coverage + YOLOv8 Citation + Docker Pulls + Discord +
+ Run on Gradient + Open In Colab + Open In Kaggle +
+
+ +[Ultralytics](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics) ๆ˜ฏไธ€ๆฌพๅ‰ๆฒฟใ€ๆœ€ๅ…ˆ่ฟ›๏ผˆSOTA๏ผ‰็š„ๆจกๅž‹๏ผŒๅŸบไบŽๅ…ˆๅ‰ YOLO ็‰ˆๆœฌ็š„ๆˆๅŠŸ๏ผŒๅผ•ๅ…ฅไบ†ๆ–ฐๅŠŸ่ƒฝๅ’Œๆ”น่ฟ›๏ผŒ่ฟ›ไธ€ๆญฅๆๅ‡ๆ€ง่ƒฝๅ’Œ็ตๆดปๆ€งใ€‚YOLOv8 ่ฎพ่ฎกๅฟซ้€Ÿใ€ๅ‡†็กฎไธ”ๆ˜“ไบŽไฝฟ็”จ๏ผŒไฝฟๅ…ถๆˆไธบๅ„็ง็‰ฉไฝ“ๆฃ€ๆต‹ไธŽ่ทŸ่ธชใ€ๅฎžไพ‹ๅˆ†ๅ‰ฒใ€ๅ›พๅƒๅˆ†็ฑปๅ’Œๅงฟๆ€ไผฐ่ฎกไปปๅŠก็š„็ปไฝณ้€‰ๆ‹ฉใ€‚ + +ๆˆ‘ไปฌๅธŒๆœ›่ฟ™้‡Œ็š„่ต„ๆบ่ƒฝๅธฎๅŠฉๆ‚จๅ……ๅˆ†ๅˆฉ็”จ YOLOv8ใ€‚่ฏทๆต่งˆ YOLOv8 ๆ–‡ๆกฃ ไบ†่งฃ่ฏฆ็ป†ไฟกๆฏ๏ผŒๅœจ GitHub ไธŠๆไบค้—ฎ้ข˜ไปฅ่Žทๅพ—ๆ”ฏๆŒ๏ผŒๅนถๅŠ ๅ…ฅๆˆ‘ไปฌ็š„ Discord ็คพๅŒบ่ฟ›่กŒ้—ฎ้ข˜ๅ’Œ่ฎจ่ฎบ๏ผ + +ๅฆ‚้œ€็”ณ่ฏทไผไธš่ฎธๅฏ๏ผŒ่ฏทๅœจ [Ultralytics Licensing](https://ultralytics.com/license) ๅค„ๅกซๅ†™่กจๆ ผ + +YOLOv8 performance plots + +
+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ +##
ๆ–‡ๆกฃ
+ +่ฏทๅ‚้˜…ไธ‹้ข็š„ๅฟซ้€Ÿๅฎ‰่ฃ…ๅ’Œไฝฟ็”จ็คบไพ‹๏ผŒไปฅๅŠ [YOLOv8 ๆ–‡ๆกฃ](https://docs.ultralytics.com) ไธŠๆœ‰ๅ…ณ่ฎญ็ปƒใ€้ชŒ่ฏใ€้ข„ๆต‹ๅ’Œ้ƒจ็ฝฒ็š„ๅฎŒๆ•ดๆ–‡ๆกฃใ€‚ + +
+ๅฎ‰่ฃ… + +ไฝฟ็”จPipๅœจไธ€ไธช[**Python>=3.8**](https://www.python.org/)็Žฏๅขƒไธญๅฎ‰่ฃ…`ultralytics`ๅŒ…๏ผŒๆญค็Žฏๅขƒ่ฟ˜้œ€ๅŒ…ๅซ[**PyTorch>=1.8**](https://pytorch.org/get-started/locally/)ใ€‚่ฟ™ไนŸไผšๅฎ‰่ฃ…ๆ‰€ๆœ‰ๅฟ…่ฆ็š„[ไพ่ต–้กน](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt)ใ€‚ + +[![PyPI version](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + +```bash +pip install ultralytics +``` + +ๅฆ‚้œ€ไฝฟ็”จๅŒ…ๆ‹ฌ[Conda](https://anaconda.org/conda-forge/ultralytics)ใ€[Docker](https://hub.docker.com/r/ultralytics/ultralytics)ๅ’ŒGitๅœจๅ†…็š„ๅ…ถไป–ๅฎ‰่ฃ…ๆ–นๆณ•๏ผŒ่ฏทๅ‚่€ƒ[ๅฟซ้€Ÿๅ…ฅ้—จๆŒ‡ๅ—](https://docs.ultralytics.com/quickstart)ใ€‚ + +
+ +
+Usage + +#### CLI + +YOLOv8 ๅฏไปฅๅœจๅ‘ฝไปค่กŒ็•Œ้ข๏ผˆCLI๏ผ‰ไธญ็›ดๆŽฅไฝฟ็”จ๏ผŒๅช้œ€่พ“ๅ…ฅ `yolo` ๅ‘ฝไปค๏ผš + +```bash +yolo predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' +``` + +`yolo` ๅฏ็”จไบŽๅ„็งไปปๅŠกๅ’Œๆจกๅผ๏ผŒๅนถๆŽฅๅ—ๅ…ถไป–ๅ‚ๆ•ฐ๏ผŒไพ‹ๅฆ‚ `imgsz=640`ใ€‚ๆŸฅ็œ‹ YOLOv8 [CLI ๆ–‡ๆกฃ](https://docs.ultralytics.com/usage/cli)ไปฅ่Žทๅ–็คบไพ‹ใ€‚ + +#### Python + +YOLOv8 ไนŸๅฏไปฅๅœจ Python ็Žฏๅขƒไธญ็›ดๆŽฅไฝฟ็”จ๏ผŒๅนถๆŽฅๅ—ไธŽไธŠ่ฟฐ CLI ็คบไพ‹ไธญ็›ธๅŒ็š„[ๅ‚ๆ•ฐ](https://docs.ultralytics.com/usage/cfg/)๏ผš + +```python +from ultralytics import YOLO + +# ๅŠ ่ฝฝๆจกๅž‹ +model = YOLO("yolov8n.yaml") # ไปŽๅคดๅผ€ๅง‹ๆž„ๅปบๆ–ฐๆจกๅž‹ +model = YOLO("yolov8n.pt") # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒๆจกๅž‹๏ผˆๅปบ่ฎฎ็”จไบŽ่ฎญ็ปƒ๏ผ‰ + +# ไฝฟ็”จๆจกๅž‹ +model.train(data="coco128.yaml", epochs=3) # ่ฎญ็ปƒๆจกๅž‹ +metrics = model.val() # ๅœจ้ชŒ่ฏ้›†ไธŠ่ฏ„ไผฐๆจกๅž‹ๆ€ง่ƒฝ +results = model("https://ultralytics.com/images/bus.jpg") # ๅฏนๅ›พๅƒ่ฟ›่กŒ้ข„ๆต‹ +success = model.export(format="onnx") # ๅฐ†ๆจกๅž‹ๅฏผๅ‡บไธบ ONNX ๆ ผๅผ +``` + +ๆŸฅ็œ‹ YOLOv8 [Python ๆ–‡ๆกฃ](https://docs.ultralytics.com/usage/python)ไปฅ่Žทๅ–ๆ›ดๅคš็คบไพ‹ใ€‚ + +
+ +##
ๆจกๅž‹
+ +ๅœจ[COCO](https://docs.ultralytics.com/datasets/detect/coco)ๆ•ฐๆฎ้›†ไธŠ้ข„่ฎญ็ปƒ็š„YOLOv8 [ๆฃ€ๆต‹](https://docs.ultralytics.com/tasks/detect)๏ผŒ[ๅˆ†ๅ‰ฒ](https://docs.ultralytics.com/tasks/segment)ๅ’Œ[ๅงฟๆ€](https://docs.ultralytics.com/tasks/pose)ๆจกๅž‹ๅฏไปฅๅœจ่ฟ™้‡Œๆ‰พๅˆฐ๏ผŒไปฅๅŠๅœจ[ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet)ๆ•ฐๆฎ้›†ไธŠ้ข„่ฎญ็ปƒ็š„YOLOv8 [ๅˆ†็ฑป](https://docs.ultralytics.com/tasks/classify)ๆจกๅž‹ใ€‚ๆ‰€ๆœ‰็š„ๆฃ€ๆต‹๏ผŒๅˆ†ๅ‰ฒๅ’Œๅงฟๆ€ๆจกๅž‹้ƒฝๆ”ฏๆŒ[่ฟฝ่ธช](https://docs.ultralytics.com/modes/track)ๆจกๅผใ€‚ + +Ultralytics YOLO supported tasks + +ๆ‰€ๆœ‰[ๆจกๅž‹](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models)ๅœจ้ฆ–ๆฌกไฝฟ็”จๆ—ถไผš่‡ชๅŠจไปŽๆœ€ๆ–ฐ็š„Ultralytics [ๅ‘ๅธƒ็‰ˆๆœฌ](https://github.com/ultralytics/assets/releases)ไธ‹่ฝฝใ€‚ + +
ๆฃ€ๆต‹ (COCO) + +ๆŸฅ็œ‹[ๆฃ€ๆต‹ๆ–‡ๆกฃ](https://docs.ultralytics.com/tasks/detect/)ไปฅ่Žทๅ–่ฟ™ไบ›ๅœจ[COCO](https://docs.ultralytics.com/datasets/detect/coco/)ไธŠ่ฎญ็ปƒ็š„ๆจกๅž‹็š„ไฝฟ็”จ็คบไพ‹๏ผŒๅ…ถไธญๅŒ…ๆ‹ฌ80ไธช้ข„่ฎญ็ปƒ็ฑปๅˆซใ€‚ + +| ๆจกๅž‹ | ๅฐบๅฏธ
(ๅƒ็ด ) | mAPval
50-95 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ๅ‚ๆ•ฐ
(M) | FLOPs
(B) | +| ------------------------------------------------------------------------------------ | --------------- | -------------------- | --------------------------- | -------------------------------- | -------------- | ----------------- | +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- **mAPval** ๅ€ผๆ˜ฏๅŸบไบŽๅ•ๆจกๅž‹ๅ•ๅฐบๅบฆๅœจ [COCO val2017](http://cocodataset.org) ๆ•ฐๆฎ้›†ไธŠ็š„็ป“ๆžœใ€‚
้€š่ฟ‡ `yolo val detect data=coco.yaml device=0` ๅค็Žฐ +- **้€Ÿๅบฆ** ๆ˜ฏไฝฟ็”จ [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) ๅฎžไพ‹ๅฏน COCO val ๅ›พๅƒ่ฟ›่กŒๅนณๅ‡่ฎก็ฎ—็š„ใ€‚
้€š่ฟ‡ `yolo val detect data=coco.yaml batch=1 device=0|cpu` ๅค็Žฐ + +
+ +
ๆฃ€ๆต‹๏ผˆOpen Image V7๏ผ‰ + +ๆŸฅ็œ‹[ๆฃ€ๆต‹ๆ–‡ๆกฃ](https://docs.ultralytics.com/tasks/detect/)ไปฅ่Žทๅ–่ฟ™ไบ›ๅœจ[Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/)ไธŠ่ฎญ็ปƒ็š„ๆจกๅž‹็š„ไฝฟ็”จ็คบไพ‹๏ผŒๅ…ถไธญๅŒ…ๆ‹ฌ600ไธช้ข„่ฎญ็ปƒ็ฑปๅˆซใ€‚ + +| ๆจกๅž‹ | ๅฐบๅฏธ
(ๅƒ็ด ) | mAP้ชŒ่ฏ
50-95 | ้€Ÿๅบฆ
CPU ONNX
(ๆฏซ็ง’) | ้€Ÿๅบฆ
A100 TensorRT
(ๆฏซ็ง’) | ๅ‚ๆ•ฐ
(M) | ๆตฎ็‚น่ฟ็ฎ—
(B) | +| ----------------------------------------------------------------------------------------- | --------------- | ------------------- | --------------------------- | -------------------------------- | -------------- | ---------------- | +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18.4 | 142.4 | 1.21 | 3.5 | 10.5 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27.7 | 183.1 | 1.40 | 11.4 | 29.7 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33.6 | 408.5 | 2.26 | 26.2 | 80.6 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34.9 | 596.9 | 2.43 | 44.1 | 167.4 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36.3 | 860.6 | 3.56 | 68.7 | 260.6 | + +- **mAP้ชŒ่ฏ** ๅ€ผ้€‚็”จไบŽๅœจ[Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/)ๆ•ฐๆฎ้›†ไธŠ็š„ๅ•ๆจกๅž‹ๅ•ๅฐบๅบฆใ€‚
้€š่ฟ‡ `yolo val detect data=open-images-v7.yaml device=0` ไปฅๅค็Žฐใ€‚ +- **้€Ÿๅบฆ** ๅœจไฝฟ็”จ[Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/)ๅฎžไพ‹ๅฏนOpen Image V7้ชŒ่ฏๅ›พๅƒ่ฟ›่กŒๅนณๅ‡ๆต‹็ฎ—ใ€‚
้€š่ฟ‡ `yolo val detect data=open-images-v7.yaml batch=1 device=0|cpu` ไปฅๅค็Žฐใ€‚ + +
+ +
ๅˆ†ๅ‰ฒ (COCO) + +ๆŸฅ็œ‹[ๅˆ†ๅ‰ฒๆ–‡ๆกฃ](https://docs.ultralytics.com/tasks/segment/)ไปฅ่Žทๅ–่ฟ™ไบ›ๅœจ[COCO-Seg](https://docs.ultralytics.com/datasets/segment/coco/)ไธŠ่ฎญ็ปƒ็š„ๆจกๅž‹็š„ไฝฟ็”จ็คบไพ‹๏ผŒๅ…ถไธญๅŒ…ๆ‹ฌ80ไธช้ข„่ฎญ็ปƒ็ฑปๅˆซใ€‚ + +| ๆจกๅž‹ | ๅฐบๅฏธ
(ๅƒ็ด ) | mAPbox
50-95 | mAPmask
50-95 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ๅ‚ๆ•ฐ
(M) | FLOPs
(B) | +| -------------------------------------------------------------------------------------------- | --------------- | -------------------- | --------------------- | --------------------------- | -------------------------------- | -------------- | ----------------- | +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- **mAPval** ๅ€ผๆ˜ฏๅŸบไบŽๅ•ๆจกๅž‹ๅ•ๅฐบๅบฆๅœจ [COCO val2017](http://cocodataset.org) ๆ•ฐๆฎ้›†ไธŠ็š„็ป“ๆžœใ€‚
้€š่ฟ‡ `yolo val segment data=coco-seg.yaml device=0` ๅค็Žฐ +- **้€Ÿๅบฆ** ๆ˜ฏไฝฟ็”จ [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) ๅฎžไพ‹ๅฏน COCO val ๅ›พๅƒ่ฟ›่กŒๅนณๅ‡่ฎก็ฎ—็š„ใ€‚
้€š่ฟ‡ `yolo val segment data=coco-seg.yaml batch=1 device=0|cpu` ๅค็Žฐ + +
+ +
ๅงฟๆ€ (COCO) + +ๆŸฅ็œ‹[ๅงฟๆ€ๆ–‡ๆกฃ](https://docs.ultralytics.com/tasks/pose/)ไปฅ่Žทๅ–่ฟ™ไบ›ๅœจ[COCO-Pose](https://docs.ultralytics.com/datasets/pose/coco/)ไธŠ่ฎญ็ปƒ็š„ๆจกๅž‹็š„ไฝฟ็”จ็คบไพ‹๏ผŒๅ…ถไธญๅŒ…ๆ‹ฌ1ไธช้ข„่ฎญ็ปƒ็ฑปๅˆซ๏ผŒๅณไบบใ€‚ + +| ๆจกๅž‹ | ๅฐบๅฏธ
(ๅƒ็ด ) | mAPpose
50-95 | mAPpose
50 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ๅ‚ๆ•ฐ
(M) | FLOPs
(B) | +| ---------------------------------------------------------------------------------------------------- | --------------- | --------------------- | ------------------ | --------------------------- | -------------------------------- | -------------- | ----------------- | +| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | +| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | +| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | +| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | +| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | +| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +- **mAPval** ๅ€ผๆ˜ฏๅŸบไบŽๅ•ๆจกๅž‹ๅ•ๅฐบๅบฆๅœจ [COCO Keypoints val2017](http://cocodataset.org) ๆ•ฐๆฎ้›†ไธŠ็š„็ป“ๆžœใ€‚
้€š่ฟ‡ `yolo val pose data=coco-pose.yaml device=0` ๅค็Žฐ +- **้€Ÿๅบฆ** ๆ˜ฏไฝฟ็”จ [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) ๅฎžไพ‹ๅฏน COCO val ๅ›พๅƒ่ฟ›่กŒๅนณๅ‡่ฎก็ฎ—็š„ใ€‚
้€š่ฟ‡ `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu` ๅค็Žฐ + +
+ +
ๅˆ†็ฑป (ImageNet) + +ๆŸฅ็œ‹[ๅˆ†็ฑปๆ–‡ๆกฃ](https://docs.ultralytics.com/tasks/classify/)ไปฅ่Žทๅ–่ฟ™ไบ›ๅœจ[ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/)ไธŠ่ฎญ็ปƒ็š„ๆจกๅž‹็š„ไฝฟ็”จ็คบไพ‹๏ผŒๅ…ถไธญๅŒ…ๆ‹ฌ1000ไธช้ข„่ฎญ็ปƒ็ฑปๅˆซใ€‚ + +| ๆจกๅž‹ | ๅฐบๅฏธ
(ๅƒ็ด ) | acc
top1 | acc
top5 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ๅ‚ๆ•ฐ
(M) | FLOPs
(B) at 640 | +| -------------------------------------------------------------------------------------------- | --------------- | ---------------- | ---------------- | --------------------------- | -------------------------------- | -------------- | ------------------------ | +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- **acc** ๅ€ผๆ˜ฏๆจกๅž‹ๅœจ [ImageNet](https://www.image-net.org/) ๆ•ฐๆฎ้›†้ชŒ่ฏ้›†ไธŠ็š„ๅ‡†็กฎ็އใ€‚
้€š่ฟ‡ `yolo val classify data=path/to/ImageNet device=0` ๅค็Žฐ +- **้€Ÿๅบฆ** ๆ˜ฏไฝฟ็”จ [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) ๅฎžไพ‹ๅฏน ImageNet val ๅ›พๅƒ่ฟ›่กŒๅนณๅ‡่ฎก็ฎ—็š„ใ€‚
้€š่ฟ‡ `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu` ๅค็Žฐ + +
+ +##
้›†ๆˆ
+ +ๆˆ‘ไปฌไธŽ้ข†ๅ…ˆ็š„AIๅนณๅฐ็š„ๅ…ณ้”ฎๆ•ดๅˆๆ‰ฉๅฑ•ไบ†Ultralyticsไบงๅ“็š„ๅŠŸ่ƒฝ๏ผŒๅขžๅผบไบ†ๆ•ฐๆฎ้›†ๆ ‡็ญพๅŒ–ใ€่ฎญ็ปƒใ€ๅฏ่ง†ๅŒ–ๅ’Œๆจกๅž‹็ฎก็†็ญ‰ไปปๅŠกใ€‚ๆŽข็ดขUltralyticsๅฆ‚ไฝ•ไธŽ[Roboflow](https://roboflow.com/?ref=ultralytics)ใ€ClearMLใ€[Comet](https://bit.ly/yolov8-readme-comet)ใ€Neural MagicไปฅๅŠ[OpenVINO](https://docs.ultralytics.com/integrations/openvino)ๅˆไฝœ๏ผŒไผ˜ๅŒ–ๆ‚จ็š„AIๅทฅไฝœๆต็จ‹ใ€‚ + +
+ +Ultralytics active learning integrations +
+
+ +
+ + Roboflow logo + space + + ClearML logo + space + + Comet ML logo + space + + NeuralMagic logo +
+ +| Roboflow | ClearML โญ NEW | Comet โญ NEW | Neural Magic โญ NEW | +| :--------------------------------------------------------------------------------: | :----------------------------------------------------------------------------: | :----------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------: | +| ไฝฟ็”จ [Roboflow](https://roboflow.com/?ref=ultralytics) ๅฐ†ๆ‚จ็š„่‡ชๅฎšไน‰ๆ•ฐๆฎ้›†็›ดๆŽฅๆ ‡่ฎฐๅนถๅฏผๅ‡บ่‡ณ YOLOv8 ่ฟ›่กŒ่ฎญ็ปƒ | ไฝฟ็”จ [ClearML](https://cutt.ly/yolov5-readme-clearml)๏ผˆๅผ€ๆบ๏ผ๏ผ‰่‡ชๅŠจ่ทŸ่ธชใ€ๅฏ่ง†ๅŒ–๏ผŒ็”š่‡ณ่ฟœ็จ‹่ฎญ็ปƒ YOLOv8 | ๅ…่ดนไธ”ๆฐธไน…๏ผŒ[Comet](https://bit.ly/yolov8-readme-comet) ่ฎฉๆ‚จไฟๅญ˜ YOLOv8 ๆจกๅž‹ใ€ๆขๅค่ฎญ็ปƒ๏ผŒๅนถไปฅไบคไบ’ๅผๆ–นๅผๆŸฅ็œ‹ๅ’Œ่ฐƒ่ฏ•้ข„ๆต‹ | ไฝฟ็”จ [Neural Magic DeepSparse](https://bit.ly/yolov5-neuralmagic) ไฝฟ YOLOv8 ๆŽจ็†้€Ÿๅบฆๆ้ซ˜ๅคš่พพ 6 ๅ€ | + +##
Ultralytics HUB
+ +ไฝ“้ชŒ [Ultralytics HUB](https://bit.ly/ultralytics_hub) โญ ๅธฆๆฅ็š„ๆ— ็ผ AI๏ผŒ่ฟ™ๆ˜ฏไธ€ไธชไธ€ไฝ“ๅŒ–่งฃๅ†ณๆ–นๆกˆ๏ผŒ็”จไบŽๆ•ฐๆฎๅฏ่ง†ๅŒ–ใ€YOLOv5 ๅ’Œๅณๅฐ†ๆŽจๅ‡บ็š„ YOLOv8 ๐Ÿš€ ๆจกๅž‹่ฎญ็ปƒๅ’Œ้ƒจ็ฝฒ๏ผŒๆ— ้œ€ไปปไฝ•็ผ–็ ใ€‚้€š่ฟ‡ๆˆ‘ไปฌๅ…ˆ่ฟ›็š„ๅนณๅฐๅ’Œ็”จๆˆทๅ‹ๅฅฝ็š„ [Ultralytics ๅบ”็”จ็จ‹ๅบ](https://ultralytics.com/app_install)๏ผŒ่ฝปๆพๅฐ†ๅ›พๅƒ่ฝฌๅŒ–ไธบๅฏๆ“ไฝœ็š„่ง่งฃ๏ผŒๅนถๅฎž็Žฐๆ‚จ็š„ AI ๆ„ฟๆ™ฏใ€‚็Žฐๅœจๅฐฑๅผ€ๅง‹ๆ‚จ็š„**ๅ…่ดน**ไน‹ๆ—…๏ผ + + +Ultralytics HUB preview image + +##
่ดก็Œฎ
+ +ๆˆ‘ไปฌๅ–œๆฌขๆ‚จ็š„ๅ‚ไธŽ๏ผๆฒกๆœ‰็คพๅŒบ็š„ๅธฎๅŠฉ๏ผŒYOLOv5 ๅ’Œ YOLOv8 ๅฐ†ๆ— ๆณ•ๅฎž็Žฐใ€‚่ฏทๅ‚้˜…ๆˆ‘ไปฌ็š„[่ดก็ŒฎๆŒ‡ๅ—](https://docs.ultralytics.com/help/contributing)ไปฅๅผ€ๅง‹ไฝฟ็”จ๏ผŒๅนถๅกซๅ†™ๆˆ‘ไปฌ็š„[่ฐƒๆŸฅ้—ฎๅท](https://ultralytics.com/survey?utm_source=github&utm_medium=social&utm_campaign=Survey)ๅ‘ๆˆ‘ไปฌๆไพ›ๆ‚จ็š„ไฝฟ็”จไฝ“้ชŒๅ้ฆˆใ€‚ๆ„Ÿ่ฐขๆ‰€ๆœ‰่ดก็Œฎ่€…็š„ๆ”ฏๆŒ๏ผ๐Ÿ™ + + + + +Ultralytics open-source contributors + +##
่ฎธๅฏ่ฏ
+ +Ultralytics ๆไพ›ไธค็ง่ฎธๅฏ่ฏ้€‰้กนไปฅ้€‚ๅบ”ๅ„็งไฝฟ็”จๅœบๆ™ฏ๏ผš + +- **AGPL-3.0 ่ฎธๅฏ่ฏ**๏ผš่ฟ™ไธช[OSI ๆ‰นๅ‡†](https://opensource.org/licenses/)็š„ๅผ€ๆบ่ฎธๅฏ่ฏ้žๅธธ้€‚ๅˆๅญฆ็”Ÿๅ’Œ็ˆฑๅฅฝ่€…๏ผŒๅฏไปฅๆŽจๅŠจๅผ€ๆ”พ็š„ๅไฝœๅ’Œ็Ÿฅ่ฏ†ๅˆ†ไบซใ€‚่ฏทๆŸฅ็œ‹[LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) ๆ–‡ไปถไปฅไบ†่งฃๆ›ดๅคš็ป†่Š‚ใ€‚ +- **ไผไธš่ฎธๅฏ่ฏ**๏ผšไธ“ไธบๅ•†ไธš็”จ้€”่ฎพ่ฎก๏ผŒ่ฏฅ่ฎธๅฏ่ฏๅ…่ฎธๅฐ† Ultralytics ็š„่ฝฏไปถๅ’Œ AI ๆจกๅž‹ๆ— ็ผ้›†ๆˆๅˆฐๅ•†ไธšไบงๅ“ๅ’ŒๆœๅŠกไธญ๏ผŒไปŽ่€Œ็ป•่ฟ‡ AGPL-3.0 ็š„ๅผ€ๆบ่ฆๆฑ‚ใ€‚ๅฆ‚ๆžœๆ‚จ็š„ๅœบๆ™ฏๆถ‰ๅŠๅฐ†ๆˆ‘ไปฌ็š„่งฃๅ†ณๆ–นๆกˆๅตŒๅ…ฅๅˆฐๅ•†ไธšไบงๅ“ไธญ๏ผŒ่ฏท้€š่ฟ‡ [Ultralytics Licensing](https://ultralytics.com/license)ไธŽๆˆ‘ไปฌ่”็ณปใ€‚ + +##
่”็ณปๆ–นๅผ
+ +ๅฏนไบŽ Ultralytics ็š„้”™่ฏฏๆŠฅๅ‘Šๅ’ŒๅŠŸ่ƒฝ่ฏทๆฑ‚๏ผŒ่ฏท่ฎฟ้—ฎ [GitHub Issues](https://github.com/ultralytics/ultralytics/issues)๏ผŒๅนถๅŠ ๅ…ฅๆˆ‘ไปฌ็š„ [Discord](https://ultralytics.com/discord) ็คพๅŒบ่ฟ›่กŒ้—ฎ้ข˜ๅ’Œ่ฎจ่ฎบ๏ผ + +
+
+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
diff --git a/ultralytics/README.zh-CN.md:Zone.Identifier b/ultralytics/README.zh-CN.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/README.zh-CN.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/bus.jpg b/ultralytics/bus.jpg new file mode 100755 index 0000000..b43e311 Binary files /dev/null and b/ultralytics/bus.jpg differ diff --git a/ultralytics/bus.jpg:Zone.Identifier b/ultralytics/bus.jpg:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/bus.jpg:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docker/Dockerfile b/ultralytics/docker/Dockerfile new file mode 100755 index 0000000..e3a32c8 --- /dev/null +++ b/ultralytics/docker/Dockerfile @@ -0,0 +1,82 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# Builds ultralytics/ultralytics:latest image on DockerHub https://hub.docker.com/r/ultralytics/ultralytics +# Image is CUDA-optimized for YOLOv8 single/multi-GPU training and inference + +# Start FROM PyTorch image https://hub.docker.com/r/pytorch/pytorch or nvcr.io/nvidia/pytorch:23.03-py3 +FROM pytorch/pytorch:2.1.0-cuda12.1-cudnn8-runtime +RUN pip install --no-cache nvidia-tensorrt --index-url https://pypi.ngc.nvidia.com + +# Downloads to user config dir +ADD https://ultralytics.com/assets/Arial.ttf https://ultralytics.com/assets/Arial.Unicode.ttf /root/.config/Ultralytics/ + +# Install linux packages +# g++ required to build 'tflite_support' and 'lap' packages, libusb-1.0-0 required for 'tflite_support' package +RUN apt update \ + && apt install --no-install-recommends -y gcc git zip curl htop libgl1 libglib2.0-0 libpython3-dev gnupg g++ libusb-1.0-0 + +# Security updates +# https://security.snyk.io/vuln/SNYK-UBUNTU1804-OPENSSL-3314796 +RUN apt upgrade --no-install-recommends -y openssl tar + +# Create working directory +WORKDIR /usr/src/ultralytics + +# Copy contents +# COPY . /usr/src/ultralytics # git permission issues inside container +RUN git clone https://github.com/ultralytics/ultralytics -b main /usr/src/ultralytics +ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt /usr/src/ultralytics/ + +# Install pip packages +RUN python3 -m pip install --upgrade pip wheel +RUN pip install --no-cache -e ".[export]" albumentations comet pycocotools pytest-cov + +# Run exports to AutoInstall packages +RUN yolo export model=tmp/yolov8n.pt format=edgetpu imgsz=32 +RUN yolo export model=tmp/yolov8n.pt format=ncnn imgsz=32 +# Requires <= Python 3.10, bug with paddlepaddle==2.5.0 https://github.com/PaddlePaddle/X2Paddle/issues/991 +RUN pip install --no-cache paddlepaddle==2.4.2 x2paddle +# Fix error: `np.bool` was a deprecated alias for the builtin `bool` segmentation error in Tests +RUN pip install --no-cache numpy==1.23.5 +# Remove exported models +RUN rm -rf tmp + +# Set environment variables +ENV OMP_NUM_THREADS=1 +# Avoid DDP error "MKL_THREADING_LAYER=INTEL is incompatible with libgomp.so.1 library" https://github.com/pytorch/pytorch/issues/37377 +ENV MKL_THREADING_LAYER=GNU + + +# Usage Examples ------------------------------------------------------------------------------------------------------- + +# Build and Push +# t=ultralytics/ultralytics:latest && sudo docker build -f docker/Dockerfile -t $t . && sudo docker push $t + +# Pull and Run with access to all GPUs +# t=ultralytics/ultralytics:latest && sudo docker pull $t && sudo docker run -it --ipc=host --gpus all $t + +# Pull and Run with access to GPUs 2 and 3 (inside container CUDA devices will appear as 0 and 1) +# t=ultralytics/ultralytics:latest && sudo docker pull $t && sudo docker run -it --ipc=host --gpus '"device=2,3"' $t + +# Pull and Run with local directory access +# t=ultralytics/ultralytics:latest && sudo docker pull $t && sudo docker run -it --ipc=host --gpus all -v "$(pwd)"/datasets:/usr/src/datasets $t + +# Kill all +# sudo docker kill $(sudo docker ps -q) + +# Kill all image-based +# sudo docker kill $(sudo docker ps -qa --filter ancestor=ultralytics/ultralytics:latest) + +# DockerHub tag update +# t=ultralytics/ultralytics:latest tnew=ultralytics/ultralytics:v6.2 && sudo docker pull $t && sudo docker tag $t $tnew && sudo docker push $tnew + +# Clean up +# sudo docker system prune -a --volumes + +# Update Ubuntu drivers +# https://www.maketecheasier.com/install-nvidia-drivers-ubuntu/ + +# DDP test +# python -m torch.distributed.run --nproc_per_node 2 --master_port 1 train.py --epochs 3 + +# GCP VM from Image +# docker.io/ultralytics/ultralytics:latest diff --git a/ultralytics/docker/Dockerfile-arm64 b/ultralytics/docker/Dockerfile-arm64 new file mode 100755 index 0000000..aedb4f2 --- /dev/null +++ b/ultralytics/docker/Dockerfile-arm64 @@ -0,0 +1,44 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# Builds ultralytics/ultralytics:latest-arm64 image on DockerHub https://hub.docker.com/r/ultralytics/ultralytics +# Image is aarch64-compatible for Apple M1 and other ARM architectures i.e. Jetson Nano and Raspberry Pi + +# Start FROM Ubuntu image https://hub.docker.com/_/ubuntu +FROM arm64v8/ubuntu:22.04 + +# Downloads to user config dir +ADD https://ultralytics.com/assets/Arial.ttf https://ultralytics.com/assets/Arial.Unicode.ttf /root/.config/Ultralytics/ + +# Install linux packages +# g++ required to build 'tflite_support' and 'lap' packages, libusb-1.0-0 required for 'tflite_support' package +RUN apt update \ + && apt install --no-install-recommends -y python3-pip git zip curl htop gcc libgl1 libglib2.0-0 libpython3-dev gnupg g++ libusb-1.0-0 + +# Create working directory +WORKDIR /usr/src/ultralytics + +# Copy contents +# COPY . /usr/src/ultralytics # git permission issues inside container +RUN git clone https://github.com/ultralytics/ultralytics -b main /usr/src/ultralytics +ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt /usr/src/ultralytics/ + +# Install pip packages +RUN python3 -m pip install --upgrade pip wheel +RUN pip install --no-cache -e . + +# Creates a symbolic link to make 'python' point to 'python3' +RUN ln -sf /usr/bin/python3 /usr/bin/python + + +# Usage Examples ------------------------------------------------------------------------------------------------------- + +# Build and Push +# t=ultralytics/ultralytics:latest-arm64 && sudo docker build --platform linux/arm64 -f docker/Dockerfile-arm64 -t $t . && sudo docker push $t + +# Run +# t=ultralytics/ultralytics:latest-arm64 && sudo docker run -it --ipc=host $t + +# Pull and Run +# t=ultralytics/ultralytics:latest-arm64 && sudo docker pull $t && sudo docker run -it --ipc=host $t + +# Pull and Run with local volume mounted +# t=ultralytics/ultralytics:latest-arm64 && sudo docker pull $t && sudo docker run -it --ipc=host -v "$(pwd)"/datasets:/usr/src/datasets $t diff --git a/ultralytics/docker/Dockerfile-arm64:Zone.Identifier b/ultralytics/docker/Dockerfile-arm64:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docker/Dockerfile-arm64:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docker/Dockerfile-conda b/ultralytics/docker/Dockerfile-conda new file mode 100755 index 0000000..73d38d6 --- /dev/null +++ b/ultralytics/docker/Dockerfile-conda @@ -0,0 +1,38 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# Builds ultralytics/ultralytics:latest-conda image on DockerHub https://hub.docker.com/r/ultralytics/ultralytics +# Image is optimized for Ultralytics Anaconda (https://anaconda.org/conda-forge/ultralytics) installation and usage + +# Start FROM miniconda3 image https://hub.docker.com/r/continuumio/miniconda3 +FROM continuumio/miniconda3:latest + +# Downloads to user config dir +ADD https://ultralytics.com/assets/Arial.ttf https://ultralytics.com/assets/Arial.Unicode.ttf /root/.config/Ultralytics/ + +# Install linux packages +RUN apt update \ + && apt install --no-install-recommends -y libgl1 + +# Copy contents +ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt . + +# Install conda packages +# mkl required to fix 'OSError: libmkl_intel_lp64.so.2: cannot open shared object file: No such file or directory' +RUN conda config --set solver libmamba && \ + conda install pytorch torchvision pytorch-cuda=11.8 -c pytorch -c nvidia && \ + conda install -c conda-forge ultralytics mkl + # conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics mkl + + +# Usage Examples ------------------------------------------------------------------------------------------------------- + +# Build and Push +# t=ultralytics/ultralytics:latest-conda && sudo docker build -f docker/Dockerfile-cpu -t $t . && sudo docker push $t + +# Run +# t=ultralytics/ultralytics:latest-conda && sudo docker run -it --ipc=host $t + +# Pull and Run +# t=ultralytics/ultralytics:latest-conda && sudo docker pull $t && sudo docker run -it --ipc=host $t + +# Pull and Run with local volume mounted +# t=ultralytics/ultralytics:latest-conda && sudo docker pull $t && sudo docker run -it --ipc=host -v "$(pwd)"/datasets:/usr/src/datasets $t diff --git a/ultralytics/docker/Dockerfile-conda:Zone.Identifier b/ultralytics/docker/Dockerfile-conda:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docker/Dockerfile-conda:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docker/Dockerfile-cpu b/ultralytics/docker/Dockerfile-cpu new file mode 100755 index 0000000..42e5ec3 --- /dev/null +++ b/ultralytics/docker/Dockerfile-cpu @@ -0,0 +1,55 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# Builds ultralytics/ultralytics:latest-cpu image on DockerHub https://hub.docker.com/r/ultralytics/ultralytics +# Image is CPU-optimized for ONNX, OpenVINO and PyTorch YOLOv8 deployments + +# Start FROM Ubuntu image https://hub.docker.com/_/ubuntu +FROM ubuntu:23.10 + +# Downloads to user config dir +ADD https://ultralytics.com/assets/Arial.ttf https://ultralytics.com/assets/Arial.Unicode.ttf /root/.config/Ultralytics/ + +# Install linux packages +# g++ required to build 'tflite_support' and 'lap' packages, libusb-1.0-0 required for 'tflite_support' package +RUN apt update \ + && apt install --no-install-recommends -y python3-pip git zip curl htop libgl1 libglib2.0-0 libpython3-dev gnupg g++ libusb-1.0-0 + +# Create working directory +WORKDIR /usr/src/ultralytics + +# Copy contents +# COPY . /usr/src/ultralytics # git permission issues inside container +RUN git clone https://github.com/ultralytics/ultralytics -b main /usr/src/ultralytics +ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt /usr/src/ultralytics/ + +# Remove python3.11/EXTERNALLY-MANAGED or use 'pip install --break-system-packages' avoid 'externally-managed-environment' Ubuntu nightly error +RUN rm -rf /usr/lib/python3.11/EXTERNALLY-MANAGED + +# Install pip packages +RUN python3 -m pip install --upgrade pip wheel +RUN pip install --no-cache -e ".[export]" --extra-index-url https://download.pytorch.org/whl/cpu + +# Run exports to AutoInstall packages +RUN yolo export model=tmp/yolov8n.pt format=edgetpu imgsz=32 +RUN yolo export model=tmp/yolov8n.pt format=ncnn imgsz=32 +# Requires <= Python 3.10, bug with paddlepaddle==2.5.0 https://github.com/PaddlePaddle/X2Paddle/issues/991 +# RUN pip install --no-cache paddlepaddle==2.4.2 x2paddle +# Remove exported models +RUN rm -rf tmp + +# Creates a symbolic link to make 'python' point to 'python3' +RUN ln -sf /usr/bin/python3 /usr/bin/python + + +# Usage Examples ------------------------------------------------------------------------------------------------------- + +# Build and Push +# t=ultralytics/ultralytics:latest-cpu && sudo docker build -f docker/Dockerfile-cpu -t $t . && sudo docker push $t + +# Run +# t=ultralytics/ultralytics:latest-cpu && sudo docker run -it --ipc=host $t + +# Pull and Run +# t=ultralytics/ultralytics:latest-cpu && sudo docker pull $t && sudo docker run -it --ipc=host $t + +# Pull and Run with local volume mounted +# t=ultralytics/ultralytics:latest-cpu && sudo docker pull $t && sudo docker run -it --ipc=host -v "$(pwd)"/datasets:/usr/src/datasets $t diff --git a/ultralytics/docker/Dockerfile-cpu:Zone.Identifier b/ultralytics/docker/Dockerfile-cpu:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docker/Dockerfile-cpu:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docker/Dockerfile-jetson b/ultralytics/docker/Dockerfile-jetson new file mode 100755 index 0000000..c177b8e --- /dev/null +++ b/ultralytics/docker/Dockerfile-jetson @@ -0,0 +1,48 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# Builds ultralytics/ultralytics:jetson image on DockerHub https://hub.docker.com/r/ultralytics/ultralytics +# Supports JetPack for YOLOv8 on Jetson Nano, TX1/TX2, Xavier NX, AGX Xavier, AGX Orin, and Orin NX + +# Start FROM https://catalog.ngc.nvidia.com/orgs/nvidia/containers/l4t-pytorch +FROM nvcr.io/nvidia/l4t-pytorch:r35.2.1-pth2.0-py3 + +# Downloads to user config dir +ADD https://ultralytics.com/assets/Arial.ttf https://ultralytics.com/assets/Arial.Unicode.ttf /root/.config/Ultralytics/ + +# Install linux packages +# g++ required to build 'tflite_support' and 'lap' packages, libusb-1.0-0 required for 'tflite_support' package +RUN apt update \ + && apt install --no-install-recommends -y gcc git zip curl htop libgl1 libglib2.0-0 libpython3-dev gnupg g++ libusb-1.0-0 + +# Create working directory +WORKDIR /usr/src/ultralytics + +# Copy contents +# COPY . /usr/src/ultralytics # git permission issues inside container +RUN git clone https://github.com/ultralytics/ultralytics -b main /usr/src/ultralytics +ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt /usr/src/ultralytics/ + +# Remove opencv-python from requirements.txt as it conflicts with opencv-python installed in base image +RUN grep -v '^opencv-python' requirements.txt > tmp.txt && mv tmp.txt requirements.txt + +# Install pip packages manually for TensorRT compatibility https://github.com/NVIDIA/TensorRT/issues/2567 +RUN python3 -m pip install --upgrade pip wheel +RUN pip install --no-cache tqdm matplotlib pyyaml psutil pandas onnx "numpy==1.23" +RUN pip install --no-cache -e . + +# Set environment variables +ENV OMP_NUM_THREADS=1 + + +# Usage Examples ------------------------------------------------------------------------------------------------------- + +# Build and Push +# t=ultralytics/ultralytics:latest-jetson && sudo docker build --platform linux/arm64 -f docker/Dockerfile-jetson -t $t . && sudo docker push $t + +# Run +# t=ultralytics/ultralytics:latest-jetson && sudo docker run -it --ipc=host $t + +# Pull and Run +# t=ultralytics/ultralytics:latest-jetson && sudo docker pull $t && sudo docker run -it --ipc=host $t + +# Pull and Run with NVIDIA runtime +# t=ultralytics/ultralytics:latest-jetson && sudo docker pull $t && sudo docker run -it --ipc=host --runtime=nvidia $t diff --git a/ultralytics/docker/Dockerfile-jetson:Zone.Identifier b/ultralytics/docker/Dockerfile-jetson:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docker/Dockerfile-jetson:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docker/Dockerfile-python b/ultralytics/docker/Dockerfile-python new file mode 100755 index 0000000..b227fa6 --- /dev/null +++ b/ultralytics/docker/Dockerfile-python @@ -0,0 +1,52 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# Builds ultralytics/ultralytics:latest-cpu image on DockerHub https://hub.docker.com/r/ultralytics/ultralytics +# Image is CPU-optimized for ONNX, OpenVINO and PyTorch YOLOv8 deployments + +# Use the official Python 3.10 slim-bookworm as base image +FROM python:3.10-slim-bookworm + +# Downloads to user config dir +ADD https://ultralytics.com/assets/Arial.ttf https://ultralytics.com/assets/Arial.Unicode.ttf /root/.config/Ultralytics/ + +# Install linux packages +# g++ required to build 'tflite_support' and 'lap' packages, libusb-1.0-0 required for 'tflite_support' package +RUN apt update \ + && apt install --no-install-recommends -y python3-pip git zip curl htop libgl1 libglib2.0-0 libpython3-dev gnupg g++ libusb-1.0-0 + +# Create working directory +WORKDIR /usr/src/ultralytics + +# Copy contents +# COPY . /usr/src/ultralytics # git permission issues inside container +RUN git clone https://github.com/ultralytics/ultralytics -b main /usr/src/ultralytics +ADD https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt /usr/src/ultralytics/ + +# Remove python3.11/EXTERNALLY-MANAGED or use 'pip install --break-system-packages' avoid 'externally-managed-environment' Ubuntu nightly error +# RUN rm -rf /usr/lib/python3.11/EXTERNALLY-MANAGED + +# Install pip packages +RUN python3 -m pip install --upgrade pip wheel +RUN pip install --no-cache -e ".[export]" --extra-index-url https://download.pytorch.org/whl/cpu + +# Run exports to AutoInstall packages +RUN yolo export model=tmp/yolov8n.pt format=edgetpu imgsz=32 +RUN yolo export model=tmp/yolov8n.pt format=ncnn imgsz=32 +# Requires <= Python 3.10, bug with paddlepaddle==2.5.0 https://github.com/PaddlePaddle/X2Paddle/issues/991 +RUN pip install --no-cache paddlepaddle==2.4.2 x2paddle +# Remove exported models +RUN rm -rf tmp + + +# Usage Examples ------------------------------------------------------------------------------------------------------- + +# Build and Push +# t=ultralytics/ultralytics:latest-python && sudo docker build -f docker/Dockerfile-python -t $t . && sudo docker push $t + +# Run +# t=ultralytics/ultralytics:latest-python && sudo docker run -it --ipc=host $t + +# Pull and Run +# t=ultralytics/ultralytics:latest-python && sudo docker pull $t && sudo docker run -it --ipc=host $t + +# Pull and Run with local volume mounted +# t=ultralytics/ultralytics:latest-python && sudo docker pull $t && sudo docker run -it --ipc=host -v "$(pwd)"/datasets:/usr/src/datasets $t diff --git a/ultralytics/docker/Dockerfile-python:Zone.Identifier b/ultralytics/docker/Dockerfile-python:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docker/Dockerfile-python:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docker/Dockerfile-runner b/ultralytics/docker/Dockerfile-runner new file mode 100755 index 0000000..c0f8659 --- /dev/null +++ b/ultralytics/docker/Dockerfile-runner @@ -0,0 +1,38 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +# Builds GitHub actions CI runner image for deployment to DockerHub https://hub.docker.com/r/ultralytics/ultralytics +# Image is CUDA-optimized for YOLOv8 single/multi-GPU training and inference tests + +# Start FROM Ultralytics GPU image +FROM ultralytics/ultralytics:latest + +# Set the working directory +WORKDIR /actions-runner + +# Download and unpack the latest runner from https://github.com/actions/runner +RUN FILENAME=actions-runner-linux-x64-2.309.0.tar.gz && \ + curl -o $FILENAME -L https://github.com/actions/runner/releases/download/v2.309.0/$FILENAME && \ + tar xzf $FILENAME && \ + rm $FILENAME + +# Install runner dependencies +ENV RUNNER_ALLOW_RUNASROOT=1 +ENV DEBIAN_FRONTEND=noninteractive +RUN ./bin/installdependencies.sh && \ + apt-get -y install libicu-dev + +# Inline ENTRYPOINT command to configure and start runner with default TOKEN and NAME +ENTRYPOINT sh -c './config.sh --url https://github.com/ultralytics/ultralytics \ + --token ${GITHUB_RUNNER_TOKEN:-TOKEN} \ + --name ${GITHUB_RUNNER_NAME:-NAME} \ + --labels gpu-latest \ + --replace && \ + ./run.sh' + + +# Usage Examples ------------------------------------------------------------------------------------------------------- + +# Build and Push +# t=ultralytics/ultralytics:latest-runner && sudo docker build -f docker/Dockerfile-runner -t $t . && sudo docker push $t + +# Pull and Run in detached mode with access to GPUs 0 and 1 +# t=ultralytics/ultralytics:latest-runner && sudo docker run -d -e GITHUB_RUNNER_TOKEN=TOKEN -e GITHUB_RUNNER_NAME=NAME --ipc=host --gpus '"device=0,1"' $t diff --git a/ultralytics/docker/Dockerfile-runner:Zone.Identifier b/ultralytics/docker/Dockerfile-runner:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docker/Dockerfile-runner:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docker/Dockerfile:Zone.Identifier b/ultralytics/docker/Dockerfile:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docker/Dockerfile:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/README.md b/ultralytics/docs/README.md new file mode 100755 index 0000000..a5da59e --- /dev/null +++ b/ultralytics/docs/README.md @@ -0,0 +1,102 @@ +# Ultralytics Docs + +Ultralytics Docs are deployed to [https://docs.ultralytics.com](https://docs.ultralytics.com). + +[![pages-build-deployment](https://github.com/ultralytics/docs/actions/workflows/pages/pages-build-deployment/badge.svg)](https://github.com/ultralytics/docs/actions/workflows/pages/pages-build-deployment) [![Check Broken links](https://github.com/ultralytics/docs/actions/workflows/links.yml/badge.svg)](https://github.com/ultralytics/docs/actions/workflows/links.yml) + +## Install Ultralytics package + +[![PyPI version](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + +To install the ultralytics package in developer mode, you will need to have Git and Python 3 installed on your system. Then, follow these steps: + +1. Clone the ultralytics repository to your local machine using Git: + + ```bash + git clone https://github.com/ultralytics/ultralytics.git + ``` + +2. Navigate to the root directory of the repository: + + ```bash + cd ultralytics + ``` + +3. Install the package in developer mode using pip: + + ```bash + pip install -e '.[dev]' + ``` + +This will install the ultralytics package and its dependencies in developer mode, allowing you to make changes to the package code and have them reflected immediately in your Python environment. + +Note that you may need to use the pip3 command instead of pip if you have multiple versions of Python installed on your system. + +## Building and Serving Locally + +The `mkdocs serve` command is used to build and serve a local version of the MkDocs documentation site. It is typically used during the development and testing phase of a documentation project. + +```bash +mkdocs serve +``` + +Here is a breakdown of what this command does: + +- `mkdocs`: This is the command-line interface (CLI) for the MkDocs static site generator. It is used to build and serve MkDocs sites. +- `serve`: This is a subcommand of the `mkdocs` CLI that tells it to build and serve the documentation site locally. +- `-a`: This flag specifies the hostname and port number to bind the server to. The default value is `localhost:8000`. +- `-t`: This flag specifies the theme to use for the documentation site. The default value is `mkdocs`. +- `-s`: This flag tells the `serve` command to serve the site in silent mode, which means it will not display any log messages or progress updates. When you run the `mkdocs serve` command, it will build the documentation site using the files in the `docs/` directory and serve it at the specified hostname and port number. You can then view the site by going to the URL in your web browser. + +While the site is being served, you can make changes to the documentation files and see them reflected in the live site immediately. This is useful for testing and debugging your documentation before deploying it to a live server. + +To stop the serve command and terminate the local server, you can use the `CTRL+C` keyboard shortcut. + +## Building and Serving Multi-Language + +For multi-language MkDocs sites use the following additional steps: + +1. Add all new language *.md files to git commit: `git add docs/**/*.md -f` +2. Build all languages to the `/site` directory. Verify that the top-level `/site` directory contains `CNAME`, `robots.txt` and `sitemap.xml` files, if applicable. + + ```bash + # Remove existing /site directory + rm -rf site + + # Loop through all *.yml files in the docs directory + mkdocs build -f docs/mkdocs.yml + for file in docs/mkdocs_*.yml; do + echo "Building MkDocs site with configuration file: $file" + mkdocs build -f "$file" + done + ``` + +3. Preview in web browser with: + + ```bash + cd site + python -m http.server + open http://localhost:8000 # on macOS + ``` + +Note the above steps are combined into the Ultralytics [build_docs.py](https://github.com/ultralytics/ultralytics/blob/main/docs/build_docs.py) script. + +## Deploying Your Documentation Site + +To deploy your MkDocs documentation site, you will need to choose a hosting provider and a deployment method. Some popular options include GitHub Pages, GitLab Pages, and Amazon S3. + +Before you can deploy your site, you will need to configure your `mkdocs.yml` file to specify the remote host and any other necessary deployment settings. + +Once you have configured your `mkdocs.yml` file, you can use the `mkdocs deploy` command to build and deploy your site. This command will build the documentation site using the files in the `docs/` directory and the specified configuration file and theme, and then deploy the site to the specified remote host. + +For example, to deploy your site to GitHub Pages using the gh-deploy plugin, you can use the following command: + +```bash +mkdocs gh-deploy +``` + +If you are using GitHub Pages, you can set a custom domain for your documentation site by going to the "Settings" page for your repository and updating the "Custom domain" field in the "GitHub Pages" section. + +![196814117-fc16e711-d2be-4722-9536-b7c6d78fd167](https://user-images.githubusercontent.com/26833433/210150206-9e86dcd7-10af-43e4-9eb2-9518b3799eac.png) + +For more information on deploying your MkDocs documentation site, see the [MkDocs documentation](https://www.mkdocs.org/user-guide/deploying-your-docs/). diff --git a/ultralytics/docs/README.md:Zone.Identifier b/ultralytics/docs/README.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/README.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/index.md b/ultralytics/docs/ar/index.md new file mode 100755 index 0000000..ab95a6a --- /dev/null +++ b/ultralytics/docs/ar/index.md @@ -0,0 +1,83 @@ +--- +comments: true +description: ุงุณุชูƒุดู ุฏู„ูŠู„ ูƒุงู…ู„ ู„ู€ Ultralytics YOLOv8 ุŒ ู†ู…ูˆุฐุฌ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ูˆุชุฌุฒุฆุฉ ุงู„ุตูˆุฑ ุฐูˆ ุงู„ุณุฑุนุฉ ุงู„ุนุงู„ูŠุฉ ูˆุงู„ุฏู‚ุฉ ุงู„ุนุงู„ูŠุฉ. ุชุซุจูŠุช ุงู„ู…ุญุฑุฑุฉ ุŒ ูˆุงู„ุชู†ุจุค ุŒ ูˆุงู„ุชุฏุฑูŠุจ ูˆุงู„ู…ุฒูŠุฏ. +keywords: UltralyticsุŒ YOLOv8ุŒ ูƒุดู ุงู„ูƒุงุฆู†ุงุชุŒ ุชุฌุฒุฆุฉ ุงู„ุตูˆุฑุŒ ุงู„ุชุนู„ู… ุงู„ุขู„ูŠุŒ ุงู„ุชุนู„ู… ุงู„ุนู…ูŠู‚ุŒ ุงู„ุฑุคูŠุฉ ุงู„ุญุงุณูˆุจูŠุฉุŒ YOLOv8 installationุŒ YOLOv8 predictionุŒ YOLOv8 trainingุŒ ุชุงุฑูŠุฎ YOLOุŒ ุชุฑุงุฎูŠุต YOLO +--- + +
+

+ + Ultralytics YOLO banner +

+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ Ultralytics CI + Ultralytics Code Coverage + YOLOv8 Citation + Docker Pulls + Discord +
+ Run on Gradient + Open In Colab + Open In Kaggle +
+ +ูŠุชู… ุชู‚ุฏูŠู… [Ultralytics](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics) ุŒ ุฃุญุฏุซ ุฅุตุฏุงุฑ ู…ู† ู†ู…ูˆุฐุฌ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ูˆุชุฌุฒุฆุฉ ุงู„ุตูˆุฑ ุงู„ู…ุดู‡ูˆุฑุฉ ู„ู„ูˆู‚ุช ุงู„ูุนู„ูŠ. ูŠุนุชู…ุฏ YOLOv8 ุนู„ู‰ ุงู„ุชุทูˆุฑุงุช ุงู„ู…ุชู‚ุฏู…ุฉ ููŠ ุงู„ุชุนู„ู… ุงู„ุนู…ูŠู‚ ูˆุงู„ุฑุคูŠุฉ ุงู„ุญุงุณูˆุจูŠุฉ ุŒ ูˆูŠู‚ุฏู… ุฃุฏุงุกู‹ ูุงุฆู‚ู‹ุง ู…ู† ุญูŠุซ ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉ. ูŠุฌุนู„ ุงู„ุชุตู…ูŠู… ุงู„ุจุณูŠุท ู„ู‡ ู…ู†ุงุณุจู‹ุง ู„ู…ุฎุชู„ู ุงู„ุชุทุจูŠู‚ุงุช ูˆู‚ุงุจู„ู‹ุง ู„ู„ุชูƒูŠู ุจุณู‡ูˆู„ุฉ ู…ุน ู…ู†ุตุงุช ุงู„ุฃุฌู‡ุฒุฉ ุงู„ู…ุฎุชู„ูุฉ ุŒ ู…ู† ุงู„ุฃุฌู‡ุฒุฉ ุงู„ุญุงูุฉ ุฅู„ู‰ ูˆุงุฌู‡ุงุช ุจุฑู…ุฌุฉ ุงู„ุชุทุจูŠู‚ุงุช ููŠ ุงู„ุณุญุงุจุฉ. + +ุงุณุชูƒุดู ุฃุฏู„ุฉ YOLOv8 ุŒ ูˆู‡ูŠ ู…ูˆุฑุฏ ุดุงู…ู„ ูŠู‡ุฏู ุฅู„ู‰ ู…ุณุงุนุฏุชูƒ ููŠ ูู‡ู… ูˆุงุณุชุฎุฏุงู… ู…ูŠุฒุงุชู‡ ูˆู‚ุฏุฑุงุชู‡. ุณูˆุงุก ูƒู†ุช ู…ู…ุงุฑุณู‹ุง ููŠ ู…ุฌุงู„ ุงู„ุชุนู„ู… ุงู„ุขู„ูŠ ู…ู† ุฐูˆูŠ ุงู„ุฎุจุฑุฉ ุฃูˆ ุฌุฏูŠุฏู‹ุง ููŠ ู‡ุฐุง ุงู„ู…ุฌุงู„ ุŒ ูุฅู† ุงู„ู‡ุฏู ู…ู† ู‡ุฐุง ุงู„ู…ุฑูƒุฒ ู‡ูˆ ุชุญู‚ูŠู‚ ุงู„ุญุฏ ุงู„ุฃู‚ุตู‰ ู„ุฅู…ูƒุงู†ุงุช YOLOv8 ููŠ ู…ุดุงุฑูŠุนูƒ. + +!!! Note "ู…ู„ุงุญุธุฉ" + + ๐Ÿšง ุชู… ุชุทูˆูŠุฑ ูˆุซุงุฆู‚ู†ุง ู…ุชุนุฏุฏุฉ ุงู„ู„ุบุงุช ุญุงู„ูŠู‹ุง ุŒ ูˆู†ุนู…ู„ ุจุฌุฏ ู„ุชุญุณูŠู†ู‡ุง. ุดูƒุฑุงู‹ ู„ุตุจุฑูƒ! ๐Ÿ™ + +## ู…ู† ุฃูŠู† ุฃุจุฏุฃ + +- **ุชุซุจูŠุช** `ultralytics` ุจูˆุงุณุทุฉ pip ูˆุงู„ุจุฏุก ููŠ ุงู„ุนู…ู„ ููŠ ุฏู‚ุงุฆู‚   [:material-clock-fast: ุงุจุฏุฃ ุงู„ุขู†](quickstart.md){ .md-button } +- **ุชูˆู‚ุน** ุงู„ุตูˆุฑ ูˆู…ู‚ุงุทุน ุงู„ููŠุฏูŠูˆ ุงู„ุฌุฏูŠุฏุฉ ุจูˆุงุณุทุฉ YOLOv8   [:octicons-image-16: ุชูˆู‚ุน ุนู„ู‰ ุงู„ุตูˆุฑ](modes/predict.md){ .md-button } +- **ุชุฏุฑูŠุจ** ู†ู…ูˆุฐุฌ YOLOv8 ุงู„ุฌุฏูŠุฏ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ู…ุฎุตุตุฉ ุงู„ุฎุงุตุฉ ุจูƒ   [:fontawesome-solid-brain: ู‚ู… ุจุชุฏุฑูŠุจ ู†ู…ูˆุฐุฌ](modes/train.md){ .md-button } +- **ุงุณุชูƒุดุงู** ู…ู‡ุงู… YOLOv8 ู…ุซู„ ุงู„ุชุฌุฒุฆุฉ ูˆุงู„ุชุตู†ูŠู ูˆุงู„ูˆุถุน ูˆุงู„ุชุชุจุน   [:material-magnify-expand: ุงุณุชูƒุดุงู ุงู„ู…ู‡ุงู…](tasks/index.md){ .md-button } + +

+
+ +
+ ู…ุดุงู‡ุฏุฉ: ูƒูŠููŠุฉ ุชุฏุฑูŠุจ ู†ู…ูˆุฐุฌ YOLOv8 ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ู…ุฎุตุตุฉ ููŠ ุฌูˆุฌู„ ูƒูˆู„ุงุจ. +

+ +## YOLO: ู†ุจุฐุฉ ุชุงุฑูŠุฎูŠุฉ + +ุชู… ุชุทูˆูŠุฑ [YOLO](https://arxiv.org/abs/1506.02640) (You Only Look Once) ุŒ ู†ู…ูˆุฐุฌ ุดู‡ูŠุฑ ู„ูƒุดู ุงู„ูƒุงุฆู†ุงุช ูˆุชุฌุฒุฆุฉ ุงู„ุตูˆุฑ ุŒ ู…ู† ู‚ุจู„ ุฌูˆุฒูŠู ุฑูŠุฏู…ูˆู† ูˆุนู„ูŠ ูุฑู‡ุงุฏูŠ ููŠ ุฌุงู…ุนุฉ ูˆุงุดู†ุทู†. ููŠ ุนุงู… 2015 ุŒ ุญู‚ู‚ุช YOLO ุดู‡ุฑุฉ ุณุฑูŠุนุฉ ุจูุถู„ ุณุฑุนุชู‡ุง ุงู„ุนุงู„ูŠุฉ ูˆุฏู‚ุชู‡ุง. + +- [YOLOv2](https://arxiv.org/abs/1612.08242) ุŒ ุงู„ุฐูŠ ุชู… ุฅุตุฏุงุฑู‡ ููŠ ุนุงู… 2016 ุŒ ู‚ุงู… ุจุชุญุณูŠู† ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฃุตู„ูŠ ู…ู† ุฎู„ุงู„ ุฏู…ุฌ ุงู„ุชุทุจูŠุน ุงู„ุชุดุบูŠู„ูŠ ุŒ ูˆู…ุฑุจุนุงุช ุงู„ุฑุจุท ุŒ ูˆู…ุฌู…ูˆุนุงุช ุงู„ุฃุจุนุงุฏ. +- [YOLOv3](https://pjreddie.com/media/files/papers/YOLOv3.pdf) ุŒ ุงู„ุฐูŠ ุชู… ุฅุทู„ุงู‚ู‡ ููŠ ุนุงู… 2018 ุŒ ู‚ุฏู… ุชุญุณูŠู†ุงุช ุฅุถุงููŠุฉ ู„ุฃุฏุงุก ุงู„ู†ู…ูˆุฐุฌ ุจุงุณุชุฎุฏุงู… ุดุจูƒุฉ ุธู‡ุฑ ุฃูƒุซุฑ ูƒูุงุกุฉ ูˆู…ุฑุดุญุงุช ู…ุชุนุฏุฏุฉ ูˆุชุฌู…ูŠุน ู‡ุฑู… ุงู„ู…ุณุงุญุฉ. +- ุชู… ุฅุตุฏุงุฑ [YOLOv4](https://arxiv.org/abs/2004.10934) ููŠ ุนุงู… 2020 ุŒ ูˆู‚ุฏู… ุงุจุชูƒุงุฑุงุช ู…ุซู„ ุฒูŠุงุฏุฉ ุงู„ู…ุณุงุนุฏุงุช ููŠ ุงู„ุจูŠุงู†ุงุช ุŒ ูˆุฑุฃุณ ุฌุฏูŠุฏ ู„ู„ูƒุดู ุบูŠุฑ ุงู„ู…ุฑุชุจุท ุจุงู„ู…ุฑุงุจุท ุŒ ูˆูˆุธูŠูุฉ ูู‚ุฏุงู† ุฌุฏูŠุฏุฉ. +- [YOLOv5](https://github.com/ultralytics/yolov5) ู‚ุงู… ุจุชุญุณูŠู† ุฃุฏุงุก ุงู„ู†ู…ูˆุฐุฌ ูˆุฃุถุงู ู…ูŠุฒุงุช ุฌุฏูŠุฏุฉ ู…ุซู„ ุชุญุณูŠู† ุซูˆุงุจุช ุงู„ู†ู…ูˆุฐุฌ ุŒ ูˆุชุนู‚ุจ ุงู„ุชุฌุงุฑุจ ุงู„ู…ุชูƒุงู…ู„ ูˆุงู„ุชุตุฏูŠุฑ ุงู„ุชู„ู‚ุงุฆูŠ ุฅู„ู‰ ุชู†ุณูŠู‚ุงุช ุงู„ุชุตุฏูŠุฑ ุงู„ุดู‡ูŠุฑุฉ. +- [YOLOv6](https://github.com/meituan/YOLOv6) ุชู… ุชูŽูˆู’ุฒูŠุนู‡ ุนู„ู‰ [Meituan](https://about.meituan.com/) ููŠ ุนุงู… 2022 ูˆู‡ูˆ ู‚ูŠุฏ ุงู„ุงุณุชุฎุฏุงู… ููŠ ุงู„ุนุฏูŠุฏ ู…ู† ุฑูˆุจูˆุชุงุช ุงู„ุชุณู„ูŠู… ุงู„ุฐุงุชูŠ ู„ู„ุดุฑูƒุฉ. +- [YOLOv7](https://github.com/WongKinYiu/yolov7) ุฃุถุงู ู…ู‡ู…ุงุช ุฅุถุงููŠุฉ ู…ุซู„ ุชู‚ุฏูŠุฑ ุงู„ูˆุถุน ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ู†ู‚ุงุท COCO ุงู„ุฑุฆูŠุณูŠุฉ. +- [YOLOv8](https://github.com/ultralytics/ultralytics) ู‡ูˆ ุฃุญุฏุซ ุฅุตุฏุงุฑ ู…ู† YOLO ุจูˆุงุณุทุฉ Ultralytics. ุจุงุนุชุจุงุฑู‡ ู†ู…ูˆุฐุฌู‹ุง ุญุฏูŠุซู‹ุง ูˆูุฑูŠุฏู‹ุง ู…ู† ู†ูˆุนู‡ ุŒ ูุฅู† YOLOv8 ูŠุจู†ูŠ ุนู„ู‰ ู†ุฌุงุญ ุงู„ุฅุตุฏุงุฑุงุช ุงู„ุณุงุจู‚ุฉ ุŒ ูˆูŠู‚ุฏู… ู…ูŠุฒุงุช ูˆุชุญุณูŠู†ุงุช ุฌุฏูŠุฏุฉ ู„ุชุญุณูŠู† ุงู„ุฃุฏุงุก ูˆุงู„ู…ุฑูˆู†ุฉ ูˆุงู„ูƒูุงุกุฉ. ูŠุฏุนู… YOLOv8 ู…ุฌู…ูˆุนุฉ ูƒุงู…ู„ุฉ ู…ู† ู…ู‡ุงู… ุงู„ุฐูƒุงุก ุงู„ุตู†ุงุนูŠ ู„ู„ุฑุคูŠุฉ ุŒ ุจู…ุง ููŠ ุฐู„ูƒ [ุงู„ูƒุดู](tasks/detect.md) ุŒ [ุงู„ุชุฌุฒุฆุฉ](tasks/segment.md) ุŒ [ุชู‚ุฏูŠุฑ ุงู„ูˆุถุน](tasks/pose.md) ุŒ [ุงู„ุชุชุจุน](modes/track.md) ุŒ ูˆ [ุงู„ุชุตู†ูŠู](tasks/classify.md). ุชุชูŠุญ ู‡ุฐู‡ ุงู„ู‚ุงุจู„ูŠุฉ ู„ู„ุชูƒูŠู ู„ู„ู…ุณุชุฎุฏู…ูŠู† ุงุณุชุบู„ุงู„ ู‚ุฏุฑุงุช YOLOv8 ููŠ ุชุทุจูŠู‚ุงุช ูˆู…ุฌุงู„ุงุช ู…ุชู†ูˆุนุฉ. + +## ุชุฑุงุฎูŠุต YOLO: ูƒูŠู ูŠุชู… ุชุฑุฎูŠุต Ultralytics YOLOุŸ + +ูŠูˆูุฑ Ultralytics ุฎูŠุงุฑูŠู† ู„ู„ุชุฑุฎูŠุต ู„ุงุณุชูŠุนุงุจ ุงู„ุญุงู„ุงุช ุงู„ุงุณุชุฎุฏุงู… ุงู„ู…ุชู†ูˆุนุฉ: + +- **ุชุฑุฎูŠุต AGPL-3.0**: ู‡ุฐุง ุงู„ุชุฑุฎูŠุต ู…ูุชูˆุญ ุงู„ู…ุตุฏุฑ ูˆุงู„ู…ุนุชู…ุฏ ู…ู† [OSI](https://opensource.org/licenses/) ูˆู‡ูˆ ู…ุซุงู„ูŠ ู„ู„ุทู„ุงุจ ูˆุงู„ู‡ูˆุงุฉ ุŒ ูˆูŠุดุฌุน ุนู„ู‰ ุงู„ุชุนุงูˆู† ุงู„ู…ูุชูˆุญ ูˆู…ุดุงุฑูƒุฉ ุงู„ู…ุนุฑูุฉ. ุฑุงุฌุน ู…ู„ู [LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) ู„ู…ุฒูŠุฏ ู…ู† ุงู„ุชูุงุตูŠู„. +- **ุชุฑุฎูŠุต ุงู„ู…ุคุณุณุฉ**: ุตู…ู… ู„ู„ุงุณุชุฎุฏุงู… ุงู„ุชุฌุงุฑูŠ ุŒ ูŠุณู…ุญ ู‡ุฐุง ุงู„ุชุฑุฎูŠุต ุจุฏู…ุฌ ุณู„ุณ ู„ู„ุจุฑู…ุฌูŠุงุช ูˆู†ู…ุงุฐุฌ AI ุงู„ุฎุงุตุฉ ุจุดุฑูƒุฉ Ultralytics ููŠ ุงู„ุณู„ุน ูˆุงู„ุฎุฏู…ุงุช ุงู„ุชุฌุงุฑูŠุฉ ุŒ ูˆุชูุงุฏูŠ ู…ุชุทู„ุจุงุช ุงู„ู…ุตุฏุฑ ุงู„ู…ูุชูˆุญ ู„ู€ AGPL-3.0. ุฅุฐุง ุชุดู…ู„ ุณูŠู†ุงุฑูŠูˆ ุงู„ุฎุงุต ุจูƒ ุชุถู…ูŠู† ุญู„ูˆู„ู†ุง ููŠ ุนุฑุถ ุชุฌุงุฑูŠ ุŒ ููŠุฑุฌู‰ ุงู„ุชูˆุงุตู„ ู…ู† ุฎู„ุงู„ [Ultralytics Licensing](https://ultralytics.com/license). + +ุชู… ุชุตู…ูŠู… ุงุณุชุฑุงุชูŠุฌูŠุฉ ุงู„ุชุฑุฎูŠุต ุงู„ุฎุงุตุฉ ุจู†ุง ู„ุถู…ุงู† ุฃู† ุฃูŠ ุชุญุณูŠู†ุงุช ุนู„ู‰ ู…ุดุงุฑูŠุนู†ุง ู…ูุชูˆุญุฉ ุงู„ู…ุตุฏุฑ ูŠุชู… ุฅุฑุฌุงุนู‡ุง ุฅู„ู‰ ุงู„ู…ุฌุชู…ุน. ู†ุญู…ู„ ู…ุจุงุฏุฆ ุงู„ู…ุตุฏุฑ ุงู„ู…ูุชูˆุญ ู‚ุฑูŠุจุฉ ู…ู† ู‚ู„ูˆุจู†ุง โค๏ธ ุŒ ูˆู…ู‡ู…ุชู†ุง ู‡ูŠ ุถู…ุงู† ุฃู† ูŠู…ูƒู† ุงุณุชุฎุฏุงู… ูˆุชูˆุณูŠุน ู…ุณุงู‡ู…ุงุชู†ุง ุจุทุฑู‚ ุชุนูˆุฏ ุจุงู„ู†ูุน ุนู„ู‰ ุงู„ุฌู…ูŠุน. diff --git a/ultralytics/docs/ar/index.md:Zone.Identifier b/ultralytics/docs/ar/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/models/fast-sam.md b/ultralytics/docs/ar/models/fast-sam.md new file mode 100755 index 0000000..68b39fc --- /dev/null +++ b/ultralytics/docs/ar/models/fast-sam.md @@ -0,0 +1,191 @@ +--- +comments: true +description: ุงุณุชูƒุดู FastSAM ุŒ ูˆู‡ูˆ ุญู„ุงู‹ ู…ุจู†ูŠู‹ุง ุนู„ู‰ ุงู„ุดุจูƒุงุช ุงู„ุนุตุจูŠุฉ ุงู„ุณุฑูŠุนุฉ ู„ุชุฌุฒุฆุฉ ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ููŠ ุงู„ุตูˆุฑ. ุชูุงุนู„ ุงู„ู…ุณุชุฎุฏู… ุงู„ู…ุญุณู‘ู† ุŒ ูˆุงู„ูƒูุงุกุฉ ุงู„ุญุณุงุจูŠุฉ ุŒ ูˆุงู„ู‚ุงุจู„ูŠุฉ ู„ู„ุชูƒูŠู ููŠ ู…ู‡ุงู… ุงู„ุฑุคูŠุฉ ุงู„ู…ุฎุชู„ูุฉ. +keywords: FastSAM ุŒ ุงู„ุชุนู„ู… ุงู„ุขู„ูŠ ุŒ ุญู„ุงู‹ ู…ุจู†ูŠู‹ุง ุนู„ู‰ ุงู„ุดุจูƒุงุช ุงู„ุนุตุจูŠุฉ ุงู„ุณุฑูŠุนุฉ ุŒ ู‚ุณูŠู…ุฉ ุงู„ูƒุงุฆู†ุงุช ุŒ ุญู„ุงู‹ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุŒ Ultralytics ุŒ ู…ู‡ุงู… ุงู„ุฑุคูŠุฉ ุŒ ู…ุนุงู„ุฌุฉ ุงู„ุตูˆุฑ ุŒ ุชุทุจูŠู‚ุงุช ุตู†ุงุนูŠุฉ ุŒ ุชูุงุนู„ ุงู„ู…ุณุชุฎุฏู… +--- + +# ู†ู…ูˆุฐุฌ ุชุฌุฒุฆุฉ ุฃูŠ ุดูŠุก ุจุณุฑุนุฉ ุนุงู„ูŠุฉ (FastSAM) + +ู†ู…ูˆุฐุฌ ุชุฌุฒุฆุฉ ุฃูŠ ุดูŠุก ุจุณุฑุนุฉ ุนุงู„ูŠุฉ (FastSAM) ู‡ูˆ ุญู„ุงู‹ ู…ุจุชูƒุฑู‹ุง ู„ู„ุนุตุจ ุงู„ุดุจูƒูŠ ูŠุนู…ู„ ุจุงู„ุฒู…ู† ุงู„ุญู‚ูŠู‚ูŠ ู„ู…ู‡ู…ุฉ ุชุฌุฒุฆุฉ ุฃูŠ ูƒุงุฆู† ุฏุงุฎู„ ุตูˆุฑุฉ ู…ุง. ุชู… ุชุตู…ูŠู… ู‡ุฐู‡ ุงู„ู…ู‡ู…ุฉ ู„ุชุฌุฒุฆุฉ ุฃูŠ ูƒุงุฆู† ุฏุงุฎู„ ุตูˆุฑุฉ ุจู†ุงุกู‹ ุนู„ู‰ ุฅุดุงุฑุงุช ุชูุงุนู„ ุงู„ู…ุณุชุฎุฏู… ุงู„ู…ุฎุชู„ูุฉ ุงู„ู…ู…ูƒู†ุฉ. ูŠู‚ู„ู„ ุงู„ู€ FastSAM ู…ู† ุงู„ุงุญุชูŠุงุฌุงุช ุงู„ุญุณุงุจูŠุฉ ุจุดูƒู„ ูƒุจูŠุฑ ู…ุน ุงู„ุญูุงุธ ุนู„ู‰ ุฃุฏุงุก ุชู†ุงูุณูŠ ุŒ ู…ู…ุง ูŠุฌุนู„ู‡ ุฎูŠุงุฑู‹ุง ุนู…ู„ูŠู‹ุง ู„ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ู…ู‡ุงู… ุงู„ุฑุคูŠุฉ. + +![ู†ุธุฑุฉ ุนุงู…ุฉ ุนู„ู‰ ุชุตู…ูŠู… ู†ู…ูˆุฐุฌ ุชุฌุฒุฆุฉ ุฃูŠ ุดูŠุก ุจุณุฑุนุฉ ุนุงู„ูŠุฉ (FastSAM)](https://user-images.githubusercontent.com/26833433/248551984-d98f0f6d-7535-45d0-b380-2e1440b52ad7.jpg) + +## ู†ุธุฑุฉ ุนุงู…ุฉ + +ุชู… ุชุตู…ูŠู… FastSAM ู„ู„ุชุบู„ุจ ุนู„ู‰ ุงู„ู‚ูŠูˆุฏ ุงู„ู…ูˆุฌูˆุฏุฉ ููŠ [ู†ู…ูˆุฐุฌ ุชุฌุฒุฆุฉ ู…ุง ุดูŠุก (SAM)](sam.md) ุŒ ูˆู‡ูˆ ู†ู…ูˆุฐุฌ ุชุญูˆูŠู„ ุซู‚ูŠู„ ูŠุชุทู„ุจ ู…ูˆุงุฑุฏ ุญุณุงุจูŠุฉ ูƒุจูŠุฑุฉ. ูŠูุตู„ FastSAM ุนู…ู„ูŠุฉ ุชุฌุฒุฆุฉ ุฃูŠ ุดูŠุก ุฅู„ู‰ ู…ุฑุญู„ุชูŠู† ู…ุชุณู„ุณู„ุชูŠู†: ุชุฌุฒุฆุฉ ุฌู…ูŠุน ุงู„ุฃู…ุซู„ุฉ ูˆุงุฎุชูŠุงุฑ ู…ูˆุฌู‡ ุจู†ุงุกู‹ ุนู„ู‰ ุงู„ุชุนู„ูŠู…ุงุช. ุชุณุชุฎุฏู… ุงู„ู…ุฑุญู„ุฉ ุงู„ุฃูˆู„ู‰ [YOLOv8-seg](../tasks/segment.md) ู„ุฅู†ุชุงุฌ ู‚ู†ุงุน ุงู„ุชุฌุฒุฆุฉ ู„ุฌู…ูŠุน ุงู„ุฃู…ุซู„ุฉ ููŠ ุงู„ุตูˆุฑุฉ. ููŠ ุงู„ู…ุฑุญู„ุฉ ุงู„ุซุงู†ูŠุฉ ุŒ ูŠุชู… ุฅุฎุฑุงุฌ ู…ู†ุทู‚ุฉ ุงู„ุงู‡ุชู…ุงู… ุงู„ู…ุชุนู„ู‚ุฉ ุจุงู„ุชุนู„ูŠู…ุฉ. + +## ุงู„ู…ู…ูŠุฒุงุช ุงู„ุฑุฆูŠุณูŠุฉ + +1. **ุญู„ุงู‹ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ**: ู…ู† ุฎู„ุงู„ ุงุณุชุบู„ุงู„ ูƒูุงุกุฉ ุงู„ุดุจูƒุงุช ุงู„ุนุตุจูŠุฉ ุงู„ุญุงุณูˆุจูŠุฉ ุŒ ูŠูˆูุฑ FastSAM ุญู„ุงู‹ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ู„ู…ู‡ู…ุฉ ุชุฌุฒุฆุฉ ุฃูŠ ุดูŠุก ุŒ ู…ู…ุง ูŠุฌุนู„ู‡ ู‚ูŠู…ู‹ุง ู„ู„ุชุทุจูŠู‚ุงุช ุงู„ุตู†ุงุนูŠุฉ ุงู„ุชูŠ ุชุชุทู„ุจ ู†ุชุงุฆุฌ ุณุฑูŠุนุฉ. + +2. **ูƒูุงุกุฉ ูˆุฃุฏุงุก**: ูŠู‚ุฏู… FastSAM ุชู‚ู„ูŠู„ ูƒุจูŠุฑ ููŠ ุงู„ุงุญุชูŠุงุฌุงุช ุงู„ุญุณุงุจูŠุฉ ูˆุงุณุชุฎุฏุงู… ุงู„ู…ูˆุงุฑุฏ ุฏูˆู† ุงู„ุชู†ุงุฒู„ ุนู† ุฌูˆุฏุฉ ุงู„ุฃุฏุงุก. ูŠุญู‚ู‚ ุฃุฏุงุกู‹ ู‚ุงุจู„ุงู‹ ู„ู„ู…ู‚ุงุฑู†ุฉ ู…ุน SAM ูˆู„ูƒู† ุจู…ูˆุงุฑุฏ ุญุณุงุจูŠุฉ ู…ุฎูุถุฉ ุจุดูƒู„ ูƒุจูŠุฑ ุŒ ู…ู…ุง ูŠู…ูƒู† ู…ู† ุชุทุจูŠู‚ู‡ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ. + +3. **ุชุฌุฒุฆุฉ ูŠุณุชู†ุฏ ุฅู„ู‰ ุงู„ู…ูˆุฌู‡**: ูŠู…ูƒู† ู„ู€ FastSAM ุชุฌุฒุฆุฉ ุฃูŠ ูƒุงุฆู† ุฏุงุฎู„ ุตูˆุฑุฉ ุชุฑุดุฏู‡ ู…ุฎุชู„ู ุฅุดุงุฑุงุช ุชูุงุนู„ ุงู„ู…ุณุชุฎุฏู… ุงู„ู…ู…ูƒู†ุฉ ุŒ ู…ู…ุง ูŠูˆูุฑ ู…ุฑูˆู†ุฉ ูˆู‚ุงุจู„ูŠุฉ ู„ู„ุชูƒูŠู ููŠ ุณูŠู†ุงุฑูŠูˆู‡ุงุช ู…ุฎุชู„ูุฉ. + +4. **ูŠุณุชู†ุฏ ุฅู„ู‰ YOLOv8-seg**: ูŠุณุชู†ุฏ FastSAM ุฅู„ู‰ [YOLOv8-seg](../tasks/segment.md) ุŒ ูˆู‡ูˆ ูƒุงุดู ูƒุงุฆู†ุงุช ู…ุฌู‡ุฒ ุจูุฑุน ุชุฌุฒุฆุฉ ุงู„ู…ุซูŠู„ุงุช. ูŠู…ูƒู†ู‡ ุจุดูƒู„ ูุนุงู„ ุฅู†ุชุงุฌ ู‚ู†ุงุน ุงู„ุชุฌุฒุฆุฉ ู„ุฌู…ูŠุน ุงู„ุฃู…ุซู„ุฉ ููŠ ุตูˆุฑุฉ. + +5. **ู†ุชุงุฆุฌ ุชู†ุงูุณูŠุฉ ููŠ ุงู„ุงุฎุชุจุงุฑุงุช ุงู„ุชุญุถูŠุฑูŠุฉ**: ููŠ ู…ู‡ู…ุฉ ุงู‚ุชุฑุงุญ ุงู„ูƒุงุฆู† ุนู„ู‰ MS COCO ุŒ ูŠุญู‚ู‚ FastSAM ุฏุฑุฌุงุช ุนุงู„ูŠุฉ ุจุณุฑุนุฉ ุฃุณุฑุน ุจูƒุซูŠุฑ ู…ู† [SAM](sam.md) ุนู„ู‰ ุจุทุงู‚ุฉ NVIDIA RTX 3090 ูˆุงุญุฏุฉ ุŒ ู…ู…ุง ูŠุฏู„ ุนู„ู‰ ูƒูุงุกุชู‡ ูˆู‚ุฏุฑุชู‡. + +6. **ุชุทุจูŠู‚ุงุช ุนู…ู„ูŠุฉ**: ุชูˆูุฑ ุงู„ุทุฑูŠู‚ุฉ ุงู„ู…ู‚ุชุฑุญุฉ ุญู„ุงู‹ ุฌุฏูŠุฏู‹ุง ูˆุนู…ู„ูŠู‹ุง ู„ุนุฏุฏ ูƒุจูŠุฑ ู…ู† ู…ู‡ุงู… ุงู„ุฑุคูŠุฉ ุจุณุฑุนุฉ ุนุงู„ูŠุฉ ุญู‚ู‹ุง ุŒ ุจู…ุนุฏู„ุงุช ุณุฑุนุฉ ุนุดุฑุงุช ุฃูˆ ู…ุฆุงุช ุงู„ู…ุฑุงุช ุฃุณุฑุน ู…ู† ุงู„ุทุฑู‚ ุงู„ุญุงู„ูŠุฉ. + +7. **ุฌุฏูˆู‰ ุถุบุท ุงู„ู†ู…ูˆุฐุฌ**: ูŠุธู‡ุฑ FastSAM ุฅู…ูƒุงู†ูŠุฉ ุชู‚ู„ูŠู„ ุงู„ุฌู‡ุฏ ุงู„ุญุณุงุจูŠ ุจุดูƒู„ ูƒุจูŠุฑ ู…ู† ุฎู„ุงู„ ุฅุฏุฎุงู„ ุณุงุจู‚ ุงุตุทู†ุงุนูŠ ู„ู„ู‡ูŠูƒู„ ุŒ ู…ู…ุง ูŠูุชุญ ุฅู…ูƒุงู†ูŠุงุช ุฌุฏูŠุฏุฉ ู„ู‡ู†ุฏุณุฉ ู‡ูŠูƒู„ ุงู„ู†ู…ูˆุฐุฌ ุงู„ูƒุจูŠุฑ ู„ู…ู‡ุงู… ุงู„ุฑุคูŠุฉ ุงู„ุนุงู…ุฉ. + +## ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุชุงุญุฉ ุŒ ุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉ ุŒ ูˆุฃูˆุถุงุน ุงู„ุชุดุบูŠู„ + +ูŠุนุฑุถ ู‡ุฐุง ุงู„ุฌุฏูˆู„ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุชุงุญุฉ ู…ุน ุฃูˆุฒุงู†ู‡ุง ุงู„ู…ุญุฏุฏุฉ ุŒ ูˆุงู„ู…ู‡ุงู… ุงู„ุชูŠ ุชุฏุนู…ู‡ุง ุŒ ูˆู…ุฏู‰ ุชูˆุงูู‚ู‡ุง ู…ุน ุฃูˆุถุงุน ุงู„ุชุดุบูŠู„ ุงู„ู…ุฎุชู„ูุฉ ู…ุซู„ [ุงู„ุงุณุชู†ุชุงุฌ](../modes/predict.md) ุŒ [ุงู„ุชุญู‚ู‚](../modes/val.md) ุŒ [ุงู„ุชุฏุฑูŠุจ](../modes/train.md) ุŒ ูˆ[ุงู„ุชุตุฏูŠุฑ](../modes/export.md) ุŒ ู…ุดุงุฑ ุฅู„ูŠู‡ุง ุจุฑู…ูˆุฒ ุงู„ู€โœ… ู„ู„ุฃูˆุถุงุน ุงู„ู…ุฏุนูˆู…ุฉ ูˆุงู„ุฑู…ูˆุฒ โŒ ู„ู„ุฃูˆุถุงุน ุบูŠุฑ ุงู„ู…ุฏุนูˆู…ุฉ. + +| ู†ูˆุน ุงู„ู†ู…ูˆุฐุฌ | ุฃูˆุฒุงู† ุชู… ุชุฏุฑูŠุจู‡ุง ู…ุณุจู‚ู‹ุง | ุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉ | ุงู„ุงุณุชู†ุชุงุฌ | ุงู„ุชุญู‚ู‚ | ุงู„ุชุฏุฑูŠุจ | ุงู„ุชุตุฏูŠุฑ | +|-------------|-------------------------|---------------------------------------|-----------|--------|---------|---------| +| FastSAM-s | `FastSAM-s.pt` | [ุชุฌุฒุฆุฉ ุงู„ู…ุซูŠู„ุงุช](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| FastSAM-x | `FastSAM-x.pt` | [ุชุฌุฒุฆุฉ ุงู„ู…ุซูŠู„ุงุช](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## ุฃู…ุซู„ุฉ ุงู„ุงุณุชุฎุฏุงู… + +ูŠุณู‡ู„ ุฏู…ุฌ ู†ู…ุงุฐุฌ FastSAM ููŠ ุชุทุจูŠู‚ุงุช Python ุงู„ุฎุงุตุฉ ุจูƒ. ูŠูˆูุฑ Ultralytics ูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ ุชุทุจูŠู‚ุงุช Python ุณู‡ู„ุฉ ุงู„ุงุณุชุฎุฏุงู… ูˆุฃูˆุงู…ุฑ CLI ู„ุชุณู‡ูŠู„ ุงู„ุชุทูˆูŠุฑ. + +### ุงุณุชุฎุฏุงู… ุงู„ุชูˆู‚ุนุงุช + +ู„ู„ู‚ูŠุงู… ุจูƒุดู ุงู„ูƒุงุฆู†ุงุช ููŠ ุตูˆุฑุฉ ุŒ ุงุณุชุฎุฏู… ุทุฑูŠู‚ุฉ `predict` ูƒู…ุง ู‡ูˆ ู…ูˆุถุญ ุฃุฏู†ุงู‡: + +!!! Example "ู…ุซุงู„" + + === "ุจุงูŠุซูˆู†" + ```python + from ultralytics import FastSAM + from ultralytics.models.fastsam import FastSAMPrompt + + # ุญุฏุฏ ู…ุตุฏุฑ ุงู„ุชูˆู‚ุน + source = 'path/to/bus.jpg' + + # ู‚ู… ุจุฅู†ุดุงุก ู†ู…ูˆุฐุฌ FastSAM + model = FastSAM('FastSAM-s.pt') # or FastSAM-x.pt + + # ุชู†ููŠุฐ ุชูˆู‚ุนุงุช ุนู„ู‰ ุตูˆุฑุฉ + everything_results = model(source, device='cpu', retina_masks=True, imgsz=1024, conf=0.4, iou=0.9) + + # ู‚ู… ุจุชุฌู‡ูŠุฒ ูƒุงุฆู† ู…ุนุงู„ุฌ ู…ุน ู‚ูˆุงุนุฏ ุงู„ุชูˆู‚ุน + prompt_process = FastSAMPrompt(source, everything_results, device='cpu') + + # ุงู„ุชูˆู‚ุน ุจุงุณุชุฎุฏุงู… ูƒู„ ุดูŠุก + ann = prompt_process.everything_prompt() + + # bbox ุงู„ุดูƒู„ ุงู„ุงูุชุฑุงุถูŠ [0ุŒ0ุŒ0ุŒ0] -> [x1ุŒy1ุŒx2ุŒy2] + ann = prompt_process.box_prompt(bbox=[200ุŒ 200ุŒ 300ุŒ 300]) + + # ุงู„ุชูˆู‚ุน ุงู„ู†ุตูŠ + ann = prompt_process.text_prompt(text='ุตูˆุฑุฉ ู„ูƒู„ุจ') + + # ุงู„ุชูˆู‚ุน ุงู„ู†ู‚ุทูŠ + ann = prompt_process.point_prompt(points=[[200ุŒ 200]]ุŒ pointlabel=[1]) + prompt_process.plot(annotations=annุŒ output='./') + ``` + + === "CLI" + ```bash + # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ FastSAM ูˆุชุฌุฒุฆุฉ ูƒู„ ุดูŠุก ุจู‡ + yolo segment predict model=FastSAM-s.pt source=path/to/bus.jpg imgsz=640 + ``` + +ุชูˆุถุญ ู‡ุฐู‡ ุงู„ู…ู‚ุงุทุน ุงู„ุจุณุงุทุฉ ููŠ ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ูˆุชู†ููŠุฐ ุชูˆู‚ุน ุนู„ู‰ ุตูˆุฑุฉ. + +### ุงุณุชุฎุฏุงู… ู…ู‡ุงู… ุงู„ุชุญู‚ู‚ + +ูŠู…ูƒู† ุชู†ููŠุฐ ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ุนู„ู‰ ุงู„ู†ุญูˆ ุงู„ุชุงู„ูŠ: + +!!! Example "ู…ุซุงู„" + + === "ุจุงูŠุซูˆู†" + ```python + from ultralytics import FastSAM + + # ู‚ู… ุจุฅู†ุดุงุก ู†ู…ูˆุฐุฌ FastSAM + model = FastSAM('FastSAM-s.pt') # or FastSAM-x.pt + + # ู‚ู… ุจุชู†ููŠุฐ ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ + results = model.val(data='coco8-seg.yaml') + ``` + + === "CLI" + ```bash + # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ FastSAM ูˆุฃุฌุฑู ุงู„ุชุญู‚ู‚ ู…ู†ู‡ ุจุฎุตูˆุต ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ู…ุซุงู„ ูƒูˆูƒูˆ 8 ุจุญุฌู… ุตูˆุฑุฉ 640 + yolo segment val model=FastSAM-s.pt data=coco8.yaml imgsz=640 + ``` + +ูŠุฑุฌู‰ ู…ู„ุงุญุธุฉ ุฃู† ุงู„ู€ FastSAM ูŠุฏุนู… ูู‚ุท ุงู„ูƒุดู ูˆุงู„ุชุฌุฒุฆุฉ ู„ูุฆุฉ ูˆุงุญุฏุฉ ู…ู† ุงู„ูƒุงุฆู†. ู‡ุฐุง ูŠุนู†ูŠ ุฃู†ู‡ ุณูŠุชุนุฑู ูˆูŠุฌุฒุก ุฌู…ูŠุน ุงู„ูƒุงุฆู†ุงุช ุนู„ู‰ ุฃู†ู‡ุง ู†ูุณ ุงู„ูุฆุฉ. ู„ุฐู„ูƒ ุŒ ุนู†ุฏ ุฅุนุฏุงุฏ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุŒ ูŠุฌุจ ุชุญูˆูŠู„ ุฌู…ูŠุน ู…ุนุฑูุงุช ูุฆุฉ ุงู„ูƒุงุฆู† ุฅู„ู‰ 0. + +## ุงุณุชุฎุฏุงู… FastSAM ุงู„ุฑุณู…ูŠ + +ูŠุชูˆูุฑ ู†ู…ูˆุฐุฌ FastSAM ู…ุจุงุดุฑุฉู‹ ู…ู† ู…ุณุชูˆุฏุน [https://github.com/CASIA-IVA-Lab/FastSAM](https://github.com/CASIA-IVA-Lab/FastSAM). ููŠู…ุง ูŠู„ูŠ ู†ุธุฑุฉ ุนุงู…ุฉ ู…ูˆุฌุฒุฉ ุนู„ู‰ ุงู„ุฎุทูˆุงุช ุงู„ุชู‚ู„ูŠุฏูŠุฉ ุงู„ุชูŠ ู‚ุฏ ุชุชุฎุฐู‡ุง ู„ุงุณุชุฎุฏุงู… FastSAM: + +### ุงู„ุชุซุจูŠุช + +1. ุงุณุชู†ุณุฎ ู…ุณุชูˆุฏุน FastSAM: + ```shell + git clone https://github.com/CASIA-IVA-Lab/FastSAM.git + ``` + +2. ุฃู†ุดุฆ ุจูŠุฆุฉ Conda ูˆูุนู‘ู„ู‡ุง ุจู€ Python 3.9: + ```shell + conda create -n FastSAM python=3.9 + conda activate FastSAM + ``` + +3. ุงู†ุชู‚ู„ ุฅู„ู‰ ุงู„ู…ุณุชูˆุฏุน ุงู„ู…ู†ุณุฎ ูˆู‚ู… ุจุชุซุจูŠุช ุงู„ุญุฒู… ุงู„ู…ุทู„ูˆุจุฉ: + ```shell + cd FastSAM + pip install -r requirements.txt + ``` + +4. ู‚ู… ุจุชุซุจูŠุช ู†ู…ูˆุฐุฌ CLIP: + ```shell + pip install git+https://github.com/openai/CLIP.git + ``` + +### ู…ุซุงู„ ุงู„ุงุณุชุฎุฏุงู… + +1. ู‚ู… ุจุชู†ุฒูŠู„ [ุชููˆูŠุถ ู†ู…ูˆุฐุฌ](https://drive.google.com/file/d/1m1sjY4ihXBU1fZXdQ-Xdj-mDltW-2Rqv/view?usp=sharing). + +2. ุงุณุชุฎุฏู… FastSAM ู„ู„ุชูˆู‚ุน. ุฃู…ุซู„ุฉ ุงู„ุฃูˆุงู…ุฑ: + + - ุชุฌุฒุฆุฉ ูƒู„ ุดูŠุก ููŠ ุตูˆุฑุฉ: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg + ``` + + - ุชุฌุฒุฆุฉ ูƒุงุฆู†ุงุช ู…ุญุฏุฏุฉ ุจุงุณุชุฎุฏุงู… ุชุนู„ูŠู…ุงุช ุงู„ู†ุต: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --text_prompt "ุงู„ูƒู„ุจ ุงู„ุฃุตูุฑ" + ``` + + - ุชุฌุฒุฆุฉ ูƒุงุฆู†ุงุช ุฏุงุฎู„ ู…ุฑุจุน ู…ุญุฏุฏ (ุชู‚ุฏูŠู… ุฅุญุฏุงุซูŠุงุช ุงู„ุตู†ุฏูˆู‚ ููŠ ุชู†ุณูŠู‚ xywh): + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --box_prompt "[570,200,230,400]" + ``` + + - ุชุฌุฒุฆุฉ ูƒุงุฆู†ุงุช ู‚ุฑุจ ุงู„ู†ู‚ุงุท ุงู„ู…ุญุฏุฏุฉ: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --point_prompt "[[520,360],[620,300]]" --point_label "[1,0]" + ``` + +ุจุงู„ุฅุถุงูุฉ ุฅู„ู‰ ุฐู„ูƒ ุŒ ูŠู…ูƒู†ูƒ ุชุฌุฑุจุฉ FastSAM ู…ู† ุฎู„ุงู„ [Colab demo](https://colab.research.google.com/drive/1oX14f6IneGGw612WgVlAiy91UHwFAvr9?usp=sharing) ุฃูˆ ุนู„ู‰ [HuggingFace web demo](https://huggingface.co/spaces/An-619/FastSAM) ู„ุชุฌุฑุจุฉ ุจุตุฑูŠุฉ. + +## ุงู„ุงู‚ุชุจุงุณุงุช ูˆุงู„ุดูƒุฑ + +ู†ูˆุฏ ุฃู† ู†ุดูƒุฑ ุฃุจุงุก FastSAM ุนู„ู‰ ู…ุณุงู‡ู…ุงุชู‡ู… ุงู„ู‡ุงู…ุฉ ููŠ ู…ุฌุงู„ ุชุฌุฒุฆุฉ ุงู„ู…ุซูŠู„ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ: + +!!! Quote "" + + === "ุจูŠุจ ุชูŠูƒุณ" + + ```bibtex + @misc{zhao2023fast, + title={Fast Segment Anything}, + author={Xu Zhao and Wenchao Ding and Yongqi An and Yinglong Du and Tao Yu and Min Li and Ming Tang and Jinqiao Wang}, + year={2023}, + eprint={2306.12156}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ูŠู…ูƒู† ุงู„ุนุซูˆุฑ ุนู„ู‰ ูˆุฑู‚ุฉ FastSAM ุงู„ุฃุตู„ูŠุฉ ุนู„ู‰ [arXiv](https://arxiv.org/abs/2306.12156). ู‚ุงู… ุงู„ุฃุจุงุก ุจุฌุนู„ ุฃุนู…ุงู„ู‡ู… ู…ุชุงุญุฉ ู„ู„ุฌู…ู‡ูˆุฑ ุŒ ูˆูŠู…ูƒู† ุงู„ูˆุตูˆู„ ุฅู„ู‰ ู‚ุงุนุฏุฉ ุงู„ูƒูˆุฏ ุนู„ู‰ [GitHub](https://github.com/CASIA-IVA-Lab/FastSAM). ู†ู‚ุฏุฑ ุฌู‡ูˆุฏู‡ู… ููŠ ุชุทูˆูŠุฑ ุงู„ู…ุฌุงู„ ูˆุฌุนู„ ุฃุนู…ุงู„ู‡ู… ู…ุชุงุญุฉ ู„ู„ู…ุฌุชู…ุน ุงู„ุฃูˆุณุน. diff --git a/ultralytics/docs/ar/models/fast-sam.md:Zone.Identifier b/ultralytics/docs/ar/models/fast-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/models/fast-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/models/index.md b/ultralytics/docs/ar/models/index.md new file mode 100755 index 0000000..a399f0d --- /dev/null +++ b/ultralytics/docs/ar/models/index.md @@ -0,0 +1,98 @@ +--- +comments: true +description: ุงุณุชูƒุดู ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ุนุงุฆู„ุฉ YOLOุŒ ูˆู†ู…ุงุฐุฌ SAM ูˆMobileSAM ูˆFastSAM ูˆYOLO-NAS ูˆRT-DETR ุงู„ู…ุฏุนูˆู…ุฉ ู…ู† Ultralytics. ุงุจุฏุฃ ุจุฃู…ุซู„ุฉ ู„ูƒู„ ู…ู† ุงุณุชุฎุฏุงู… ูˆุงุฌู‡ุฉ ุงู„ุฃูˆุงู…ุฑ ูˆPython. +keywords: Ultralytics, documentation, YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS, RT-DETR, models, architectures, Python, CLI +--- + +# ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุนูˆู…ุฉ ู…ู† Ultralytics + +ุฃู‡ู„ุงู‹ ุจูƒ ููŠ ูˆุซุงุฆู‚ ู†ู…ุงุฐุฌ Ultralytics! ู†ุญู† ู†ู‚ุฏู… ุงู„ุฏุนู… ู„ู…ุฌู…ูˆุนุฉ ูˆุงุณุนุฉ ู…ู† ุงู„ู†ู…ุงุฐุฌุŒ ูƒู„ ู…ู†ู‡ุง ู…ูุตู…ู… ู„ู…ู‡ุงู… ู…ุญุฏุฏุฉ ู…ุซู„ [ุงู„ูƒุดู ุนู† ุงู„ุฃุฌุณุงู…](../tasks/detect.md)ุŒ [ุชู‚ุทูŠุน ุงู„ุญุงู„ุงุช](../tasks/segment.md)ุŒ [ุชุตู†ูŠู ุงู„ุตูˆุฑ](../tasks/classify.md)ุŒ [ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุงุช](../tasks/pose.md)ุŒ ูˆ[ุชุชุจุน ุงู„ุฃุฌุณุงู… ุงู„ู…ุชุนุฏุฏุฉ](../modes/track.md). ุฅุฐุง ูƒู†ุช ู…ู‡ุชู…ู‹ุง ุจุงู„ู…ุณุงู‡ู…ุฉ ููŠ ู‡ู†ุฏุณุฉ ู†ู…ูˆุฐุฌูƒ ู…ุน UltralyticsุŒ ุฑุงุฌุน ุฏู„ูŠู„ [ุงู„ู…ุณุงู‡ู…ุฉ](../../help/contributing.md). + +!!! Note "ู…ู„ุงุญุธุฉ" + + ๐Ÿšง ุชุญุช ุงู„ุฅู†ุดุงุก: ูˆุซุงุฆู‚ู†ุง ุจู„ุบุงุช ู…ุชุนุฏุฏุฉ ู‚ูŠุฏ ุงู„ุฅู†ุดุงุก ุญุงู„ูŠู‹ุงุŒ ูˆู†ุญู† ู†ุนู…ู„ ุจุฌุฏ ู„ุชุญุณูŠู†ู‡ุง. ุดูƒุฑู‹ุง ู„ุตุจุฑูƒ! ๐Ÿ™ + +## ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ู…ูŠุฒุฉ + +ุฅู„ูŠูƒ ุจุนุถ ุงู„ู†ู…ุงุฐุฌ ุงู„ุฑุฆูŠุณูŠุฉ ุงู„ู…ุฏุนูˆู…ุฉ: + +1. **[YOLOv3](yolov3.md)**: ุงู„ุฅุตุฏุงุฑ ุงู„ุซุงู„ุซ ู…ู† ุนุงุฆู„ุฉ ู†ู…ูˆุฐุฌ YOLOุŒ ุงู„ุฐูŠ ุฃู†ุดุฃู‡ ุฃุตู„ุงู‹ Joseph RedmonุŒ ูˆุงู„ู…ุนุฑูˆู ุจู‚ุฏุฑุงุชู‡ ุงู„ูุนุงู„ุฉ ููŠ ุงู„ูƒุดู ุนู† ุงู„ุฃุฌุณุงู… ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ. +2. **[YOLOv4](yolov4.md)**: ุชุญุฏูŠุซ ู…ุญู„ูŠ ู„ู€ YOLOv3ุŒ ุชู… ุฅุตุฏุงุฑู‡ ุจูˆุงุณุทุฉ Alexey Bochkovskiy ููŠ 2020. +3. **[YOLOv5](yolov5.md)**: ู†ุณุฎุฉ ู…ูุญุณู†ุฉ ู…ู† ู‡ู†ุฏุณุฉ YOLO ู…ู† ู‚ุจู„ UltralyticsุŒ ุชูˆูุฑ ุฃุฏุงุกู‹ ุฃูุถู„ ูˆุชูˆุงุฒู† ููŠ ุงู„ุณุฑุนุฉ ู…ู‚ุงุฑู†ุฉ ุจุงู„ุฅุตุฏุงุฑุงุช ุงู„ุณุงุจู‚ุฉ. +4. **[YOLOv6](yolov6.md)**: ุฃูุตุฏุฑุช ุจูˆุงุณุทุฉ [Meituan](https://about.meituan.com/) ููŠ 2022ุŒ ูˆูŠูุณุชุฎุฏู… ููŠ ุงู„ุนุฏูŠุฏ ู…ู† ุฑูˆุจูˆุชุงุช ุงู„ุชูˆุตูŠู„ ุงู„ุฐุงุชูŠุฉ ู„ู„ุดุฑูƒุฉ. +5. **[YOLOv7](yolov7.md)**: ุชู… ุฅุตุฏุงุฑ ู†ู…ุงุฐุฌ YOLO ุงู„ู…ุญุฏุซุฉ ููŠ 2022 ุจูˆุงุณุทุฉ ู…ุคู„ููŠ YOLOv4. +6. **[YOLOv8](yolov8.md) ุฌุฏูŠุฏ ๐Ÿš€**: ุงู„ุฅุตุฏุงุฑ ุงู„ุฃุญุฏุซ ู…ู† ุนุงุฆู„ุฉ YOLOุŒ ูŠุชู…ูŠุฒ ุจู‚ุฏุฑุงุช ู…ูุนุฒุฒุฉ ู…ุซู„ ุชู‚ุทูŠุน ุงู„ุญุงู„ุงุชุŒ ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุงุช/ุงู„ู†ู‚ุงุท ุงู„ุฑุฆูŠุณูŠุฉุŒ ูˆุงู„ุชุตู†ูŠู. +7. **[Segment Anything Model (SAM)](sam.md)**: ู†ู…ูˆุฐุฌ Segment Anything Model (SAM) ู…ู† Meta. +8. **[Mobile Segment Anything Model (MobileSAM)](mobile-sam.md)**: ู†ู…ูˆุฐุฌ MobileSAM ู„ู„ุชุทุจูŠู‚ุงุช ุงู„ู…ุญู…ูˆู„ุฉุŒ ู…ู† ุฌุงู…ุนุฉ Kyung Hee. +9. **[Fast Segment Anything Model (FastSAM)](fast-sam.md)**: ู†ู…ูˆุฐุฌ FastSAM ู…ู† ู…ุฌู…ูˆุนุฉ ุชุญู„ูŠู„ ุงู„ุตูˆุฑ ูˆุงู„ููŠุฏูŠูˆุŒ ูˆุงู„ู…ุนู‡ุฏ ุงู„ุตูŠู†ูŠ ู„ู„ุฃุชู…ุชุฉุŒ ูˆุฃูƒุงุฏูŠู…ูŠุฉ ุงู„ุนู„ูˆู… ุงู„ุตูŠู†ูŠุฉ. +10. **[YOLO-NAS](yolo-nas.md)**: ู†ู…ุงุฐุฌ YOLO Neural Architecture Search (NAS). +11. **[Realtime Detection Transformers (RT-DETR)](rtdetr.md)**: ู†ู…ุงุฐุฌ Realtime Detection Transformer (RT-DETR) ู…ู† PaddlePaddle ุงู„ุชุงุจุนุฉ ู„ุดุฑูƒุฉ Baidu. + +

+
+ +
+ ุดุงู‡ุฏ: ุชุดุบูŠู„ ู†ู…ุงุฐุฌ YOLO ู…ู† Ultralytics ููŠ ุจุถุนุฉ ุฃุณุทุฑ ู…ู† ุงู„ูƒูˆุฏ ูู‚ุท. +

+ +## ุงู„ุจุฏุก ููŠ ุงู„ุงุณุชุฎุฏุงู…: ุฃู…ุซู„ุฉ ุนู„ู‰ ุงู„ุงุณุชุฎุฏุงู… + +ูŠูˆูุฑ ู‡ุฐุง ุงู„ู…ุซุงู„ ุฃู…ุซู„ุฉ ู…ุจุณุทุฉ ุนู„ู‰ ุงู„ุชุฏุฑูŠุจ ูˆุงู„ุงุณุชุฏู„ุงู„ ุจุงุณุชุฎุฏุงู… YOLO. ู„ู„ุญุตูˆู„ ุนู„ู‰ ุงู„ูˆุซุงุฆู‚ ุงู„ูƒุงู…ู„ุฉ ุนู† ู‡ุฐู‡ ูˆุบูŠุฑู‡ุง ู…ู† [ุงู„ุฃูˆุถุงุน](../modes/index.md), ุงู†ุธุฑ ุตูุญุงุช ูˆุซุงุฆู‚ [ุงู„ุชู†ุจุค](../modes/predict.md)ุŒ ูˆ[ุงู„ุชุฏุฑูŠุจ](../modes/train.md)ุŒ ูˆ[ุงู„ุชู‚ูŠูŠู…](../modes/val.md) ูˆ[ุงู„ุชุตุฏูŠุฑ](../modes/export.md). + +ู„ุงุญุธ ุฃู† ุงู„ู…ุซุงู„ ุฃุฏู†ุงู‡ ู‡ูˆ ู„ู†ู…ุงุฐุฌ [Detect](../tasks/detect.md) YOLOv8 ู„ูƒุดู ุงู„ุฃุฌุณุงู…. ู„ู„ุงุทู„ุงุน ุนู„ู‰ ุงู„ู…ู‡ุงู… ุงู„ุฅุถุงููŠุฉ ุงู„ู…ุฏุนูˆู…ุฉุŒ ุฑุงุฌุน ูˆุซุงุฆู‚ [Segment](../tasks/segment.md)ุŒ ูˆ[Classify](../tasks/classify.md) ูˆ[Pose](../tasks/pose.md). + +!!! Example "ู…ุซุงู„" + + === "Python" + + ู†ู…ุงุฐุฌ `*.pt` ุงู„ู…ูุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ูˆู…ู„ูุงุช ุงู„ุฅุนุฏุงุฏ `*.yaml` ูŠู…ูƒู† ุฃู† ุชูู…ุฑุฑ ุฅู„ู‰ ูุฆุงุช `YOLO()`, `SAM()`, `NAS()` ูˆ `RTDETR()` ู„ุฅู†ุดุงุก ู…ุซุงู„ ู†ู…ูˆุฐุฌ ููŠ Python: + + ```python + ู…ู† ultralytics ุงุณุชูŠุฑุงุฏ YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ูุฏุฑุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ COCO + ุงู„ู†ู…ูˆุฐุฌ = YOLO('yolov8n.pt') + + # ุนุฑุถ ู…ุนู„ูˆู…ุงุช ุงู„ู†ู…ูˆุฐุฌ (ุงุฎุชูŠุงุฑูŠ) + model.info() + + # ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ู…ุซุงู„ูŠุฉ COCO8 ู„ู…ุฏุฉ 100 ุนุตุฑ + ุงู„ู†ุชุงุฆุฌ = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ุชุดุบูŠู„ ุงู„ุงุณุชุฏู„ุงู„ ุจู†ู…ูˆุฐุฌ YOLOv8n ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + ุงู„ู†ุชุงุฆุฌ = model('path/to/bus.jpg') + ``` + + === "CLI" + + ุงู„ุฃูˆุงู…ุฑ CLI ู…ุชุงุญุฉ ู„ุชุดุบูŠู„ ุงู„ู†ู…ุงุฐุฌ ู…ุจุงุดุฑุฉ: + + ```bash + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ูุฏุฑุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ COCO ูˆุชุฏุฑูŠุจู‡ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ู…ุซุงู„ูŠุฉ COCO8 ู„ู…ุฏุฉ 100 ุนุตุฑ + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ูุฏุฑุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ COCO ูˆุชุดุบูŠู„ ุงู„ุงุณุชุฏู„ุงู„ ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## ุงู„ู…ุณุงู‡ู…ุฉ ุจู†ู…ุงุฐุฌ ุฌุฏูŠุฏุฉ + +ู‡ู„ ุฃู†ุช ู…ู‡ุชู… ุจุงู„ู…ุณุงู‡ู…ุฉ ุจู†ู…ูˆุฐุฌูƒ ููŠ UltralyticsุŸ ุฑุงุฆุน! ู†ุญู† ุฏุงุฆู…ู‹ุง ู…ู†ูุชุญูˆู† ุนู„ู‰ ุชูˆุณูŠุน ู…ุญูุธุฉ ุงู„ู†ู…ุงุฐุฌ ู„ุฏูŠู†ุง. + +1. **ุงุญูุธ ู†ุณุฎุฉ ุนู† ุงู„ู…ุณุชูˆุฏุน**: ุงุจุฏุฃ ุจุญูุธ ู†ุณุฎุฉ ุนู† [ู…ุณุชูˆุฏุน Ultralytics ุนู„ู‰ GitHub](https://github.com/ultralytics/ultralytics). + +2. **ุงุณุชู†ุณุฎ ู†ุณุฎุชูƒ**: ุงู†ุณุฎ ู†ุณุฎุชูƒ ุฅู„ู‰ ุฌู‡ุงุฒูƒ ุงู„ู…ุญู„ูŠ ูˆุฃู†ุดุฆ ูุฑุนู‹ุง ุฌุฏูŠุฏู‹ุง ู„ู„ุนู…ู„ ุนู„ูŠู‡. + +3. **ุทุจู‚ ู†ู…ูˆุฐุฌูƒ**: ุฃุถู ู†ู…ูˆุฐุฌูƒ ู…ุชุจุนู‹ุง ู…ุนุงูŠูŠุฑ ูˆุฅุฑุดุงุฏุงุช ุงู„ุจุฑู…ุฌุฉ ุงู„ู…ูˆูุฑุฉ ููŠ ุฏู„ูŠู„ [ุงู„ู…ุณุงู‡ู…ุฉ](../../help/contributing.md) ู„ุฏูŠู†ุง. + +4. **ุงุฎุชุจุฑ ุจุฏู‚ุฉ**: ุชุฃูƒุฏ ู…ู† ุงุฎุชุจุงุฑ ู†ู…ูˆุฐุฌูƒ ุจุดูƒู„ ู…ูƒุซูุŒ ุณูˆุงุก ุจุดูƒู„ ู…ู†ูุตู„ ุฃูˆ ูƒุฌุฒุก ู…ู† ุงู„ู…ุณุงุฑ ุงู„ุจุฑู…ุฌูŠ. + +5. **ุฃู†ุดุฆ Pull Request**: ุจู…ุฌุฑุฏ ุฃู† ุชูƒูˆู† ุฑุงุถู‹ูŠุง ุนู† ู†ู…ูˆุฐุฌูƒุŒ ู‚ู… ุจุฅู†ุดุงุก ุทู„ุจ ุณุญุจ ุฅู„ู‰ ุงู„ู…ุณุชูˆุฏุน ุงู„ุฑุฆูŠุณูŠ ู„ู„ู…ุฑุงุฌุนุฉ. + +6. **ู…ุฑุงุฌุนุฉ ุงู„ูƒูˆุฏ ูˆุงู„ุฏู…ุฌ**: ุจุนุฏ ุงู„ู…ุฑุงุฌุนุฉุŒ ุฅุฐุง ูƒุงู† ู†ู…ูˆุฐุฌูƒ ูŠู„ุจูŠ ู…ุนุงูŠูŠุฑู†ุงุŒ ุณูŠุชู… ุฏู…ุฌู‡ ููŠ ุงู„ู…ุณุชูˆุฏุน ุงู„ุฑุฆูŠุณูŠ. + +ู„ู„ุฎุทูˆุงุช ุงู„ุชูุตูŠู„ูŠุฉุŒ ูŠุฑุฌู‰ ุงู„ุฑุฌูˆุน ุฅู„ู‰ ุฏู„ูŠู„ [ุงู„ู…ุณุงู‡ู…ุฉ](../../help/contributing.md). diff --git a/ultralytics/docs/ar/models/index.md:Zone.Identifier b/ultralytics/docs/ar/models/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/models/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/models/mobile-sam.md b/ultralytics/docs/ar/models/mobile-sam.md new file mode 100755 index 0000000..ec671da --- /dev/null +++ b/ultralytics/docs/ar/models/mobile-sam.md @@ -0,0 +1,116 @@ +--- +comments: true +description: ุชุนุฑู‘ู ุนู„ู‰ MobileSAM ูˆุชุทุจูŠู‚ู‡ุŒ ูˆู‚ุงุฑู†ู‡ ู…ุน SAM ุงู„ุฃุตู„ูŠุŒ ูˆูƒูŠููŠุฉ ุชู†ุฒูŠู„ู‡ ูˆุงุฎุชุจุงุฑู‡ ููŠ ุฅุทุงุฑ Ultralytics. ู‚ู… ุจุชุญุณูŠู† ุชุทุจูŠู‚ุงุชูƒ ุงู„ู…ุญู…ูˆู„ุฉ ุงู„ูŠูˆู…. +keywords: MobileSAMุŒ UltralyticsุŒ SAMุŒ ุงู„ุชุทุจูŠู‚ุงุช ุงู„ู…ุญู…ูˆู„ุฉุŒ ArxivุŒ GPUุŒ APIุŒ ู…ูุดูู‘ุฑ ุงู„ุตูˆุฑุฉุŒ ููƒ ุชุดููŠุฑ ุงู„ู‚ู†ุงุนุŒ ุชู†ุฒูŠู„ ุงู„ู†ู…ูˆุฐุฌุŒ ุทุฑูŠู‚ุฉ ุงู„ุงุฎุชุจุงุฑ +--- + +![ุดุนุงุฑ MobileSAM](https://github.com/ChaoningZhang/MobileSAM/blob/master/assets/logo2.png?raw=true) + +# ุงู„ุชู…ูŠูŠุฒ ุงู„ู…ุญู…ูˆู„ ู„ุฃูŠ ุดูŠุก (MobileSAM) + +ุงู„ุขู† ูŠู…ูƒู†ูƒ ุงู„ุงุทู‘ู„ุงุน ุนู„ู‰ ูˆุฑู‚ุฉ MobileSAM ููŠ [arXiv](https://arxiv.org/pdf/2306.14289.pdf). + +ูŠู…ูƒู† ุงู„ูˆุตูˆู„ ุฅู„ู‰ ุนุฑุถ ู…ุจุงุดุฑ ู„ู€ MobileSAM ูŠุนู…ู„ ุนู„ู‰ ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ู…ุฑูƒุฒูŠุฉ CPU ู…ู† [ู‡ู†ุง](https://huggingface.co/spaces/dhkim2810/MobileSAM). ูŠุณุชุบุฑู‚ ุงู„ุฃุฏุงุก ุนู„ู‰ ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ู…ุฑูƒุฒูŠุฉ Mac i5 ุชู‚ุฑูŠุจู‹ุง 3 ุซูˆุงู†ู. ููŠ ุนุฑุถ ุงู„ูˆุงุฌู‡ุฉ ุงู„ุชูุงุนู„ูŠุฉ ุงู„ุฎุงุต ุจู‡ู†ุบ ููŠุณุŒ ุชุคุฏูŠ ูˆุงุฌู‡ุฉ ุงู„ู…ุณุชุฎุฏู… ูˆูˆุญุฏุงุช ุงู„ู…ุนุงู„ุฌุฉ ุงู„ู…ุฑูƒุฒูŠุฉ ุฐุงุช ุงู„ุฃุฏุงุก ุงู„ู…ู†ุฎูุถ ุฅู„ู‰ ุงุณุชุฌุงุจุฉ ุฃุจุทุฃุŒ ู„ูƒู†ู‡ุง ุชูˆุงุตู„ ุงู„ุนู…ู„ ุจูุนุงู„ูŠุฉ. + +ุชู… ุชู†ููŠุฐ MobileSAM ููŠ ุนุฏุฉ ู…ุดุงุฑูŠุน ุจู…ุง ููŠ ุฐู„ูƒ [Grounding-SAM](https://github.com/IDEA-Research/Grounded-Segment-Anything) ูˆ [AnyLabeling](https://github.com/vietanhdev/anylabeling) ูˆ [Segment Anything in 3D](https://github.com/Jumpat/SegmentAnythingin3D). + +ุชู… ุชุฏุฑูŠุจ MobileSAM ุนู„ู‰ ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ุฑุณูˆู…ูŠุฉ (GPU) ุงู„ูˆุงุญุฏุฉ ุจุงุณุชุฎุฏุงู… ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ุชุญุชูˆูŠ ุนู„ู‰ 100000 ุตูˆุฑุฉ (1% ู…ู† ุงู„ุตูˆุฑ ุงู„ุฃุตู„ูŠุฉ) ููŠ ุฃู‚ู„ ู…ู† ูŠูˆู… ูˆุงุญุฏ. ุณูŠุชู… ุชูˆููŠุฑ ุงู„ุดูุฑุฉ ุงู„ู…ุตุฏุฑูŠุฉ ู„ุนู…ู„ูŠุฉ ุงู„ุชุฏุฑูŠุจ ู‡ุฐู‡ ููŠ ุงู„ู…ุณุชู‚ุจู„. + +## ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุชุงุญุฉุŒ ุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉุŒ ูˆุฃูˆุถุงุน ุงู„ุชุดุบูŠู„ + +ูŠูุนุฑุถ ููŠ ู‡ุฐุง ุงู„ุฌุฏูˆู„ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุชุงุญุฉ ู…ุน ูˆุฒู†ู‡ุง ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุงุŒ ูˆุงู„ู…ู‡ุงู… ุงู„ุชูŠ ุชุฏุนู…ู‡ุงุŒ ูˆุชูˆุงูู‚ู‡ุง ู…ุน ุฃูˆุถุงุน ุงู„ุชุดุบูŠู„ ุงู„ู…ุฎุชู„ูุฉ ู…ุซู„ [ุงู„ุงุณุชุฏู„ุงู„](../modes/predict.md)ุŒ [ุงู„ุชุญู‚ู‚](../modes/val.md)ุŒ [ุงู„ุชุฏุฑูŠุจ](../modes/train.md)ุŒ ูˆ [ุงู„ุชุตุฏูŠุฑ](../modes/export.md)ุŒ ุญูŠุซ ูŠูุดูŠุฑ ุฅูŠู…ูˆุฌูŠ โœ… ู„ู„ุฃูˆุถุงุน ุงู„ู…ุฏุนูˆู…ุฉ ูˆุฅูŠู…ูˆุฌูŠ โŒ ู„ู„ุฃูˆุถุงุน ุบูŠุฑ ุงู„ู…ุฏุนูˆู…ุฉ. + +| ู†ูˆุน ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุฃูˆุฒุงู† ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง | ุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉ | ุงู„ุงุณุชุฏู„ุงู„ | ุงู„ุชุญู‚ู‚ | ุงู„ุชุฏุฑูŠุจ | ุงู„ุชุตุฏูŠุฑ | +|-------------|------------------------|--------------------------------------|-----------|--------|---------|---------| +| MobileSAM | `mobile_sam.pt` | [ุชุฌุฒุฆุฉ ุงู„ุนู†ุงุตุฑ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## ุงู„ุชุญูˆูŠู„ ู…ู† SAM ุฅู„ู‰ MobileSAM + +ู†ุธุฑู‹ุง ู„ุฃู† MobileSAM ูŠุญุชูุธ ุจู†ูุณ ุณูŠุฑ ุงู„ุนู…ู„ ู„ู€ SAM ุงู„ุฃุตู„ูŠุŒ ู‚ู…ู†ุง ุจุฏู…ุฌ ุงู„ุชุฌู‡ูŠุฒุงุช ุงู„ู…ุณุจู‚ุฉ ูˆุงู„ุชุฌู‡ูŠุฒุงุช ุงู„ู„ุงุญู‚ุฉ ู„ู„ู†ู…ูˆุฐุฌ ุงู„ุฃุตู„ูŠ ูˆุฌู…ูŠุน ุงู„ูˆุงุฌู‡ุงุช ุงู„ุฃุฎุฑู‰. ู†ุชูŠุฌุฉ ู„ุฐู„ูƒุŒ ูŠู…ูƒู† ู„ุฃูˆู„ุฆูƒ ุงู„ุฐูŠู† ูŠุณุชุฎุฏู…ูˆู† ุญุงู„ูŠู‹ุง SAM ุงู„ุฃุตู„ูŠ ุงู„ุงู†ุชู‚ุงู„ ุฅู„ู‰ MobileSAM ุจู‚ุฏุฑ ุฃุฏู†ู‰ ู…ู† ุงู„ุฌู‡ุฏ. + +ูŠุคุฏูŠ MobileSAM ุจุดูƒู„ ู…ู‚ุงุฑุจ ู„ู€ SAM ุงู„ุฃุตู„ูŠ ูˆูŠุญุชูุธ ุจู†ูุณ ุณูŠุฑ ุงู„ุนู…ู„ ุจุงุณุชุซู†ุงุก ุชุบูŠูŠุฑ ููŠ ู…ูุดูุฑ ุงู„ุตูˆุฑุฉ. ุนู„ู‰ ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ุฑุณูˆู…ูŠุฉ (GPU) ุงู„ูˆุงุญุฏุฉุŒ ูŠุนู…ู„ MobileSAM ุจู…ุนุฏู„ 12 ู…ู„ู„ูŠ ุซุงู†ูŠุฉ ู„ูƒู„ ุตูˆุฑุฉ: 8 ู…ู„ู„ูŠ ุซุงู†ูŠุฉ ู„ู…ูุดูุฑ ุงู„ุตูˆุฑุฉ ูˆ4 ู…ู„ู„ูŠ ุซุงู†ูŠุฉ ู„ููƒ ุชุดููŠุฑ ุงู„ู‚ู†ุงุน. + +ูŠูˆูุฑ ุงู„ุฌุฏูˆู„ ุงู„ุชุงู„ูŠ ู…ู‚ุงุฑู†ุฉ ุจูŠู† ู…ูุดูุฑุงุช ุงู„ุตูˆุฑ ุงู„ู‚ุงุฆู…ุฉ ุนู„ู‰ ViT: + +| ู…ูุดูู‘ุฑ ุงู„ุตูˆุฑุฉ | SAM ุงู„ุฃุตู„ูŠ | MobileSAM | +|---------------|----------------|--------------| +| ุงู„ุนูˆุงู…ู„ | 611 ู…ู„ูŠูˆู† | 5 ู…ู„ูŠูˆู† | +| ุงู„ุณุฑุนุฉ | 452 ู…ู„ู„ูŠ ุซุงู†ูŠุฉ | 8 ู…ู„ู„ูŠ ุซุงู†ูŠุฉ | + +ูŠุณุชุฎุฏู… SูŽM ุงู„ุฃุตู„ูŠ ูˆ MobileSAM ู†ูุณ ููƒ ุชุดููŠุฑ ุงู„ู‚ู†ุงุน ุงู„ุฐูŠ ูŠุนุชู…ุฏ ุนู„ู‰ ุงู„ุชูˆุฌูŠู‡ ุจูˆุงุณุทุฉ ุงู„ุฑู…ูˆุฒ: + +| ููƒ ุชุดููŠุฑ ุงู„ู‚ู†ุงุน | SAM ุงู„ุฃุตู„ูŠ | MobileSAM | +|-----------------|--------------|--------------| +| ุงู„ุนูˆุงู…ู„ | 3.876 ู…ู„ูŠูˆู† | 3.876 ู…ู„ูŠูˆู† | +| ุงู„ุณุฑุนุฉ | 4 ู…ู„ู„ูŠ ุซุงู†ูŠุฉ | 4 ู…ู„ู„ูŠ ุซุงู†ูŠุฉ | + +ููŠู…ุง ูŠู„ูŠ ู…ู‚ุงุฑู†ุฉ ู„ูƒุงู…ู„ ุณูŠุฑ ุงู„ุนู…ู„: + +| ุงู„ุณูŠุฑ ุงู„ูƒุงู…ู„ (ุงู„ุชุดููŠุฑ+ุงู„ููƒ) | SAM ุงู„ุฃุตู„ูŠ | MobileSAM | +|-----------------------------|----------------|---------------| +| ุงู„ุนูˆุงู…ู„ | 615 ู…ู„ูŠูˆู† | 9.66 ู…ู„ูŠูˆู† | +| ุงู„ุณุฑุนุฉ | 456 ู…ู„ู„ูŠ ุซุงู†ูŠุฉ | 12 ู…ู„ู„ูŠ ุซุงู†ูŠุฉ | + +ูŠุชู… ุนุฑุถ ุฃุฏุงุก MobileSAM ูˆ SAM ุงู„ุฃุตู„ูŠ ุจุงุณุชุฎุฏุงู… ูƒู„ ู…ู† ุงู„ู†ู‚ุทุฉ ูˆู…ุฑุจุน ูƒู„ู…ุฉ ุงู„ู…ุญูุฒ. + +![ุตูˆุฑุฉ ุจุงู„ู†ู‚ุทุฉ ูƒูƒู„ู…ุฉ ู…ุญูุฒ](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +![ุตูˆุฑุฉ ุจุงู„ู…ุฑุจุน ูƒูƒู„ู…ุฉ ู…ุญูุฒ](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +ุจูุถู„ ุฃุฏุงุฆู‡ ุงู„ู…ุชููˆู‚ุŒ ูŠูƒูˆู† MobileSAM ุฃุตุบุฑ ุจุญูˆุงู„ูŠ 5 ุฃุถุนุงู ูˆุฃุณุฑุน ุจุญูˆุงู„ูŠ 7 ุฃุถุนุงู ู…ู† FastSAM ุงู„ุญุงู„ูŠ. ูŠุชูˆูุฑ ู…ุฒูŠุฏ ู…ู† ุงู„ุชูุงุตูŠู„ ุนู„ู‰ [ุตูุญุฉ ู…ุดุฑูˆุน MobileSAM](https://github.com/ChaoningZhang/MobileSAM). + +## ุงุฎุชุจุงุฑ MobileSAM ููŠ Ultralytics + +ู…ุซู„ SAM ุงู„ุฃุตู„ูŠุŒ ู†ู‚ุฏู… ุทุฑูŠู‚ุฉ ุงุฎุชุจุงุฑ ู…ุจุณู‘ุทุฉ ููŠ UltralyticsุŒ ุจู…ุง ููŠ ุฐู„ูƒ ูˆุถุนูŠ ุงู„ู†ู‚ุทุฉ ูˆุงู„ุตู†ุฏูˆู‚. + +### ุชู†ุฒูŠู„ ุงู„ู†ู…ูˆุฐุฌ + +ูŠู…ูƒู†ูƒ ุชู†ุฒูŠู„ ุงู„ู†ู…ูˆุฐุฌ [ู‡ู†ุง](https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt). + +### ุงู„ู†ู‚ุทุฉ ูƒูƒู„ู…ุฉ ู…ุญูุฒ + +!!! Example "ู…ุซุงู„" + + === "Python" + ```python + from ultralytics import SAM + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = SAM('mobile_sam.pt') + + # ุชูˆู‚ุน ุฌุฒุก ุจู†ุงุกู‹ ุนู„ู‰ ู†ู‚ุทุฉ ู…ุญูุฒ + model.predict('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +### ุงู„ุตู†ุฏูˆู‚ ูƒูƒู„ู…ุฉ ู…ุญูุฒ + +!!! Example "ู…ุซุงู„" + + === "Python" + ```python + from ultralytics import SAM + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = SAM('mobile_sam.pt') + + # ุชูˆู‚ุน ุฌุฒุก ุจู†ุงุกู‹ ุนู„ู‰ ุตู†ุฏูˆู‚ ู…ุญูุฒ + model.predict('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + ``` + +ู„ู‚ุฏ ู‚ู…ู†ุง ุจุชู†ููŠุฐ "MobileSAM" ูˆ "SAM" ุจุงุณุชุฎุฏุงู… ู†ูุณ API. ู„ู…ุฒูŠุฏ ู…ู† ู…ุนู„ูˆู…ุงุช ุงู„ุงุณุชุฎุฏุงู…ุŒ ูŠูุฑุฌู‰ ุงู„ุงุทู‘ู„ุงุน ุนู„ู‰ [ุตูุญุฉ SAM](sam.md). + +## ุงู„ุงู‚ุชุจุงุณ ูˆุงู„ุดูƒุฑ + +ุฅุฐุง ูˆุฌุฏุช MobileSAM ู…ููŠุฏู‹ุง ููŠ ุฃุจุญุงุซูƒ ุฃูˆ ุนู…ู„ูƒ ุงู„ุชุทูˆูŠุฑูŠุŒ ูŠูุฑุฌู‰ ุงู„ู†ุธุฑ ููŠ ุงุณุชุดู‡ุงุฏ ูˆุฑู‚ุชู†ุง: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{mobile_sam, + title={Faster Segment Anything: Towards Lightweight SAM for Mobile Applications}, + author={Zhang, Chaoning and Han, Dongshen and Qiao, Yu and Kim, Jung Uk and Bae, Sung Ho and Lee, Seungkyu and Hong, Choong Seon}, + journal={arXiv preprint arXiv:2306.14289}, + year={2023} + } diff --git a/ultralytics/docs/ar/models/mobile-sam.md:Zone.Identifier b/ultralytics/docs/ar/models/mobile-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/models/mobile-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/models/rtdetr.md b/ultralytics/docs/ar/models/rtdetr.md new file mode 100755 index 0000000..f60d049 --- /dev/null +++ b/ultralytics/docs/ar/models/rtdetr.md @@ -0,0 +1,93 @@ +--- +comments: true +description: ุงูƒุชุดู ู…ูŠุฒุงุช ูˆููˆุงุฆุฏ RT-DETR ู…ู† BaiduุŒ ูˆู‡ูˆ ูƒุงุดู ูƒุงุฆู†ุงุช ูุนุงู„ ูˆู‚ุงุจู„ ู„ู„ุชูƒูŠู ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ ูŠุนุชู…ุฏ ุนู„ู‰ Vision TransformersุŒ ุจู…ุง ููŠ ุฐู„ูƒ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง. +keywords: RT-DETR, Baidu, Vision Transformers, ูƒุดู ูƒุงุฆู†ุงุช, ุฃุฏุงุก ููˆุฑูŠ, CUDA, TensorRT, ุงุฎุชูŠุงุฑ ุงู„ุงุณุชุนู„ุงู… ุงู„ู…ุฑุชุจุท ุจู€ IoU, Ultralytics, ูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ ุงู„ุชุทุจูŠู‚ุงุช ุงู„ุฎุงุตุฉ ุจู„ุบุฉ Python, PaddlePaddle +--- + +# RT-DETR ู…ู† Baidu: ุงูƒุชุดุงู ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ ูŠุนุชู…ุฏ ุนู„ู‰ Vision Transformer + +## ุงู„ู†ุธุฑุฉ ุงู„ุนุงู…ุฉ + +Real-Time Detection Transformer (RT-DETR)ุŒ ุงู„ู…ุทูˆุฑ ู…ู† ู‚ุจู„ BaiduุŒ ู‡ูˆ ูƒุงุดู ุญุฏูŠุซ ุงู„ุทุฑุงุฒ ูŠูˆูุฑ ุฃุฏุงุกู‹ ูุนู„ูŠู‹ุง ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ ู…ุน ุงู„ุญูุงุธ ุนู„ู‰ ุฏู‚ุฉ ุนุงู„ูŠุฉ. ูŠุณุชููŠุฏ ู…ู† ู‚ูˆุฉ Vision Transformers (ViT) ููŠ ู…ุนุงู„ุฌุฉ ุงู„ู…ูŠุฒุงุช ู…ุชุนุฏุฏุฉ ุงู„ู…ู‚ูŠุงุณ ุนู† ุทุฑูŠู‚ ูุตู„ ุงู„ุชูุงุนู„ุงุช ุฏุงุฎู„ ุงู„ู…ู‚ูŠุงุณ ูˆุฏู…ุฌ ุงู„ุชูุงุนู„ุงุช ุจูŠู† ุงู„ู…ู‚ุงูŠูŠุณ ุงู„ู…ุฎุชู„ูุฉ. ูŠุชูƒูŠู RT-DETR ุจุดูƒู„ ูƒุจูŠุฑ ูˆูŠุฏุนู… ุถุจุท ุณุฑุนุฉ ุงู„ุงุณุชุนู„ุงู… ุจุงุณุชุฎุฏุงู… ุทุจู‚ุงุช ู…ุฎุชู„ูุฉ ููŠ ุงู„ู…ููƒุฑุฉ ุจุฏูˆู† ุฅุนุงุฏุฉ ุงู„ุชุฏุฑูŠุจ. ูŠุชููˆู‚ ู‡ุฐุง ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ุงู„ุนุฏูŠุฏ ู…ู† ูƒุงุดูุงุช ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ ุงู„ุฃุฎุฑู‰ุŒ ูˆูŠุณุชููŠุฏ ู…ู† ุงู„ู…ู†ุตุงุช ุงู„ู‚ูˆูŠุฉ ู…ุซู„ CUDA ู…ุน TensorRT. + +![ู†ู…ูˆุฐุฌ ู…ุซุงู„](https://user-images.githubusercontent.com/26833433/238963168-90e8483f-90aa-4eb6-a5e1-0d408b23dd33.png) +**ู†ุธุฑุฉ ุนุงู…ุฉ ุนู„ู‰ RT-DETR ู…ู† Baidu.** ูŠุนุฑุถ ู…ุฎุทุท ู…ุนู…ุงุฑูŠุฉ ู†ู…ูˆุฐุฌ RT-DETR ู…ุฑุงุญู„ ุงู„ุธู‡ุฑ ุงู„ุซู„ุงุซ ุงู„ุฃุฎูŠุฑุฉ {S3ุŒ S4ุŒ S5} ูƒุฅุฏุฎุงู„ ู„ู„ู…ุดูุฑ. ูŠุญูˆู„ ุงู„ู…ุดูุฑ ุงู„ู‡ุฌูŠู† ุงู„ูุนุงู„ ุงู„ู…ูŠุฒุงุช ู…ุชุนุฏุฏุฉ ุงู„ู…ู‚ูŠุงุณ ุฅู„ู‰ ุชุณู„ุณู„ ู…ู† ู…ูŠุฒุงุช ุงู„ุตูˆุฑุฉ ู…ู† ุฎู„ุงู„ ุชูุงุนู„ ุงู„ู…ูŠุฒุงุช ุฏุงุฎู„ ุงู„ู…ู‚ูŠุงุณ (AIFI) ูˆูˆุญุฏุฉ ุฏู…ุฌ ุงู„ู…ูŠุฒุงุช ุจูŠู† ุงู„ู…ู‚ุงูŠูŠุณ ุงู„ู…ุฎุชู„ูุฉ (CCFM). ูŠุชู… ุงุณุชุฎุฏุงู… ุงุฎุชูŠุงุฑ ุงู„ุงุณุชุนู„ุงู… ุงู„ู…ุฑุชุจุท ุจู€ IoU ู„ุงุฎุชูŠุงุฑ ุนุฏุฏ ุซุงุจุช ู…ู† ู…ูŠุฒุงุช ุงู„ุตูˆุฑุฉ ู„ุชูƒูˆู† ุงุณุชุนู„ุงู…ุงุช ุงู„ูƒุงุฆู† ุงู„ุฃูˆู„ูŠุฉ ู„ููƒ ุงู„ุชุฑู…ูŠุฒ. ุฃุฎูŠุฑู‹ุงุŒ ูŠุญุณู† ููƒ ุงู„ุชุฑู…ูŠุฒ ู…ุน ุฑุคูˆุณ ุงู„ุชู†ุจุค ุงู„ุฅุถุงููŠุฉ ุงู„ุงุณุชุนู„ุงู…ุงุช ุงู„ูƒุงุฆู†ูŠุฉ ู„ุชูˆู„ูŠุฏ ุงู„ู…ุฑุจุนุงุช ูˆุชู‚ูŠูŠู…ุงุช ุงู„ุซู‚ุฉ ([ุงู„ู…ุตุฏุฑ](https://arxiv.org/pdf/2304.08069.pdf)). + +### ุงู„ู…ูŠุฒุงุช ุงู„ุฑุฆูŠุณูŠุฉ + +- **ู…ุดูุฑ ู‡ุฌูŠู† ูุนุงู„:** ูŠุณุชุฎุฏู… RT-DETR ู…ู† Baidu ู…ุดูุฑ ู‡ุฌูŠู† ูุนุงู„ ูŠุนู…ู„ ุนู„ู‰ ู…ุนุงู„ุฌุฉ ุงู„ู…ูŠุฒุงุช ู…ุชุนุฏุฏุฉ ุงู„ู…ู‚ูŠุงุณ ู…ู† ุฎู„ุงู„ ูุตู„ ุงู„ุชูุงุนู„ุงุช ุฏุงุฎู„ ุงู„ู…ู‚ูŠุงุณ ูˆุฏู…ุฌ ุงู„ุชูุงุนู„ุงุช ุจูŠู† ุงู„ู…ู‚ุงูŠูŠุณ ุงู„ู…ุฎุชู„ูุฉ. ูŠู‚ู„ู„ ู‡ุฐุง ุงู„ุชุตู…ูŠู… ุงู„ูุฑูŠุฏ ุงู„ู‚ุงุฆู… ุนู„ู‰ Vision Transformers ู…ู† ุชูƒุงู„ูŠู ุงู„ุญุณุงุจุงุช ูˆูŠุชูŠุญ ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ. +- **ุงุฎุชูŠุงุฑ ุงู„ุงุณุชุนู„ุงู… ุงู„ู…ุฑุชุจุท ุจู€ IoU:** ูŠุนู…ู„ RT-DETR ู…ู† Baidu ุนู„ู‰ ุชุญุณูŠู† ุจุฏุก ุงุณุชุนู„ุงู… ุงู„ูƒุงุฆู†ุงุช ู…ู† ุฎู„ุงู„ ุงุณุชุฎุฏุงู… ุงุฎุชูŠุงุฑ ุงู„ุงุณุชุนู„ุงู… ุงู„ู…ุฑุชุจุท ุจู€ IoU. ูŠุชูŠุญ ู‡ุฐุง ู„ู„ู†ู…ูˆุฐุฌ ุงู„ุชุฑูƒูŠุฒ ุนู„ู‰ ุงู„ูƒุงุฆู†ุงุช ุงู„ุฃูƒุซุฑ ุตู„ุฉ ููŠ ุงู„ุณูŠู†ุงุฑูŠูˆุŒ ู…ู…ุง ูŠุนุฒุฒ ุฏู‚ุฉ ุงู„ูƒุดู. +- **ุณุฑุนุฉ ุงู„ุงุณุชู†ุชุงุฌ ู‚ุงุจู„ุฉ ู„ู„ุชูƒูŠู:** ูŠุฏุนู… RT-DETR ู…ู† Baidu ุถุจุท ุณุฑุนุฉ ุงู„ุงุณุชู†ุชุงุฌ ุจุดูƒู„ ู…ุฑู† ุจุงุณุชุฎุฏุงู… ุทุจู‚ุงุช ู…ุฎุชู„ูุฉ ููŠ ุงู„ู…ููƒุฑุฉ ุฏูˆู† ุงู„ุญุงุฌุฉ ู„ุฅุนุงุฏุฉ ุงู„ุชุฏุฑูŠุจ. ูŠุณู‡ู„ ู‡ุฐุง ุงู„ุชูƒูŠู ุงู„ุชุทุจูŠู‚ ุงู„ุนู…ู„ูŠ ููŠ ุงู„ุนุฏูŠุฏ ู…ู† ุณูŠู†ุงุฑูŠูˆู‡ุงุช ูƒุดู ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ. + +## ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง + +ุชู‚ุฏู… ูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ ุงู„ุชุทุจูŠู‚ุงุช ุงู„ุฎุงุตุฉ ุจู„ุบุฉ Python ููŠ Ultralytics ู†ู…ุงุฐุฌ PaddlePaddle RT-DETR ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ุจู…ู‚ุงูŠูŠุณ ู…ุฎุชู„ูุฉ: + +- RT-DETR-L: 53.0% AP ุนู„ู‰ COCO val2017ุŒ 114 FPS ุนู„ู‰ GPU T4 +- RT-DETR-X: 54.8% AP ุนู„ู‰ COCO val2017ุŒ 74 FPS ุนู„ู‰ GPU T4 + +## ุฃู…ุซู„ุฉ ุงู„ุงุณุชุฎุฏุงู… + +ูŠูˆูุฑ ู‡ุฐุง ุงู„ู…ุซุงู„ ุฃู…ุซู„ุฉ ุจุณูŠุทุฉ ู„ุชุฏุฑูŠุจ ูˆุงุฎุชุจุงุฑ RT-DETRR. ู„ู„ุญุตูˆู„ ุนู„ู‰ ูˆุซุงุฆู‚ ูƒุงู…ู„ุฉ ุญูˆู„ ู‡ุฐู‡ ุงู„ุฃู…ุซู„ุฉ ูˆุฃูˆุถุงุน ุฃุฎุฑู‰ [ุงู†ู‚ุฑ ู‡ู†ุง](../modes/index.md) ู„ู„ุงุทู„ุงุน ุนู„ู‰ ุตูุญุงุช ุงู„ูˆุซุงุฆู‚ [ุงู„ุชู†ุจุค](../modes/predict.md)ุŒ [ุงู„ุชุฏุฑูŠุจ](../modes/train.md)ุŒ [ุงู„ุชุตุญูŠุญ](../modes/val.md) ูˆ [ุงู„ุชุตุฏูŠุฑ](../modes/export.md). + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import RTDETR + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ RT-DETR-l ู…ุญู…ูŠ ุจูˆุงุณุทุฉ COCO ู…ุณุจู‚ู‹ุง + model = RTDETR('rtdetr-l.pt') + + # ุนุฑุถ ู…ุนู„ูˆู…ุงุช ุงู„ู†ู…ูˆุฐุฌ (ุงุฎุชูŠุงุฑูŠ) + model.info() + + # ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ุงู„ู…ุซุงู„ COCO8 ู„ู€ 100 ุฏูˆุฑุฉ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ุชุดุบูŠู„ ุงู„ุงุณุชุฏู„ุงู„ ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ RT-DETR-l ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ```bash + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ RT-DETR-l ู…ุญู…ูŠ ุจูˆุงุณุทุฉ COCO ู…ุณุจู‚ู‹ุง ูˆุชุฏุฑูŠุจู‡ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ุงู„ู…ุซุงู„ COCO8 ู„ู€ 100 ุฏูˆุฑุฉ + yolo train model=rtdetr-l.pt data=coco8.yaml epochs=100 imgsz=640 + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ RT-DETR-l ู…ุญู…ูŠ ุจูˆุงุณุทุฉ COCO ู…ุณุจู‚ู‹ุง ูˆุชุดุบูŠู„ ุงู„ุงุณุชุฏู„ุงู„ ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + yolo predict model=rtdetr-l.pt source=path/to/bus.jpg + ``` + +## ุงู„ู…ู‡ุงู… ูˆุงู„ุฃูˆุถุงุน ุงู„ู…ุฏุนูˆู…ุฉ + +ูŠู‚ุฏู… ู‡ุฐุง ุงู„ุฌุฏูˆู„ ุฃู†ูˆุงุน ุงู„ู†ู…ุงุฐุฌ ูˆุงู„ุฃูˆุฒุงู† ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ุงู„ู…ุญุฏุฏุฉ ูˆุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉ ุจูˆุงุณุทุฉ ูƒู„ ู†ู…ูˆุฐุฌุŒ ูˆุงู„ุฃูˆุถุงุน ุงู„ู…ุฎุชู„ูุฉ ([ุงู„ุชุฏุฑูŠุจ](../modes/train.md)ุŒ [ุงู„ุชุตุญูŠุญ](../modes/val.md)ุŒ [ุงู„ุชู†ุจุค](../modes/predict.md)ุŒ [ุงู„ุชุตุฏูŠุฑ](../modes/export.md)) ุงู„ุชูŠ ูŠุชู… ุฏุนู…ู‡ุงุŒ ู…ู…ุซู„ุฉ ุจุฑู…ูˆุฒ ุงู„ู€ โœ…. + +| ู†ูˆุน ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุฃูˆุฒุงู† ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง | ุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉ | ุงุณุชู†ุชุงุฌ | ุชุญู‚ู‚ ุตุญุฉ | ุชุฏุฑูŠุจ | ุงุณุชูŠุฑุงุฏ | +|-----------------------|------------------------|----------------------------------|---------|----------|-------|---------| +| RT-DETR ุงู„ูƒุจูŠุฑ | `rtdetr-l.pt` | [ูƒุดู ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| RT-DETR ุงู„ูƒุจูŠุฑ ุงู„ุฒุงุฆุฏ | `rtdetr-x.pt` | [ูƒุดู ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +## ุงู„ุงุณุชุดู‡ุงุฏุงุช ูˆุงู„ุชู‚ุฏูŠุฑุงุช + +ุฅุฐุง ุงุณุชุฎุฏู…ุช RT-DETR ู…ู† Baidu ููŠ ุฃุนู…ุงู„ ุงู„ุจุญุซ ุฃูˆ ุงู„ุชุทูˆูŠุฑ ุงู„ุฎุงุตุฉ ุจูƒุŒ ูŠุฑุฌู‰ ุงู„ุงุณุชุดู‡ุงุฏ ุจู€ [ุงู„ูˆุฑู‚ุฉ ุงู„ุฃุตู„ูŠุฉ](https://arxiv.org/abs/2304.08069): + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{lv2023detrs, + title={DETRs Beat YOLOs on Real-time Object Detection}, + author={Wenyu Lv and Shangliang Xu and Yian Zhao and Guanzhong Wang and Jinman Wei and Cheng Cui and Yuning Du and Qingqing Dang and Yi Liu}, + year={2023}, + eprint={2304.08069}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ู†ุญู† ู†ูˆุฏ ุฃู† ู†ุนุฑุจ ุนู† ุงู…ุชู†ุงู†ู†ุง ู„ู€ Baidu ูˆูุฑูŠู‚ [PaddlePaddle](https://github.com/PaddlePaddle/PaddleDetection) ู„ุฅู†ุดุงุก ูˆุตูŠุงู†ุฉ ู‡ุฐ ุงู„ู…ูˆุฑุฏ ุงู„ู‚ูŠู… ู„ู…ุฌุชู…ุน ุงู„ุฑุคูŠุฉ ุงู„ุญุงุณูˆุจูŠุฉ. ู†ู‚ุฏุฑ ุชูุงุนู„ู‡ู… ู…ุน ุงู„ู…ุฌุงู„ ู…ู† ุฎู„ุงู„ ุชุทูˆูŠุฑ ูƒุงุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ุญู‚ูŠู‚ูŠ ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ ุงู„ู‚ุงุฆู… ุนู„ู‰ Vision TransformersุŒ RT-DETR. + +*keywords: RT-DETRุŒ ุงู„ู†ุงู‚ู„ุŒ Vision TransformersุŒ Baidu RT-DETRุŒ PaddlePaddleุŒ Paddle Paddle RT-DETRุŒ ูƒุดู ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠุŒ ูƒุดู ูƒุงุฆู†ุงุช ู‚ุงุฆู… ุนู„ู‰ Vision TransformersุŒ ู†ู…ุงุฐุฌ PaddlePaddle RT-DETR ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุงุŒ ุงุณุชุฎุฏุงู… Baidu's RT-DETRุŒ ูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ ุงู„ุชุทุจูŠู‚ุงุช ุงู„ุฎุงุตุฉ ุจู„ุบุฉ Python ููŠ Ultralytics* diff --git a/ultralytics/docs/ar/models/rtdetr.md:Zone.Identifier b/ultralytics/docs/ar/models/rtdetr.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/models/rtdetr.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/models/sam.md b/ultralytics/docs/ar/models/sam.md new file mode 100755 index 0000000..b47d5ec --- /dev/null +++ b/ultralytics/docs/ar/models/sam.md @@ -0,0 +1,225 @@ +--- +comments: true +description: ุงุณุชูƒุดู ุงู„ู†ู…ูˆุฐุฌ ุงู„ู‚ุงุทุน ู„ู„ุดูŠุก ุฃูŠุง ูƒุงู† (SAM) ุงู„ุญุฏูŠุซ ู…ู† Ultralytics ุงู„ุฐูŠ ูŠุชูŠุญ ุงู„ุช segment ุชุดููŠุฑ ุตูˆุฑ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ. ุชุนุฑู ุนู„ู‰ ู…ุฑูˆู†ุชู‡ ููŠ ู…ุฌุงู„ ุงู„ุช segmentุŒ ูˆุฃุฏุงุก ู†ู‚ู„ ุงู†ูŠููˆุฑู… ุฒูŠุฑูˆ ุดูˆุชุŒ ูˆูƒูŠููŠุฉ ุงุณุชุฎุฏุงู…ู‡. +keywords: Ultralytics, ู‚ุณูŠู…ุฉ ุงู„ุตูˆุฑ, Segment Anything Model, SAM, ุณู„ุณู„ุฉ ุจูŠุงู†ุงุช SA-1B, ู…ุฑูˆู†ุฉ ุงู„ุตูˆุฑ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ, ู†ู‚ู„ ุงู„ุงู†ูŠููˆุฑู… ุฒูŠุฑูˆ ุดูˆุช, ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช, ุชุญู„ูŠู„ ุงู„ุตูˆุฑ, ุงู„ุชุนู„ู… ุงู„ุขู„ูŠ +--- + +# ู†ู…ูˆุฐุฌ ุงู„ู‚ุทุนุฉ ุดูŠุก ู…ุง (SAM) + +ู…ุฑุญุจู‹ุง ุจูƒ ููŠ ุงู„ุฌุจู‡ุฉ ุงู„ุฃูˆู„ู‰ ู„ู‚ุทุน ุงู„ุตูˆุฑ ู…ุน ู†ู…ูˆุฐุฌ ุงู„ู‚ุทุนุฉ ุดูŠุก ู…ุง ุŒ ุฃูˆ SAM. ู‡ุฐุง ุงู„ู†ู…ูˆุฐุฌ ุงู„ุซูˆุฑูŠ ู‚ุฏ ุบูŠุฑ ุงู„ู„ุนุจุฉ ู…ู† ุฎู„ุงู„ ุฅุฏุฎุงู„ ุงู„ุชุดููŠุฑ ุงู„ู‚ุฑุงุกุฉ ู„ู„ุตูˆุฑ ู…ุน ุฃุฏุงุก ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠุŒ ูˆุชุญุฏูŠุฏ ู…ุนุงูŠูŠุฑ ุฌุฏูŠุฏุฉ ููŠ ู‡ุฐุง ุงู„ู…ุฌุงู„. + +## ู…ู‚ุฏู…ุฉ ุฅู„ู‰ SAM: ุงู„ู‚ุทุนุฉ ุดูŠุก ู…ุง ู†ู…ูˆุฐุฌ + +ู†ู…ูˆุฐุฌ ุงู„ู‚ุทุนุฉ ุดูŠุก ู…ุง ุŒ ุฃูˆ SAMุŒ ู‡ูˆ ู†ู…ูˆุฐุฌ ุดูุงู ุงูˆู„ ููŠ ูุตู„ ุงู„ุตูˆุฑ ุงู„ุฑู‚ู…ูŠุฉ ุงู„ุชูŠ ุชุชูŠุญ ู‚ุฏุฑุฉ ุดู‡ูŠุฑ ุนู„ู‰ ุงู„ุชุดููŠุฑุŒ ุชูˆูุฑ ู…ุฑูˆู†ุฉ ูุฑูŠุฏุฉ ู…ู† ู†ูˆุนู‡ุง ููŠ ู…ู‡ุงู… ุงู„ุชุญู„ูŠู„ ุงู„ู„ุงุฒู…ุฉ ู„ู„ุตูˆุฑ. ู†ู…ูˆุฐุฌ SAM ู‡ูˆ ุฃุณุงุณ ู…ุดุฑูˆุน 'ุฃูŠ ุดูŠุก ููŠ ุดูŠุก' ุงู„ุงุจุชูƒุงุฑูŠ ูˆ ู‡ูˆ ู…ุดุฑูˆุน ูŠู‚ุฏู… ู†ู…ูˆุฐุฌุง ุฌุฏูŠุฏุง ุŒ ู…ู‡ู…ุฉ ูˆุณู„ุณู„ุฉ ุจูŠุงู†ุงุช ู…ุจุชูƒุฑุฉ ู„ู„ูุตู„ ุงู„ุจุตุฑูŠ. + +ูŠุชูŠุญ ุชุตู…ูŠู… SAM ุงู„ู…ุชู‚ุฏู… ู„ู‡ ุงู„ุชูƒูŠู ู…ุน ุชูˆุฒูŠุนุงุช ุตูˆุฑ ุฌุฏูŠุฏุฉ ูˆู…ู‡ุงู… ุฌุฏูŠุฏุฉ ุฏูˆู† ุงู„ุญุงุฌุฉ ุฅู„ู‰ ู…ุนุฑูุฉ ู…ุณุจู‚ุฉุŒ ูˆู‡ุฐู‡ ุงู„ู…ูŠุฒุฉ ุชุนุฑู ุจุงู„ูƒูุงุกุฉ ุงู„ู…ุทู„ูˆุจุฉ. ุญูŠุซ ูŠุชู… ุชุฏุฑูŠุจู‡ ุนู„ู‰ ุณู„ุณู„ุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ูˆุงุณุนุฉ [ุณู„ุณู„ุฉ SA-1B](https://ai.facebook.com/datasets/segment-anything/)ุŒ ุงู„ุชูŠ ุชุญุชูˆูŠ ุนู„ู‰ ุฃูƒุซุฑ ู…ู† ู…ู„ูŠุงุฑูŠ ู‚ู†ุงุน ู…ุนุฑูˆุถ ุนู„ู‰ 11 ู…ู„ูŠูˆู† ุตูˆุฑุฉ ุชู…ุช ุงู„ู…ุญุงูุธุฉ ุนู„ูŠู‡ุง ุจุนู†ุงูŠุฉุŒ ูˆู‚ุฏ ุนุฑุถ SAM ุฃุฏุงุก ู…ุซูŠุฑ ู„ู„ุฅุนุฌุงุจ ู…ุน ู†ู‚ู„ ุงู†ูŠููˆุฑู… ุฒูŠุฑูˆ ุดูˆุช ูุงู‚ ุงู„ู†ุชุงุฆุฌ ุงู„ู…ุฑุงู‚ุจุฉ ุงู„ุณุงุจู‚ุฉ ุจุงู„ุชุฏุฑูŠุจ ุงู„ูƒุงู…ู„ ููŠ ุงู„ุนุฏูŠุฏ ู…ู† ุงู„ุญุงู„ุงุช. + +![ุตูˆุฑุฉ ู…ุซุงู„ูŠุฉ ู„ุณู„ุณุฉ ุงู„ุจูŠุงู†ุงุช](https://user-images.githubusercontent.com/26833433/238056229-0e8ffbeb-f81a-477e-a490-aff3d82fd8ce.jpg) +ุตูˆุฑ ู…ุซุงู„ูŠุฉ ู…ุน ู‚ู†ุงุน ู…ุญุงุท ุจู‡ุง ู…ู† ุณู„ุณู„ุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ุชูŠ ู‚ุฏู…ู†ุงู‡ุง ุญุฏูŠุซู‹ุง ุŒ SA-1B. ูŠุญุชูˆูŠ ุณู„ุณู„ุฉ SA-1B ุนู„ู‰ 11 ู…ู„ูŠูˆู† ุตูˆุฑุฉ ู…ุชู†ูˆุนุฉ ุŒ ุนุงู„ูŠุฉ ุงู„ุฏู‚ุฉ ุŒ ู…ุฑุฎุตุฉ ูˆุชุญู…ูŠ ุงู„ุฎุตูˆุตูŠุฉ ูˆ 1.1 ู…ู„ูŠุงุฑ ู‚ู†ุงุน ูุตู„ ุฌูˆุฏุฉ ุนุงู„ูŠุฉ. ุชู… ุชูˆุฌูŠู‡ ู‡ุฐู‡ ุงู„ู‚ู†ุงุน ุชู…ุงู…ู‹ุง ุจุชู‚ูˆูŠู… ุขู„ูŠ ู…ู† ู‚ุจู„ SAM ูˆุชู… ุงู„ุชุญู‚ู‚ ู…ู† ุฌูˆุฏุชู‡ุง ูˆุชู†ูˆุนู‡ุง ู…ู† ุฎู„ุงู„ ุชุตู†ูŠูุงุช ุจุดุฑูŠุฉ ูˆุชุฌุงุฑุจ ุนุฏูŠุฏุฉ. ูŠุชู… ุชุฌู…ูŠุน ุงู„ุตูˆุฑ ุญุณุจ ุนุฏุฏ ุงู„ุฃู‚ู†ุนุฉ ููŠ ูƒู„ ุตูˆุฑุฉ ู„ู„ุชุตูˆูŠุฑ (ู‡ู†ุงูƒ ุญูˆุงู„ูŠ 100 ู‚ู†ุงุน ููŠ ุงู„ุตูˆุฑุฉ ููŠ ุงู„ู…ุชูˆุณุท). + +## ุงู„ุณู…ุงุช ุงู„ุฑุฆูŠุณูŠุฉ ู„ู†ู…ูˆุฐุฌ ุงู„ู‚ุทุนุฉ ุดูŠุก ู…ุง (SAM) + +- **ู…ู‡ู…ุฉ ุงู„ุชุดููŠุฑ ุงู„ู‚ุถุงุฆูŠุฉ:** ุชู… ุชุตู…ูŠู… SAM ุจู‡ุฏู ู…ู‡ู…ุฉ ุงู„ุชุดููŠุฑ ุงู„ู‚ุงุจู„ุฉ ู„ู„ุชุดููŠุฑ ุŒ ู…ู…ุง ูŠุชูŠุญ ู„ู‡ ุฅู†ุดุงุก ู‚ู†ุงุน ุชุดููŠุฑ ุตุงู„ุญ ู…ู† ุฃูŠ ุชู„ู…ูŠุญ ู…ุนูŠู† ุŒ ู…ุซู„ ุงู„ุฏู„ุงุฆู„ ุงู„ู…ูƒุงู†ูŠุฉ ุฃูˆ ุงู„ู†ุตูŠุฉ ุงู„ุชูŠ ุชุญุฏุฏ ุงู„ูƒุงุฆู†. +- **ุจู†ูŠุฉ ู…ุชู‚ุฏู…ุฉ:** ูŠุณุชุฎุฏู… ู†ู…ูˆุฐุฌ ุงู„ู‚ุทุนุฉ ุดูŠุก ู…ุง ู…ูุดูุฑ ุตูˆุฑุฉ ู‚ูˆูŠ ุŒ ู…ุดูุฑ ุชุดููŠุฑ ูˆู…ูุดูุฑ ุจุณู‡ูˆู„ุฉ ุงู„ูˆูŠุบูˆุฑุฉ. ุชู…ูƒู† ู‡ุฐู‡ ุงู„ุจู†ูŠุฉ ุงู„ูุฑูŠุฏุฉ ู…ู† ูุชุญ ุงู„ู…ุฌุงู„ ู„ู„ุชุดููŠุฑ ุงู„ู…ุฑู† ุŒ ูˆุญุณุงุจ ุงู„ู‚ู†ุงุน ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุŒ ูˆุงู„ุงุณุชุนุฏุงุฏ ู„ู„ุบู…ูˆุถ ููŠ ู…ู‡ุงู… ุงู„ุชุดููŠุฑ. +- **ุณู„ุณู„ุฉ ุงู„ุจูŠุงู†ุงุช SA-1B:** ุงู„ุชูŠ ู‚ุฏู…ู‡ุง ู…ุดุฑูˆุน ุฃูŠ ุดูŠุก ููŠ ุดูŠุกุŒ ุชุนุฑุถ ุณู„ุณู„ุฉ ุงู„ุจูŠุงู†ุงุช SA-1B ุฃูƒุซุฑ ู…ู† ู…ู„ูŠุงุฑูŠ ู‚ู†ุงุน ุนู„ู‰ 11 ู…ู„ูŠูˆู† ุตูˆุฑุฉ. ูƒุฃูƒุจุฑ ุณู„ุณู„ุฉ ุจูŠุงู†ุงุช ู„ู„ูุตู„ ุญุชู‰ ุงู„ุขู†ุŒ ุชูˆูุฑ ู†ู…ูˆุฐุฌ SAM ู…ุตุฏุฑ ุชุฏุฑูŠุจ ุถุฎู… ูˆู…ุชู†ูˆุน. +- **ุฃุฏุงุก ู†ู‚ู„ ุงู„ุงู†ูŠููˆุฑู… ุฒูŠุฑูˆ ุดูˆุช:** ูŠุนุฑุถ ู†ู…ูˆุฐุฌ SAM ุฃุฏุงุก ุฑุงุฆุน ููŠ ู†ู‚ู„ ุงู„ุงู†ูŠููˆุฑู… ุฒูŠุฑูˆ ุดูˆุช ููŠ ู…ู‡ุงู… ุงู„ู‚ุทุน ุงู„ู…ุฎุชู„ูุฉุŒ ู…ู…ุง ูŠุฌุนู„ู‡ ุฃุฏุงุฉ ู‚ูˆูŠุฉ ุฌุงู‡ุฒุฉ ู„ู„ุงุณุชุฎุฏุงู… ููŠ ุชุทุจูŠู‚ุงุช ู…ุชู†ูˆุนุฉ ู…ุน ุญุงุฌุฉ ู‚ู„ูŠู„ุฉ ุฌุฏู‹ุง ู„ู‡ู†ุฏุณุฉ ุงู„ุชุดููŠุฑ ุงู„ุฎุงุตุฉ. + +ู„ู„ุญุตูˆู„ ุนู„ู‰ ู†ุธุฑุฉ ุดุงู…ู„ุฉ ุนู„ู‰ ู†ู…ูˆุฐุฌ ุงู„ู‚ุทุนุฉ ุดูŠุก ู…ุง ูˆุณู„ุณู„ุฉ SA-1BุŒ ูŠุฑุฌู‰ ุฒูŠุงุฑุฉ [ู…ูˆู‚ุน ุฃูŠ ุดูŠุก ููŠ ุดูŠุก](https://segment-anything.com) ูˆุงุทู„ุน ุนู„ู‰ ุจุญุซ [ุฃูŠ ุดูŠุก ููŠ ุดูŠุก](https://arxiv.org/abs/2304.02643). + +## ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุชุงุญุฉ ูˆุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉ ูˆูˆุถุนูŠุงุช ุงู„ุนู…ู„ + +ุชู‚ุฏู… ู‡ุฐู‡ ุงู„ุฌุฏูˆู„ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุชุงุญุฉ ู…ุน ุฃูˆุฒุงู† ู…ุญุฏุฏุฉ ู…ุณุจู‚ู‹ุง ูˆุงู„ู…ู‡ุงู… ุงู„ุชูŠ ูŠุฏุนู…ูˆู†ู‡ุง ูˆุชูˆุงูู‚ู‡ู… ู…ุน ูˆุถุนูŠุงุช ุงู„ุนู…ู„ ุงู„ู…ุฎุชู„ูุฉ ู…ุซู„ [ู‚ุฑุงุกุฉ ุงู„ุตูˆุฑุฉ](../modes/predict.md)ุŒ [ุงู„ุชุญู‚ู‚](../modes/val.md)ุŒ [ุงู„ุชุฏุฑูŠุจ](../modes/train.md)ุŒ ูˆ [ุงู„ุชุตุฏูŠุฑ](../modes/export.md) ุŒ ู…ู…ุง ูŠุดูŠุฑ ุฅู„ู‰ โœ… ุฑู…ูˆุฒ ุงู„ุฏุนู… ูˆ โŒ ู„ู„ูˆุถุนูŠุงุช ุบูŠุฑ ุงู„ู…ุฏุนูˆู…ุฉ. + +| ู†ูˆุน ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุฃูˆุฒุงู† ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง | ุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉ | ู‚ุฑุงุกุฉ ุงู„ุตูˆุฑุฉ | ุงู„ุชุญู‚ู‚ | ุงู„ุชุฏุฑูŠุจ | ุงู„ุชุตุฏูŠุฑ | +|-------------|------------------------|------------------------------------|--------------|--------|---------|---------| +| SAM ุงู„ุฃุณุงุณูŠ | `sam_b.pt` | [ุชุฌุฒุฆุฉ ุงู„ู†ุณุฎ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| SAM ุงู„ูƒุจูŠุฑ | `sam_l.pt` | [ุชุฌุฒุฆุฉ ุงู„ู†ุณุฎ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## ูƒูŠููŠุฉ ุงุณุชุฎุฏุงู… SAM: ู…ุฑูˆู†ุฉ ูˆู‚ูˆุฉ ููŠ ุชุฌุฒุฆุฉ ุงู„ุตูˆุฑุฉ + +ูŠู…ูƒู† ุงุณุชุฎุฏุงู… ู†ู…ูˆุฐุฌ ุงู„ู‚ุทุนุฉ ุดูŠุก ู…ู† ุฃุฌู„ ุงู„ุนุฏูŠุฏ ู…ู† ุงู„ู…ู‡ุงู… ุงู„ุชุงุจุนุฉ ุฅู„ู‰ ุชุฏุฑูŠุจู‡. ูŠุดู…ู„ ุฐู„ูƒ ุงู„ูƒุดู ุนู† ุงู„ุญุงูุฉุŒ ุฅู†ุดุงุก ุชุฑุดูŠุญ ู„ู„ูƒุงุฆู†ุงุชุŒ ุชุฌุฒุฆุฉ ู†ุณุฎุฉ ูˆุชูˆู‚ุน ู†ุต ู…ุจุฏุฆูŠ ู„ู„ุชุดููŠุฑ. ู…ุน ุงู„ุชุดููŠุฑ ุงู„ู…ู‡ู†ูŠ ุŒ ูŠู…ูƒู† ู„ู€ SAM ุงู„ุชูƒูŠู ุจุณุฑุนุฉ ู…ุน ุงู„ู…ู‡ู…ุงุช ูˆุชูˆุฒูŠุนุงุช ุงู„ุจูŠุงู†ุงุช ุงู„ุฌุฏูŠุฏุฉ ุจุทุฑูŠู‚ุฉ transfer zero-shotุŒ ูˆุจุงู„ุชุงู„ูŠ ูŠุนุชุจุฑ ุฃุฏุงุฉ ู…ุชุนุฏุฏุฉ ุงู„ุงุณุชุฎุฏุงู…ุงุช ูˆูุนุงู„ุฉ ู„ุฌู…ูŠุน ุงุญุชูŠุงุฌุงุช ุชุฌุฒุฆุฉ ุงู„ุตูˆุฑุฉ. + +### ู…ุซุงู„ ู„ุฏู…ุฌ SAM + +!!! Example "ุงู„ู‚ุณู… ุจุงู„ุงุดุงุฑุงุช" + + ุชู‚ุณูŠู… ุงู„ุตูˆุฑุฉ ู…ุน ุงู„ุฅุดุงุฑุงุช ุงู„ู…ุนุทุงุฉ. + + === "ุงู„ุจุงูŠุซูˆู†" + + ```python + from ultralytics import SAM + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = SAM('sam_b.pt') + + # ุนุฑุถ ู…ุนู„ูˆู…ุงุช ุงู„ู†ู…ูˆุฐุฌ (ุงุฎุชูŠุงุฑูŠ) + model.info() + + # ุชุดุบูŠู„ ุงู„ุชู†ุจูˆุก ุจูˆุงุณุทุฉ ุงู„ุฏู„ุงุฆู„ + model('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + + # ุชุดุบูŠู„ ุงู„ุชู†ุจูˆุก ุจูˆุงุณุทุฉ ู†ู‚ุงุท ุงู„ุฅุดุงุฑุฉ + model('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +!!! Example "ู‚ุทุน ูƒู„ ุงู„ุดูŠุก" + + ู‚ู… ุจุชุฌุฒุฆุฉ ุงู„ุตูˆุฑุฉ ุจุฃูƒู…ู„ู‡ุง. + + === "ุงู„ุจุงูŠุซูˆู†" + + ```python + from ultralytics import SAM + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = SAM('sam_b.pt') + + # ุนุฑุถ ู…ุนู„ูˆู…ุงุช ุงู„ู†ู…ูˆุฐุฌ (ุงุฎุชูŠุงุฑูŠ) + model.info() + + # ุชุดุบูŠู„ ุงู„ุชู†ุจูˆุก + model('ู…ุณุงุฑ/ุฅู„ู‰/ุตูˆุฑุฉ.jpg') + ``` + + === "ุตููŠุญุฉ" + + ```ุงู„ุจุงูŠุด + # ุชุดุบูŠู„ ุงู„ุชู†ุจูˆุก ุจู†ู…ูˆุฐุฌ SAM + yolo predict model=sam_b.pt source=path/to/image.jpg + ``` + +- ุงู„ู…ู†ุทู‚ ู‡ู†ุง ู‡ูˆ ุชุฌุฒุฆุฉ ุงู„ุตูˆุฑุฉ ูƒู„ู‡ุง ุฅุฐุง ู„ู… ุชู…ุฑ ุนุจุฑ ุฃูŠ ุฅุดุงุฑุงุช (bboxes/ points / masks). + +!!! Example "ู…ุซุงู„ ุนู„ู‰ SAMPredictor" + + ุจูˆุงุณุทุฉ ู‡ุฐุง ุงู„ุทุฑูŠู‚ ุŒ ูŠู…ูƒู†ูƒ ุชุนูŠูŠู† ุงู„ุตูˆุฑุฉ ู…ุฑุฉ ูˆุงุญุฏุฉ ูˆุชุดุบูŠู„ ุงู„ุฅุดุงุฑุงุช ู…ุฑุงุฑู‹ุง ูˆุชูƒุฑุงุฑู‹ุง ุฏูˆู† ุชุดุบูŠู„ ู…ุดูุฑ ุงู„ุตูˆุฑุฉ ู…ุฑุฉ ุฃุฎุฑู‰. + + === "ุงู„ุชู†ุจุค ุจุงู„ุฅุดุงุฑุฉ" + + ```ุงู„ุจุงูŠุซูˆู† + from ultralytics.models.sam import Predictor as SAMPredictor + + # ุฅู†ุดุงุก SAMPredictor + ุงู„ุฃุนู„ู‰ = dict (ุงู„ุซู‚ุฉ = 0.25ุŒ task ='segment'ุŒ ุงู„ู†ู…ุท = 'ุชู†ุจุค'ุŒ imgsz = 1024ุŒ ู†ู…ูˆุฐุฌ = "mobile_sam.pt") + predictor = SAMPredictor (overrides = ุงู„ุชุฌุงูˆุฒ ุงู„ุฃุนู„ู‰) + + # ุชุนูŠูŠู† ุงู„ุตูˆุฑุฉ + predictor.set_image("ultralytics/assets/zidane.jpg") # ุชุนูŠูŠู† ุจูˆุงุณุทุฉ ู…ู„ู ุตูˆุฑุฉ + predictor.set_image(cv2.imread("ultralytics/assets/zidane.jpg")) # ุชุนูŠูŠู† ู…ุน np.ndarray + results = predictor(bboxes=[439, 437, 524, 709]) + results = predictor(points=[900, 370], labels=[1]) + + # ุฅุนุงุฏุฉ ุชุนูŠูŠู† ุงู„ุตูˆุฑุฉ + predictor.reset_image() + ``` + + ู‚ุทุน ูƒู„ ุดูŠุก ู…ุน ูˆุฌูˆุฏ ู…ุนุทูŠุงุช ุงุฎุชูŠุงุฑูŠุฉ. + + === "ุชู‚ุทูŠุน ูƒู„ ุดูŠุก" + + ```ุงู„ุจุงูŠุซูˆู† + from ultralytics.models.sam import Predictor as SAMPredictor + + # ุฅู†ุดุงุก SAMPredictor + ุงู„ุฃุนู„ู‰ = dict (ุงู„ุซู‚ุฉ = 0.25ุŒ task ='segment'ุŒ ุงู„ู†ู…ุท = 'ุชู†ุจุค'ุŒ imgsz = 1024ุŒ ู†ู…ูˆุฐุฌ = "mobile_sam.pt") + predictor = SAMPredictor (overrides = ุงู„ุชุฌุงูˆุฒ ุงู„ุฃุนู„ู‰) + + # ุชุฌุฒุฆุฉ ู…ุน ุจูŠุงู†ุงุช ุฅุถุงููŠุฉ + results = predictor(source="ultralytics/assets/zidane.jpg"ุŒ crop_n_layers=1ุŒ points_stride=64) + ``` + +- ุงู„ู…ุฒูŠุฏ args ุฅุถุงููŠ ู„ู„ุชู‚ุทูŠุน ูƒู„ ุดูŠุก ุดุงู‡ุฏ ุงู„ุชูˆุซูŠู‚ ู…ุฑุฌุน [`ุงู„ุณู„ุจูŠ/ุชู‚ุฏูŠู…` ู…ุฑุฌุน](../../../reference/models/sam/predict.md). + +## ู…ู‚ุงุฑู†ุฉ SAM ู…ู‚ุงุจู„ YOLOv8 + +ููŠ ู‡ุฐุง ุงู„ู…ูƒุงู† ู†ู‚ุงุฑู† ู†ู…ูˆุฐุฌ SAM ุงู„ุฃุตุบุฑ ุณุงู…ุŒ SAM-b ุŒ ู…ุน ู†ู…ูˆุฐุฌ ุงู„ุชุฌุฒุฆุฉ YOLOv8 ุงู„ุตุบูŠุฑุฉ UltralyticsุŒ [YOLOv8n-seg](../tasks/segment.md): + +| ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุญุฌู… | ุงู„ู…ุนู„ู…ุงุช | ุงู„ุณุฑุนุฉ (ุงู„ู…ุนุงู„ุฌ) | +|----------------------------------------------|----------------------------|-----------------------|--------------------------| +| ุณุงู… SAM-b | 358 ู….ุจุงูŠุช | 94.7 M | 51096 ms/im | +| [MobileSAM](mobile-sam.md) | 40.7 MB | 10.1 M | 46122 ms/im | +| [FastSAM-s](fast-sam.md) with YOLOv8 ุญุงูุธุฉ | 23.7 MB | 11.8 M | 115 ms/im | +| [YOLOv8n-seg](../tasks/segment.md) ู„ Ultraly | **6.7 MB** (53.4 ู…ุฑุฉ ุฃุตุบุฑ) | **3.4 M** (27.9x ุฃู‚ู„) | **59 ms/im** (866x ุฃุณุฑุน) | + +ู‡ุฐู‡ ุงู„ู…ู‚ุงุฑู†ุฉ ุชุธู‡ุฑ ุงู„ุงุฎุชู„ุงูุงุช ููŠ ุฃู…ุฑ ุงู„ู…ู‚ุฏุงุฑ ูˆุงู„ุณุฑุนุฉ ุจูŠู† ุงู„ู†ู…ุงุฐุฌ. ููŠ ุญูŠู† ูŠู‚ุฏู… SAM ู‚ุฏุฑุงุช ูุฑูŠุฏุฉ ู„ู„ุชุฌุฒุฆุฉ ุงู„ุชู„ู‚ุงุฆูŠุฉ ุŒ ุฅู„ุง ุฃู†ู‡ ู„ูŠุณ ู…ู†ุงูุณู‹ุง ู…ุจุงุดุฑู‹ุง ู„ู†ู…ุงุฐุฌ ุงู„ุชุฌุฒุฆุฉ YOLOv8 ุŒ ุญูŠุซ ุชูƒูˆู† ุฃุตุบุฑ ูˆุฃุณุฑุน ูˆุฃูƒุซุฑ ูƒูุงุกุฉ. + +ุงูƒุชู†ุฒุงุช ุงู„ุชุฌุฑูŠุจ ุนู„ู‰ ู…ุงูƒูŠู†ุฉ Apple M2 Macbook 2023 ู…ุน 16GB ู…ู† ุงู„ุฐุงูƒุฑุฉ. ู„ุฅุนุงุฏุฉ ุฅู†ุชุงุฌ ู‡ุฐุง ุงู„ุงุฎุชุจุงุฑ: + +!!! Example "ู…ุซุงู„" + + === "ุงู„ุจุงูŠุซูˆู†" + ```ุงู„ุจุงูŠุซูˆู† + from ultralytics import FastSAM, SAM, YOLO + + # ุชุญู„ูŠู„ ูŠุงู…-b + model = SAM('sam_b.pt') + model.info() + model('ultralytics/assets') + + # ุชุญู„ูŠู„ MobileSAM + model = SAM('mobile_sam.pt') + model.info() + model('ultralytics/assets') + + # ุชุญู„ูŠู„ FastSAM-s + model = FastSAM('FastSAM-s.pt') + model.info() + model('ultralytics/assets') + + # ุชุญู„ูŠู„ YOLOv8n-seg + model = YOLO('yolov8n-seg.pt') + model.info() + model('ultralytics/assets') + ``` + +## ุชุนู„ู… ุชู„ู‚ุงุฆูŠ: ู…ุณุงุฑ ุณุฑูŠุน ุฅู„ู‰ ุณู„ุงุณู„ ุงู„ุจูŠุงู†ุงุช ุงู„ุฎุงุตุฉ ุจุงู„ุชุฌุฒุฆุฉ + +ุงู„ุชุนู„ู… ุงู„ุชู„ู‚ุงุฆูŠ ู‡ูˆ ู…ูŠุฒุฉ ุฑุฆูŠุณูŠุฉ ู„ู€ SAMุŒ ุญูŠุซ ูŠุณู…ุญ ู„ู„ู…ุณุชุฎุฏู…ูŠู† ุจุฅู†ุดุงุก [ุณู„ุงุณู„ ุจูŠุงู†ุงุช ุชุฌุฒุฆุฉ](https://docs.ultralytics.com/datasets/segment) ุจุงุณุชุฎุฏุงู… ู†ู…ูˆุฐุฌ ุงู„ูƒุดู ุงู„ุฌุงู‡ุฒ. ูŠุชูŠุญ ู‡ุฐุง ุงู„ู…ูŠุฒุฉ ุฅู†ุดุงุก ูˆุชุญุฏูŠุซ ุณุฑูŠุน ูˆุฏู‚ูŠู‚ ู„ุนุฏุฏ ูƒุจูŠุฑ ู…ู† ุงู„ุตูˆุฑ ุจุฏูˆู† ุงู„ุญุงุฌุฉ ุฅู„ู‰ ุนู…ู„ูŠุฉ ุงู„ุชุณู…ูŠุฉ ุงู„ูŠุฏูˆูŠุฉ ุงู„ุจุทูŠุฆุฉ. + +### ุฅู†ุดุงุก ุณู„ุงุณู„ ุงู„ุจูŠุงู†ุงุช ุงู„ุฎุงุตุฉ ุจุงู„ุชุฌุฒุฆุฉ ุจุงุณุชุฎุฏุงู… ู†ู…ูˆุฐุฌ ุงู„ูƒุดู + +ู„ู„ุชุนู„ูŠู… ุงู„ุชู„ู‚ุงุฆูŠ ู„ุณู„ุงุณู„ ุงู„ุจูŠุงู†ุงุช ุงู„ุฎุงุตุฉ ุจุงู„ุชุฌุฒุฆุฉ ุจุงุณุชุฎุฏุงู… ุฅุทุงุฑ ุงู„ุนู…ู„ Ultralytics ุŒ ุงุณุชุฎุฏู… ูˆุธูŠูุฉ 'auto_annotate' ูƒู…ุง ู‡ูˆ ู…ูˆุถุญ ุฃุฏู†ุงู‡: + +!!! Example "ู…ุซุงู„" + + === "ุงู„ุจุงูŠุซูˆู†" + ```ุงู„ุจุงูŠุซูˆู† + from ultralytics.data.annotator import auto_annotate + + auto_annotate(data="ู…ุณุงุฑ/ุฅู„ู‰/ุตูˆุฑ", det_model="yolov8x.pt", sam_model='sam_b.pt') + ``` + +| ุงู„ูˆุณูŠุทุฉ | ุงู„ู†ูˆุน | ุงู„ูˆุตู | ุงู„ุงูุชุฑุงุถูŠ | +|------------|------------------------|---------------------------------------------------------------------------------------------------------------------------|--------------| +| ุงู„ุจูŠุงู†ุงุช | ุณู„ุณู„ุฉ | ุงู„ู…ุณุงุฑ ุฅู„ู‰ ุงู„ู…ุฌู„ุฏ ุงู„ุฐูŠ ูŠุญุชูˆูŠ ุนู„ู‰ ุงู„ุตูˆุฑ ุงู„ุชูŠ ุณูŠุชู… ุงู„ุฅุดุงุฑุฉ ุฅู„ูŠู‡ุง. | | +| det_model | ุณู„ุณู„ุฉุŒ ุงุฎุชูŠุงุฑูŠ | ู†ู…ูˆุฐุฌ ุงู„ูƒุดู ุงู„ู…ุฏุฑุจ ุงู„ู…ุณุจู‚ ู„ู€ YOLO. ุงู„ุงูุชุฑุงุถูŠ ู‡ูˆ 'yolov8x.pt'. | 'yolov8x.pt' | +| sam_model | ุณู„ุณู„ุฉุŒ ุงุฎุชูŠุงุฑูŠ | ู†ู…ูˆุฐุฌ ุงู„ู‚ุทุนุฉ ุฃูŠุง ุดูŠุก ู…ู† Ultralytics. ุงู„ุงูุชุฑุงุถูŠ ู‡ูˆ 'sam_b.pt'. | 'sam_b.pt' | +| device | ุณู„ุณู„ุฉุŒ ุงุฎุชูŠุงุฑูŠ | ุงู„ุฌู‡ุงุฒ ุงู„ุฐูŠ ูŠุชู… ุชุดุบูŠู„ ุงู„ู†ู…ุงุฐุฌ ุนู„ูŠู‡. ุงู„ุงูุชุฑุงุถูŠ ู‡ูˆ ุณู„ุณู„ุฉ ูุงุฑุบุฉ (ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ู…ุฑูƒุฒูŠุฉ ุฃูˆ ูˆุญุฏุฉ ู…ุนุงู„ุฌุฉ ุงู„ุฑุณูˆู…ุงุช ุฅุฐุง ุชูˆุงูุฑุช). | | +| output_dir | ุณู„ุณู„ุฉุŒ ู„ุง ุดูŠุกุŒ ุงุฎุชูŠุงุฑูŠ | ุงู„ุฏู„ูŠู„ ู„ุญูุธ ุงู„ู†ุชุงุฆุฌ ุงู„ู…ุฑุฆูŠุฉ. ุงู„ุงูุชุฑุงุถูŠ ู‡ูˆ ู…ุฌู„ุฏ 'ุงู„ุชุณู…ูŠุงุช' ููŠ ู†ูุณ ุฏู„ูŠู„ 'ุงู„ุจูŠุงู†ุงุช'. | ู„ุง ุดูŠุก | + +ูŠุฃุฎุฐ ุชุงุจุน 'auto_annotate' ุงู„ู…ุณุงุฑ ุฅู„ู‰ ุงู„ุตูˆุฑ ุงู„ุฎุงุตุฉ ุจูƒ ู…ุน ูˆุณูŠุทุงุช ุงุฎุชูŠุงุฑูŠุฉ ู„ุชุญุฏูŠุฏ ู†ู…ูˆุฐุฌ ุงู„ูƒุดู ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ูˆู†ู…ูˆุฐุฌ ุงู„ุชุฌุฒุฆุฉ SAM ูˆุงู„ุฌู‡ุงุฒ ุงู„ุฐูŠ ุณูŠุชู… ุชุดุบูŠู„ ุงู„ู†ู…ุงุฐุฌ ุจู‡ ูˆุงู„ุฏู„ูŠู„ ุงู„ุฎุฑูˆุฌ ู„ุญูุธ ุงู„ู†ุชุงุฆุฌ ุงู„ู…ุฑุฆูŠุฉ. + +ุชุนู„ู… ุชู„ู‚ุงุฆูŠู‹ุง ุจุงุณุชุฎุฏุงู… ู†ู…ุงุฐุฌ ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ูŠู…ูƒู† ุฃู† ูŠู‚ู„ู„ ุจุดูƒู„ ูƒุจูŠุฑ ู…ู† ุงู„ูˆู‚ุช ูˆุงู„ุฌู‡ุฏ ุงู„ู…ุทู„ูˆุจ ู„ุฅู†ุดุงุก ุณู„ุงุณู„ ุจูŠุงู†ุงุช ุชุฌุฒุฆุฉ ุนุงู„ูŠุฉ ุงู„ุฌูˆุฏุฉ. ูŠูƒูˆู† ู‡ุฐุง ุงู„ุฃู…ุฑ ู…ููŠุฏู‹ุง ุฎุตูˆุตู‹ุง ู„ู„ุจุงุญุซูŠู† ูˆุงู„ู…ุทูˆุฑูŠู† ุงู„ุฐูŠู† ูŠุชุนุงู…ู„ูˆู† ู…ุน ู…ุฌู…ูˆุนุงุช ุตูˆุฑ ูƒุจูŠุฑุฉ ุŒ ุญูŠุซ ูŠุชูŠุญ ู„ู‡ู… ุงู„ุชุฑูƒูŠุฒ ุนู„ู‰ ุชุทูˆูŠุฑ ุงู„ู†ู…ุงุฐุฌ ูˆุชู‚ูŠูŠู…ู‡ุง ุจุฏู„ุงู‹ ู…ู† ุงู„ุชุณู…ูŠุฉ ุงู„ูŠุฏูˆูŠุฉ ุงู„ุจุทูŠุฆุฉ. + +## ุงู„ุงู‚ุชุจุงุณุงุช ูˆุงู„ุชู‚ุฏูŠุฑุงุช + +ุฅุฐุง ูˆุฌุฏุช SAM ู…ููŠุฏู‹ุง ููŠ ุงู„ุจุญุซ ุฃูˆ ุงู„ุนู…ู„ ุงู„ุชุทูˆูŠุฑูŠ ุงู„ุฎุงุต ุจูƒ ุŒ ูŠุฑุฌู‰ ุงู„ู†ุธุฑ ููŠ ุงุณุชุดู‡ุงุฏ ุจุญุซู†ุง: + +!!! Quote "" + + === "ุงู„ุจูŠุจุชูŠูƒุณ" + ```ุงู„ุจูŠุจุชูŠูƒุณ + @misc{kirillov2023segment, + title={Segment Anything}, + author={Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollรกr and Ross Girshick}, + year={2023}, + eprint={2304.02643}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ู†ูˆุฏ ุฃู† ู†ุนุจุฑ ุนู† ุงู…ุชู†ุงู†ู†ุง ู„ู€ Meta AI ู„ุฅู†ุดุงุก ูˆุตูŠุงู†ุฉ ู‡ุฐุง ุงู„ู…ูˆุฑุฏ ุงู„ู‚ูŠู… ู„ู…ุฌุชู…ุน ุงู„ุจุตุฑูŠุงุช ุงู„ุญูˆุงุณูŠุจูŠุฉ. + +*ุฃูƒู„ู…ุงุช ุฏุงู„ุฉ: ุณู„ุณู„ุฉ ุงู„ู…ูุนูˆู„ุฉ, ู†ู…ูˆุฐุฌ ุงู„ู‚ุทุนุฉ ุดูŠุก ู…ุง, SAM, Meta SAM, ุงู„ุชุฌุฒุฆุฉ, ุงู„ุชุดููŠุฑ ุงู„ู…ู…ูŠุฒ, ุขู„ุฉ ุขูŠ, segment, Ultralytics, ู†ู…ุงุฐุฌ ู…ุฏุฑุจุฉ ู…ุณุจู‚ุง, SAM ุงู„ุงุณุงุณูŠ, SAM ุงู„ูƒุจูŠุฑ, ุชุฌุฒุฆุฉ ุงู„ูƒูŠุงู†ุงุช, ุงู„ุฑุคูŠุฉ ุงู„ูƒู…ุจูŠูˆุชุฑูŠุฉ, ุขูŠ ุงู„ุงุตุทู†ุงุนูŠุฉ, ุงู„ุชุนู„ู… ุงู„ุขู„ูŠ, ุชุณู…ูŠุฉ ุจูŠุงู†ุงุช, ู‚ู†ุงุน ุงู„ุชุฌุฒุฆุฉ, ู†ู…ูˆุฐุฌ ุงู„ูƒุดู, ู†ู…ูˆุฐุฌ ุงู„ูƒุดู YOLO, ุงู„ุจูŠุจุชูƒุณ, Meta AI.* diff --git a/ultralytics/docs/ar/models/sam.md:Zone.Identifier b/ultralytics/docs/ar/models/sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/models/sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/models/yolo-nas.md b/ultralytics/docs/ar/models/yolo-nas.md new file mode 100755 index 0000000..29ea0cb --- /dev/null +++ b/ultralytics/docs/ar/models/yolo-nas.md @@ -0,0 +1,121 @@ +--- +comments: true +description: ุงุณุชูƒุดู ุงู„ุชูˆุซูŠู‚ ุงู„ู…ูุตู„ ู„ู€ YOLO-NAS ุŒ ูˆู‡ูˆ ู†ู…ูˆุฐุฌ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ู…ุชุทูˆุฑ. ุชุนู„ู… ุงู„ู…ุฒูŠุฏ ุนู† ู…ูŠุฒุงุชู‡ ูˆุงู„ุทุฑุฒ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ูˆุงุณุชุฎุฏุงู…ู‡ ู…ุน ูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ Ultralytics Python ูˆุฃูƒุซุฑ ู…ู† ุฐู„ูƒ. +keywords: YOLO-NAS, Deci AI, ูƒุดู ุงู„ูƒุงุฆู†ุงุช, deep learning, ุงู„ุจุญุซ ููŠ ุงู„ู‡ู†ุฏุณุฉ ุงู„ุนุตุจูŠุฉ, ูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ Ultralytics Python, ู†ู…ูˆุฐุฌ YOLO, ุงู„ุทุฑุฒ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง, ูƒู…ู‘ูŠุฉ, ุงู„ุชุญุณูŠู†, COCO, Objects365, Roboflow 100 +--- + +# YOLO-NAS + +## ู†ุธุฑุฉ ุนุงู…ุฉ + +ุชู… ุชุทูˆูŠุฑ YOLO-NAS ุจูˆุงุณุทุฉ ุฏูŠุณูŠ ุงูŠู‡ ุงูŠ ุŒ ูˆู‡ูˆ ู†ู…ูˆุฐุฌ ุงุณุชุดุนุงุฑ ุงู„ูƒุงุฆู†ุงุช ุงู„ุทุงุฆุฑุฉ ู„ู„ุฃู…ุงู… ุงู„ุฐูŠ ูŠู‚ุฏู… ุชุทูˆุฑู‹ุง ู…ุจุชูƒุฑู‹ุง. ุฅู†ู‡ ู…ู†ุชุฌ ุชูƒู†ูˆู„ูˆุฌูŠุง ุจุญุซ ุงู„ู‡ู†ุฏุณุฉ ุงู„ุนุตุจูŠุฉ ุงู„ู…ุชู‚ุฏู…ุฉ ุŒ ุงู„ู…ุตู…ู…ุฉ ุจุนู†ุงูŠุฉ ู„ู…ุนุงู„ุฌุฉ ุงู„ู‚ูŠูˆุฏ ุงู„ุชูŠ ูƒุงู†ุช ุชุนุงู†ูŠ ู…ู†ู‡ุง ุงู„ู†ู…ุงุฐุฌ ุงู„ุณุงุจู‚ุฉ YOLO. ู…ุน ุชุญุณูŠู†ุงุช ูƒุจูŠุฑุฉ ููŠ ุฏุนู… ุงู„ุชู…ุซูŠู„ ุงู„ูƒู…ูˆู†ูŠ ูˆุชู†ุงุฒู„ุงุช ุงู„ุฏู‚ุฉ ูˆุงู„ุชุฃุฎูŠุฑ ุŒ ูŠู…ุซู„ YOLO-NAS ู‚ูุฒุฉ ูƒุจูŠุฑุฉ ููŠ ูƒุดู ุงู„ูƒุงุฆู†ุงุช. + +![ู†ู…ูˆุฐุฌ ุตูˆุฑุฉ ู…ุซุงู„](https://learnopencv.com/wp-content/uploads/2023/05/yolo-nas_COCO_map_metrics.png) +**ู†ุธุฑุฉ ุนุงู…ุฉ ุนู„ู‰ YOLO-NAS.** ูŠุณุชุฎุฏู… YOLO-NAS ูƒุชู„ู‹ุง ุชูุงุนู„ูŠุฉ ู„ู„ุชู…ุซูŠู„ ุงู„ูƒู…ูˆู†ูŠ ูˆุชู…ุซูŠู„ ูƒู…ูŠ ู„ู„ุญุตูˆู„ ุนู„ู‰ ุฃุฏุงุก ู…ุซู„ู‰. ูŠูˆุงุฌู‡ ุงู„ู†ู…ูˆุฐุฌ ุŒ ุนู†ุฏ ุชุญูˆูŠู„ู‡ ุฅู„ู‰ ุงู„ุฅุตุฏุงุฑ ุงู„ู…ูƒูˆู† ู…ู† 8 ุจุช ุŒ ุงู†ุฎูุงุถู‹ุง ุทููŠูู‹ุง ููŠ ุงู„ุฏู‚ุฉ ุŒ ูˆู‡ูˆ ุชุญุณูŠู† ูƒุจูŠุฑ ุนู„ู‰ ุงู„ู†ู…ุงุฐุฌ ุงู„ุฃุฎุฑู‰. ุชุชูˆุฌ ู‡ุฐู‡ ุงู„ุชุทูˆุฑุงุช ุจุชุตู…ูŠู… ู…ุชููˆู‚ ุฐูŠ ู‚ุฏุฑุงุช ุงุณุชุดุนุงุฑ ู„ู„ูƒุงุฆู†ุงุช ู„ุง ู…ุซูŠู„ ู„ู‡ุง ูˆุฃุฏุงุก ู…ุชู…ูŠุฒ. + +### ุงู„ู…ุฒุงูŠุง ุงู„ุฑุฆูŠุณูŠุฉ + +- **ูƒุชู„ุฉ ุฃุณุงุณูŠุฉ ูˆุฏูŠุฉ ู„ู„ุชู…ุซูŠู„ ุงู„ูƒู…ูˆู†ูŠ:** ูŠู‚ุฏู… YOLO-NAS ูƒุชู„ุฉ ุฃุณุงุณูŠุฉ ุฌุฏูŠุฏุฉ ูˆุฏูŠุฉ ู„ู„ุชู…ุซูŠู„ ุงู„ูƒู…ูˆู†ูŠ ุŒ ู…ู…ุง ูŠุนุงู„ุฌ ุฃุญุฏ ุงู„ู‚ูŠูˆุฏ ุงู„ุฑุฆูŠุณูŠุฉ ู„ู„ู†ู…ุงุฐุฌ ุงู„ุณุงุจู‚ุฉ YOLO. +- **ุชุฏุฑูŠุจ ู…ุชุทูˆุฑ ูˆุชู…ุซูŠู„ ูƒู…ูŠ:** ูŠุณุชุฎุฏู… YOLO-NAS ู†ุธู… ุชุฏุฑูŠุจ ู…ุชู‚ุฏู…ุฉ ูˆุชู…ุซูŠู„ู‹ุง ู„ู„ูƒู… ุจุนุฏ ุงู„ุชุฏุฑูŠุจ ู„ุชุนุฒูŠุฒ ุงู„ุฃุฏุงุก. +- **ุชุญุณูŠู† AutoNAC ูˆุงู„ุชุฏุฑูŠุจ ุงู„ู…ุณุจู‚:** ูŠุณุชุฎุฏู… YOLO-NAS ุชุญุณูŠู† AutoNAC ูˆูŠุชู… ุชุฏุฑูŠุจู‡ ู…ุณุจู‚ู‹ุง ุนู„ู‰ ู…ุฌู…ูˆุนุงุช ุจูŠุงู†ุงุช ุจุงุฑุฒุฉ ู…ุซู„ COCO ูˆ Objects365 ูˆ Roboflow 100. ูŠุฌุนู„ ู‡ุฐุง ุงู„ุชุฏุฑูŠุจ ุงู„ู…ุณุจู‚ ู…ู†ุงุณุจู‹ุง ู„ู…ู‡ุงู… ุงุณุชุดุนุงุฑ ุงู„ูƒุงุฆู†ุงุช ุงู„ูุฑุนูŠุฉ ููŠ ุจูŠุฆุงุช ุงู„ุฅู†ุชุงุฌ. + +## ุงู„ุทุฑุฒ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง + +ุงุณุชู…ุชุน ุจู‚ูˆุฉ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ู…ู† ุงู„ุฌูŠู„ ุงู„ู‚ุงุฏู… ู…ุน ุงู„ุทุฑุฒ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ู„ู€ YOLO-NAS ุงู„ุชูŠ ูŠูˆูุฑู‡ุง Ultralytics. ุชู… ุชุตู…ูŠู… ู‡ุฐู‡ ุงู„ุทุฑุฒ ู„ุชู‚ุฏูŠู… ุฃุฏุงุก ู…ุชููˆู‚ ู…ู† ุญูŠุซ ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉ. ุงุฎุชุฑ ู…ู† ุจูŠู† ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ุงู„ุฎูŠุงุฑุงุช ุงู„ู…ุตู…ู…ุฉ ูˆูู‚ู‹ุง ู„ุงุญุชูŠุงุฌุงุชูƒ ุงู„ุฎุงุตุฉ: + +| ุงู„ุทุฑุงุฒ | ู…ุคุดุฑ ุงู„ุชู‚ุฏูŠุฑ ุงู„ู…ุชูˆุณุท (mAP) | ุชุฃุฎุฑ ุงู„ูˆู‚ุช (ms) | +|------------------|----------------------------|-----------------| +| YOLO-NAS S | 47.5 | 3.21 | +| YOLO-NAS M | 51.55 | 5.85 | +| YOLO-NAS L | 52.22 | 7.87 | +| YOLO-NAS S INT-8 | 47.03 | 2.36 | +| YOLO-NAS M INT-8 | 51.0 | 3.78 | +| YOLO-NAS L INT-8 | 52.1 | 4.78 | + +ุชู… ุชุตู…ูŠู… ูƒู„ ู†ุณุฎุฉ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ู„ุชู‚ุฏูŠู… ุชูˆุงุฒู† ุจูŠู† ู…ุชูˆุณุท ุงู„ุฏู‚ุฉ (mAP) ูˆุชุฃุฎูŠุฑ ุงู„ูˆู‚ุช ุŒ ู…ู…ุง ูŠุณุงุนุฏูƒ ููŠ ุชุญุณูŠู† ู…ู‡ุงู… ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ุฎุงุตุฉ ุจูƒ ู…ู† ุญูŠุซ ุงู„ุฃุฏุงุก ูˆุงู„ุณุฑุนุฉ. + +## ุฃู…ุซู„ุฉ ุงู„ุงุณุชุฎุฏุงู… + +ู‚ุงู… Ultralytics ุจุฌุนู„ ุทุฑุฒ YOLO-NAS ุณู‡ู„ุฉ ุงู„ุฏู…ุฌ ููŠ ุชุทุจูŠู‚ุงุช Python ุงู„ุฎุงุตุฉ ุจูƒ ุนุจุฑ ุญุฒู…ุฉ `ultralytics` ุงู„ุฎุงุตุฉ ุจู†ุง. ุชูˆูุฑ ุงู„ุญุฒู…ุฉ ูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ ุงู„ุชุทุจูŠู‚ุงุช ุจุณูŠุทุฉ ุงู„ุงุณุชุฎุฏุงู… ู„ุชุณู‡ูŠู„ ุงู„ุนู…ู„ูŠุฉ. + +ุชูˆุถุญ ุงู„ุฃู…ุซู„ุฉ ุงู„ุชุงู„ูŠุฉ ูƒูŠููŠุฉ ุงุณุชุฎุฏุงู… ุทุฑุฒ YOLO-NAS ู…ุน ุญุฒู…ุฉ `ultralytics` ู„ู„ูƒุดู ูˆุงู„ุชุญู‚ู‚: + +### ุฃู…ุซู„ุฉ ุงู„ูƒุดู ูˆุงู„ุชุญู‚ู‚ + +ููŠ ู‡ุฐุง ุงู„ู…ุซุงู„ ุŒ ู†ู‚ูˆู… ุจุงู„ุชุญู‚ู‚ ู…ู† ุตุญุฉ YOLO-NAS-s ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO8. + +!!! Example "ู…ุซุงู„" + + ูŠูˆูุฑ ู‡ุฐุง ุงู„ู…ุซุงู„ ุฑู…ุฒ ุจุณูŠุท ู„ุนู…ู„ูŠุฉ ุงู„ูƒุดู ูˆุงู„ุชุญู‚ู‚ ู„ู€ YOLO-NAS. ู„ู…ุนุงู„ุฌุฉ ู†ุชุงุฆุฌ ุงู„ุงุณุชุฏู„ุงู„ ุŒ ุงู†ุธุฑ ูˆุถุน [ุชูˆู‚ุน](../modes/predict.md). ู„ุงุณุชุฎุฏุงู… YOLO-NAS ู…ุน ูˆุถุนูŠุงุช ุฅุถุงููŠุฉ ุŒ ุงู†ุธุฑ [ุชูˆุตูŠู](../modes/val.md) ูˆ[ุชุตุฏูŠุฑ](../modes/export.md). ู„ุง ูŠุฏุนู… ู†ุธุงู… YOLO-NAS ุนู„ู‰ ุญุฒู…ุฉ `ultralytics` ุนู…ู„ูŠุฉ ุงู„ุชุฏุฑูŠุจ. + + === "Python" + + ูŠู…ูƒู† ุชู…ุฑูŠุฑ ู†ู…ุงุฐุฌ PyTorch ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง `*.pt` ุฅู„ู‰ ูุฆุฉ `NAS()` ู„ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ ููŠ Python: + + ```python + from ultralytics import NAS + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLO-NAS-s ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ COCO + model = NAS('yolo_nas_s.pt') + + # ุนุฑุถ ู…ุนู„ูˆู…ุงุช ุงู„ู†ู…ูˆุฐุฌ (ุงุฎุชูŠุงุฑูŠ) + model.info() + + # ุงู„ุชุญู‚ู‚ ู…ู† ุตุญุฉ ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ู…ุซุงู„ COCO8 + results = model.val(data='coco8.yaml') + + # ุชุดุบูŠู„ ุงุณุชุฏู„ุงู„ ุจุงุณุชุฎุฏุงู… ู†ู…ูˆุฐุฌ YOLO-NAS-s ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + results = model('path/to/bus.jpg')) + ``` + + === "CLI" + + ุชุชูˆูุฑ ุฃูˆุงู…ุฑ CLI ู„ุชุดุบูŠู„ ุงู„ู†ู…ุงุฐุฌ ู…ุจุงุดุฑุฉ: + + ```bash + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLO-NAS-s ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ COCO ูˆุงู„ุชุญู‚ู‚ ู…ู† ุฃุฏุงุฆู‡ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ู…ุซุงู„ COCO8 + yolo val model=yolo_nas_s.pt data=coco8.yaml + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLO-NAS-s ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ COCO ูˆุงู„ุชู†ุจุค ุจุงู„ุงุณุชุฏู„ุงู„ ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + yolo predict model=yolo_nas_s.pt source=path/to/bus.jpg + ``` + +## ุงู„ู…ู‡ุงู… ูˆุงู„ุฃูˆุถุงุน ุงู„ู…ุฏุนูˆู…ุฉ + +ู†ุญู† ู†ู‚ุฏู… ุซู„ุงุซุฉ ุฃู†ูˆุงุน ู…ู† ู†ู…ุงุฐุฌ YOLO-NAS: ุงู„ุตุบูŠุฑ (s) ุŒ ุงู„ู…ุชูˆุณุท (m) ุŒ ูˆุงู„ูƒุจูŠุฑ (l). ูŠุชู… ุชุตู…ูŠู… ูƒู„ ู†ุณุฎุฉ ู„ุชู„ุจูŠุฉ ุงุญุชูŠุงุฌุงุช ุงู„ุญูˆุณุจุฉ ูˆุงู„ุฃุฏุงุก ุงู„ู…ุฎุชู„ูุฉ: + +- **YOLO-NAS-s**: ู…ุญุณู†ุฉ ู„ู„ุจูŠุฆุงุช ุงู„ุชูŠ ุชูƒูˆู† ููŠู‡ุง ุงู„ู…ูˆุงุฑุฏ ุงู„ุญุณุงุจูŠุฉ ู…ุญุฏูˆุฏุฉ ูˆุงู„ูƒูุงุกุฉ ู‡ูŠ ุงู„ุฃู‡ู…. +- **YOLO-NAS-m**: ูŠู‚ุฏู… ู†ู‡ุฌู‹ุง ู…ุชูˆุงุฒู†ู‹ุง ุŒ ู…ู†ุงุณุจู‹ุง ู„ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ุนุงู…ุฉ ุจุฏู‚ุฉ ุฃุนู„ู‰. +- **YOLO-NAS-l**: ู…ุตู…ู…ุฉ ู„ู„ุณูŠู†ุงุฑูŠูˆู‡ุงุช ุงู„ุชูŠ ุชุชุทู„ุจ ุฃุนู„ู‰ ุฏุฑุฌุฉ ู…ู† ุงู„ุฏู‚ุฉ ุŒ ุญูŠุซ ุงู„ู…ูˆุงุฑุฏ ุงู„ุญุณุงุจูŠุฉ ุฃู‚ู„ ู‚ูŠุฏู‹ุง. + +ุฃุฏู†ุงู‡ ู†ุธุฑุฉ ุนุงู…ุฉ ู…ูุตู„ุฉ ุนู† ูƒู„ ู†ู…ูˆุฐุฌ ุŒ ุจู…ุง ููŠ ุฐู„ูƒ ุฑูˆุงุจุท ุฃูˆุฒุงู†ู‡ู… ุงู„ู…ุฏุฑุจูŠู† ู…ุณุจู‚ู‹ุง ุŒ ูˆุงู„ู…ู‡ุงู… ุงู„ุชูŠ ูŠุฏุนู…ูˆู†ู‡ุง ุŒ ูˆุชูˆุงูู‚ู‡ู… ู…ุน ูˆุถุนูŠุงุช ุงู„ุชุดุบูŠู„ ุงู„ู…ุฎุชู„ูุฉ. + +| ู†ูˆุน ุงู„ู†ู…ูˆุฐุฌ | ุฃูˆุฒุงู† ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง | ุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉ | ุงู„ุงุณุชุฏู„ุงู„ | ุงู„ุชุญู‚ู‚ | ุงู„ุชุฏุฑูŠุจ | ุงู„ุชุตุฏูŠุฑ | +|-------------|-----------------------------------------------------------------------------------------------|------------------------------------|-----------|--------|---------|---------| +| YOLO-NAS-s | [yolo_nas_s.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_s.pt) | [ูƒุดู ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-m | [yolo_nas_m.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_m.pt) | [ูƒุดู ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-l | [yolo_nas_l.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_l.pt) | [ูƒุดู ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | + +## ุงู„ุงู‚ุชุจุงุณุงุช ูˆุงู„ุดูƒุฑ + +ุฅุฐุง ุงุณุชุฎุฏู…ุช YOLO-NAS ููŠ ุฃุนู…ุงู„ูƒ ุงู„ุจุญุซูŠุฉ ุฃูˆ ุงู„ุชุทูˆูŠุฑูŠุฉ ุŒ ูŠุฑุฌู‰ ุงู„ุงุณุชุดู‡ุงุฏ ุจู…ุดุฑูˆุน SuperGradients: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{supergradients, + doi = {10.5281/ZENODO.7789328}, + url = {https://zenodo.org/record/7789328}, + author = {Aharon, Shay and {Louis-Dupont} and {Ofri Masad} and Yurkova, Kate and {Lotem Fridman} and {Lkdci} and Khvedchenya, Eugene and Rubin, Ran and Bagrov, Natan and Tymchenko, Borys and Keren, Tomer and Zhilko, Alexander and {Eran-Deci}}, + title = {Super-Gradients}, + publisher = {GitHub}, + journal = {GitHub repository}, + year = {2021}, + } + ``` + +ู†ุนุจุฑ ุนู† ุงู…ุชู†ุงู†ู†ุง ู„ูุฑูŠู‚ [SuperGradients](https://github.com/Deci-AI/super-gradients/) ููŠ Deci AI ู„ุฌู‡ูˆุฏู‡ู… ููŠ ุฅู†ุดุงุก ูˆุตูŠุงู†ุฉ ู‡ุฐุฉ ุงู„ู…ูˆุงุฑุฏ ุงู„ู‚ูŠู…ุฉ ู„ู…ุฌุชู…ุน ุฑุคูŠุฉ ุงู„ุญุงุณูˆุจ. ู†ุนุชู‚ุฏ ุฃู† YOLO-NAS ุŒ ุจุชุตู…ูŠู…ู‡ ุงู„ู…ุจุชูƒุฑ ูˆู‚ุฏุฑุชู‡ ุงู„ุงุณุชุดุนุงุฑ ุงู„ู…ุชููˆู‚ุฉ ู„ู„ูƒุงุฆู†ุงุช ุŒ ุณูŠุตุจุญ ุฃุฏุงุฉ ุญุงุณู…ุฉ ู„ู„ู…ุทูˆุฑูŠู† ูˆุงู„ุจุงุญุซูŠู† ุนู„ู‰ ุญุฏ ุณูˆุงุก. + +*keywords: YOLO-NAS, Deci AI, ูƒุดู ุงู„ูƒุงุฆู†ุงุช, deep learning, ุงู„ุจุญุซ ููŠ ุงู„ู‡ู†ุฏุณุฉ ุงู„ุนุตุจูŠุฉ, ูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ Ultralytics Python, ู†ู…ูˆุฐุฌ YOLO, SuperGradients, ุงู„ุทุฑุฒ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง, ูƒุชู„ุฉ ุฃุณุงุณูŠุฉ ูˆุฏูŠุฉ ู„ู„ุชู…ุซูŠู„ ุงู„ูƒู…ูˆู†ูŠ, ุฃู†ุธู…ุฉ ุชุฏุฑูŠุจ ู…ุชุทูˆุฑุฉ, ุชู…ุซูŠู„ ูƒู…ูŠ ุจุนุฏ ุงู„ุชุฏุฑูŠุจ, ุชุญุณูŠู† AutoNAC, COCO, Objects365, Roboflow 100* diff --git a/ultralytics/docs/ar/models/yolo-nas.md:Zone.Identifier b/ultralytics/docs/ar/models/yolo-nas.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/models/yolo-nas.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/models/yolov3.md b/ultralytics/docs/ar/models/yolov3.md new file mode 100755 index 0000000..d34e915 --- /dev/null +++ b/ultralytics/docs/ar/models/yolov3.md @@ -0,0 +1,98 @@ +--- +comments: true +description: ุงุญุตู„ ุนู„ู‰ ู†ุธุฑุฉ ุนุงู…ุฉ ุญูˆู„ YOLOv3 ูˆ YOLOv3-Ultralytics ูˆ YOLOv3u. ุชุนุฑู ุนู„ู‰ ู…ูŠุฒุงุชู‡ุง ุงู„ุฑุฆูŠุณูŠุฉ ูˆุงุณุชุฎุฏุงู…ู‡ุง ูˆุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉ ู„ู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช. +keywords: YOLOv3ุŒ YOLOv3-UltralyticsุŒ YOLOv3uุŒ ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุชุŒ ุฅุฌุฑุงุกุŒ ุงู„ุชุฏุฑูŠุจุŒ Ultralytics +--- + +# YOLOv3 ูˆ YOLOv3-Ultralytics ูˆ YOLOv3u + +## ุงู„ู†ุธุฑุฉ ุงู„ุนุงู…ุฉ + +ูŠู‚ุฏู… ู‡ุฐุง ุงู„ูˆุซูŠู‚ุฉ ู†ุธุฑุฉ ุนุงู…ุฉ ุนู„ู‰ ุซู„ุงุซ ู†ู…ุงุฐุฌ ู…ุฑุชุจุทุฉ ุจูƒุดู ุงู„ูƒุงุฆู†ุงุช ุŒ ูˆู‡ูŠ [YOLOv3](https://pjreddie.com/darknet/yolo/) ูˆ [YOLOv3-Ultralytics](https://github.com/ultralytics/yolov3) ูˆ [YOLOv3u](https://github.com/ultralytics/ultralytics). + +1. **YOLOv3:** ู‡ุฐู‡ ู‡ูŠ ุงู„ุฅุตุฏุงุฑ ุงู„ุซุงู„ุซ ู…ู† ุฎูˆุงุฑุฒู…ูŠุฉ You Only Look Once (YOLO) ู„ู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช. ู‚ุงู… ุฌูˆุฒูŠู ุฑูŠุฏู…ูˆู† ุจุชุทูˆูŠุฑู‡ุง ุจุงู„ุฃุตู„ ุŒ ูˆู‚ุฏ ู‚ุงู…ุช YOLOv3 ุจุชุญุณูŠู† ุณุงุจู‚ูŠู‡ุง ู…ู† ุฎู„ุงู„ ุฅุฏุฎุงู„ ู…ูŠุฒุงุช ู…ุซู„ ุงู„ุชู†ุจุคุงุช ู…ุชุนุฏุฏุฉ ุงู„ู…ู‚ูŠุงุณ ูˆุซู„ุงุซุฉ ุฃุญุฌุงู… ู…ุฎุชู„ูุฉ ู…ู† ู†ูˆู‰ ุงู„ูƒุดู. + +2. **YOLOv3-Ultralytics:** ู‡ุฐู‡ ู‡ูŠ ุชู†ููŠุฐ Ultralytics ู„ู†ู…ูˆุฐุฌ YOLOv3. ูŠู‚ูˆู… ุจุฅุนุงุฏุฉ ุฅู†ุชุงุฌ ุจู†ูŠุฉ YOLOv3 ุงู„ุฃุตู„ูŠุฉ ูˆูŠู‚ุฏู… ูˆุธุงุฆู ุฅุถุงููŠุฉ ุŒ ู…ุซู„ ุฏุนู… ุงู„ู…ุฒูŠุฏ ู…ู† ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ูˆุฎูŠุงุฑุงุช ุชุฎุตูŠุต ุฃุณู‡ู„. + +3. **YOLOv3u:** ู‡ุฐุง ู‡ูˆ ุงู„ุฅุตุฏุงุฑ ุงู„ู…ูุญุฏู‘ุซ ู„ู€ YOLOv3-Ultralytics ุงู„ุฐูŠ ูŠุฏู…ุฌ ุฑุฃุณ ุงู„ูƒุดู ุจุฏูˆู† ู…ุดุชุฑูƒุงุช ูˆุจุฏูˆู† ู…ุณุชุฎุฏู… ุงู„ูƒุงุฆู† ุงู„ุฐูŠ ูŠุณุชุฎุฏู… ููŠ ู†ู…ุงุฐุฌ YOLOv8. ูŠุญุชูุธ YOLOv3u ุจู†ูุณ ุจู†ูŠุฉ ุงู„ุนู…ูˆุฏ ุงู„ูู‚ุฑูŠ ูˆุงู„ุนู†ู‚ ู…ุซู„ YOLOv3 ูˆู„ูƒู† ุจุฑุฃุณ ุงู„ูƒุดู ุงู„ู…ูุญุฏุซ ู…ู† YOLOv8. + +![Ultralytics YOLOv3](https://raw.githubusercontent.com/ultralytics/assets/main/yolov3/banner-yolov3.png) + +## ุงู„ู…ูŠุฒุงุช ุงู„ุฑุฆูŠุณูŠุฉ + +- **YOLOv3:** ู‚ุฏู… ุงุณุชุฎุฏุงู… ุซู„ุงุซ ู…ู‚ูŠุงุณุงุช ู…ุฎุชู„ูุฉ ู„ู„ูƒุดู ุŒ ุจุงุณุชุฎุฏุงู… ุซู„ุงุซุฉ ุฃุญุฌุงู… ู…ุฎุชู„ูุฉ ู…ู† ู†ูˆู‰ ุงู„ูƒุดู: 13x13 ุŒ 26x26 ูˆ 52x52. ู‡ุฐุง ูŠุญุณู† ุจุดูƒู„ ูƒุจูŠุฑ ุฏู‚ุฉ ุงู„ูƒุดู ู„ู„ูƒุงุฆู†ุงุช ุฐุงุช ุงู„ุฃุญุฌุงู… ุงู„ู…ุฎุชู„ูุฉ. ุจุงู„ุฅุถุงูุฉ ุฅู„ู‰ ุฐู„ูƒ ุŒ ุฃุถุงู YOLOv3 ู…ูŠุฒุงุช ู…ุซู„ ุงู„ุชู†ุจุคุงุช ู…ุชุนุฏุฏุฉ ุงู„ุนู„ุงู…ุงุช ู„ูƒู„ ู…ุฑุจุน ู…ุญุงุฐุงุฉ ูˆุดุจูƒุฉ ุงุณุชุฎุฑุงุฌ ุณู…ุงุช ุฃูุถู„. + +- **YOLOv3-Ultralytics:** ุชูˆูุฑ ุชู†ููŠุฐ Ultralytics ู„ู€ YOLOv3 ู†ูุณ ุงู„ุฃุฏุงุก ู…ุซู„ ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฃุตู„ูŠ ูˆู„ูƒู†ู‡ ูŠุฃุชูŠ ู…ุน ุฏุนู… ุฅุถุงููŠ ู„ู„ู…ุฒูŠุฏ ู…ู† ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ูˆุทุฑู‚ ุชุฏุฑูŠุจ ุฅุถุงููŠุฉ ูˆุฎูŠุงุฑุงุช ุฃุณู‡ู„ ู„ู„ุชุฎุตูŠุต. ู‡ุฐุง ูŠุฌุนู„ู‡ุง ุฃูƒุซุฑ ู…ุฑูˆู†ุฉ ูˆุณู‡ูˆู„ุฉ ุงุณุชุฎุฏุงู…ุงู‹ ู„ู„ุชุทุจูŠู‚ุงุช ุงู„ุนู…ู„ูŠุฉ. + +- **YOLOv3u:** ูŠุฏู…ุฌ ู‡ุฐุง ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ูุญุฏู‘ุซ ุฑุฃุณ ุงู„ูƒุดู ุจุฏูˆู† ู…ุดุชุฑูƒุงุช ูˆุจุฏูˆู† ู…ุณุชุฎุฏู… ุงู„ูƒุงุฆู† ู…ู† YOLOv8. ู…ู† ุฎู„ุงู„ ุฅุฒุงู„ุฉ ุงู„ุญุงุฌุฉ ุฅู„ู‰ ุตู†ุงุฏูŠู‚ ุงู„ู…ุฑุฌุน ุงู„ู…ุญุฏุฏุฉ ู…ุณุจู‚ู‹ุง ูˆุฏุฑุฌุงุช ุชูƒูˆู† ุงู„ูƒุงุฆู† ุŒ ูŠู…ูƒู† ุฃู† ูŠุญุณู† ุชุตู…ูŠู… ุฑุฃุณ ุงู„ูƒุดู ู‡ุฐุง ู‚ุฏุฑุฉ ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุฐุงุช ุงู„ุฃุญุฌุงู… ูˆุงู„ุฃุดูƒุงู„ ุงู„ู…ุชู†ูˆุนุฉ. ู‡ุฐุง ูŠุฌุนู„ YOLOv3u ุฃูƒุซุฑ ู…ุฑูˆู†ุฉ ูˆุฏู‚ุฉ ู„ู…ู‡ุงู… ูƒุดู ุงู„ูƒุงุฆู†ุงุช. + +## ุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉ ูˆุงู„ุฃูˆุถุงุน + +ุชู… ุชุตู…ูŠู… ุณู„ุณู„ุฉ YOLOv3 ุŒ ุจู…ุง ููŠ ุฐู„ูƒ YOLOv3 ูˆ YOLOv3-Ultralytics ูˆ YOLOv3u ุŒ ุฎุตูŠุตู‹ุง ู„ู…ู‡ุงู… ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช. ูŠุดุชู‡ุฑ ู‡ุฐู‡ ุงู„ู†ู…ุงุฐุฌ ุจูุนุงู„ูŠุชู‡ุง ููŠ ุณูŠู†ุงุฑูŠูˆู‡ุงุช ุงู„ุนุงู„ู… ุงู„ุญู‚ูŠู‚ูŠ ุงู„ู…ุฎุชู„ูุฉ ุŒ ู…ุน ุชูˆุงุฒู† ุฏู‚ุฉ ุงู„ูƒุดู ูˆุงู„ุณุฑุนุฉ. ูŠูˆูุฑ ูƒู„ ุทุฑุงุฒ ู…ูŠุฒุงุช ูˆุชุญุณูŠู†ุงุช ูุฑูŠุฏุฉ ุŒ ู…ู…ุง ูŠุฌุนู„ู‡ุง ู…ู†ุงุณุจุฉ ู„ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ุงู„ุชุทุจูŠู‚ุงุช. + +ูŠุฏุนู… ุงู„ู†ู…ุงุฐุฌ ุงู„ุซู„ุงุซุฉ ูˆุถุนู‹ุง ุดุงู…ู„ุงู‹ ู…ู† ุงู„ุฃูˆุถุงุน ุŒ ู…ู…ุง ูŠุถู…ู† ู…ุฑูˆู†ุฉ ููŠ ู…ุฑุงุญู„ ู…ุฎุชู„ูุฉ ู…ู† ู†ู…ูˆุฐุฌ ุงู„ู†ุดุฑ ูˆุงู„ุชุทูˆูŠุฑ. ู‡ุฐู‡ ุงู„ุฃูˆุถุงุน ุชุดู…ู„ [ุงู„ุชู…ูŠูŠุฒ](../modes/predict.md) ุŒ [ุงู„ุชุญู‚ู‚](../modes/val.md) ุŒ [ุงู„ุชุฏุฑูŠุจ](../modes/train.md) ูˆ [ุงู„ุชุตุฏูŠุฑ](../modes/export.md) ุŒ ู…ู…ุง ูŠูˆูุฑ ู„ู„ู…ุณุชุฎุฏู…ูŠู† ู…ุฌู…ูˆุนุฉ ูƒุงู…ู„ุฉ ู…ู† ุฃุฏูˆุงุช ูุนุงู„ุฉ ู„ู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช. + +| ู†ูˆุน ุงู„ู†ู…ูˆุฐุฌ | ุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉ | ุงู„ุชู…ูŠูŠุฒ | ุงู„ุชุญู‚ู‚ | ุงู„ุชุฏุฑูŠุจ | ุงู„ุชุตุฏูŠุฑ | +|--------------------|------------------------------------|---------|--------|---------|---------| +| YOLOv3 | [ูƒุดู ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3-Ultralytics | [ูƒุดู ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3u | [ูƒุดู ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +ุชูˆูุฑ ู‡ุฐู‡ ุงู„ุฌุฏูˆู„ุฉ ู†ุธุฑุฉ ููˆุฑูŠุฉ ุนู„ู‰ ุฅู…ูƒุงู†ุงุช ูƒู„ ู†ุณุฎุฉ ู…ู† YOLOv3 ุŒ ู…ู…ุง ูŠุณู„ุท ุงู„ุถูˆุก ุนู„ู‰ ู…ุฑูˆู†ุชู‡ุง ูˆู…ู„ุงุกู…ุชู‡ุง ู„ู…ุฎุชู„ู ุงู„ู…ู‡ุงู… ูˆุฃูˆุถุงุน ุงู„ุนู…ู„ ุงู„ุนู…ู„ูŠุฉ ููŠ ุณูŠุฑ ุงู„ุนู…ู„ ู„ูƒุดู ุงู„ูƒุงุฆู†ุงุช. + +## ุฃู…ุซู„ุฉ ุงู„ุงุณุชุฎุฏุงู… + +ูŠู‚ุฏู… ู‡ุฐุง ุงู„ู…ุซุงู„ ุฃู…ุซู„ุฉ ุจุณูŠุทุฉ ู„ู„ุชุฏุฑูŠุจ ูˆุงู„ุชู†ุจุค ุจุงุณุชุฎุฏุงู… YOLOv3. ู„ู„ุญุตูˆู„ ุนู„ู‰ ูˆุซุงุฆู‚ ูƒุงู…ู„ุฉ ุญูˆู„ ู‡ุฐู‡ ูˆุบูŠุฑู‡ุง ู…ู† [ุงู„ุฃูˆุถุงุน](../modes/index.md) ุงู†ุธุฑ ุตูุญุงุช ุงู„ูˆุซุงุฆู‚: [ุงู„ุชู†ุจุค](../modes/predict.md) ุŒ (../modes/train.md) [Val](../modes/val.md) ูˆ [ุงู„ุชุตุฏูŠุฑ](../modes/export.md) docs. + +!!! Example "ู…ุซุงู„" + + === "ุจูŠุซูˆู†" + + ูŠู…ูƒู† ุชู…ุฑูŠุฑ ู†ู…ุงุฐุฌ PyTorch ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง `*.pt` ูˆู…ู„ูุงุช ุงู„ุชูƒูˆูŠู† `*.yaml` ุฅู„ู‰ ูุฆุฉ `YOLO()` ู„ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ ููŠ Python: + + ```python + ู…ู† ultralytics ุงุณุชูŠุฑุงุฏ YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv3n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ COCO + model = YOLO('yolov3n.pt') + + # ุนุฑุถ ู…ุนู„ูˆู…ุงุช ุงู„ู†ู…ูˆุฐุฌ (ุงุฎุชูŠุงุฑูŠ) + model.info() + + # ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ู…ุซุงู„ูŠุฉ Coco8 ู„ู…ุฏุฉ 100 ุฏูˆุฑุฉ ุชุฏุฑูŠุจ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ู‚ู… ุจุชุดุบูŠู„ ุงู„ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ู†ู…ูˆุฐุฌ YOLOv3n ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ุชุชูˆูุฑ ุฃูˆุงู…ุฑ CLI ู„ุชุดุบูŠู„ ุงู„ู†ู…ุงุฐุฌ ู…ุจุงุดุฑุฉ: + + ```bash + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv3n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ COCO ูˆู‚ู… ุจุชุฏุฑูŠุจู‡ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ู…ุซุงู„ูŠุฉ Coco8 ู„ู…ุฏุฉ 100 ุฏูˆุฑุฉ ุชุฏุฑูŠุจ + yolo train model=yolov3n.pt data=coco8.yaml epochs=100 imgsz=640 + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv3n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ COCO ูˆุชุดุบูŠู„ ุงู„ุชู†ุจุค ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + yolo predict model=yolov3n.pt source=path/to/bus.jpg + ``` + +## ุงู„ุงู‚ุชุจุงุณุงุช ูˆุงู„ุดูƒุฑ + +ุฅุฐุง ู‚ู…ุช ุจุงุณุชุฎุฏุงู… YOLOv3 ููŠ ุจุญุซูƒ ุŒ ููŠุฑุฌู‰ ุงู„ุงู‚ุชุจุงุณ ู„ุฃูˆุฑุงู‚ YOLO ุงู„ุฃุตู„ูŠุฉ ูˆู…ุณุชูˆุฏุน Ultralytics YOLOv3: + +!!! Quote "" + + === "ุจูŠุจ ุชูŠูƒุณ" + + ```bibtex + @article{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Redmon, Joseph and Farhadi, Ali}, + journal={arXiv preprint arXiv:1804.02767}, + year={2018} + } + ``` + +ุดูƒุฑุงู‹ ู„ุฌูˆุฒูŠู ุฑูŠุฏู…ูˆู† ูˆุนู„ูŠ ูุฑู‡ุงุฏูŠ ุนู„ู‰ ุชุทูˆูŠุฑ YOLOv3 ุงู„ุฃุตู„ูŠ. diff --git a/ultralytics/docs/ar/models/yolov3.md:Zone.Identifier b/ultralytics/docs/ar/models/yolov3.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/models/yolov3.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/models/yolov4.md b/ultralytics/docs/ar/models/yolov4.md new file mode 100755 index 0000000..1cf5068 --- /dev/null +++ b/ultralytics/docs/ar/models/yolov4.md @@ -0,0 +1,72 @@ +--- +comments: true +description: ุงุณุชูƒุดู ุฏู„ูŠู„ู†ุง ุงู„ุชูุตูŠู„ูŠ ุนู„ู‰ YOLOv4 ุŒ ูˆู‡ูˆ ุฌู‡ุงุฒ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ุญุฏูŠุซุฉ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ. ูู‡ู… ุฃุจุฑุฒ ู…ุนุงู„ู… ุงู„ุชุตู…ูŠู… ุงู„ู…ุนู…ุงุฑูŠ ุงู„ุฎุงุตุฉ ุจู‡ ุŒ ูˆุงู„ู…ูŠุฒุงุช ุงู„ู…ุจุชูƒุฑุฉ ุŒ ูˆุฃู…ุซู„ุฉ ุงู„ุชุทุจูŠู‚. +keywords: ultralytics ุŒ yolo v4 ุŒ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุŒ ุดุจูƒุฉ ุนุตุจูŠุฉ ุŒ ูƒุดู ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุŒ ูƒุงุดู ุงู„ูƒุงุฆู†ุงุช ุŒ ุชุนู„ู… ุงู„ุขู„ุฉ + +--- + +# YOLOv4: ุงู„ูƒุดู ุงู„ุนุงู„ูŠ ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉ ู„ู„ูƒุงุฆู†ุงุช + +ุฃู‡ู„ุงู‹ ุจูƒ ููŠ ุตูุญุฉ ูˆุซุงุฆู‚ Ultralytics ู„ู€ YOLOv4 ุŒ ุฌู‡ุงุฒ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ุญุฏูŠุซ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุงู„ุฐูŠ ุชู… ุฅุทู„ุงู‚ู‡ ููŠ ุนุงู… 2020 ู…ู† ู‚ุจู„ Alexey Bochkovskiy ุนู„ู‰ [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). ุชู… ุชุตู…ูŠู… YOLOv4 ู„ุชูˆููŠุฑ ุงู„ุชูˆุงุฒู† ุงู„ู…ุซุงู„ูŠ ุจูŠู† ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉ ุŒ ู…ู…ุง ูŠุฌุนู„ู‡ ุฎูŠุงุฑู‹ุง ู…ู…ุชุงุฒู‹ุง ู„ู„ุนุฏูŠุฏ ู…ู† ุงู„ุชุทุจูŠู‚ุงุช. + +![ุฑุณู… ุชูˆุถูŠุญูŠ ู„ู‡ู†ุฏุณุฉ YOLOv4](https://user-images.githubusercontent.com/26833433/246185689-530b7fe8-737b-4bb0-b5dd-de10ef5aface.png) +**ุฑุณู… ุชูˆุถูŠุญูŠ ู„ู‡ู†ุฏุณุฉ YOLOv4**. ูŠุนุฑุถ ุงู„ุชุตู…ูŠู… ุงู„ู…ุนู…ุงุฑูŠ ุงู„ู…ุนู‚ุฏ ู„ุดุจูƒุฉ YOLOv4 ุŒ ุจู…ุง ููŠ ุฐู„ูƒ ุงู„ู…ูƒูˆู†ุงุช ุงู„ุฑุฆูŠุณูŠุฉ ูˆุงู„ุฑู‚ุจุฉ ูˆุงู„ุฑุฃุณ ุŒ ูˆุงู„ุทุจู‚ุงุช ุงู„ู…ุชุฑุงุจุทุฉ ู„ู„ูƒุดู ุงู„ูุนุงู„ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ. + +## ู…ู‚ุฏู…ุฉ + +ุชุนู†ูŠ YOLOv4 "ูู‚ุท ุชู†ุธุฑ ู…ุฑุฉ ูˆุงุญุฏุฉ ุงู„ู†ุณุฎุฉ 4". ู‡ูˆ ู†ู…ูˆุฐุฌ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ุญู‚ูŠู‚ูŠ ุงู„ุฒู…ู†ูŠ ุงู„ุฐูŠ ุชู… ุชุทูˆูŠุฑู‡ ู„ู…ุนุงู„ุฌุฉ ู‚ูŠูˆุฏ ุงู„ุฅุตุฏุงุฑุงุช ุงู„ุณุงุจู‚ุฉ ู„ู€ YOLO ู…ุซู„ [YOLOv3](yolov3.md) ูˆู†ู…ุงุฐุฌ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ุฃุฎุฑู‰. ุนู„ู‰ ุนูƒุณ ูƒุงุดูุงุช ุงู„ูƒุงุฆู†ุงุช ุงู„ุฃุฎุฑู‰ ุงู„ู‚ุงุฆู…ุฉ ุนู„ู‰ ุงู„ุดุจูƒุงุช ุงู„ุนุตุจูŠุฉ ุงู„ู…ุชุฒุงุญู…ุฉ ุงู„ู…ุณุชุฎุฏู…ุฉ ู„ู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช ุŒ ูŠู…ูƒู† ุชุทุจูŠู‚ YOLOv4 ู„ุฃู†ุธู…ุฉ ุงู„ูˆุตูŠุฉ ุงู„ู†ุตุญูŠุฉ ูˆูƒุฐู„ูƒ ู„ุฅุฏุงุฑุฉ ุงู„ุนู…ู„ูŠุฉ ุงู„ู…ุณุชู‚ู„ุฉ ูˆุชู‚ู„ูŠู„ ุงู„ุฅุฏุฎุงู„ุงุช ุงู„ุจุดุฑูŠุฉ. ูŠุชูŠุญ ุชุดุบูŠู„ู‡ ุนู„ู‰ ูˆุญุฏุงุช ู…ุนุงู„ุฌุฉ ุงู„ุฑุณูˆู…ุงุช ุงู„ู‚ูŠุงุณูŠุฉ (GPUs) ุงู„ุงุณุชุฎุฏุงู… ุงู„ุดุงู…ู„ ุจุชูƒู„ูุฉ ู…ุนู‚ูˆู„ุฉ ุŒ ูˆุชู… ุชุตู…ูŠู…ู‡ ู„ู„ุนู…ู„ ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ ุนู„ู‰ ูˆุญุฏุฉ ู…ุนุงู„ุฌุฉ ุงู„ุฑุณูˆู…ุงุช ุงู„ุชู‚ู„ูŠุฏูŠุฉ ู…ุน ุงู„ุญุงุฌุฉ ุฅู„ู‰ ูˆุญุฏุฉ ูˆุงุญุฏุฉ ูู‚ุท ู…ู† ู‡ุฐุง ุงู„ู†ูˆุน ู„ู„ุชุฏุฑูŠุจ. + +## ุงู„ู‡ู†ุฏุณุฉ + +ุชุณุชุบู„ YOLOv4 ุงู„ุนุฏูŠุฏ ู…ู† ุงู„ู…ูŠุฒุงุช ุงู„ู…ุจุชูƒุฑุฉ ุงู„ุชูŠ ุชุนู…ู„ ู…ุนู‹ุง ู„ุชุญุณูŠู† ุฃุฏุงุฆู‡ุง. ุชุดู…ู„ ู‡ุฐู‡ ุงู„ู…ูŠุฒุงุช ุงู„ุงุชุตุงู„ุงุช ุงู„ู…ุชุจู‚ูŠุฉ ุงู„ู…ุฑุฌุญุฉ (WRC) ุŒ ูˆุงู„ุงุชุตุงู„ุงุช ุงู„ุฌุฒุฆูŠุฉ ุนุจุฑ ุงู„ู…ุฑุญู„ุฉ ุงู„ู…ุชู‚ุงุทุนุฉ (CSP) ุŒ ูˆุงู„ุชุทุจูŠุน ุงู„ู…ุชู‚ุงุทุน ุงู„ู…ุตุบุฑ ู„ุฏููุน (CmBN) ุŒ ูˆุงู„ุชุฏุฑูŠุจ ุงู„ู…ุชู†ุงุฒุน ู„ู†ูุณู‡ (SAT) ุŒ ูˆุชู†ุดูŠุท Mish ุŒ ูˆุฒูŠุงุฏุฉ ุจูŠุงู†ุงุช ุงู„ุฒุฎู… ุŒ ูˆุชู†ุธูŠู… DropBlock ุŒ ูˆุฎุณุงุฑุฉ CIoU. ูŠุชู… ุฏู…ุฌ ู‡ุฐู‡ ุงู„ู…ูŠุฒุงุช ู„ุชุญู‚ูŠู‚ ุฃุญุฏุซ ุงู„ู†ุชุงุฆุฌ. + +ูŠุชุฃู„ู ูƒุงุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ู†ู…ูˆุฐุฌูŠ ู…ู† ุนุฏุฉ ุฃุฌุฒุงุก ุจู…ุง ููŠ ุฐู„ูƒ ุงู„ู…ุฏุฎู„ ูˆุงู„ุธู‡ุฑ ูˆุงู„ุฑู‚ุจุฉ ูˆุงู„ุฑุฃุณ. ูŠุชู… ุชุฏุฑูŠุจ ุงู„ุธู‡ุฑูŠุฉ ู„ู€ YOLOv4 ุณู„ูู‹ุง ุนู„ู‰ ImageNet ูˆูŠุณุชุฎุฏู… ู„ุชูˆู‚ุน ูุฆุงุช ูˆู…ุฑุจุนุงุช ู…ุญูŠุทุฉ ู„ู„ูƒุงุฆู†ุงุช. ูŠู…ูƒู† ุฃู† ูŠูƒูˆู† ุงู„ุธู‡ุฑูŠุฉ ู…ู† ุนุฏุฉ ู†ู…ุงุฐุฌ ุจู…ุง ููŠ ุฐู„ูƒ VGG ูˆ ResNet ูˆ ResNeXt ุฃูˆ DenseNet. ูŠุชู… ุงุณุชุฎุฏุงู… ุฌุฒุก ุงู„ุฑู‚ุจุฉ ู…ู† ุงู„ูƒุงุดู ู„ุฌู…ุน ุฎุฑุงุฆุท ุงู„ู…ูŠุฒุงุช ู…ู† ู…ุฑุงุญู„ ู…ุฎุชู„ูุฉ ูˆุนุงุฏุฉ ู…ุง ูŠุชุถู…ู† ุนุฏุฉ ู…ุณุงุฑุงุช ู„ุฃุณูู„ ูˆุนุฏุฉ ู…ุณุงุฑุงุช ู„ู„ุฃุนู„ู‰. ุฌุฒุก ุงู„ุฑุฃุณ ู‡ูˆ ู…ุง ูŠุณุชุฎุฏู… ู„ุฅุฌุฑุงุก ุงูƒุชุดุงู ุงู„ูƒุงุฆู†ุงุช ูˆุงู„ุชุตู†ูŠู ุงู„ู†ู‡ุงุฆูŠ. + +## ุงู„ุญู‚ูŠุจุฉ ุงู„ู…ุฌุงู†ูŠุฉ + +ูŠุณุชุฎุฏู… YOLOv4 ุฃูŠุถู‹ุง ุทุฑู‚ู‹ุง ุชุนุฑู ุจุงุณู… "ุญู‚ูŠุจุฉ ุงู„ู…ุฌุงู†ูŠุงุช" ูˆู‡ูŠ ุชู‚ู†ูŠุงุช ุชุญุณูู‘ู† ุฏู‚ุฉ ุงู„ู†ู…ูˆุฐุฌ ุฃุซู†ุงุก ุงู„ุชุฏุฑูŠุจ ุฏูˆู† ุฒูŠุงุฏุฉ ุชูƒู„ูุฉ ุงู„ุงุณุชู†ุชุงุฌ. ุชุนุฏ ุงู„ุชุนุฏูŠู„ุงุช ููŠ ุงู„ุจูŠุงู†ุงุช ุชู‚ู†ูŠุฉ ุดุงุฆุนุฉ ููŠ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุŒ ูˆุงู„ุชูŠ ุชุฒูŠุฏ ู…ู† ุชู†ูˆุน ุตูˆุฑ ุงู„ุฅุฏุฎุงู„ ู„ุชุญุณูŠู† ู‚ูˆุฉ ุงู„ู…ูˆุฏูŠู„. ุจุนุถ ุฃู…ุซู„ุฉ ุงู„ุชุนุฏูŠู„ ููŠ ุงู„ุจูŠุงู†ุงุช ุชุดู…ู„ ุงู„ุชุดูˆูŠู‡ุงุช ุงู„ุจุตุฑูŠุฉ (ุถุจุท ุงู„ุณุทูˆุน ูˆุงู„ุชุจุงูŠู† ูˆุงู„ุฏุฑุฌุฉ ูˆุงู„ุชุดุจุน ูˆุงู„ุถูˆุถุงุก ููŠ ุงู„ุตูˆุฑุฉ) ูˆุงู„ุชุดูˆูŠู‡ุงุช ุงู„ู‡ู†ุฏุณูŠุฉ (ุฅุถุงูุฉ ุชูˆุฒูŠุน ุนุดูˆุงุฆูŠ ู„ู„ุชุบูŠูŠุฑ ุงู„ู…ู‚ูŠุงุณูŠ ูˆุงู„ุงู‚ุชุตุงุต ูˆุงู„ุงู†ุนูƒุงุณ ูˆุงู„ุชุฏูˆูŠุฑ). ุชุณุงุนุฏ ู‡ุฐู‡ ุงู„ุชู‚ู†ูŠุงุช ุงู„ู…ูˆุฏูŠู„ ููŠ ุงู„ุชุนู…ูŠู… ุนู„ู‰ ุฃู†ูˆุงุน ู…ุฎุชู„ูุฉ ู…ู† ุงู„ุตูˆุฑ. + +## ุงู„ู…ูŠุฒุงุช ูˆุงู„ุฃุฏุงุก + +ุชู… ุชุตู…ูŠู… YOLOv4 ู„ุชุญู‚ูŠู‚ ุณุฑุนุฉ ูˆุฏู‚ุฉ ู…ุซู„ู‰ ููŠ ูƒุดู ุงู„ูƒุงุฆู†ุงุช. ูŠุชุถู…ู† ุชุตู…ูŠู… YOLOv4 CSPDarknet53 ูƒุธู‡ุฑ ุŒ PANet ูƒุฑู‚ุจุฉ ุŒ ูˆ YOLOv3 ูƒุฑุฃุณ ูƒุดู. ูŠุณู…ุญ ู‡ุฐุง ุงู„ุชุตู…ูŠู… ู„ู€ YOLOv4 ุจุฃุฏุงุก ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุจุณุฑุนุฉ ู…ุฐู‡ู„ุฉ ุŒ ู…ู…ุง ูŠุฌุนู„ู‡ ู…ู†ุงุณุจู‹ุง ู„ุชุทุจูŠู‚ุงุช ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ. ูŠุชููˆู‚ YOLOv4 ุฃูŠุถู‹ุง ููŠ ุงู„ุฏู‚ุฉ ุŒ ูˆูŠุญู‚ู‚ ู†ุชุงุฆุฌ ุนุงู„ูŠุฉ ููŠ ู…ู‚ุงูŠูŠุณ ูƒุดู ุงู„ูƒุงุฆู†ุงุช. + +## ุฃู…ุซู„ุฉ ุงู„ุงุณุชุฎุฏุงู… + +ููŠ ูˆู‚ุช ูƒุชุงุจุฉ ู‡ุฐุง ุงู„ู†ุต ุŒ ู„ุง ูŠุฏุนู… Ultralytics ุญุงู„ูŠู‹ุง ู†ู…ุงุฐุฌ YOLOv4. ู„ุฐู„ูƒ ุŒ ุณูŠุญุชุงุฌ ุฃูŠ ู…ุณุชุฎุฏู…ูŠู† ู…ู‡ุชู…ูŠู† ุจุงุณุชุฎุฏุงู… YOLOv4 ุฅู„ู‰ ุงู„ุฑุฌูˆุน ู…ุจุงุดุฑุฉ ุฅู„ู‰ ู…ุณุชูˆุฏุน YOLOv4 ุนู„ู‰ GitHub ู„ู„ุญุตูˆู„ ุนู„ู‰ ุชุนู„ูŠู…ุงุช ุงู„ุชุซุจูŠุช ูˆุงู„ุงุณุชุฎุฏุงู…. + +ุฅู„ูŠูƒ ู†ุธุฑุฉ ุนุงู…ุฉ ู…ูˆุฌุฒุฉ ุนู„ู‰ ุงู„ุฎุทูˆุงุช ุงู„ู†ู…ูˆุฐุฌูŠุฉ ุงู„ุชูŠ ูŠู…ูƒู† ุฃู† ุชุชุฎุฐู‡ุง ู„ุงุณุชุฎุฏุงู… YOLOv4: + +1. ู‚ู… ุจุฒูŠุงุฑุฉ ู…ุณุชูˆุฏุน YOLOv4 ุนู„ู‰ GitHub: [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). + +2. ุงุชุจุน ุงู„ุชุนู„ูŠู…ุงุช ุงู„ู…ู‚ุฏู…ุฉ ููŠ ู…ู„ู README ู„ุนู…ู„ูŠุฉ ุงู„ุชุซุจูŠุช. ูŠู†ุทูˆูŠ ู‡ุฐุง ุนุงุฏุฉ ุนู„ู‰ ุงุณุชู†ุณุงุฎ ุงู„ู…ุณุชูˆุฏุน ุŒ ูˆุชุซุจูŠุช ุงู„ุชุจุนูŠุงุช ุงู„ู„ุงุฒู…ุฉ ุŒ ูˆุฅุนุฏุงุฏ ุฃูŠ ู…ุชุบูŠุฑุงุช ุจูŠุฆุฉ ุถุฑูˆุฑูŠุฉ. + +3. ุจู…ุฌุฑุฏ ุงู„ุงู†ุชู‡ุงุก ู…ู† ุงู„ุชุซุจูŠุช ุŒ ูŠู…ูƒู†ูƒ ุชุฏุฑูŠุจ ูˆุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ูˆูู‚ู‹ุง ู„ุชุนู„ูŠู…ุงุช ุงู„ุงุณุชุฎุฏุงู… ุงู„ู…ู‚ุฏู…ุฉ ููŠ ุงู„ู…ุณุชูˆุฏุน. ูŠุชุถู…ู† ุฐู„ูƒ ุนุงุฏุฉ ุฅุนุฏุงุฏ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ุฎุงุตุฉ ุจูƒ ุŒ ูˆุชูƒูˆูŠู† ู…ุนุงู…ู„ุงุช ุงู„ู†ู…ูˆุฐุฌ ุŒ ูˆุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุŒ ุซู… ุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฏุฑุจ ู„ุฃุฏุงุก ุงูƒุชุดุงู ุงู„ูƒุงุฆู†ุงุช. + +ูŠุฑุฌู‰ ู…ู„ุงุญุธุฉ ุฃู† ุงู„ุฎุทูˆุงุช ุงู„ู†ู…ูˆุฐุฌูŠุฉ ู‚ุฏ ุชุฎุชู„ู ุงุนุชู…ุงุฏู‹ุง ุนู„ู‰ ุญุงู„ุฉ ุงู„ุงุณุชุฎุฏุงู… ุงู„ุฎุงุตุฉ ุจูƒ ูˆุญุงู„ุฉ ู…ุณุชูˆุฏุน YOLOv4 ุงู„ุญุงู„ูŠ. ู„ุฐู„ูƒ ุŒ ูŠูู†ุตุญ ุจุดุฏุฉ ุจุงู„ุฑุฌูˆุน ู…ุจุงุดุฑุฉ ุฅู„ู‰ ุงู„ุชุนู„ูŠู…ุงุช ุงู„ู…ู‚ุฏู…ุฉ ููŠ ู…ุณุชูˆุฏุน YOLOv4 ุนู„ู‰ GitHub. + +ู†ุคุณู ุนู„ู‰ ุฃูŠ ุฅุฒุนุงุฌ ุŒ ูˆุณู†ุณุนู‰ ู„ุชุญุฏูŠุซ ู‡ุฐุง ุงู„ู…ุณุชู†ุฏ ุจุฃู…ุซู„ุฉ ุงุณุชุฎุฏุงู… ู„ู€ Ultralytics ุจู…ุฌุฑุฏ ุชู†ููŠุฐ ุงู„ุฏุนู… ู„ู€ YOLOv4. + +## ุงู„ุงุณุชู†ุชุงุฌ + +YOLOv4 ู‡ูˆ ู†ู…ูˆุฐุฌ ู‚ูˆูŠ ูˆูุนุงู„ ู„ูƒุดู ุงู„ูƒุงุฆู†ุงุช ูŠุฌู…ุน ุจูŠู† ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉ. ูŠุณุชุฎุฏู… ุงู„ู…ูŠุฒุงุช ุงู„ูุฑูŠุฏุฉ ูˆุชู‚ู†ูŠุงุช ุงู„ุฒุฎู… ููŠ ุงู„ุชุฏุฑูŠุจ ู„ู„ุฃุฏุงุก ุจุดูƒู„ ู…ู…ุชุงุฒ ููŠ ู…ู‡ุงู… ุงูƒุชุดุงู ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ. ูŠู…ูƒู† ู„ุฃูŠ ุดุฎุต ูŠู…ุชู„ูƒ ูˆุญุฏุฉ ู…ุนุงู„ุฌุฉ ุฑุณูˆู…ูŠุฉ ุชู‚ู„ูŠุฏูŠุฉ ุชุฏุฑูŠุจ YOLOv4 ูˆุงุณุชุฎุฏุงู…ู‡ุง ุŒ ู…ู…ุง ูŠุฌุนู„ู‡ุง ุณู‡ู„ุฉ ุงู„ูˆุตูˆู„ ูˆุนู…ู„ูŠุฉ ู„ู…ุฌู…ูˆุนุฉ ูˆุงุณุนุฉ ู…ู† ุงู„ุชุทุจูŠู‚ุงุช. + +## ุงู„ุชู†ูˆูŠู‡ ูˆุงู„ุชู‚ุฏูŠุฑุงุช + +ู†ูˆุฏ ุฃู† ู†ุนุชุฑู ุจู…ุณุงู‡ู…ุฉ ุฃุตุญุงุจ YOLOv4 ููŠ ู…ุฌุงู„ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ุญู‚ูŠู‚ูŠุฉ ุงู„ุฒู…ู†ูŠุฉ: + +!!! Quote "" + + === "ุจูŠุจ ุชูƒุณ" + + ```bibtex + @misc{bochkovskiy2020yolov4, + title={YOLOv4: Optimal Speed and Accuracy of Object Detection}, + author={Alexey Bochkovskiy and Chien-Yao Wang and Hong-Yuan Mark Liao}, + year={2020}, + eprint={2004.10934}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ูŠู…ูƒู† ุงู„ุนุซูˆุฑ ุนู„ู‰ ูˆุฑู‚ุฉ YOLOv4 ุงู„ุฃุตู„ูŠุฉ ุนู„ู‰ [arXiv](https://arxiv.org/abs/2004.10934). ู‚ุงู… ุงู„ู…ุคู„ููˆู† ุจุชูˆููŠุฑ ุนู…ู„ู‡ู… ุจุดูƒู„ ุนุงู… ุŒ ูˆูŠู…ูƒู† ุงู„ูˆุตูˆู„ ุฅู„ู‰ ู‚ุงุนุฏุฉ ุงู„ุดูุฑุงุช ุนู„ู‰ [GitHub](https://github.com/AlexeyAB/darknet). ู†ู‚ุฏุฑ ุฌู‡ูˆุฏู‡ู… ููŠ ุชุนุฒูŠุฒ ุงู„ู…ูŠุฏุงู† ูˆุชูˆููŠุฑ ุนู…ู„ู‡ู… ู„ู„ู…ุฌุชู…ุน ุงู„ุนุฑูŠุถ. diff --git a/ultralytics/docs/ar/models/yolov4.md:Zone.Identifier b/ultralytics/docs/ar/models/yolov4.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/models/yolov4.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/models/yolov5.md b/ultralytics/docs/ar/models/yolov5.md new file mode 100755 index 0000000..32481a3 --- /dev/null +++ b/ultralytics/docs/ar/models/yolov5.md @@ -0,0 +1,107 @@ +--- +comments: true +description: ุงูƒุชุดู YOLOv5uุŒ ูˆู‡ูˆ ุฅุตุฏุงุฑ ู…ุนุฒุฒ ู„ู†ู…ูˆุฐุฌ YOLOv5 ูŠูˆูุฑ ุชูˆุงุฒู†ู‹ุง ู…ุญุณู†ู‹ุง ุจูŠู† ุงู„ุฏู‚ุฉ ูˆุงู„ุณุฑุนุฉ ูˆุงู„ุนุฏูŠุฏ ู…ู† ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ู„ู…ู‡ุงู… ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ู…ุฎุชู„ูุฉ. +keywords: YOLOv5uุŒ ูƒุดู ุงู„ูƒุงุฆู†ุงุชุŒ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุงุŒ UltralyticsุŒ ุงู„ุชุดุฎูŠุตุŒ ุงู„ุชุญู‚ู‚ุŒ YOLOv5ุŒ YOLOv8ุŒ ุจุฏูˆู† ู‚ุงุนุฏุฉ ุชุซุจูŠุช ุงู„ุนู‚ุฏุฉ ุงู„ุฑุฆูŠุณูŠุฉุŒ ุจุฏูˆู† ู‚ูŠู…ุฉ ุงู„ูƒุงุฆู†ุŒ ุงู„ุชุทุจูŠู‚ุงุช ุงู„ูุนู„ูŠุฉุŒ ุชุนู„ู… ุงู„ุขู„ุฉ +--- + +# YOLOv5 + +## ู†ุธุฑุฉ ุนุงู…ุฉ + +ูŠู…ุซู„ YOLOv5u ุชู‚ุฏู…ู‹ุง ููŠ ู…ู†ู‡ุฌูŠุงุช ูƒุดู ุงู„ูƒุงุฆู†ุงุช. ูŠู†ุฏุฑุฌ YOLOv5u ุชุญุช ุงู„ุจู†ูŠุฉ ุงู„ู…ุนู…ุงุฑูŠุฉ ุงู„ุฃุณุงุณูŠุฉ ู„ู†ู…ูˆุฐุฌ [YOLOv5](https://github.com/ultralytics/yolov5) ุงู„ุฐูŠ ุทูˆุฑุชู‡ ุดุฑูƒุฉ UltralyticsุŒ ูˆ ูŠุฏู…ุฌ ู†ู…ูˆุฐุฌ YOLOv5u ู…ูŠุฒุฉ ุงู„ู‚ุณู…ุฉ ุนู„ู‰ ุฌุฒุฆูŠู† ู„ู„ูƒุงุฆู†ุงุช ุงู„ู…ุณุชู‚ู„ุฉ ุนู† ุงู„ู‚ุงุนุฏุฉ ุงู„ุชูŠ ุชู… ุชู‚ุฏูŠู…ู‡ุง ููŠ ู†ู…ุงุฐุฌ [YOLOv8](yolov8.md). ุชุญุณูŠู† ู‡ุฐุง ุงู„ู†ู…ุท ูŠุญุณู† ู†ู…ุท ุงู„ู†ู…ูˆุฐุฌุŒ ู…ู…ุง ูŠุคุฏูŠ ุฅู„ู‰ ุชุญุณูŠู† ุงู„ุชูˆุงุฒู† ุจูŠู† ุงู„ุฏู‚ุฉ ูˆุงู„ุณุฑุนุฉ ููŠ ู…ู‡ุงู… ูƒุดู ุงู„ูƒุงุฆู†ุงุช. ุจู†ุงุกู‹ ุนู„ู‰ ุงู„ู†ุชุงุฆุฌ ุงู„ุชุฌุฑูŠุจูŠุฉ ูˆุงู„ู…ุฒุงูŠุง ุงู„ู…ุดุชู‚ุฉ ู…ู†ู‡ุงุŒ ูŠู‚ุฏู… YOLOv5u ุจุฏูŠู„ุงู‹ ูุนุงู„ู‹ุง ู„ุฃูˆู„ุฆูƒ ุงู„ุฐูŠู† ูŠุณุนูˆู† ู„ุฅูŠุฌุงุฏ ุญู„ูˆู„ ู‚ูˆูŠุฉ ููŠ ุงู„ุฃุจุญุงุซ ูˆุงู„ุชุทุจูŠู‚ุงุช ุงู„ุนู…ู„ูŠุฉ. + +![Ultralytics YOLOv5](https://raw.githubusercontent.com/ultralytics/assets/main/yolov5/v70/splash.png) + +## ุงู„ู…ุฒุงูŠุง ุงู„ุฑุฆูŠุณูŠุฉ + +- **ุฑุฃุณ Ultralytics ู„ู„ู‚ุณู…ุฉ ุจุฏูˆู† ู‚ุงุนุฏุฉ ุชุซุจูŠุช ุงู„ุนู‚ุฏุฉ:** ูŠุนุชู…ุฏ ู†ู…ุงุฐุฌ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ุชู‚ู„ูŠุฏูŠุฉ ุนู„ู‰ ุตู†ุงุฏูŠู‚ ู‚ุงุนุฏุฉ ู…ุญุฏุฏุฉ ู…ุณุจู‚ู‹ุง ู„ุชูˆู‚ุน ู…ูˆุงู‚ุน ุงู„ูƒุงุฆู†ุงุช. ูˆู…ุน ุฐู„ูƒุŒ ูŠุญุฏุซ ุชุญุฏูŠุซ ููŠ ู†ู‡ุฌ YOLOv5u ู‡ุฐุง. ู…ู† ุฎู„ุงู„ ุงุนุชู…ุงุฏ ุฑุฃุณ Ultralytics ุงู„ู…ูู‚ุณู… ุจุฏูˆู† ู‚ุงุนุฏุฉ ุชุซุจูŠุช ุงู„ุนู‚ุฏุฉุŒ ูŠุถู…ู† ู‡ุฐุง ุงู„ู†ู…ุท ุขู„ูŠุฉ ูƒุดู ุฃูƒุซุฑ ู…ุฑูˆู†ุฉ ูˆุงู†ุฏูุงุนู‹ุงุŒ ู…ู…ุง ูŠุนุฒุฒ ุงู„ุฃุฏุงุก ููŠ ุณูŠู†ุงุฑูŠูˆู‡ุงุช ู…ุชู†ูˆุนุฉ. + +- **ุชูˆุงุฒู† ู…ุญุณู† ุจูŠู† ุงู„ุฏู‚ุฉ ูˆุงู„ุณุฑุนุฉ:** ุชุชุตุงุฑุน ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉ ููŠ ุงู„ุนุฏูŠุฏ ู…ู† ุงู„ุฃุญูŠุงู†. ูˆู„ูƒู† YOLOv5u ูŠุชุญุฏู‰ ู‡ุฐุง ุงู„ุชูˆุงุฒู†. ูŠู‚ุฏู… ุชูˆุงุฒู†ู‹ุง ู…ุนุงูŠุฑู‹ุงุŒ ูˆูŠุถู…ู† ูƒุดูู‹ุง ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ ุฏูˆู† ุงู„ู…ุณุงูˆู…ุฉ ุนู„ู‰ ุงู„ุฏู‚ุฉ. ุชุนุฏ ู‡ุฐู‡ ุงู„ู…ูŠุฒุฉ ุฐุงุช ู‚ูŠู…ุฉ ุฎุงุตุฉ ู„ู„ุชุทุจูŠู‚ุงุช ุงู„ุชูŠ ุชุชุทู„ุจ ุงุณุชุฌุงุจุฉ ุณุฑูŠุนุฉุŒ ู…ุซู„ ุงู„ู…ุฑูƒุจุงุช ุงู„ู…ุณุชู‚ู„ุฉ ูˆุงู„ุฑูˆุจูˆุชุงุช ูˆุชุญู„ูŠู„ ุงู„ููŠุฏูŠูˆ ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ. + +- **ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง:** ุนู„ู‰ ูู‡ู… ุงู„ุฃู…ูˆุฑ ุงู„ุชูŠ ุชุญุชุงุฌ ุฅู„ู‰ ู…ุฌู…ูˆุนุงุช ุฃุฏูˆุงุช ู…ุฎุชู„ูุฉ YOLOv5u ูŠูˆูุฑ ุงู„ุนุฏูŠุฏ ู…ู† ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง. ุณูˆุงุก ูƒู†ุช ุชุฑูƒุฒ ุนู„ู‰ ุงู„ุชุดุฎูŠุต ุฃูˆ ุงู„ุชุญู‚ู‚ ุฃูˆ ุงู„ุชุฏุฑูŠุจุŒ ู‡ู†ุงูƒ ู†ู…ูˆุฐุฌ ู…ุตู…ู… ุฎุตูŠุตู‹ุง ูŠู†ุชุธุฑูƒ. ูŠุถู…ู† ู‡ุฐุง ุงู„ุชู†ูˆุน ุฃู†ูƒ ู„ุง ุชุณุชุฎุฏู… ุญู„ุงู‹ ู…ู† ู†ูˆุน ูˆุงุญุฏ ูŠู†ุงุณุจ ุงู„ุฌู…ูŠุนุŒ ูˆู„ูƒู† ู†ู…ูˆุฐุฌ ู…ูˆุงุฒู† ุญุณุจ ุญุงุฌุชูƒ ุงู„ูุฑูŠุฏุฉ. + +## ุงู„ู…ู‡ุงู… ูˆุงู„ุฃูˆุถุงุน ุงู„ู…ุฏุนูˆู…ุฉ + +ุชุชููˆู‚ ู†ู…ุงุฐุฌ YOLOv5uุŒ ู…ุน ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ุงู„ุฃูˆุฒุงู† ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุงุŒ ููŠ ู…ู‡ุงู… [ูƒุดู ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md). ุชุฏุนู… ู‡ุฐู‡ ุงู„ู†ู…ุงุฐุฌ ู…ุฌู…ูˆุนุฉ ุดุงู…ู„ุฉ ู…ู† ุงู„ุฃูˆุถุงุนุŒ ู…ู…ุง ูŠุฌุนู„ู‡ุง ู…ู†ุงุณุจุฉ ู„ุชุทุจูŠู‚ุงุช ู…ุชู†ูˆุนุฉุŒ ู…ู† ุงู„ุชุทูˆูŠุฑ ุฅู„ู‰ ุงู„ุชู†ููŠุฐ. + +| ู†ูˆุน ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุฃูˆุฒุงู† ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง | ุงู„ู…ู‡ู…ุฉ | ุงู„ุชุดุฎูŠุต | ุงู„ุชุญู‚ู‚ | ุงู„ุชุฏุฑูŠุจ | ุงู„ุชุตุฏูŠุฑ | +|-------------|-----------------------------------------------------------------------------------------------------------------------------|------------------------------------|---------|--------|---------|---------| +| YOLOv5u | `yolov5nu`, `yolov5su`, `yolov5mu`, `yolov5lu`, `yolov5xu`, `yolov5n6u`, `yolov5s6u`, `yolov5m6u`, `yolov5l6u`, `yolov5x6u` | [ูƒุดู ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +ูŠูˆูุฑ ู‡ุฐุง ุงู„ุฌุฏูˆู„ ู†ุธุฑุฉ ุนุงู…ุฉ ู…ูุตู„ุฉ ุนู† ุงู„ุจุฏุงุฆู„ ู…ู† ู†ู…ุงุฐุฌ ู†ู…ูˆุฐุฌ YOLOv5uุŒ ูˆูŠุณู„ุท ุงู„ุถูˆุก ุนู„ู‰ ุชุทุจูŠู‚ุงุชู‡ุง ููŠ ู…ู‡ุงู… ูƒุดู ุงู„ูƒุงุฆู†ุงุช ูˆุฏุนู…ู‡ุง ู„ุฃูˆุถุงุน ุชุดุบูŠู„ ู…ุชู†ูˆุนุฉ ู…ุซู„ [ุงู„ุชุดุฎูŠุต](../modes/predict.md)ุŒ [ุงู„ุชุญู‚ู‚](../modes/val.md)ุŒ [ุงู„ุชุฏุฑูŠุจ](../modes/train.md)ุŒ ูˆ[ุงู„ุชุตุฏูŠุฑ](../modes/export.md). ูŠุถู…ู† ู‡ุฐุง ุงู„ุฏุนู… ุงู„ุดุงู…ู„ ุฃู† ูŠู…ูƒู† ู„ู„ู…ุณุชุฎุฏู…ูŠู† ุงุณุชุบู„ุงู„ ู‚ุฏุฑุงุช ู†ู…ุงุฐุฌ YOLOv5u ุจุดูƒู„ ูƒุงู…ู„ ููŠ ู…ุฌู…ูˆุนุฉ ูˆุงุณุนุฉ ู…ู† ุณูŠู†ุงุฑูŠูˆู‡ุงุช ูƒุดู ุงู„ูƒุงุฆู†ุงุช. + +## ุงู„ุฃุฏุงุก + +!!! ุงู„ุฃุฏุงุก + + === "ูƒุดู" + + ุฑุงุฌุน [ูˆุซุงุฆู‚ ุงู„ูƒุดู](https://docs.ultralytics.com/tasks/detect/) ู„ู„ุญุตูˆู„ ุนู„ู‰ ุฃู…ุซู„ุฉ ุงุณุชุฎุฏุงู… ู…ุน ู‡ุฐู‡ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ุนู„ู‰ [COCO](https://docs.ultralytics.com/datasets/detect/coco/)ุŒ ุงู„ุชูŠ ุชุดู…ู„ 80 ูุฆุฉ ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง. + + | ุงู„ู†ู…ูˆุฐุฌ | ูŠุงู…ู„ | ุญุฌู…
(ุจูƒุณู„) | mAPval
50-95 | ุณุฑุนุฉ
ู…ุนุงู„ุฌ ุงู„ุฌู‡ุงุฒ ONNX
(ู…ู„ู„ูŠ ุซุงู†ูŠุฉ) | ุณุฑุนุฉ
ุญูˆูŠุตู„ุฉ A100 TensorRT
(ู…ู„ู„ูŠ ุซุงู†ูŠุฉ) | ุงู„ู…ุนู„ู…ุงุช
(ู…ู„ูŠูˆู†) | FLOPs
(ุจู„ูŠูˆู†) | + |---------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------|-----------------------|----------------------|--------------------------------|-------------------------------------|--------------------|-------------------| + | [yolov5nu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5nu.pt) | [yolov5n.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 34.3 | 73.6 | 1.06 | 2.6 | 7.7 | + | [yolov5su.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5su.pt) | [yolov5s.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 43.0 | 120.7 | 1.27 | 9.1 | 24.0 | + | [yolov5mu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5mu.pt) | [yolov5m.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 49.0 | 233.9 | 1.86 | 25.1 | 64.2 | + | [yolov5lu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5lu.pt) | [yolov5l.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 52.2 | 408.4 | 2.50 | 53.2 | 135.0 | + | [yolov5xu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5xu.pt) | [yolov5x.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 53.2 | 763.2 | 3.81 | 97.2 | 246.4 | + | | | | | | | | | + | [yolov5n6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5n6u.pt) | [yolov5n6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 42.1 | 211.0 | 1.83 | 4.3 | 7.8 | + | [yolov5s6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5s6u.pt) | [yolov5s6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 48.6 | 422.6 | 2.34 | 15.3 | 24.6 | + | [yolov5m6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5m6u.pt) | [yolov5m6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 53.6 | 810.9 | 4.36 | 41.2 | 65.7 | + | [yolov5l6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5l6u.pt) | [yolov5l6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 55.7 | 1470.9 | 5.47 | 86.1 | 137.4 | + | [yolov5x6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5x6u.pt) | [yolov5x6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 56.8 | 2436.5 | 8.98 | 155.4 | 250.7 | + +## ุฃู…ุซู„ุฉ ู„ู„ุงุณุชุฎุฏุงู… + +ูŠู‚ุฏู… ู‡ุฐุง ุงู„ู…ุซุงู„ ุฃู…ุซู„ุฉ ุจุณูŠุทุฉ ู„ู„ุบุงูŠุฉ ู„ู„ุชุฏุฑูŠุจ ูˆุงู„ุชุดุฎูŠุต ุจุงุณุชุฎุฏุงู… YOLOv5. ูŠูู…ูƒู† ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ ู…ุซูŠู„ ููŠ ุงู„ุจุฑู…ุฌุฉ ุจุงุณุชุฎุฏุงู… ู†ู…ุงุฐุฌ PyTorch ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ููŠ ุตูŠุบุฉ `*.pt` ูˆู…ู„ูุงุช ุงู„ุชูƒูˆูŠู† `*.yaml`: + +```python +from ultralytics import YOLO + +#ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv5n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO +model = YOLO('yolov5n.pt') + +# ู‚ู… ุจุนุฑุถ ู…ุนู„ูˆู…ุงุช ุงู„ู†ู…ูˆุฐุฌ (ุงุฎุชูŠุงุฑูŠ) +model.info() + +# ู‚ู… ุจุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช COCO8 ู„ู…ุฏุฉ 100 ุฏูˆุฑุฉ +results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + +# ู‚ู… ุจุชุดุบูŠู„ ุงู„ุชุดุฎูŠุต ุจู†ู…ูˆุฐุฌ YOLOv5n ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' +results = model('path/to/bus.jpg') +``` + +=== "ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ" + + ูŠุชุงุญ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ ู„ุชุดุบูŠู„ ุงู„ู†ู…ุงุฐุฌ ู…ุจุงุดุฑุฉ: + + ```bash + # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv5n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO8 ูˆู‚ู… ุจุชุฏุฑูŠุจู‡ ู„ู…ุฏุฉ 100 ุฏูˆุฑุฉ + yolo train model=yolov5n.pt data=coco8.yaml epochs=100 imgsz=640 + + # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv5n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO8 ูˆุชุดุบูŠู„ ุญุงู„ุฉ ุงู„ุชุดุฎูŠุต ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + yolo predict model=yolov5n.pt source=path/to/bus.jpg + ``` + +## ุงู„ุงุณุชุดู‡ุงุฏุงุช ูˆุงู„ุชู‚ุฏูŠุฑ + +ุฅุฐุง ู‚ู…ุช ุจุงุณุชุฎุฏุงู… YOLOv5 ุฃูˆ YOLOv5u ููŠ ุจุญุซูƒุŒ ูŠุฑุฌู‰ ุงุณุชุดู‡ุงุฏ ู†ู…ูˆุฐุฌ Ultralytics YOLOv5 ุจุทุฑูŠู‚ุฉ ุงู„ุงู‚ุชุจุงุณ ุงู„ุชุงู„ูŠุฉ: + +!!! Quote "" + + === "BibTeX" + ```bibtex + @software{yolov5, + title = {Ultralytics YOLOv5}, + author = {Glenn Jocher}, + year = {2020}, + version = {7.0}, + license = {AGPL-3.0}, + url = {https://github.com/ultralytics/yolov5}, + doi = {10.5281/zenodo.3908559}, + orcid = {0000-0001-5950-6979} + } + ``` + +ูŠุฑุฌู‰ ู…ู„ุงุญุธุฉ ุฃู† ู†ู…ุงุฐุฌ YOLOv5 ู…ุชุงุญุฉ ุจุชุฑุฎูŠุต [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) ูˆ[Enterprise](https://ultralytics.com/license). diff --git a/ultralytics/docs/ar/models/yolov5.md:Zone.Identifier b/ultralytics/docs/ar/models/yolov5.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/models/yolov5.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/models/yolov6.md b/ultralytics/docs/ar/models/yolov6.md new file mode 100755 index 0000000..12dd557 --- /dev/null +++ b/ultralytics/docs/ar/models/yolov6.md @@ -0,0 +1,107 @@ +--- +comments: true +description: ุงุณุชูƒุดู ู†ู…ูˆุฐุฌ Meituan YOLOv6 ู„ู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช ุงู„ุญุฏูŠุซุฉุŒ ูˆุงู„ุฐูŠ ูŠูˆูุฑ ุชูˆุงุฒู†ู‹ุง ู…ุฐู‡ู„ุงู‹ ุจูŠู† ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉุŒ ู…ู…ุง ูŠุฌุนู„ู‡ ุงู„ุฎูŠุงุฑ ุงู„ุฃู…ุซู„ ู„ุชุทุจูŠู‚ุงุช ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ. ุชุนุฑู‘ู ุนู„ู‰ ุงู„ู…ูŠุฒุงุช ูˆุงู„ู†ู…ุงุฐุฌ ุงู„ู…ูุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ูˆุงุณุชุฎุฏุงู… Python. +keywords: Meituan YOLOv6ุŒ ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุชุŒ UltralyticsุŒ YOLOv6 docsุŒ Bi-directional ConcatenationุŒ ุชุฏุฑูŠุจ ุจู…ุณุงุนุฏุฉ ุงู„ุนู†ุงุตุฑุŒ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ุงุŒ ุชุทุจูŠู‚ุงุช ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ +--- + +# Meituan YOLOv6 + +## ู†ุธุฑุฉ ุนุงู…ุฉ + +[Meituan](https://about.meituan.com/) YOLOv6 ู‡ูˆ ู…ู†ุธู‘ู ุงู„ูƒุงุฆู†ุงุช ุงู„ุญุฏูŠุซุฉ ุงู„ุญุฏูŠุซุฉ ุงู„ุฐูŠ ูŠูู‚ุฏู… ุชูˆุงุฒู†ู‹ุง ู…ู„ุญูˆุธู‹ุง ุจูŠู† ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉุŒ ู…ู…ุง ูŠุฌุนู„ู‡ ุฎูŠุงุฑู‹ุง ุดุงุฆุนู‹ุง ู„ุชุทุจูŠู‚ุงุช ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ. ูŠูู‚ุฏู… ู‡ุฐุง ุงู„ู†ู…ูˆุฐุฌ ุงู„ุนุฏูŠุฏ ู…ู† ุงู„ุชุญุณูŠู†ุงุช ุงู„ู…ู„ุญูˆุธุฉ ููŠ ุจู†ูŠุชู‡ ูˆู†ุธุงู… ุงู„ุชุฏุฑูŠุจุŒ ุจู…ุง ููŠ ุฐู„ูƒ ุชุทุจูŠู‚ ูˆุญุฏุฉ Bi-directional Concatenation (BiC)ุŒ ูˆุงุณุชุฑุงุชูŠุฌูŠุฉ AAT (anchor-aided training) ุงู„ุชูŠ ุชุนุชู…ุฏ ุนู„ู‰ ุงู„ุนู†ุงุตุฑุŒ ูˆุชุตู…ูŠู… ู…ุญุณู‘ู† ู„ู„ุฃุณุงุณ ูˆุงู„ุฑู‚ุจุฉ ู„ุชุญู‚ูŠู‚ ุฃุฏุงุก ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO ูŠููˆู‚ ุฌู…ูŠุน ุงู„ู†ู…ุงุฐุฌ ุงู„ุฃุฎุฑู‰. + +![Meituan YOLOv6](https://user-images.githubusercontent.com/26833433/240750495-4da954ce-8b3b-41c4-8afd-ddb74361d3c2.png) +![Model example image](https://user-images.githubusercontent.com/26833433/240750557-3e9ec4f0-0598-49a8-83ea-f33c91eb6d68.png) +**ู†ุธุฑุฉ ุนุงู…ุฉ ุนู„ู‰ YOLOv6.** ู…ุฎุทุท ุจู†ูŠุฉ ุงู„ู†ู…ูˆุฐุฌ ูŠูˆุถุญ ุงู„ู…ูƒูˆู†ุงุช ุงู„ู…ุนุงุฏ ุชุตู…ูŠู…ู‡ุง ูˆุงุณุชุฑุงุชูŠุฌูŠุงุช ุงู„ุชุฏุฑูŠุจ ุงู„ุชูŠ ุฃุฏุช ุฅู„ู‰ ุชุญุณูŠู†ุงุช ุฃุฏุงุก ูƒุจูŠุฑุฉ. (ุฃ) ุงู„ุฑู‚ุจุฉ ุงู„ุฎุงุตุฉ ุจู€ YOLOv6 (N ูˆ S ู…ุนุฑูˆุถุงู†). ู„ุงุญุธ ุฃู†ู‡ ุจุงู„ู†ุณุจุฉ ู„ู…/nุŒ ูŠุชู… ุงุณุชุจุฏุงู„ RepBlocks ุจู€ CSPStackRep. (ุจ) ู‡ูŠูƒู„ ูˆุญุฏุฉ BiC. (ุฌ) ู…ูƒูˆู† SimCSPSPPF. ([ุงู„ู…ุตุฏุฑ](https://arxiv.org/pdf/2301.05586.pdf)). + +### ู…ูŠุฒุงุช ุฑุฆูŠุณูŠุฉ + +- **ูˆุญุฏุฉ Bi-directional Concatenation (BiC):** ูŠู‚ุฏู… YOLOv6 ูˆุญุฏุฉ BiC ููŠ ุงู„ุฑู‚ุจุฉ ุงู„ุชุงุจุนุฉ ู„ู„ูƒุงุดูุŒ ู…ู…ุง ูŠุนุฒุฒ ุฅุดุงุฑุงุช ุงู„ุชุญุฏูŠุฏ ุงู„ู…ุญู„ูŠุฉ ูˆูŠุคุฏูŠ ุฅู„ู‰ ุฒูŠุงุฏุฉ ุงู„ุฃุฏุงุก ุฏูˆู† ุงู„ุชุฃุซูŠุฑ ุนู„ู‰ ุงู„ุณุฑุนุฉ. +- **ุงุณุชุฑุงุชูŠุฌูŠุฉ ุงู„ุชุฏุฑูŠุจ ุจู…ุณุงุนุฏุฉ ุงู„ุนู†ุงุตุฑ (AAT):** ูŠู‚ุฏู… ู‡ุฐุง ุงู„ู†ู…ูˆุฐุฌ ุงุณุชุฑุงุชูŠุฌูŠุฉ AAT ู„ู„ุงุณุชูุงุฏุฉ ู…ู† ููˆุงุฆุฏ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุณุชู†ุฏุฉ ุฅู„ู‰ ุงู„ุนู†ุงุตุฑ ูˆุบูŠุฑ ุงู„ู…ุณุชู†ุฏุฉ ุฅู„ูŠู‡ุง ุฏูˆู† ุงู„ุชุถุญูŠุฉ ููŠ ูƒูุงุกุฉ ุงู„ุงุณุชุฏู„ุงู„. +- **ุชุตู…ูŠู… ุฃุณุงุณ ูˆุฑู‚ุจุฉ ู…ุญุณู‘ู†:** ู…ู† ุฎู„ุงู„ ุชุนู…ูŠู‚ YOLOv6 ู„ุชุดู…ู„ ู…ุฑุญู„ุฉ ุฃุฎุฑู‰ ููŠ ุงู„ุฃุณุงุณ ูˆุงู„ุฑู‚ุจุฉุŒ ูŠุญู‚ู‚ ู‡ุฐุง ุงู„ู†ู…ูˆุฐุฌ ุฃุฏุงุกู‹ ูŠููˆู‚ ุฌู…ูŠุน ุงู„ู†ู…ุงุฐุฌ ุงู„ุฃุฎุฑู‰ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO ู„ุฅุฏุฎุงู„ ุนุงู„ูŠ ุงู„ุฏู‚ุฉ. +- **ุงุณุชุฑุงุชูŠุฌูŠุฉ ุงู„ุงุณุชู†ุจุงุท ุงู„ุฐุงุชูŠ:** ูŠุชู… ุชู†ููŠุฐ ุงุณุชุฑุงุชูŠุฌูŠุฉ ุงุณุชู†ุชุงุฌ ุฐุงุชูŠ ุฌุฏูŠุฏุฉ ู„ุชุนุฒูŠุฒ ุฃุฏุงุก ุงู„ู†ู…ุงุฐุฌ ุงู„ุตุบูŠุฑุฉ ู…ู† YOLOv6ุŒ ูˆุฐู„ูƒ ุนู† ุทุฑูŠู‚ ุชุนุฒูŠุฒ ูุฑุน ุงู„ุงู†ุญุฏุงุฑ ุงู„ู…ุณุงุนุฏ ุฎู„ุงู„ ุงู„ุชุฏุฑูŠุจ ูˆุฅุฒุงู„ุชู‡ ููŠ ุงู„ุงุณุชู†ุชุงุฌ ู„ุชุฌู†ุจ ุงู†ุฎูุงุถ ุงู„ุณุฑุนุฉ ุงู„ูˆุงุถุญ. + +## ู…ุนุงูŠูŠุฑ ุงู„ุฃุฏุงุก + +ูŠูˆูุฑ YOLOv6 ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง ุจู…ู‚ุงูŠูŠุณ ู…ุฎุชู„ูุฉ: + +- YOLOv6-N: ูฃูง.ูฅูช AP ููŠ COCO val2017 ุนู†ุฏู…ุง ูŠุชู… ุงุณุชุฎุฏุงู… ุจุทุงู‚ุฉ NVIDIA Tesla T4 GPU ูˆุณุฑุนุฉ ูกูกูจูง ุฅุทุงุฑ ููŠ ุงู„ุซุงู†ูŠุฉ. +- YOLOv6-S: ูคูฅ.ู ูช AP ูˆุณุฑุนุฉ ูคูจูค ุฅุทุงุฑ ููŠ ุงู„ุซุงู†ูŠุฉ. +- YOLOv6-M: ูฅู .ู ูช AP ูˆุณุฑุนุฉ ูขูขูฆ ุฅุทุงุฑ ููŠ ุงู„ุซุงู†ูŠุฉ. +- YOLOv6-L: ูฅูข.ูจูช AP ูˆุณุฑุนุฉ ูกูกูฆ ุฅุทุงุฑ ููŠ ุงู„ุซุงู†ูŠุฉ. +- YOLOv6-L6: ุฏู‚ุฉ ุญุฏูŠุซุฉ ููŠ ุงู„ุฒู…ู† ุงู„ุญู‚ูŠู‚ูŠ. + +ูƒู…ุง ูŠูˆูุฑ YOLOv6 ู†ู…ุงุฐุฌ ู…ุคู†ู‚ุฉ (quantized models) ุจุฏู‚ุงุช ู…ุฎุชู„ูุฉ ูˆู†ู…ุงุฐุฌ ู…ุญุณู†ุฉ ู„ู„ู…ู†ุตุงุช ุงู„ู…ุญู…ูˆู„ุฉ. + +## ุฃู…ุซู„ุฉ ุนู† ุงู„ุงุณุชุฎุฏุงู… + +ูŠู‚ุฏู… ู‡ุฐุง ุงู„ู…ุซุงู„ ุฃู…ุซู„ุฉ ุจุณูŠุทุฉ ู„ุชุฏุฑูŠุจ YOLOv6 ูˆุงุณุชู†ุชุงุฌู‡. ู„ู„ุญุตูˆู„ ุนู„ู‰ ูˆุซุงุฆู‚ ูƒุงู…ู„ุฉ ุญูˆู„ ู‡ุฐู‡ ูˆุฃูˆุถุงุน ุฃุฎุฑู‰ [ุงู†ุธุฑ](../modes/index.md) ุงู„ู‰ ุงู„ุตูุญุงุช ุงู„ุชูˆุถูŠุญูŠุฉ ู„ุชูˆุณุนุฉ ุงู„ูˆุซุงุฆู‚ ุงู„ูุงุฆู‚ุฉ ุŒ [ุชูˆู‚ุน](../modes/predict.md) ุŒ [ุชุฏุฑูŠุจ](../modes/train.md) ุŒ [ุงู„ุชุญู‚ู‚](../modes/val.md) ูˆ [ุงู„ุชุตุฏูŠุฑ](../modes/export.md). + +!!! Example "ู…ุซุงู„" + + === "Python" + + ูŠู…ูƒู† ุชู…ุฑูŠุฑ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง ุจุชู†ุณูŠู‚ `*.pt` ููŠ PyTorch ูˆู…ู„ูุงุช ุงู„ุชูƒูˆูŠู† `*.yaml` ู„ูุฆุฉ `YOLO()` ู„ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ ููŠ Python: + + ```python + from ultralytics import YOLO + + # ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ YOLOv6n ู…ู† ุงู„ุจุฏุงูŠุฉ + model = YOLO('yolov6n.yaml') + + # ุนุฑุถ ู…ุนู„ูˆู…ุงุช ุงู„ู†ู…ูˆุฐุฌ (ุงุฎุชูŠุงุฑูŠ) + model.info() + + # ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ู…ุซุงู„ COCO8 ู„ู…ุฏุฉ 100 ุฏูˆุฑุฉ ุชุฏุฑูŠุจ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ุชุดุบูŠู„ ุงู„ุงุณุชู†ุชุงุฌ ุจู†ู…ูˆุฐุฌ YOLOv6n ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ูŠู…ูƒู† ุงุณุชุฎุฏุงู… ุฃูˆุงู…ุฑ CLI ู„ุชุดุบูŠู„ ุงู„ู†ู…ุงุฐุฌ ู…ุจุงุดุฑุฉู‹: + + ```bash + # ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ YOLOv6n ู…ู† ุงู„ุจุฏุงูŠุฉ ูˆุชุฏุฑูŠุจู‡ ุจุงุณุชุฎุฏุงู… ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ู…ุซุงู„ COCO8 ู„ู…ุฏุฉ 100 ุฏูˆุฑุฉ ุชุฏุฑูŠุจ + yolo train model=yolov6n.yaml data=coco8.yaml epochs=100 imgsz=640 + + # ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ YOLOv6n ู…ู† ุงู„ุจุฏุงูŠุฉ ูˆุชุดุบูŠู„ ุงู„ุงุณุชู†ุชุงุฌ ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + yolo predict model=yolov6n.yaml source=path/to/bus.jpg + ``` + +## ุงู„ู…ู‡ุงู… ูˆุงู„ุฃูˆุถุงุน ุงู„ู…ุฏุนูˆู…ุฉ + +ุชู‚ุฏู… ุณู„ุณู„ุฉ YOLOv6 ู…ุฌู…ูˆุนุฉ ู…ู† ุงู„ู†ู…ุงุฐุฌุŒ ูˆุงู„ุชูŠ ุชู… ุชุญุณูŠู†ู‡ุง ู„ู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช ุนุงู„ูŠ ุงู„ุฃุฏุงุก. ุชู„ุจูŠ ู‡ุฐู‡ ุงู„ู†ู…ุงุฐุฌ ุงุญุชูŠุงุฌุงุช ุงู„ูƒู…ุจูŠูˆุชูŠุดู† ุงู„ู…ุชู†ูˆุนุฉ ูˆู…ุชุทู„ุจุงุช ุงู„ุฏู‚ุฉุŒ ู…ู…ุง ูŠุฌุนู„ู‡ุง ู…ุชุนุฏุฏุฉ ุงู„ุงุณุชุฎุฏุงู…ุงุช ููŠ ู…ุฌู…ูˆุนุฉ ูˆุงุณุนุฉ ู…ู† ุงู„ุชุทุจูŠู‚ุงุช. + +| ู†ูˆุน ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุฃูˆุฒุงู† ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง | ุงู„ู…ู‡ุงู… ุงู„ู…ุฏุนูˆู…ุฉ | ุงู„ุงุณุชู†ุชุงุฌ | ุงู„ุชุญู‚ู‚ | ุงู„ุชุฏุฑูŠุจ | ุงู„ุชุตุฏูŠุฑ | +|-------------|------------------------|-----------------------------------------|-----------|--------|---------|---------| +| YOLOv6-N | `yolov6-n.pt` | [ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-S | `yolov6-s.pt` | [ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-M | `yolov6-m.pt` | [ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L | `yolov6-l.pt` | [ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L6 | `yolov6-l6.pt` | [ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +ุชูˆูุฑ ู‡ุฐู‡ ุงู„ุฌุฏูˆู„ ู†ุธุฑุฉ ุนุงู…ุฉ ู…ูุตู„ุฉ ุนู„ู‰ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฎุชู„ูุฉ ู„ู€ YOLOv6ุŒ ู…ุน ุชุณู„ูŠุท ุงู„ุถูˆุก ุนู„ู‰ ู‚ุฏุฑุงุชู‡ุง ููŠ ู…ู‡ุงู… ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช ูˆุชูˆุงูู‚ู‡ุง ู…ุน ุงู„ุฃูˆุถุงุน ุงู„ุชุดุบูŠู„ูŠุฉ ุงู„ู…ุฎุชู„ูุฉ ู…ุซู„ [ุงู„ุงุณุชู†ุชุงุฌ](../modes/predict.md) ูˆ [ุงู„ุชุญู‚ู‚](../modes/val.md) ูˆ [ุงู„ุชุฏุฑูŠุจ](../modes/train.md) ูˆ [ุงู„ุชุตุฏูŠุฑ](../modes/export.md). ู‡ุฐุง ุงู„ุฏุนู… ุงู„ุดุงู…ู„ ูŠุถู…ู† ุฃู† ูŠู…ูƒู† ู„ู„ู…ุณุชุฎุฏู…ูŠู† ุงู„ุงุณุชูุงุฏุฉ ุงู„ูƒุงู…ู„ุฉ ู…ู† ู‚ุฏุฑุงุช ู†ู…ุงุฐุฌ YOLOv6 ููŠ ู…ุฌู…ูˆุนุฉ ูˆุงุณุนุฉ ู…ู† ุณูŠู†ุงุฑูŠูˆู‡ุงุช ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช. + +## ุงู„ุงู‚ุชุจุงุณุงุช ูˆุงู„ุชู‚ุฏูŠุฑุงุช + +ู†ุญู† ู†ูˆุฏ ุฃู† ู†ู‚ุฏู‘ู… ุงู„ุดูƒุฑ ู„ู„ู…ุคู„ููŠู† ุนู„ู‰ ู…ุณุงู‡ู…ุงุชู‡ู… ุงู„ู‡ุงู…ุฉ ููŠ ู…ุฌุงู„ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{li2023yolov6, + title={YOLOv6 v3.0: A Full-Scale Reloading}, + author={Chuyi Li and Lulu Li and Yifei Geng and Hongliang Jiang and Meng Cheng and Bo Zhang and Zaidan Ke and Xiaoming Xu and Xiangxiang Chu}, + year={2023}, + eprint={2301.05586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ูŠู…ูƒู† ุงู„ุนุซูˆุฑ ุนู„ู‰ ุงู„ูˆุฑู‚ุฉ ุงู„ุฃุตู„ูŠุฉ ู„ู€ YOLOv6 ุนู„ู‰ [arXiv](https://arxiv.org/abs/2301.05586). ู†ุดุฑ ุงู„ู…ุคู„ููˆู† ุนู…ู„ู‡ู… ุจุดูƒู„ ุนุงู…ุŒ ูˆูŠู…ูƒู† ุงู„ูˆุตูˆู„ ุฅู„ู‰ ุงู„ุดูŠูุฑุฉ ุงู„ู…ุตุฏุฑูŠุฉ ุนู„ู‰ [GitHub](https://github.com/meituan/YOLOv6). ู†ุญู† ู†ู‚ุฏู‘ุฑ ุฌู‡ูˆุฏู‡ู… ููŠ ุชุทูˆูŠุฑ ู‡ุฐุง ุงู„ู…ุฌุงู„ ูˆุฌุนู„ ุนู…ู„ู‡ู… ู…ุชุงุญู‹ุง ู„ู„ู…ุฌุชู…ุน ุจุฃุณุฑู‡. diff --git a/ultralytics/docs/ar/models/yolov6.md:Zone.Identifier b/ultralytics/docs/ar/models/yolov6.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/models/yolov6.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/models/yolov7.md b/ultralytics/docs/ar/models/yolov7.md new file mode 100755 index 0000000..1cdcc63 --- /dev/null +++ b/ultralytics/docs/ar/models/yolov7.md @@ -0,0 +1,66 @@ +--- +comments: true +description: ุงุณุชูƒุดู YOLOv7 ุŒ ุฌู‡ุงุฒ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ. ุชุนุฑู ุนู„ู‰ ุณุฑุนุชู‡ ุงู„ูุงุฆู‚ุฉุŒ ูˆุฏู‚ุชู‡ ุงู„ู…ุฐู‡ู„ุฉุŒ ูˆุชุฑูƒูŠุฒู‡ ุงู„ูุฑูŠุฏ ุนู„ู‰ ุชุญุณูŠู† ุงู„ุฃู…ุชุนุฉ ุงู„ุชุฏุฑูŠุจูŠุฉ ุชุฏุฑูŠุจูŠุงู‹. +keywords: YOLOv7ุŒ ูƒุงุดู ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠุŒ ุงู„ุญุงู„ุฉ ุงู„ูู†ูŠุฉุŒ UltralyticsุŒ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช MS COCOุŒ ุงู„ู…ุนูŠุงุฑ ุงู„ู…ุนุงุฏ ุชุนุฑูŠูู‡ ู„ู„ู†ู…ูˆุฐุฌุŒ ุงู„ุชุณู…ูŠุฉ ุงู„ุฏูŠู†ุงู…ูŠูƒูŠุฉุŒ ุงู„ุชุญุฌูŠู… ุงู„ู…ูˆุณุนุŒ ุงู„ุชุญุฌูŠู… ุงู„ู…ุฑูƒุจ +--- + +# YOLOv7: ุญู‚ูŠุจุฉ ู…ุฌุงู†ูŠุฉ ู‚ุงุจู„ุฉ ู„ู„ุชุฏุฑูŠุจ + +YOLOv7 ู‡ูˆ ูƒุงุดู ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุงู„ุญุฏูŠุซ ุงู„ุญุงู„ูŠ ุงู„ุฐูŠ ูŠุชููˆู‚ ุนู„ู‰ ุฌู…ูŠุน ูƒุงุดูุงุช ุงู„ูƒุงุฆู†ุงุช ุงู„ู…ุนุฑูˆูุฉ ู…ู† ุญูŠุซ ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉ ููŠ ุงู„ู†ุทุงู‚ ู…ู† 5 ุฅุทุงุฑุงุช ููŠ ุงู„ุซุงู†ูŠุฉ ุฅู„ู‰ 160 ุฅุทุงุฑู‹ุง ููŠ ุงู„ุซุงู†ูŠุฉ. ุฅู†ู‡ ูŠุชู…ุชุน ุจุฃุนู„ู‰ ุฏู‚ุฉ (ูฅูฆ.ูจูช AP) ุจูŠู† ุฌู…ูŠุน ูƒุงุดูุงุช ุงู„ูƒุงุฆู†ุงุช ุงู„ุญุงู„ูŠุฉ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุจุณุฑุนุฉ ูฃู  ุฅุทุงุฑู‹ุง ููŠ ุงู„ุซุงู†ูŠุฉ ุฃูˆ ุฃุนู„ู‰ ุนู„ู‰ GPU V100. ุนู„ุงูˆุฉ ุนู„ู‰ ุฐู„ูƒ, ูŠุชููˆู‚ YOLOv7 ุนู„ู‰ ูƒุงุดูุงุช ุงู„ูƒุงุฆู†ุงุช ุงู„ุฃุฎุฑู‰ ู…ุซู„ YOLOR, YOLOX, Scaled-YOLOv4, YOLOv5 ูˆุงู„ุนุฏูŠุฏ ู…ู† ุงู„ุขุฎุฑูŠู† ู…ู† ุญูŠุซ ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉ. ุงู„ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช MS COCO ู…ู† ุงู„ุจุฏุงูŠุฉ ุฏูˆู† ุงุณุชุฎุฏุงู… ุฃูŠ ู…ุฌู…ูˆุนุงุช ุจูŠุงู†ุงุช ุฃุฎุฑู‰ ุฃูˆ ูˆุฒู† ู…ูุนูŠู† ู…ูุณุจู‚ู‹ุง. ุฑู…ุฒ ุงู„ู…ุตุฏุฑ ู„ู€ YOLOv7 ู…ุชุงุญ ุนู„ู‰ GitHub. + +![ู…ู‚ุงุฑู†ุฉ YOLOv7 ู…ุน ูƒุงุดูุงุช ุงู„ูƒุงุฆู†ุงุช ุงู„ุฃุนู„ู‰ ุงู„ูู†ูŠุฉ](https://github.com/ultralytics/ultralytics/assets/26833433/5e1e0420-8122-4c79-b8d0-2860aa79af92) + +**ู…ู‚ุงุฑู†ุฉ ุจูŠู† ูƒุงุดูุงุช ุงู„ูƒุงุฆู†ุงุช ุงู„ุฃุนู„ู‰ ุงู„ูู†ูŠุฉ.** ู…ู† ุงู„ู†ุชุงุฆุฌ ููŠ ุงู„ุฌุฏูˆู„ 2 ู†ุชุนุฑู ุนู„ู‰ ุฃู† ุงู„ุทุฑูŠู‚ุฉ ุงู„ู…ู‚ุชุฑุญุฉ ู„ุฏูŠู‡ุง ุฃูุถู„ ุชูˆุงุฒู† ุจูŠู† ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉ ุจุดูƒู„ ุดุงู…ู„. ุฅุฐุง ู‚ุงุฑู†ุง ุจูŠู† YOLOv7-tiny-SiLU ูˆ YOLOv5-N (r6.1) ุŒ ูŠูƒูˆู† ุงู„ุทุฑูŠู‚ุฉ ุงู„ุญุงู„ูŠุฉ ุฃุณุฑุน ุจู€ ูกูขูง ุฅุทุงุฑู‹ุง ููŠ ุงู„ุซุงู†ูŠุฉ ูˆุฃูƒุซุฑ ุฏู‚ุฉ ุจู†ุณุจุฉ ูกู .ูงูช ู…ู† ุญูŠุซ AP. ุจุงู„ุฅุถุงูุฉ ุฅู„ู‰ ุฐู„ูƒ ุŒ YOLOv7 ู„ุฏูŠู‡ุง AP ุจู†ุณุจุฉ ูฅูก.ูคูช ููŠ ู…ุนุฏู„ ุฅุทุงุฑ ูกูฆูก ููŠ ุงู„ุซุงู†ูŠุฉ ุŒ ููŠ ุญูŠู† ูŠูƒูˆู† ู„ู€ PPYOLOE-L ู†ูุณ AP ูู‚ุท ุจู…ุนุฏู„ ุฅุทุงุฑ ูงูจ ููŠ ุงู„ุซุงู†ูŠุฉ. ู…ู† ุญูŠุซ ุงุณุชุฎุฏุงู… ุงู„ุนูˆุงู…ู„ ุŒ ูŠูƒูˆู† YOLOv7 ุฃู‚ู„ ุจู†ุณุจุฉ ูคูกูช ู…ู† ุงู„ุนูˆุงู…ู„ ู…ู‚ุงุฑู†ุฉู‹ ุจู€ PPYOLOE-L. ุฅุฐุง ู‚ุงุฑู†ุง YOLOv7-X ุจุณุฑุนุฉ ุชูˆุงุตู„ ุจูŠุงู†ุงุช ูกูกูค ุฅุทุงุฑู‹ุง ููŠ ุงู„ุซุงู†ูŠุฉ ู…ุน YOLOv5-L (r6.1) ู…ุน ุณุฑุนุฉ ุชุญู„ูŠู„ ูฉูฉ ุฅุทุงุฑู‹ุง ููŠ ุงู„ุซุงู†ูŠุฉ ุŒ ูŠู…ูƒู† ุฃู† ูŠุญุณู† YOLOv7-X AP ุจู…ู‚ุฏุงุฑ ูฃ.ูฉูช. ุฅุฐุง ู‚ูˆุฑู† YOLOv7-X ุจู€ู€ YOLOv5-X (r6.1) ุจู†ูุณ ุงู„ุญุฌู… ุŒ ูุฅู† ุณุฑุนุฉ ุชูˆุงุตู„ ุงู„ุจูŠุงู†ุงุช ููŠ YOLOv7-X ุชูƒูˆู† ุฃุณุฑุน ุจู€ ูฃูก ุฅุทุงุฑู‹ุง ููŠ ุงู„ุซุงู†ูŠุฉ. ุจุงู„ุฅุถุงูุฉ ุฅู„ู‰ ุฐู„ูƒ ุŒ ู…ู† ุญูŠุซ ูƒู…ูŠุฉ ุงู„ู…ุนุงู…ู„ุงุช ูˆุงู„ุญุณุงุจุงุช ุŒ ูŠู‚ู„ู„ YOLOv7-X ุจู†ุณุจุฉ ูขูขูช ู…ู† ุงู„ู…ุนุงู…ู„ุงุช ูˆูจูช ู…ู† ุงู„ุญุณุงุจ ู…ู‚ุงุฑู†ุฉู‹ ุจู€ YOLOv5-X (r6.1) ุŒ ูˆู„ูƒู†ู‡ ูŠุญุณู† AP ุจู†ุณุจุฉ ูข.ูขูช ([ุงู„ู…ุตุฏุฑ](https://arxiv.org/pdf/2207.02696.pdf)). + +## ุงู„ู†ุธุฑุฉ ุงู„ุนุงู…ุฉ + +ูƒุงุดู ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ู‡ูˆ ุฌุฒุก ู…ู‡ู… ููŠ ุงู„ุนุฏูŠุฏ ู…ู† ุฃู†ุธู…ุฉ ุฑุคูŠุฉ ุงู„ุญุงุณูˆุจ ุŒ ุจู…ุง ููŠ ุฐู„ูƒ ุงู„ุชุชุจุน ู…ุชุนุฏุฏ ุงู„ูƒุงุฆู†ุงุช ูˆุงู„ู‚ูŠุงุฏุฉ ุงู„ุชู„ู‚ุงุฆูŠุฉ ูˆุงู„ุฑูˆุจูˆุชุงุช ูˆุชุญู„ูŠู„ ุตูˆุฑ ุงู„ุฃุนุถุงุก. ููŠ ุงู„ุณู†ูˆุงุช ุงู„ุฃุฎูŠุฑุฉ ุŒ ุชุฑูƒุฒ ุชุทูˆูŠุฑ ูƒุงุดูุงุช ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุนู„ู‰ ุชุตู…ูŠู… ู‡ูŠุงูƒู„ ูุนุงู„ุฉ ูˆุชุญุณูŠู† ุณุฑุนุฉ ุงู„ุชุญู„ูŠู„ ู„ู…ุนุงู„ุฌุงุช ุงู„ูƒู…ุจูŠูˆุชุฑ ุงู„ู…ุฑูƒุฒูŠุฉ ูˆู…ุนุงู„ุฌุงุช ุงู„ุฑุณูˆู…ุงุช ูˆูˆุญุฏุงุช ู…ุนุงู„ุฌุฉ ุงู„ุฃุนุตุงุจ (NPUs). ูŠุฏุนู… YOLOv7 ูƒู„ุงู‹ ู…ู† GPU ุงู„ู…ุญู…ูˆู„ ูˆุฃุฌู‡ุฒุฉ ุงู„ู€ GPU ุŒ ู…ู† ุงู„ุญูˆุงู ุฅู„ู‰ ุงู„ุณุญุงุจุฉ. + +ุนู„ู‰ ุนูƒุณ ูƒุงุดูุงุช ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุงู„ุชู‚ู„ูŠุฏูŠุฉ ุงู„ุชูŠ ุชุฑูƒุฒ ุนู„ู‰ ุชุญุณูŠู† ุงู„ู‡ูŠุงูƒู„ ุŒ ูŠูู‚ุฏู… YOLOv7 ุชุฑูƒูŠุฒู‹ุง ุนู„ู‰ ุชุญุณูŠู† ุนู…ู„ูŠุฉ ุงู„ุชุฏุฑูŠุจ. ูŠุชุถู…ู† ุฐู„ูƒ ูˆุญุฏุงุช ูˆุทุฑู‚ ุชุญุณูŠู† ุชูุตู…ู… ู„ุชุญุณูŠู† ุฏู‚ุฉ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุฏูˆู† ุฒูŠุงุฏุฉ ุชูƒู„ูุฉ ุงู„ุชุญู„ูŠู„ ุŒ ูˆู‡ูˆ ู…ูู‡ูˆู… ูŠูุนุฑู ุจู€ "ุงู„ุญู‚ูŠุจุฉ ุงู„ู‚ุงุจู„ุฉ ู„ู„ุชุฏุฑูŠุจ ู„ู„ู…ุฌุงู†ูŠุงุช". + +## ุงู„ู…ูŠุฒุงุช ุงู„ุฑุฆูŠุณูŠุฉ + +ุชูู‚ุฏู… YOLOv7 ุนุฏุฉ ู…ูŠุฒุงุช ุฑุฆูŠุณูŠุฉ: + +1. **ุฅุนุงุฏุฉ ุชุนูŠูŠู† ู†ู…ูˆุฐุฌ ุงู„ู…ุนุงู…ู„ุงุช**: ูŠู‚ุชุฑุญ YOLOv7 ู†ู…ูˆุฐุฌ ู…ุนุงู…ู„ุงุช ู…ุนูŠู† ู…ุฎุทุท ู„ู‡ ุŒ ูˆู‡ูˆ ุงุณุชุฑุงุชูŠุฌูŠุฉ ู‚ุงุจู„ุฉ ู„ู„ุชุทุจูŠู‚ ุนู„ู‰ ุงู„ุทุจู‚ุงุช ููŠ ุดุจูƒุงุช ู…ุฎุชู„ูุฉ ุจุงุณุชุฎุฏุงู… ู…ูู‡ูˆู… ู…ุณุงุฑ ุงู†ุชุดุงุฑ ุงู„ุชุฏุฑุฌ. + +2. **ุงู„ุชุณู…ูŠุฉ ุงู„ุฏูŠู†ุงู…ูŠูƒูŠุฉ**: ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ู…ุน ุนุฏุฉ ุทุจู‚ุงุช ุฅุฎุฑุงุฌ ูŠุจุฑุฒ ู‚ุถูŠุฉ ุฌุฏูŠุฏุฉ: "ูƒูŠููŠุฉ ุชุนูŠูŠู† ุฃู‡ุฏุงู ุฏูŠู†ุงู…ูŠูƒูŠุฉ ู„ุฅุฎุฑุงุฌ ุงู„ูุฑูˆุน ุงู„ู…ุฎุชู„ูุฉุŸ" ู„ุญู„ ู‡ุฐู‡ ุงู„ู…ุดูƒู„ุฉ ุŒ ูŠู‚ุฏู… YOLOv7 ุทุฑูŠู‚ุฉ ุชุณู…ูŠุฉ ุฌุฏูŠุฏุฉ ุชุณู…ู‰ ุชุณู…ูŠุฉ ุงู„ู‡ุฏู ุงู„ู…ุฑุดุฏุฉ ู…ู† ุงู„ุฎุดู† ุฅู„ู‰ ุงู„ุฏู‚ูŠู‚ุฉ. + +3. **ุงู„ุชุญุฌูŠู… ุงู„ู…ูˆุณุน ูˆุงู„ู…ุฑูƒุจ**: ูŠู‚ุชุฑุญ YOLOv7 ุทุฑู‚ "ุงู„ุชุญุฌูŠู… ุงู„ู…ูˆุณุน" ูˆ "ุงู„ุชุญุฌูŠู… ุงู„ู…ุฑูƒุจ" ู„ูƒุงุดู ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุงู„ุชูŠ ูŠู…ูƒู† ุฃู† ุชุณุชุฎุฏู… ุจุดูƒู„ ูุนุงู„ ููŠ ุงู„ู…ุนุงู…ู„ุงุช ูˆุงู„ุญุณุงุจุงุช. + +4. **ุงู„ูƒูุงุกุฉ**: ูŠู…ูƒู† ู„ู„ุทุฑูŠู‚ุฉ ุงู„ู…ู‚ุชุฑุญุฉ ุจูˆุงุณุทุฉ YOLOv7 ุชู‚ู„ูŠู„ ุจุดูƒู„ ูุนุงู„ ุญูˆุงู„ูŠ 40ูช ู…ู† ุงู„ู…ุนุงู…ู„ุงุช ูˆ 50ูช ู…ู† ุงู„ุญุณุงุจ ู„ูƒุงุดู ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุงู„ุฃูˆู„ู‰ ู…ู† ุญูŠุซ ุงู„ุฏู‚ุฉ ูˆุงู„ุณุฑุนุฉ ููŠ ุงู„ุชุญู„ูŠู„. + +## ุฃู…ุซู„ุฉ ุนู„ู‰ ุงู„ุงุณุชุฎุฏุงู… + +ููŠ ูˆู‚ุช ูƒุชุงุจุฉ ู‡ุฐุง ุงู„ู†ุต ุŒ ู„ุง ุชุฏุนู… Ultralytics ุญุงู„ูŠู‹ุง ู†ู…ุงุฐุฌ YOLOv7. ู„ุฐู„ูƒ ุŒ ุณูŠุญุชุงุฌ ุฃูŠ ู…ุณุชุฎุฏู…ูŠู† ู…ู‡ุชู…ูŠู† ุจุงุณุชุฎุฏุงู… YOLOv7 ุฅู„ู‰ ุงู„ุฑุฌูˆุน ู…ุจุงุดุฑุฉ ุฅู„ู‰ ู…ุณุชูˆุฏุน YOLOv7 ุนู„ู‰ GitHub ู„ู„ุญุตูˆู„ ุนู„ู‰ ุชุนู„ูŠู…ุงุช ุงู„ุชุซุจูŠุช ูˆุงู„ุงุณุชุฎุฏุงู…. + +ูˆููŠู…ุง ูŠู„ูŠ ู†ุธุฑุฉ ุนุงู…ุฉ ุนู„ู‰ ุงู„ุฎุทูˆุงุช ุงู„ู†ู…ูˆุฐุฌูŠุฉ ุงู„ุชูŠ ูŠู…ูƒู†ูƒ ุงุชุจุงุนู‡ุง ู„ุงุณุชุฎุฏุงู… YOLOv7: + +1. ู‚ู… ุจุฒูŠุงุฑุฉ ู…ุณุชูˆุฏุน YOLOv7 ุนู„ู‰ GitHub: [https://github.com/WongKinYiu/yolov7](https://github.com/WongKinYiu/yolov7). + +2. ุงุชุจุน ุงู„ุชุนู„ูŠู…ุงุช ุงู„ู…ูˆุฌูˆุฏุฉ ููŠ ู…ู„ู README ู„ุนู…ู„ูŠุฉ ุงู„ุชุซุจูŠุช. ูŠุชุถู…ู† ุฐู„ูƒ ุนุงุฏุฉู‹ ุงุณุชู†ุณุงุฎ ุงู„ู…ุณุชูˆุฏุน ุŒ ูˆุชุซุจูŠุช ุงู„ุชุจุนูŠุงุช ุงู„ู„ุงุฒู…ุฉ ุŒ ูˆุฅุนุฏุงุฏ ุฃูŠ ู…ุชุบูŠุฑุงุช ุจูŠุฆุฉ ุถุฑูˆุฑูŠุฉ. + +3. ุจู…ุฌุฑุฏ ุงู„ุงู†ุชู‡ุงุก ู…ู† ุนู…ู„ูŠุฉ ุงู„ุชุซุจูŠุช ุŒ ูŠู…ูƒู†ูƒ ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ูˆุงุณุชุฎุฏุงู…ู‡ ูˆูู‚ู‹ุง ู„ุชุนู„ูŠู…ุงุช ุงู„ุงุณุชุฎุฏุงู… ุงู„ู…ูˆุฌูˆุฏุฉ ููŠ ุงู„ู…ุณุชูˆุฏุน. ูŠู†ุทูˆูŠ ุฐู„ูƒ ุนุงุฏุฉู‹ ุนู„ู‰ ุฅุนุฏุงุฏ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ุฎุงุตุฉ ุจูƒ ุŒ ูˆุชูƒูˆูŠู† ู…ุนู„ู…ุงุช ุงู„ู†ู…ูˆุฐุฌ ุŒ ูˆุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุŒ ุซู… ุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฏุฑุจ ู„ุฃุฏุงุก ูƒุดู ุงู„ูƒุงุฆู†ุงุช. + +ูŠุฑุฌู‰ ู…ู„ุงุญุธุฉ ุฃู† ุงู„ุฎุทูˆุงุช ุงู„ู…ุญุฏุฏุฉ ู‚ุฏ ุชุฎุชู„ู ุงุนุชู…ุงุฏู‹ุง ุนู„ู‰ ุญุงู„ุฉ ุงู„ุงุณุชุฎุฏุงู… ุงู„ุฎุงุตุฉ ุจูƒ ูˆุงู„ุญุงู„ุฉ ุงู„ุญุงู„ูŠุฉ ู„ู…ุณุชูˆุฏุน YOLOv7. ู„ุฐุง ุŒ ูŠููˆุตู‰ ุจุดุฏุฉ ุจุงู„ุฑุฌูˆุน ู…ุจุงุดุฑุฉ ุฅู„ู‰ ุงู„ุชุนู„ูŠู…ุงุช ุงู„ู…ู‚ุฏู…ุฉ ููŠ ู…ุณุชูˆุฏุน YOLOv7 ุนู„ู‰ GitHub. + +ู†ุฃุณู ุนู„ู‰ ุฃูŠ ุฅุฒุนุงุฌ ู‚ุฏ ูŠุณุจุจู‡ ุฐู„ูƒ ูˆุณู†ุณุนู‰ ู„ุชุญุฏูŠุซ ู‡ุฐุง ุงู„ู…ุณุชู†ุฏ ุจุฃู…ุซู„ุฉ ุนู„ู‰ ุงู„ุงุณุชุฎุฏุงู… ู„ู€ Ultralytics ุนู†ุฏู…ุง ูŠุชู… ุชู†ููŠุฐ ุงู„ุฏุนู… ู„ู€ YOLOv7. + +## ุงู„ุงู‚ุชุจุงุณุงุช ูˆุงู„ุดูƒุฑ + +ู†ูˆุฏ ุฃู† ู†ุดูƒุฑ ูƒุชุงุจ YOLOv7 ุนู„ู‰ ู…ุณุงู‡ู…ุงุชู‡ู… ุงู„ู‡ุงู…ุฉ ููŠ ู…ุฌุงู„ ุงูƒุชุดุงู ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{wang2022yolov7, + title={{YOLOv7}: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors}, + author={Wang, Chien-Yao and Bochkovskiy, Alexey and Liao, Hong-Yuan Mark}, + journal={arXiv preprint arXiv:2207.02696}, + year={2022} + } + ``` + +ูŠู…ูƒู† ุงู„ุนุซูˆุฑ ุนู„ู‰ ูˆุฑู‚ุฉ YOLOv7 ุงู„ุฃุตู„ูŠุฉ ุนู„ู‰ [arXiv](https://arxiv.org/pdf/2207.02696.pdf). ู‚ุฏู… ุงู„ูƒุชุงุจ ุนู…ู„ู‡ู… ุนู„ู†ูŠุงู‹ุŒ ูˆูŠู…ูƒู† ุงู„ูˆุตูˆู„ ุฅู„ู‰ ู‚ุงุนุฏุฉ ุงู„ุดูŠูุฑุฉ ุนู„ู‰ [GitHub](https://github.com/WongKinYiu/yolov7). ู†ุญู† ู†ู‚ุฏุฑ ุฌู‡ูˆุฏู‡ู… ููŠ ุชู‚ุฏู… ุงู„ู…ุฌุงู„ ูˆุชูˆููŠุฑ ุนู…ู„ู‡ู… ู„ู„ู…ุฌุชู…ุน ุจุดูƒู„ ุนุงู…. diff --git a/ultralytics/docs/ar/models/yolov7.md:Zone.Identifier b/ultralytics/docs/ar/models/yolov7.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/models/yolov7.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/models/yolov8.md b/ultralytics/docs/ar/models/yolov8.md new file mode 100755 index 0000000..7b2082f --- /dev/null +++ b/ultralytics/docs/ar/models/yolov8.md @@ -0,0 +1,166 @@ +--- +comments: true +description: ุงุณุชูƒุดู ุงู„ู…ูŠุฒุงุช ุงู„ู…ุซูŠุฑุฉ ู„ู€ YOLOv8 ุŒ ุฃุญุฏุซ ุฅุตุฏุงุฑ ู…ู† ู…ูƒุชุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ุญูŠุฉ ุงู„ุฎุงุต ุจู†ุง ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ! ุชุนุฑู‘ู ุนู„ู‰ ุงู„ุนู…ุงุฑุงุช ุงู„ู…ุชู‚ุฏู…ุฉ ูˆุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง ูˆุงู„ุชูˆุงุฒู† ุงู„ู…ุซู„ู‰ ุจูŠู† ุงู„ุฏู‚ุฉ ูˆุงู„ุณุฑุนุฉ ุงู„ุชูŠ ุชุฌุนู„ YOLOv8 ุงู„ุฎูŠุงุฑ ุงู„ู…ุซุงู„ูŠ ู„ู…ู‡ุงู… ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช ุงู„ุฎุงุตุฉ ุจูƒ. +keywords: YOLOv8, Ultralytics, ู…ูƒุชุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ุญูŠุฉ ุงู„ุฎุงุต ุจู†ุง ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ, ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง, ูˆุซุงุฆู‚, ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช, ุณู„ุณู„ุฉ YOLO, ุงู„ุนู…ุงุฑุงุช ุงู„ู…ุชู‚ุฏู…ุฉ, ุงู„ุฏู‚ุฉ, ุงู„ุณุฑุนุฉ +--- + +# YOLOv8 + +## ู†ุธุฑุฉ ุนุงู…ุฉ + +YOLOv8 ู‡ูˆ ุงู„ุชุทูˆุฑ ุงู„ุฃุฎูŠุฑ ููŠ ุณู„ุณู„ุฉ YOLO ู„ู…ูƒุชุดูุงุช ุงู„ูƒุงุฆู†ุงุช ุงู„ุญูŠุฉ ุงู„ุฎุงุตุฉ ุจู†ุง ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุŒ ูˆุงู„ุฐูŠ ูŠู‚ุฏู… ุฃุฏุงุกู‹ ู…ุชู‚ุฏู…ู‹ุง ููŠ ู…ุฌุงู„ ุงู„ุฏู‚ุฉ ูˆุงู„ุณุฑุนุฉ. ุจู†ุงุกู‹ ุนู„ู‰ ุงู„ุชู‚ุฏู…ุงุช ุงู„ุชูŠ ุชู… ุฅุญุฑุงุฒู‡ุง ููŠ ุฅุตุฏุงุฑุงุช YOLO ุงู„ุณุงุจู‚ุฉ ุŒ ูŠู‚ุฏู… YOLOv8 ู…ูŠุฒุงุช ูˆุชุญุณูŠู†ุงุช ุฌุฏูŠุฏุฉ ุชุฌุนู„ู‡ ุงู„ุฎูŠุงุฑ ุงู„ู…ุซุงู„ูŠ ู„ู…ู‡ุงู… ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช ููŠ ู…ุฌู…ูˆุนุฉ ูˆุงุณุนุฉ ู…ู† ุงู„ุชุทุจูŠู‚ุงุช. + +![YOLOv8 ุงู„ู…ู‚ุฏู…ุฉ ู…ู† Ultralytics](https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png) + +## ุงู„ู…ูŠุฒุงุช ุงู„ุฑุฆูŠุณูŠุฉ + +- **ุงู„ุนู…ุงุฑุงุช ุงู„ู…ุชู‚ุฏู…ุฉ ู„ู„ุธู‡ุฑ ูˆุงู„ุนู†ู‚:** ูŠุนุชู…ุฏ YOLOv8 ุนู„ู‰ ุนู…ุงุฑุงุช ุงู„ุธู‡ุฑ ูˆุงู„ุนู†ู‚ ุนู„ู‰ ุฃุญุฏุซ ุทุฑุงุฒ ุŒ ู…ู…ุง ูŠุคุฏูŠ ุฅู„ู‰ ุชุญุณูŠู† ุงุณุชุฎุฑุงุฌ ุงู„ู…ูŠุฒุงุช ูˆุฃุฏุงุก ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช. +- **Ultralytics Head ุจุฏูˆู† ุฅุซุจุงุช ุฎุทุงููŠ:** ูŠุนุชู…ุฏ YOLOv8 ุนู„ู‰ Ultralytics Head ุจุฏูˆู† ุฅุซุจุงุช ุฎุทุงููŠ ุŒ ู…ู…ุง ูŠุณู‡ู… ููŠ ุฒูŠุงุฏุฉ ุงู„ุฏู‚ุฉ ูˆุชูˆููŠุฑ ูˆู‚ุช ู…ูƒุดู ุฃูƒุซุฑ ูƒูุงุกุฉ ู…ู‚ุงุฑู†ุฉู‹ ุจุงู„ุทุฑู‚ ุงู„ุชูŠ ุชุนุชู…ุฏ ุนู„ู‰ ุงู„ุฅุซุจุงุช. +- **ุชูˆุงุฒู† ู…ุซุงู„ูŠ ุจูŠู† ุงู„ุฏู‚ุฉ ูˆุงู„ุณุฑุนุฉ ู…ุญุณูŽู‘ู†:** ุจุชุฑูƒูŠุฒู‡ ุนู„ู‰ ุงู„ุญูุงุธ ุนู„ู‰ ุชูˆุงุฒู† ู…ุซุงู„ูŠ ุจูŠู† ุงู„ุฏู‚ุฉ ูˆุงู„ุณุฑุนุฉ ุŒ ูุฅู† YOLOv8 ู…ู†ุงุณุจ ู„ู…ู‡ุงู… ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ููŠ ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ุงู„ู…ุฌุงู„ุงุช ุงู„ุชุทุจูŠู‚ูŠุฉ. +- **ุชุดูƒูŠู„ุฉ ู…ู† ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง:** ูŠู‚ุฏู… YOLOv8 ู…ุฌู…ูˆุนุฉ ู…ู† ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง ู„ุชู„ุจูŠุฉ ู…ุชุทู„ุจุงุช ุงู„ู…ู‡ุงู… ุงู„ู…ุฎุชู„ูุฉ ูˆู…ุชุทู„ุจุงุช ุงู„ุฃุฏุงุก ุŒ ู…ู…ุง ูŠุฌุนู„ ู…ู† ุงู„ุณู‡ู„ ุฅูŠุฌุงุฏ ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ู†ุงุณุจ ู„ุญุงู„ุชูƒ ุงู„ุงุณุชุฎุฏุงู…ูŠุฉ ุงู„ุฎุงุตุฉ. + +## ุงู„ู…ู‡ุงู… ูˆุงู„ุฃูˆุถุงุน ุงู„ู…ุฏุนูˆู…ุฉ + +ุชู‚ุฏู… ุณู„ุณู„ุฉ YOLOv8 ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ุงู„ู†ู…ุงุฐุฌ ุŒ ูŠุชู… ุชุฎุตูŠุต ูƒู„ู‹ุง ู…ู†ู‡ุง ู„ู„ู…ู‡ุงู… ุงู„ู…ุญุฏุฏุฉ ููŠ ุฑุคูŠุฉ ุงู„ุญุงุณูˆุจ. ุชู… ุชุตู…ูŠู… ู‡ุฐู‡ ุงู„ู†ู…ุงุฐุฌ ู„ุชู„ุจูŠุฉ ู…ุชุทู„ุจุงุช ู…ุฎุชู„ูุฉ ุŒ ุจุฏุกู‹ุง ู…ู† ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช ุฅู„ู‰ ู…ู‡ุงู… ุฃูƒุซุฑ ุชุนู‚ูŠุฏู‹ุง ู…ุซู„ ุชู‚ุณูŠู… ุงู„ุตูˆุฑ ุฅู„ู‰ ุฃุฌุฒุงุก ูˆุงูƒุชุดุงู ู†ู‚ุงุท ุงู„ู…ูุงุชูŠุญ ูˆุงู„ุชุตู†ูŠู. + +ุชู…ุช ุชุญุณูŠู† ูƒู„ ู†ูˆุน ู…ู† ุณู„ุณู„ุฉ YOLOv8 ู„ู„ู…ู‡ุงู… ุงู„ุชูŠ ุชุฎุตู‡ุง ุŒ ู…ู…ุง ูŠุถู…ู† ุฃุฏุงุก ูˆุฏู‚ุฉ ุนุงู„ูŠูŠู†. ุจุงู„ุฅุถุงูุฉ ุฅู„ู‰ ุฐู„ูƒ ุŒ ุชุชูˆุงูู‚ ู‡ุฐู‡ ุงู„ู†ู…ุงุฐุฌ ู…ุน ุฃูˆุถุงุน ุชุดุบูŠู„ ู…ุฎุชู„ูุฉ ุจู…ุง ููŠ ุฐู„ูƒ [ุงู„ุงุณุชุฏู„ุงู„](../modes/predict.md) ุŒ [ุงู„ุชุญู‚ู‚](../modes/val.md) ุŒ [ุงู„ุชุฏุฑูŠุจ](../modes/train.md) ูˆ [ุงู„ุชุตุฏูŠุฑ](../modes/export.md) ุŒ ู…ู…ุง ูŠุณู‡ู„ ุงุณุชุฎุฏุงู…ู‡ุง ููŠ ู…ุฑุงุญู„ ู…ุฎุชู„ูุฉ ู…ู† ุนู…ู„ูŠุฉ ุงู„ุชุทูˆูŠุฑ ูˆุงู„ุชู†ููŠุฐ. + +| ุงู„ู†ู…ูˆุฐุฌ | ุฃุณู…ุงุก ุงู„ู…ู„ู | ุงู„ู…ู‡ู…ุฉ | ุงุณุชุฏู„ุงู„ | ุงู„ุชุญู‚ู‚ | ุงู„ุชุฏุฑูŠุจ | ุงู„ุชุตุฏูŠุฑ | +|-------------|----------------------------------------------------------------------------------------------------------------|----------------------------------------------|---------|--------|---------|---------| +| YOLOv8 | `yolov8n.pt` `yolov8s.pt` `yolov8m.pt` `yolov8l.pt` `yolov8x.pt` | [ุงู„ูƒุดู](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-seg | `yolov8n-seg.pt` `yolov8s-seg.pt` `yolov8m-seg.pt` `yolov8l-seg.pt` `yolov8x-seg.pt` | [ุชู‚ุณูŠู… ุงู„ุตูˆุฑ ุฅู„ู‰ ุฃุฌุฒุงุก](../tasks/segment.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-pose | `yolov8n-pose.pt` `yolov8s-pose.pt` `yolov8m-pose.pt` `yolov8l-pose.pt` `yolov8x-pose.pt` `yolov8x-pose-p6.pt` | [ุงู„ู…ูˆุงู‚ู‚/ู†ู‚ุงุท ุงู„ู…ูุงุชูŠุญ](../tasks/pose.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-cls | `yolov8n-cls.pt` `yolov8s-cls.pt` `yolov8m-cls.pt` `yolov8l-cls.pt` `yolov8x-cls.pt` | [ุงู„ุชุตู†ูŠู](../tasks/classify.md) | โœ… | โœ… | โœ… | โœ… | + +ุชูˆูุฑ ู‡ุฐู‡ ุงู„ุฌุฏูˆู„ุฉ ู†ุธุฑุฉ ุนุงู…ุฉ ุนู„ู‰ ู…ุชุบูŠุฑุงุช ู†ู…ูˆุฐุฌ YOLOv8 ุŒ ู…ู…ุง ูŠุณู„ุท ุงู„ุถูˆุก ุนู„ู‰ ู‚ุงุจู„ูŠุชู‡ุง ู„ู„ุชุทุจูŠู‚ ููŠ ู…ู‡ุงู… ู…ุญุฏุฏุฉ ูˆุชูˆุงูู‚ู‡ุง ู…ุน ุฃูˆุถุงุน ุชุดุบูŠู„ ู…ุฎุชู„ูุฉ ู…ุซู„ ุงู„ุงุณุชุฏู„ุงู„ ูˆุงู„ุชุญู‚ู‚ ูˆุงู„ุชุฏุฑูŠุจ ูˆุงู„ุชุตุฏูŠุฑ. ูŠุนุฑุถ ู…ุฑูˆู†ุฉ ูˆู‚ูˆุฉ ุณู„ุณู„ุฉ YOLOv8 ุŒ ู…ู…ุง ูŠุฌุนู„ู‡ุง ู…ู†ุงุณุจุฉ ู„ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ุงู„ุชุทุจูŠู‚ุงุช ููŠ ุฑุคูŠุฉ ุงู„ุญุงุณูˆุจ. + +## ู…ู‚ุงูŠูŠุณ ุงู„ุฃุฏุงุก + +!!! ุงู„ุฃุฏุงุก + + === "ุงู„ูƒุดู (COCO)" + + ุงู†ุธุฑ ุฅู„ู‰ [ูˆุซุงุฆู‚ ุงู„ูƒุดู](https://docs.ultralytics.com/tasks/detect/) ู„ุฃู…ุซู„ุฉ ุนู† ุงู„ุงุณุชุฎุฏุงู… ู…ุน ู‡ุฐู‡ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ุนู„ู‰ [COCO](https://docs.ultralytics.com/datasets/detect/coco/) ุŒ ุงู„ุชูŠ ุชุถู… 80 ูุฆุฉ ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง. + + | ุงู„ู†ู…ูˆุฐุฌ | ุญุฌู…
(ุจูŠูƒุณู„) | ู…ุนุฏู„ ุงู„ูƒุดูุงู„ุชุญู‚ู‚
50-95 | ุณุฑุนุฉ
CPU ONNX
(ู…ุชูˆุณุท) | ุณุฑุนุฉ
A100 TensorRT
(ู…ุชูˆุณุท) | ู…ุนู„ู…ุงุช
(ู…ู„ูŠูˆู†) | FLOPs
(ู…ู„ูŠูˆู†) | + | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + + === "ุงู„ูƒุดู (ุตูˆุฑ ู…ูุชูˆุญุฉ V7)" + + ุงู†ุธุฑ ุฅู„ู‰ [ูˆุซุงุฆู‚ ุงู„ูƒุดู](https://docs.ultralytics.com/tasks/detect/) ู„ุฃู…ุซู„ุฉ ุนู† ุงู„ุงุณุชุฎุฏุงู… ู…ุน ู‡ุฐู‡ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ุนู„ู‰ [Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/)ุŒ ูˆุงู„ุชูŠ ุชุถู… 600 ูุฆุฉ ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง. + + | ุงู„ู†ู…ูˆุฐุฌ | ุญุฌู…
(ุจูŠูƒุณู„) | ู…ุนุฏู„ ุงู„ูƒุดูุงู„ุชุญู‚ู‚
50-95 | ุณุฑุนุฉ
CPU ONNX
(ู…ุชูˆุณุท) | ุณุฑุนุฉ
A100 TensorRT
(ู…ุชูˆุณุท) | ู…ุนู„ู…ุงุช
(ู…ู„ูŠูˆู†) | FLOPs
(ู…ู„ูŠูˆู†) | + | ----------------------------------------------------------------------------------------- | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18.4 | 142.4 | 1.21 | 3.5 | 10.5 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27.7 | 183.1 | 1.40 | 11.4 | 29.7 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33.6 | 408.5 | 2.26 | 26.2 | 80.6 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34.9 | 596.9 | 2.43 | 44.1 | 167.4 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36.3 | 860.6 | 3.56 | 68.7 | 260.6 | + + === "ุชู‚ุณูŠู… ุงู„ุตูˆุฑ ุฅู„ู‰ ุฃุฌุฒุงุก (COCO)" + + ุงู†ุธุฑ ุฅู„ู‰ [ูˆุซุงุฆู‚ ุงู„ุชู‚ุณูŠู…](https://docs.ultralytics.com/tasks/segment/) ู„ุฃู…ุซู„ุฉ ุนู† ุงู„ุงุณุชุฎุฏุงู… ู…ุน ู‡ุฐู‡ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง ุนู„ู‰ [COCO](https://docs.ultralytics.com/datasets/segment/coco/)ุŒ ูˆุงู„ุชูŠ ุชุถู… 80 ูุฆุฉ ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง. + + | ุงู„ู†ู…ูˆุฐุฌ | ุญุฌู…
(ุจูŠูƒุณู„) | ู…ุนุฏู„ ุงู„ุชู‚ุณูŠู…ุงู„ุชุญู‚ู‚
50-95 | ู…ุนุฏู„ ุงู„ุชู‚ุณูŠู…ุงู„ุฃู‚ู†ุนุฉ
50-95 | ุณุฑุนุฉ
CPU ONNX
(ู…ุชูˆุณุท) | ุณุฑุนุฉ
A100 TensorRT
(ู…ุชูˆุณุท) | ู…ุนู„ู…ุงุช
(ู…ู„ูŠูˆู†) | FLOPs
(ู…ู„ูŠูˆู†) | + | -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | + | [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | + | [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | + | [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | + | [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + + === "ุงู„ุชุตู†ูŠู (ImageNet)" + + ุงู†ุธุฑ ุฅู„ู‰ [ูˆุซุงุฆู‚ ุงู„ุชุตู†ูŠู](https://docs.ultralytics.com/tasks/classify/) ู„ุฃู…ุซู„ุฉ ุนู† ุงู„ุงุณุชุฎุฏุงู… ู…ุน ู‡ุฐู‡ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง ุนู„ู‰ [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/)ุŒ ูˆุงู„ุชูŠ ุชุถู… 1000 ูุฆุฉ ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง. + + | ุงู„ู†ู…ูˆุฐุฌ | ุญุฌู…
(ุจูŠูƒุณู„) | ุฏู‚ุฉ ุฃุนู„ู‰
ุฃุนู„ู‰ 1 | ุฏู‚ุฉ ุฃุนู„ู‰
ุฃุนู„ู‰ 5 | ุณุฑุนุฉ
CPU ONNX
(ู…ุชูˆุณุท) | ุณุฑุนุฉ
A100 TensorRT
(ู…ุชูˆุณุท) | ู…ุนู„ู…ุงุช
(ู…ู„ูŠูˆู†) | FLOPs
(ู…ู„ูŠูˆู†) ุนู†ุฏ 640 | + | -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ | + | [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | + | [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | + | [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | + | [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | + | [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + + === "ุงู„ู…ูˆุงู‚ู (COCO)" + + ุงู†ุธุฑ ุฅู„ู‰ [ูˆุซุงุฆู‚ ุชู‚ุฏูŠุฑ ุงู„ู…ูˆุงู‚ุน] (https://docs.ultralytics.com/tasks/pose/) ู„ุฃู…ุซู„ุฉ ุนู„ู‰ ุงู„ุงุณุชุฎุฏุงู… ู…ุน ู‡ุฐู‡ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ุนู„ู‰ [COCO](https://docs.ultralytics.com/datasets/pose/coco/)ุŒ ูˆุงู„ุชูŠ ุชุชุถู…ู† ูุฆุฉ ูˆุงุญุฏุฉ ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ุŒ 'ุดุฎุต'. + + | ุงู„ู†ู…ูˆุฐุฌ | ุญุฌู…
(ุจูŠูƒุณู„) | ู…ุนุฏู„ ุงู„ุชูˆุถูŠุญุงู„ุชุญู‚ู‚
50-95 | ู…ุนุฏู„ ุงู„ุชูˆุถูŠุญ50 | ุณุฑุนุฉ
CPU ONNX
(ู…ุชูˆุณุท) | ุณุฑุนุฉ
A100 TensorRT
(ู…ุชูˆุณุท) | ู…ุนู„ู…ุงุช
(ู…ู„ูŠูˆู†) | FLOPs
(ู…ู„ูŠูˆู†) | + | ---------------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | + | [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | + | [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | + | [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | + | [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | + | [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +## ุฃู…ุซู„ุฉ ุงุณุชุฎุฏุงู… + +ูŠูˆูุฑ ู‡ุฐุง ุงู„ู…ุซุงู„ ุฃู…ุซู„ุฉ ุจุณูŠุทุฉ ู„ู„ุชุฏุฑูŠุจ ูˆุงู„ุชู†ุจุค ุจุงุณุชุฎุฏุงู… YOLOv8. ู„ู„ุญุตูˆู„ ุนู„ู‰ ูˆุซุงุฆู‚ ูƒุงู…ู„ุฉ ุญูˆู„ ู‡ุฐู‡ ูˆุบูŠุฑู‡ุง ู…ู† [ุงู„ุฃูˆุถุงุน](../modes/index.md) ุŒ ุงู†ุธุฑ ุฅู„ู‰ ุตูุญุงุช ูˆุซุงุฆู‚ [ุชู†ุจุค](../modes/predict.md) ุŒ [ุชุฏุฑูŠุจ](../modes/train.md) ุŒ [ุงู„ุชุญู‚ู‚](../modes/val.md) ูˆ [ุงู„ุชุตุฏูŠุฑ](../modes/export.md) . + +ูŠุฑุฌู‰ ู…ู„ุงุญุธุฉ ุฃู† ุงู„ู…ุซุงู„ ุฃุฏู†ุงู‡ ูŠุชุนู„ู‚ ุจุทุฑุงุฒ YOLOv8 [Detect](../tasks/detect.md) ู„ู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช. ู„ู…ู‡ุงู… ู…ุฏุนูˆู…ุฉ ุฅุถุงููŠุฉ ุŒ ุงู†ุธุฑ ุฅู„ู‰ ูˆุซุงุฆู‚ [ุชู‚ุณูŠู…](../tasks/segment.md) ุŒ [ุชุญุฏูŠุฏ ุฅู†ุชู…ุงุก](../tasks/classify.md) ูˆ [ุชุตูˆูŠุฑ (Pose)](../tasks/pose.md) . + +!!! + +!!! Example "ู…ุซุงู„" + + === "Python" + + ูŠู…ูƒู† ุชู…ุฑูŠุฑ ู†ู…ุงุฐุฌ ุงู„ู€ PyTorch ุงู„ู…ูุฏุฑู‘ุจุฉ ุงู„ู…ูุญููˆุธุฉ ุจุงู„ุงู…ุชุฏุงุฏ `*.pt` ุจุงู„ุฅุถุงูุฉ ุฅู„ู‰ ู…ู„ูุงุช ุงู„ุชูƒูˆูŠู† ุจุงู…ุชุฏุงุฏ `*.yaml` ุฅู„ู‰ ูุฆุฉ `YOLO()` ู„ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ ููŠ ู„ุบุฉ ุจุงูŠุซูˆู†: + + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ุฏุฑู‘ุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ COCO + model = YOLO('yolov8n.pt') + + # ุนุฑุถ ู…ุนู„ูˆู…ุงุช ุงู„ู†ู…ูˆุฐุฌ (ุงุฎุชูŠุงุฑูŠ) + model.info() + + # ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ุงู„ู…ูุฏุฎู„ุงุช coco8 ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ ู„ู…ุฏุฉ 100 ุฏูˆุฑุฉ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ุชุดุบูŠู„ ุงู„ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ู†ู…ูˆุฐุฌ YOLOv8n ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ุชุชูˆูุฑ ุฃูˆุงู…ุฑ CLI ู„ุชุดุบูŠู„ ุงู„ู†ู…ุงุฐุฌ ู…ุจุงุดุฑุฉ: + + ```bash + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ุฏุฑู‘ุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ COCO ูˆุงุฎุชุจุงุฑู‡ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช coco8 + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ุฏุฑู‘ุจ ู…ุณุจู‚ู‹ุง ุนู„ู‰ COCO ูˆุงู„ุชู†ุจุค ุจู‡ ุนู„ู‰ ุตูˆุฑุฉ 'bus.jpg' + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## ุงู„ุงู‚ุชุจุงุณุงุช ูˆุงู„ุชู‚ุฏูŠุฑุงุช + +ุฅุฐุง ุงุณุชุฎุฏู…ุช ู†ู…ูˆุฐุฌ YOLOv8 ุฃูˆ ุฃูŠ ุจุฑู†ุงู…ุฌ ุขุฎุฑ ู…ู† ู‡ุฐุง ุงู„ู…ุณุชูˆุฏุน ููŠ ุนู…ู„ูƒ ุŒ ููŠุฑุฌู‰ ุงุณุชุดู‡ุงุฏู‡ ุจุงุณุชุฎุฏุงู… ุงู„ุชู†ุณูŠู‚ ุงู„ุชุงู„ูŠ: + +!!!ุŒ + +!!! quote "" + + === "BibTeX" + + ```bibtex + @software{yolov8_ultralytics, + author = {Glenn Jocher and Ayush Chaurasia and Jing Qiu}, + title = {Ultralytics YOLOv8}, + version = {8.0.0}, + year = {2023}, + url = {https://github.com/ultralytics/ultralytics}, + orcid = {0000-0001-5950-6979, 0000-0002-7603-6750, 0000-0003-3783-7069}, + license = {AGPL-3.0} + } + ``` + +ูŠุฑุฌู‰ ู…ู„ุงุญุธุฉ ุฃู† ูˆุฌูˆุฏ ู…ุนุฑู ุงู„ูƒุงุฆู† ุงู„ุฑู‚ู…ูŠ (DOI) ู‚ูŠุฏ ุงู„ุงู†ุชุธุงุฑ ูˆุณูŠุชู… ุฅุถุงูุชู‡ ุฅู„ู‰ ุงู„ุงู‚ุชุจุงุณ ุจู…ุฌุฑุฏ ุชูˆูุฑู‡. ุชูู‚ุฏู… ู†ู…ุงุฐุฌ YOLOv8 ุจู…ูˆุฌุจ [ุฑุฎุตุฉ AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) ูˆ [ุงู„ุฑุฎุตุฉ ุงู„ู…ุคุณุณูŠุฉ](https://ultralytics.com/license). diff --git a/ultralytics/docs/ar/models/yolov8.md:Zone.Identifier b/ultralytics/docs/ar/models/yolov8.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/models/yolov8.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/modes/benchmark.md b/ultralytics/docs/ar/modes/benchmark.md new file mode 100755 index 0000000..506240a --- /dev/null +++ b/ultralytics/docs/ar/modes/benchmark.md @@ -0,0 +1,94 @@ +--- +comments: true +description: ุชุนุฑู ุนู„ู‰ ูƒูŠููŠุฉ ู‚ูŠุงุณ ุณุฑุนุฉ ูˆุฏู‚ุฉ YOLOv8 ุนุจุฑ ุชู†ุณูŠู‚ุงุช ุงู„ุชุตุฏูŠุฑ ุงู„ู…ุฎุชู„ูุฉ. ุงุญุตู„ ุนู„ู‰ ุฑุคู‰ ุญูˆู„ ู…ู‚ุงูŠูŠุณ mAP50-95 ูˆaccuracy_top5 ูˆุงู„ู…ุฒูŠุฏ. +keywords: UltralyticsุŒ YOLOv8ุŒ ุงุฎุชุจุงุฑ ุงู„ุฃุฏุงุกุŒ ู‚ูŠุงุณ ุงู„ุณุฑุนุฉุŒ ู‚ูŠุงุณ ุงู„ุฏู‚ุฉุŒ ู…ู‚ุงูŠูŠุณ mAP50-95 ูˆaccuracy_top5ุŒ ONNXุŒ OpenVINOุŒ TensorRTุŒ ุชู†ุณูŠู‚ุงุช ุชุตุฏูŠุฑ YOLO +--- + +# ุงุฎุชุจุงุฑ ุงู„ู†ู…ูˆุฐุฌ ุจุงุณุชุฎุฏุงู… Ultralytics YOLO + +Ultralytics YOLO ecosystem and integrations + +## ุงู„ู…ู‚ุฏู…ุฉ + +ุจู…ุฌุฑุฏ ุฃู† ูŠุชู… ุชุฏุฑูŠุจ ู†ู…ูˆุฐุฌูƒ ูˆุชุญู‚ู‚ ุตุญุชู‡ ุŒ ูุฅู† ุงู„ุฎุทูˆุฉ ุงู„ุชุงู„ูŠุฉ ุจุดูƒู„ ู…ู†ุทู‚ูŠ ู‡ูŠ ุชู‚ูŠูŠู… ุฃุฏุงุฆู‡ ููŠ ุณูŠู†ุงุฑูŠูˆู‡ุงุช ุงู„ุนุงู„ู… ุงู„ุญู‚ูŠู‚ูŠ ุงู„ู…ุฎุชู„ูุฉ. ูŠูˆูุฑ ูˆุถุน ุงู„ุงุฎุชุจุงุฑ ููŠ Ultralytics YOLOv8 ู‡ุฐุง ุงู„ู‡ุฏู ู…ู† ุฎู„ุงู„ ุชูˆููŠุฑ ุฅุทุงุฑ ู‚ูˆูŠ ู„ุชู‚ูŠูŠู… ุณุฑุนุฉ ูˆุฏู‚ุฉ ุงู„ู†ู…ูˆุฐุฌ ุนุจุฑ ู…ุฌู…ูˆุนุฉ ู…ู† ุตูŠุบ ุงู„ุชุตุฏูŠุฑ. + +## ู„ู…ุงุฐุง ู‡ูˆ ุงุฎุชุจุงุฑ ุงู„ุฃุฏุงุก ู…ู‡ู…ุŸ + +- **ู‚ุฑุงุฑุงุช ู…ุณุชู†ูŠุฑุฉ:** ุงูƒุชุณุงุจ ุฑุคู‰ ุญูˆู„ ุงู„ุชู†ุงุฒู„ุงุช ุจูŠู† ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉ. +- **ุชุฎุตูŠุต ุงู„ู…ูˆุงุฑุฏ:** ูู‡ู… ูƒูŠููŠุฉ ุฃุฏุงุก ุชู†ุณูŠู‚ุงุช ุงู„ุชุตุฏูŠุฑ ุงู„ู…ุฎุชู„ูุฉ ุนู„ู‰ ุฃุฌู‡ุฒุฉ ู…ุฎุชู„ูุฉ. +- **ุชุญุณูŠู†:** ุชุนู„ู… ุฃูŠ ุชู†ุณูŠู‚ ุชุตุฏูŠุฑ ูŠู‚ุฏู… ุฃูุถู„ ุฃุฏุงุก ู„ุญุงู„ุชูƒ ุงู„ุงุณุชุฎุฏุงู…ูŠุฉ ุงู„ู…ุญุฏุฏุฉ. +- **ูƒูุงุกุฉ ุงู„ุชูƒู„ูุฉ:** ุงุณุชุฎุฏุงู… ุงู„ู…ูˆุงุฑุฏ ุงู„ุฃุฌู‡ุฒุฉ ุจุดูƒู„ ุฃูƒุซุฑ ูƒูุงุกุฉ ุจู†ุงุกู‹ ุนู„ู‰ ู†ุชุงุฆุฌ ุงู„ุงุฎุชุจุงุฑ. + +### ุงู„ู…ู‚ุงูŠูŠุณ ุงู„ุฑุฆูŠุณูŠุฉ ููŠ ูˆุถุน ุงู„ุงุฎุชุจุงุฑ + +- **mAP50-95:** ู„ูƒุดู ุงู„ูƒุงุฆู†ุงุช ูˆุชู‚ุณูŠู…ู‡ุง ูˆุชุญุฏูŠุฏ ุงู„ูˆุถุน. +- **accuracy_top5:** ู„ุชุตู†ูŠู ุงู„ุตูˆุฑ. +- **ูˆู‚ุช ุงู„ุชุชุจุน:** ุงู„ูˆู‚ุช ุงู„ู…ุณุชุบุฑู‚ ู„ูƒู„ ุตูˆุฑุฉ ุจุงู„ู…ูŠู„ูŠ ุซุงู†ูŠุฉ. + +### ุชู†ุณูŠู‚ุงุช ุงู„ุชุตุฏูŠุฑ ุงู„ู…ุฏุนูˆู…ุฉ + +- **ONNX:** ู„ุฃูุถู„ ุฃุฏุงุก ุนู„ู‰ ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ู…ุฑูƒุฒูŠุฉ. +- **TensorRT:** ู„ุฃู‚ุตู‰ ุงุณุชูุงุฏุฉ ู…ู† ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ุฑุณูˆู…ูŠุฉ. +- **OpenVINO:** ู„ุชุญุณูŠู† ุงู„ุฃุฌู‡ุฒุฉ ู…ู† ุฅู†ุชู„. +- **CoreML ูˆ TensorFlow SavedModel ูˆู…ุง ุฅู„ู‰ ุฐู„ูƒ:** ู„ุชู„ุจูŠุฉ ุงุญุชูŠุงุฌุงุช ุงู„ู†ุดุฑ ุงู„ู…ุชู†ูˆุนุฉ. + +!!! Tip "ู†ุตูŠุญุฉ" + + * ู‚ู… ุจุชุตุฏูŠุฑ ุฅู„ู‰ ู†ู…ูˆุฐุฌ ONNX ุฃูˆ OpenVINO ู„ุฒูŠุงุฏุฉ ุณุฑุนุฉ ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ู…ุฑูƒุฒูŠุฉ ุจู…ู‚ุฏุงุฑ 3 ู…ุฑุงุช. + * ู‚ู… ุจุชุตุฏูŠุฑ ุฅู„ู‰ ู†ู…ูˆุฐุฌ TensorRT ู„ุฒูŠุงุฏุฉ ุณุฑุนุฉ ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ุฑุณูˆู…ูŠุฉ ุจู…ู‚ุฏุงุฑ 5 ู…ุฑุงุช. + +## ุฃู…ุซู„ุฉ ุนู„ู‰ ุงู„ุงุณุชุฎุฏุงู… + +ู‚ู… ุจุชุดุบูŠู„ ุงุฎุชุจุงุฑุงุช YOLOv8n ุนู„ู‰ ุฌู…ูŠุน ุชู†ุณูŠู‚ุงุช ุงู„ุชุตุฏูŠุฑ ุงู„ู…ุฏุนูˆู…ุฉ ุจู…ุง ููŠ ุฐู„ูƒ ONNX ูˆ TensorRT ูˆู…ุง ุฅู„ู‰ ุฐู„ูƒ. ุงู†ุธุฑ ุงู„ู‚ุณู… ุงู„ู…ูˆุฌูˆุฏ ุฃุฏู†ุงู‡ ู„ู„ุญุตูˆู„ ุนู„ู‰ ู‚ุงุฆู…ุฉ ูƒุงู…ู„ุฉ ู…ู† ูˆุณูŠุทุงุช ุงู„ุชุตุฏูŠุฑ. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics.utils.benchmarks import benchmark + + # ุงุฎุชุจุงุฑ ุนู„ู‰ ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ุฑุณูˆู…ูŠุฉ + benchmark(model='yolov8n.pt', data='coco8.yaml', imgsz=640, half=False, device=0) + ``` + === "CLI" + + ```bash + yolo benchmark model=yolov8n.pt data='coco8.yaml' imgsz=640 half=False device=0 + ``` + +## ูˆุณูŠุทุงุช + +ุชูˆูุฑ ุงู„ูˆุณุงุฆุท ู…ุซู„ `model` ูˆ `data` ูˆ `imgsz` ูˆ `half` ูˆ `device` ูˆ `verbose` ู…ุฑูˆู†ุฉ ู„ู„ู…ุณุชุฎุฏู…ูŠู† ู„ุถุจุท ุงู„ุงุฎุชุจุงุฑุงุช ุญุณุจ ุงุญุชูŠุงุฌุงุชู‡ู… ุงู„ู…ุญุฏุฏุฉ ูˆู…ู‚ุงุฑู†ุฉ ุฃุฏุงุก ุชู†ุณูŠู‚ุงุช ุงู„ุชุตุฏูŠุฑ ุงู„ู…ุฎุชู„ูุฉ ุจุณู‡ูˆู„ุฉ. + +| ุงู„ู…ูุชุงุญ | ุงู„ู‚ูŠู…ุฉ | ุงู„ูˆุตู | +|-----------|---------|---------------------------------------------------------------------------------------------------| +| `model` | `None` | ู…ุณุงุฑ ุฅู„ู‰ ู…ู„ู ุงู„ู†ู…ูˆุฐุฌ ุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ yolov8n.pt ุŒ yolov8n.yaml | +| `data` | `None` | ู…ุณุงุฑ ุฅู„ู‰ YAML ูŠุดูŠุฑ ุฅู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ุงุฎุชุจุงุฑ ุงู„ุฃุฏุงุก (ุจุชุญุชูˆู‰ ุนู„ู‰ ุจูŠุงู†ุงุช `val`) | +| `imgsz` | `640` | ุญุฌู… ุงู„ุตูˆุฑุฉ ูƒุฑู‚ู… ุŒ ุฃูˆ ู‚ุงุฆู…ุฉ (h ุŒ w) ุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ (640ุŒ 480) | +| `half` | `False` | ุชู‚ู„ูŠู„ ุฏู‚ุฉ ุงู„ุนุฏุฏ ุงู„ุนุดุฑู‰ ู„ู„ุฃุจุนุงุฏ (FP16 quantization) | +| `int8` | `False` | ุชู‚ู„ูŠู„ ุฏู‚ุฉ ุงู„ุนุฏุฏ ุงู„ุตุญูŠุญ 8 ุจุช (INT8 quantization) | +| `device` | `None` | ุงู„ุฌู‡ุงุฒ ุงู„ุฐู‰ ุณุชุนู…ู„ ุนู„ูŠู‡ ุงู„ุนู…ู„ูŠุฉ ุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ cuda device=0 ุฃูˆ device=0,1,2,3 ุฃูˆ device=cpu | +| `verbose` | `False` | ุนุฏู… ุงู„ู…ุชุงุจุนุฉ ุนู†ุฏ ุญุฏูˆุซ ุฎุทุฃ (ู…ู‚ุฏุงุฑ ู…ู†ุทู‚ู‰)ุŒ ุฃูˆ ู…ุณุชูˆู‰ ุงู„ูƒุดู ุนู†ุฏ ุชุฌุงูˆุฒ ุญุฏ ุงู„ู‚ูŠู…ุฉ ุงู„ู…ุทู„ูˆุจุฉ (ู‚ูŠู…ุฉ ุนุงุฆู…ุฉ) | + +## ุตูŠุบ ุงู„ุชุตุฏูŠุฑ + +ุณูŠุญุงูˆู„ ุงู„ุชุทุจูŠู‚ ุชุดุบูŠู„ ุงู„ุงุฎุชุจุงุฑุงุช ุชู„ู‚ุงุฆูŠู‹ุง ุนู„ู‰ ุฌู…ูŠุน ุตูŠุบ ุงู„ุชุตุฏูŠุฑ ุงู„ู…ู…ูƒู†ุฉ ุงู„ู…ูˆุฌูˆุฏุฉ ุฃุฏู†ุงู‡. + +| Format | `format` Argument | Model | Metadata | Arguments | +|--------------------------------------------------------------------|-------------------|---------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half`, `int8` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras`, `int8` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +ุงู†ุธุฑ ุชูุงุตูŠู„ ุงู„ุชุตุฏูŠุฑ ุงู„ูƒุงู…ู„ุฉ ููŠ ุงู„ุตูุญุฉ [Export](https://docs.ultralytics.com/modes/export/) diff --git a/ultralytics/docs/ar/modes/benchmark.md:Zone.Identifier b/ultralytics/docs/ar/modes/benchmark.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/modes/benchmark.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/modes/export.md b/ultralytics/docs/ar/modes/export.md new file mode 100755 index 0000000..f555ec0 --- /dev/null +++ b/ultralytics/docs/ar/modes/export.md @@ -0,0 +1,108 @@ +--- +comments: true +description: ุฏู„ูŠู„ ุฎุทูˆุฉ ุจุฎุทูˆุฉ ุญูˆู„ ุชุตุฏูŠุฑ ู†ู…ุงุฐุฌ YOLOv8 ุงู„ุฎุงุตุฉ ุจูƒ ุฅู„ู‰ ุชู†ุณูŠู‚ุงุช ู…ุฎุชู„ูุฉ ู…ุซู„ ONNX ูˆ TensorRT ูˆ CoreML ูˆุบูŠุฑู‡ุง ู„ู„ู†ุดุฑ. ุงุณุชูƒุดู ุงู„ุขู†!. +keywords: YOLOุŒ YOLOv8ุŒ UltralyticsุŒ ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌุŒ ONNXุŒ TensorRTุŒ CoreMLุŒ TensorFlow SavedModelุŒ OpenVINOุŒ PyTorchุŒ ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ +--- + +# ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ ุจุงุณุชุฎุฏุงู… ูŠูˆู„ูˆ ู…ู† Ultralytics + +ุฅูƒูˆุณูŠุณุชู… ูŠูˆู„ูˆ ู…ู† Ultralytics ูˆุงู„ุชูƒุงู…ู„ุงุช + +## ู…ู‚ุฏู…ุฉ + +ุงู„ู‡ุฏู ุงู„ู†ู‡ุงุฆูŠ ู„ุชุฏุฑูŠุจ ู†ู…ูˆุฐุฌ ู‡ูˆ ู†ุดุฑู‡ ู„ุชุทุจูŠู‚ุงุช ุงู„ุนุงู„ู… ุงู„ุญู‚ูŠู‚ูŠ. ูŠูˆูุฑ ูˆุถุน ุงู„ุชุตุฏูŠุฑ ููŠ ูŠูˆู„ูˆ ู…ู† Ultralytics ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ุงู„ุฎูŠุงุฑุงุช ู„ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฏุฑุจ ุฅู„ู‰ ุชู†ุณูŠู‚ุงุช ู…ุฎุชู„ูุฉุŒ ู…ู…ุง ูŠุฌุนู„ู‡ ูŠู…ูƒู† ุงุณุชุฎุฏุงู…ู‡ ููŠ ู…ุฎุชู„ู ุงู„ุฃู†ุธู…ุฉ ูˆุงู„ุฃุฌู‡ุฒุฉ. ูŠู‡ุฏู ู‡ุฐุง ุงู„ุฏู„ูŠู„ ุงู„ุดุงู…ู„ ุฅู„ู‰ ู…ุณุงุนุฏุชูƒ ููŠ ูู‡ู… ุชูุงุตูŠู„ ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌุŒ ูˆูŠุนุฑุถ ูƒูŠููŠุฉ ุชุญู‚ูŠู‚ ุฃู‚ุตู‰ ุชูˆุงูู‚ ูˆุฃุฏุงุก. + +

+
+ +
+ ุดุงู‡ุฏ: ูƒูŠููŠุฉ ุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ Ultralytics YOLOv8 ุงู„ุชุฏุฑูŠุจ ุงู„ู…ุฎุตุต ูˆุชุดุบูŠู„ ุงู„ุงุณุชุฏู„ุงู„ ุงู„ู…ุจุงุดุฑ ุนู„ู‰ ูƒุงู…ูŠุฑุง ุงู„ูˆูŠุจ. +

+ +## ู„ู…ุงุฐุง ุงุฎุชูŠุงุฑ ูˆุถุน ุชุตุฏูŠุฑ YOLOv8ุŸ + +- **ุงู„ุชู†ูˆุน:** ุชุตุฏูŠุฑ ุฅู„ู‰ ุชู†ุณูŠู‚ุงุช ู…ุชุนุฏุฏุฉ ุจู…ุง ููŠ ุฐู„ูƒ ONNX ูˆ TensorRT ูˆ CoreML ุŒ ูˆุบูŠุฑู‡ุง. +- **ุงู„ุฃุฏุงุก:** ุงู„ุญุตูˆู„ ุนู„ู‰ ุณุฑุนุฉ ุชุณุฑูŠุน ุชุตู„ ุฅู„ู‰ 5 ุฃุถุนุงู ุจุงุณุชุฎุฏุงู… TensorRT ูˆุณุฑุนุฉ ุชุณุฑูŠุน ู…ุนุงู„ุฌ ุงู„ูƒู…ุจูŠูˆุชุฑ ุงู„ู…ุฑูƒุฒูŠ ุจู†ุณุจุฉ 3 ุฃุถุนุงู ุจุงุณุชุฎุฏุงู… ONNX ุฃูˆ OpenVINO. +- **ุงู„ุชูˆุงูู‚ูŠุฉ:** ุฌุนู„ ุงู„ู†ู…ูˆุฐุฌ ู‚ุงุจู„ุงู‹ ู„ู„ู†ุดุฑ ุนู„ู‰ ุงู„ุฃุฌู‡ุฒุฉ ูˆุงู„ุจุฑุงู…ุฌ ุงู„ู…ุฎุชู„ูุฉ. +- **ุณู‡ูˆู„ุฉ ุงู„ุงุณุชุฎุฏุงู…:** ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ ุงู„ุจุณูŠุทุฉ ูˆูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ Python ู„ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ ุจุณุฑุนุฉ ูˆุณู‡ูˆู„ุฉ. + +### ุงู„ู…ูŠุฒุงุช ุงู„ุฑุฆูŠุณูŠุฉ ู„ูˆุถุน ุงู„ุชุตุฏูŠุฑ + +ุฅู„ูŠูƒ ุจุนุถ ู…ู† ุงู„ู…ูŠุฒุงุช ุงู„ู…ู…ูŠุฒุฉ: + +- **ุชุตุฏูŠุฑ ุจู†ู‚ุฑุฉ ูˆุงุญุฏุฉ:** ุฃูˆุงู…ุฑ ุจุณูŠุทุฉ ู„ุชุตุฏูŠุฑ ุฅู„ู‰ ุชู†ุณูŠู‚ุงุช ู…ุฎุชู„ูุฉ. +- **ุชุตุฏูŠุฑ ุงู„ุฏููุนุงุช:** ุชุตุฏูŠุฑ ู†ู…ุงุฐุฌ ู‚ุงุฏุฑุฉ ุนู„ู‰ ุงู„ุนู…ู„ ู…ุน ุงู„ุฏููุนุงุช. +- **ุชู†ููŠุฐ ู…ุญุณูŽู‘ู†:** ูŠุชู… ุชุญุณูŠู† ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุตุฏุฑุฉ ู„ุชูˆููŠุฑ ูˆู‚ุช ุชู†ููŠุฐ ุฃุณุฑุน. +- **ููŠุฏูŠูˆู‡ุงุช ุชุนู„ูŠู…ูŠุฉ:** ู…ุฑุดุฏูŠู† ูˆููŠุฏูŠูˆู‡ุงุช ุชุนู„ูŠู…ูŠุฉ ู„ุชุฌุฑุจุฉ ุชุตุฏูŠุฑ ุณู„ุณุฉ. + +!!! Tip "ู†ุตูŠุญุฉ" + + * ุตุฏู‘ุฑ ุฅู„ู‰ ONNX ุฃูˆ OpenVINO ู„ู„ุญุตูˆู„ ุนู„ู‰ ุชุณุฑูŠุน ู…ุนุงู„ุฌ ุงู„ูƒู…ุจูŠูˆุชุฑ ุงู„ู…ุฑูƒุฒูŠ ุจู†ุณุจุฉ 3 ุฃุถุนุงู. + * ุตุฏู‘ุฑ ุฅู„ู‰ TensorRT ู„ู„ุญุตูˆู„ ุนู„ู‰ ุชุณุฑูŠุน ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ุฑุณูˆู…ูŠุฉ ุจู†ุณุจุฉ 5 ุฃุถุนุงู. + +## ุฃู…ุซู„ุฉ ู„ู„ุงุณุชุฎุฏุงู… + +ู‚ู… ุจุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ YOLOv8n ุฅู„ู‰ ุชู†ุณูŠู‚ ู…ุฎุชู„ู ู…ุซู„ ONNX ุฃูˆ TensorRT. ุงู†ุธุฑ ุงู„ุฌุฏูˆู„ ุฃุฏู†ุงู‡ ู„ู„ุญุตูˆู„ ุนู„ู‰ ู‚ุงุฆู…ุฉ ูƒุงู…ู„ุฉ ู…ู† ูˆุณุงุฆุท ุงู„ุชุตุฏูŠุฑ. + +!!! Example "ู…ุซุงู„" + + === "ุจุงูŠุซูˆู†" + + ```python + from ultralytics import YOLO + + # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุฎุตุต + + # ู‚ู… ุจุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ + model.export(format='onnx') + ``` + === "ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ" + + ```bash + yolo export model=yolov8n.pt format=onnx # ุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + yolo export model=path/to/best.pt format=onnx # ุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุฎุตุต + ``` + +## ุงู„ูˆุณุงุฆุท + +ุชุดูŠุฑ ุฅุนุฏุงุฏุงุช ุชุตุฏูŠุฑ YOLO ุฅู„ู‰ ุงู„ุชูƒูˆูŠู†ุงุช ูˆุงู„ุฎูŠุงุฑุงุช ุงู„ู…ุฎุชู„ูุฉ ุงู„ู…ุณุชุฎุฏู…ุฉ ู„ุญูุธ ุฃูˆ ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ ู„ู„ุงุณุชุฎุฏุงู… ููŠ ุจูŠุฆุงุช ุฃูˆ ู…ู†ุตุงุช ุฃุฎุฑู‰. ูŠู…ูƒู† ุฃู† ุชุคุซุฑ ู‡ุฐู‡ ุงู„ุฅุนุฏุงุฏุงุช ุนู„ู‰ ุฃุฏุงุก ุงู„ู†ู…ูˆุฐุฌ ูˆุญุฌู…ู‡ ูˆุชูˆุงูู‚ู‡ ู…ุน ุงู„ุฃู†ุธู…ุฉ ุงู„ู…ุฎุชู„ูุฉ. ุชุดู…ู„ ุจุนุถ ุฅุนุฏุงุฏุงุช ุชุตุฏูŠุฑ YOLO ุงู„ุดุงุฆุนุฉ ุชู†ุณูŠู‚ ู…ู„ู ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุตุฏุฑ (ู…ุซู„ ONNX ูˆุชู†ุณูŠู‚ TensorFlow SavedModel) ูˆุงู„ุฌู‡ุงุฒ ุงู„ุฐูŠ ุณูŠุชู… ุชุดุบูŠู„ ุงู„ู†ู…ูˆุฐุฌ ุนู„ูŠู‡ (ู…ุซู„ ุงู„ู…ุนุงู„ุฌ ุงู„ู…ุฑูƒุฒูŠ ุฃูˆ ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ุฑุณูˆู…ูŠุฉ) ูˆูˆุฌูˆุฏ ู…ูŠุฒุงุช ุฅุถุงููŠุฉ ู…ุซู„ ุงู„ุฃู‚ู†ุนุฉ ุฃูˆ ุงู„ุชุณู…ูŠุงุช ุงู„ู…ุชุนุฏุฏุฉ ู„ูƒู„ ู…ุฑุจุน. ู‚ุฏ ุชุคุซุฑ ุนูˆุงู…ู„ ุฃุฎุฑู‰ ู‚ุฏ ุชุคุซุฑ ุนู…ู„ูŠุฉ ุงู„ุชุตุฏูŠุฑ ุชุดู…ู„ ุงู„ู…ู‡ู…ุฉ ุงู„ู†ู…ูˆุฐุฌุฉ ุงู„ู…ุญุฏุฏุฉ ุงู„ุชูŠ ูŠุชู… ุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ู„ู‡ุง ูˆู…ุชุทู„ุจุงุช ุฃูˆ ู‚ูŠูˆุฏ ุงู„ุจูŠุฆุฉ ุฃูˆ ุงู„ู…ู†ุตุฉ ุงู„ู…ุณุชู‡ุฏูุฉ. ู…ู† ุงู„ู…ู‡ู… ุฃู† ู†ู†ุธุฑ ุจุนู†ุงูŠุฉ ูˆู†ู‚ูˆู… ุจุชูƒูˆูŠู† ู‡ุฐู‡ ุงู„ุฅุนุฏุงุฏุงุช ู„ุถู…ุงู† ุฃู† ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุตุฏุฑ ู‡ูˆ ู…ุญุณูŽู‘ู† ู„ู„ุญุงู„ุฉ ุงู„ุงุณุชุฎุฏุงู… ุงู„ู…ู‚ุตูˆุฏุฉ ูˆูŠู…ูƒู† ุงุณุชุฎุฏุงู…ู‡ ุจุดูƒู„ ูุนุงู„ ููŠ ุงู„ุจูŠุฆุฉ ุงู„ู…ุณุชู‡ุฏูุฉ. + +| ุงู„ู…ูุชุงุญ | ุงู„ู‚ูŠู…ุฉ | ุงู„ูˆุตู | +|-------------|-----------------|-----------------------------------------------------------------------| +| `format` | `'torchscript'` | ุงู„ุชู†ุณูŠู‚ ุงู„ู…ุฑุงุฏ ุชุตุฏูŠุฑู‡ | +| `imgsz` | `640` | ุญุฌู… ุงู„ุตูˆุฑุฉ ูƒู…ู‚ุฏุงุฑ ุนู„ู…ูŠ ุฃูˆ ู‚ุงุฆู…ุฉ (h ุŒ w) ุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ (640 ุŒ 480) | +| `keras` | `False` | ุงุณุชุฎุฏุงู… Keras ู„ุชุตุฏูŠุฑ TF SavedModel | +| `optimize` | `False` | TorchScript: ุงู„ุฃู…ุซู„ ู„ู„ุฌูˆุงู„ | +| `half` | `False` | ุชูƒู…ูŠู… FP16 | +| `int8` | `False` | ุชูƒู…ูŠู… INT8 | +| `dynamic` | `False` | ONNX/TensorRT: ุงู„ู…ุญุงูˆุฑ ุงู„ุฏูŠู†ุงู…ูŠูƒูŠุฉ | +| `simplify` | `False` | ONNX/TensorRT: ุชุจุณูŠุท ุงู„ู†ู…ูˆุฐุฌ | +| `opset` | `None` | ONNX: ุฅุตุฏุงุฑ opset (ุงุฎุชูŠุงุฑูŠ ุŒ ุงู„ุงูุชุฑุงุถูŠ ู‡ูˆ ุงู„ุฃุญุฏุซ) | +| `workspace` | `4` | TensorRT: ุญุฌู… ู…ุณุงุญุฉ ุงู„ุนู…ู„ (GB) | +| `nms` | `False` | CoreML: ุฅุถุงูุฉ NMS | + +## ุชู†ุณูŠู‚ุงุช ุงู„ุชุตุฏูŠุฑ + +ุตูŠุบ ุชุตุฏูŠุฑ YOLOv8 ุงู„ู…ุชุงุญุฉ ููŠ ุงู„ุฌุฏูˆู„ ุฃุฏู†ุงู‡. ูŠู…ูƒู†ูƒ ุงู„ุชุตุฏูŠุฑ ุฅู„ู‰ ุฃูŠ ุชู†ุณูŠู‚ ุจุงุณุชุฎุฏุงู… ุงู„ูˆุณูŠุทุฉ `format` ุŒ ู…ุซู„ `format='onnx'` ุฃูˆ `format='engine'`. + +| ุงู„ุชู†ุณูŠู‚ | ูˆุณูŠุทุฉ format | ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุจูŠุงู†ุงุช ุงู„ูˆุตููŠุฉ | ุงู„ูˆุณุงุฆุท | +|--------------------------------------------------------------------|---------------|---------------------------|------------------|-----------------------------------------------------| +| [ุจุงูŠุซูˆุฑุด](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `ุชูˆุฑุชุดุณูŠุฑูŠุจุช` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half`, `int8` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras`, `int8` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | diff --git a/ultralytics/docs/ar/modes/export.md:Zone.Identifier b/ultralytics/docs/ar/modes/export.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/modes/export.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/modes/index.md b/ultralytics/docs/ar/modes/index.md new file mode 100755 index 0000000..86e2c4d --- /dev/null +++ b/ultralytics/docs/ar/modes/index.md @@ -0,0 +1,77 @@ +--- +comments: true +description: ู…ู† ุงู„ุชุฏุฑูŠุจ ุฅู„ู‰ ุงู„ุชุชุจุนุŒ ุงุณุชูุฏ ู…ู† YOLOv8 ู…ุน Ultralytics. ุงุญุตู„ ุนู„ู‰ ู†ุตุงุฆุญ ูˆุฃู…ุซู„ุฉ ู„ูƒู„ ูˆุถุน ู…ุฏุนูˆู… ุจู…ุง ููŠ ุฐู„ูƒ ุงู„ุชุญู‚ู‚ ูˆุงู„ุชุตุฏูŠุฑ ูˆุงุฎุชุจุงุฑ ุงู„ุฃุฏุงุก. +keywords: Ultralytics, YOLOv8, ุงู„ุชุนู„ู… ุงู„ุขู„ูŠุŒ ูƒุดู ุงู„ูƒุงุฆู†ุงุชุŒ ุงู„ุชุฏุฑูŠุจุŒ ุงู„ุชุญู‚ู‚ุŒ ุงู„ุชู†ุจุคุŒ ุงู„ุชุตุฏูŠุฑุŒ ุงู„ุชุชุจุนุŒ ุงุฎุชุจุงุฑ ุงู„ุฃุฏุงุก +--- + +# ุฃูˆุถุงุน Ultralytics YOLOv8 + +ุจูŠุฆุฉ ุนู…ู„ Ultralytics YOLO ูˆุงู„ุชูƒุงู…ู„ุงุช + +## ุงู„ู…ู‚ุฏู…ุฉ + +YOLOv8 ู…ู† Ultralytics ู„ูŠุณุช ู…ุฌุฑุฏ ู†ู…ูˆุฐุฌ ู„ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุขุฎุฑุ› ุฅู†ู‡ุง ุฅุทุงุฑ ู…ุชุนุฏุฏ ุงู„ุงุณุชุฎุฏุงู…ุงุช ู…ุตู…ู… ู„ุชุบุทูŠุฉ ุฏูˆุฑุฉ ุญูŠุงุฉ ู†ู…ุงุฐุฌ ุงู„ุชุนู„ู… ุงู„ุขู„ูŠ ุจุฃูƒู…ู„ู‡ุง - ู…ู† ุงู…ุชุตุงุต ุงู„ุจูŠุงู†ุงุช ูˆุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุฅู„ู‰ ุงู„ุชุญู‚ู‚ ูˆุงู„ู†ุดุฑ ูˆุชุชุจุน ุงู„ูˆุงู‚ุน ุงู„ุญู‚ูŠู‚ูŠ. ูŠุฎุฏู… ูƒู„ ูˆุถุน ุบุฑุถู‹ุง ู…ุญุฏุฏู‹ุง ูˆู‡ูˆ ู…ุตู…ู… ู„ุชูˆููŠุฑ ุงู„ู…ุฑูˆู†ุฉ ูˆุงู„ูƒูุงุกุฉ ุงู„ู…ุทู„ูˆุจุฉ ู„ู„ู…ู‡ุงู… ูˆุงู„ุญุงู„ุงุช ุงู„ุงุณุชุฎุฏุงู… ุงู„ู…ุฎุชู„ูุฉ. + +!!! Note "ู…ู„ุงุญุธุฉ" + + ๐Ÿšง ุชูˆุซูŠู‚ู†ุง ู…ุชุนุฏุฏ ุงู„ู„ุบุงุช ู‚ูŠุฏ ุงู„ุฅู†ุดุงุก ุญุงู„ูŠู‹ุงุŒ ูˆู†ุญู† ู†ุนู…ู„ ุจุฌู‡ุฏ ู„ุชุญุณูŠู†ู‡. ุดูƒุฑู‹ุง ู„ูƒ ุนู„ู‰ ุตุจุฑูƒ! ๐Ÿ™ + +

+
+ +
+ ุดุงู‡ุฏ: ุจุฑู†ุงู…ุฌ ุงู„ุชุนู„ูŠู… Ultralytics: ุชุฏุฑูŠุจุŒ ุงู„ุชุญู‚ู‚ุŒ ุงู„ุชู†ุจุคุŒ ุงู„ุชุตุฏูŠุฑุŒ ูˆุงุฎุชุจุงุฑ ุงู„ุฃุฏุงุก. +

+ +### ุฃูˆุถุงุน ู…ุฎุชุตุฑุฉ + +ูู‡ู… ุงู„**ุฃูˆุถุงุน** ุงู„ู…ุฎุชู„ูุฉ ุงู„ู…ุฏุนูˆู…ุฉ ุจูˆุงุณุทุฉ Ultralytics YOLOv8 ู…ู‡ู… ุฌุฏู‹ุง ู„ู„ุงุณุชูุงุฏุฉ ุงู„ู‚ุตูˆู‰ ู…ู† ุงู„ู†ู…ุงุฐุฌ ุงู„ุฎุงุตุฉ ุจูƒ: + +- ูˆุถุน **ุงู„ุชุฏุฑูŠุจ**: ู‚ู… ุจุถุจุท ู†ู…ูˆุฐุฌูƒ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ู…ุฎุตุตุฉ ุฃูˆ ู…ุญู…ู„ุฉ ู…ุณุจู‚ู‹ุง. +- ูˆุถุน **ุงู„ุชุญู‚ู‚**: ู†ู‚ุทุฉ ูุญุต ุจุนุฏ ุงู„ุชุฏุฑูŠุจ ู„ุชู‚ูŠูŠู… ุฃุฏุงุก ุงู„ู†ู…ูˆุฐุฌ. +- ูˆุถุน **ุงู„ุชู†ุจุค**: ุงุทู„ู‚ ู‚ูˆุฉ ุงู„ุชู†ุจุค ุงู„ุฎุงุตุฉ ุจู†ู…ูˆุฐุฌูƒ ุนู„ู‰ ุงู„ุจูŠุงู†ุงุช ุงู„ุญู‚ูŠู‚ูŠุฉ. +- ูˆุถุน **ุงู„ุชุตุฏูŠุฑ**: ู‚ู… ุจุชุฌู‡ูŠุฒ ู†ู…ูˆุฐุฌูƒ ู„ู„ุงุณุชุฎุฏุงู… ููŠ ุตูŠุบ ู…ุฎุชู„ูุฉ. +- ูˆุถุน **ุงู„ุชุชุจุน**: ู‚ู… ุจุชูˆุณูŠุน ู†ู…ูˆุฐุฌ ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช ุงู„ุฎุงุต ุจูƒ ุฅู„ู‰ ุชุทุจูŠู‚ุงุช ุงู„ุชุชุจุน ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ. +- ูˆุถุน **ุงุฎุชุจุงุฑ ุงู„ุฃุฏุงุก**: ู‚ู… ุจุชุญู„ูŠู„ ุณุฑุนุฉ ูˆุฏู‚ุฉ ู†ู…ูˆุฐุฌูƒ ููŠ ุจูŠุฆุงุช ู†ุดุฑ ู…ุชู†ูˆุนุฉ. + +ูŠู‡ุฏู ู‡ุฐุง ุงู„ุฏู„ูŠู„ ุงู„ุดุงู…ู„ ุฅู„ู‰ ุชู‚ุฏูŠู… ู„ู…ุญุฉ ุนุงู…ุฉ ูˆู†ุตุงุฆุญ ุนู…ู„ูŠุฉ ุญูˆู„ ูƒู„ ูˆุถุนุŒ ู„ู…ุณุงุนุฏุชูƒ ููŠ ุงุณุชุบู„ุงู„ ูƒุงู…ู„ ุฅู…ูƒุงู†ุงุช YOLOv8. + +## [ูˆุถุน ุงู„ุชุฏุฑูŠุจ](train.md) + +ูŠุชู… ุงุณุชุฎุฏุงู… ูˆุถุน ุงู„ุชุฏุฑูŠุจ ู„ุชุฏุฑูŠุจ ู†ู…ูˆุฐุฌ YOLOv8 ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ู…ุฎุตุตุฉ. ููŠ ู‡ุฐุง ุงู„ูˆุถุนุŒ ูŠุชู… ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุจุงุณุชุฎุฏุงู… ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ูˆุงู„ู…ุนู„ู…ุงุช ุงู„ู‡ุงูŠุจุฑ ู„ู„ุญุตูˆู„ ุนู„ู‰ ุฏู‚ุฉ ููŠ ุชูˆู‚ุน ุงู„ูุฆุงุช ูˆู…ูˆุงู‚ุน ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ุตูˆุฑุฉ. + +[ุฃู…ุซู„ุฉ ุงู„ุชุฏุฑูŠุจ](train.md){ .md-button } + +## [ูˆุถุน ุงู„ุชุญู‚ู‚](val.md) + +ูŠุชู… ุงุณุชุฎุฏุงู… ูˆุถุน ุงู„ุชุญู‚ู‚ ู„ู„ุชุญู‚ู‚ ู…ู† ู†ู…ูˆุฐุฌ YOLOv8 ุจุนุฏ ุชุฏุฑูŠุจู‡. ููŠ ู‡ุฐุง ุงู„ูˆุถุนุŒ ูŠุชู… ุชู‚ูŠูŠู… ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุชุญู‚ู‚ ู„ู‚ูŠุงุณ ุฏู‚ุชู‡ ูˆุฃุฏุงุก ุงู„ุชุนู…ูŠู…. ูŠู…ูƒู† ุงุณุชุฎุฏุงู… ู‡ุฐุง ุงู„ูˆุถุน ู„ุชุนุฏูŠู„ ุงู„ู…ุนู„ู…ุงุช ุงู„ู‡ุงูŠุจุฑ ู„ู„ู†ู…ูˆุฐุฌ ู„ุชุญุณูŠู† ุฃุฏุงุฆู‡. + +[ุฃู…ุซู„ุฉ ุงู„ุชุญู‚ู‚](val.md){ .md-button } + +## [ูˆุถุน ุงู„ุชู†ุจุค](predict.md) + +ูŠุชู… ุงุณุชุฎุฏุงู… ูˆุถุน ุงู„ุชู†ุจุค ู„ุฅุฌุฑุงุก ุชู†ุจุคุงุช ุจุงุณุชุฎุฏุงู… ู†ู…ูˆุฐุฌ YOLOv8 ุงู„ู…ุฏุฑุจ ุนู„ู‰ ุตูˆุฑ ุฃูˆ ููŠุฏูŠูˆู‡ุงุช ุฌุฏูŠุฏุฉ. ููŠ ู‡ุฐุง ุงู„ูˆุถุนุŒ ูŠุชู… ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ ู…ู† ู…ู„ู ุงู„ูุญุตุŒ ูˆูŠู…ูƒู† ู„ู„ู…ุณุชุฎุฏู… ุชูˆููŠุฑ ุงู„ุตูˆุฑ ุฃูˆ ู…ู‚ุงุทุน ุงู„ููŠุฏูŠูˆ ู„ุฅุฌุฑุงุก ุงุณุชุฏู„ุงู„. ูŠู‚ูˆู… ุงู„ู†ู…ูˆุฐุฌ ุจุชูˆู‚ุน ุงู„ูุฆุงุช ูˆู…ูˆุงู‚ุน ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ุตูˆุฑ ุฃูˆ ู…ู‚ุงุทุน ุงู„ููŠุฏูŠูˆ ุงู„ู…ุฏุฎู„ุฉ. + +[ุฃู…ุซู„ุฉ ุงู„ุชู†ุจุค](predict.md){ .md-button } + +## [ูˆุถุน ุงู„ุชุตุฏูŠุฑ](export.md) + +ูŠุชู… ุงุณุชุฎุฏุงู… ูˆุถุน ุงู„ุชุตุฏูŠุฑ ู„ุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ YOLOv8 ุฅู„ู‰ ุตูŠุบุฉ ูŠู…ูƒู† ุงุณุชุฎุฏุงู…ู‡ุง ู„ู„ู†ุดุฑ. ููŠ ู‡ุฐุง ุงู„ูˆุถุนุŒ ูŠุชู… ุชุญูˆูŠู„ ุงู„ู†ู…ูˆุฐุฌ ุฅู„ู‰ ุตูŠุบุฉ ูŠู…ูƒู† ุงุณุชุฎุฏุงู…ู‡ุง ู…ู† ู‚ุจู„ ุชุทุจูŠู‚ุงุช ุงู„ุจุฑุงู…ุฌ ุงู„ุฃุฎุฑู‰ ุฃูˆ ุงู„ุฃุฌู‡ุฒุฉ ุงู„ุฃุฌู‡ุฒุฉ. ูŠูƒูˆู† ู‡ุฐุง ุงู„ูˆุถุน ู…ููŠุฏู‹ุง ุนู†ุฏ ู†ุดุฑ ุงู„ู†ู…ูˆุฐุฌ ููŠ ุจูŠุฆุงุช ุงู„ุฅู†ุชุงุฌ. + +[ุฃู…ุซู„ุฉ ุงู„ุชุตุฏูŠุฑ](export.md){ .md-button } + +## [ูˆุถุน ุงู„ุชุชุจุน](track.md) + +ูŠุชู… ุงุณุชุฎุฏุงู… ูˆุถุน ุงู„ุชุชุจุน ู„ุชุชุจุน ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุจุงุณุชุฎุฏุงู… ู†ู…ูˆุฐุฌ YOLOv8. ููŠ ู‡ุฐุง ุงู„ูˆุถุนุŒ ูŠุชู… ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ ู…ู† ู…ู„ู ุงู„ูุญุตุŒ ูˆูŠู…ูƒู† ู„ู„ู…ุณุชุฎุฏู… ุชูˆููŠุฑ ุชูŠุงุฑ ููŠุฏูŠูˆ ู…ุจุงุดุฑ ู„ุฃุฏุงุก ุชุชุจุน ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ. ูŠูƒูˆู† ู‡ุฐุง ุงู„ูˆุถุน ู…ููŠุฏู‹ุง ู„ุชุทุจูŠู‚ุงุช ู…ุซู„ ุฃู†ุธู…ุฉ ุงู„ู…ุฑุงู‚ุจุฉ ุฃูˆ ุงู„ุณูŠุงุฑุงุช ุฐุงุชูŠุฉ ุงู„ู‚ูŠุงุฏุฉ. + +[ุฃู…ุซู„ุฉ ุงู„ุชุชุจุน](track.md){ .md-button } + +## [ูˆุถุน ุงุฎุชุจุงุฑ ุงู„ุฃุฏุงุก](benchmark.md) + +ูŠุชู… ุงุณุชุฎุฏุงู… ูˆุถุน ุงุฎุชุจุงุฑ ุงู„ุฃุฏุงุก ู„ุชู‚ูŠูŠู… ุณุฑุนุฉ ูˆุฏู‚ุฉ ุตูŠุบ ุงู„ุชุตุฏูŠุฑ ุงู„ู…ุฎุชู„ูุฉ ู„ู€ YOLOv8. ุชู‚ุฏู… ุงู„ุงุฎุชุจุงุฑุงุช ู…ุนู„ูˆู…ุงุช ุญูˆู„ ุญุฌู… ุงู„ุตูŠุบุฉ ุงู„ู…ุตุฏุฑุŒ ู…ุนูŠุงุฑ ุงู„ุฃุฏุงุก `mAP50-95` (ู„ูƒุดู ุงู„ูƒุงุฆู†ุงุช ูˆุงู„ุชู‚ุณูŠู… ูˆุงู„ุชุตูˆูŠุฑ) ุฃูˆ ุงู„ู…ุนูŠุงุฑ `accuracy_top5` (ู„ู„ุชุตู†ูŠู)ุŒ ูˆูˆู‚ุช ุงู„ุงุณุชุฏู„ุงู„ ุจุงู„ู…ู„ูŠ ุซุงู†ูŠุฉ ู„ูƒู„ ุตูˆุฑุฉ ููŠ ุตูŠุบ ุงู„ุชุตุฏูŠุฑ ุงู„ู…ุฎุชู„ูุฉ ู…ุซู„ ONNX ูˆ OpenVINO ูˆ TensorRT ูˆุบูŠุฑู‡ุง. ูŠู…ูƒู† ู„ู‡ุฐู‡ ุงู„ู…ุนู„ูˆู…ุงุช ู…ุณุงุนุฏุฉ ุงู„ู…ุณุชุฎุฏู…ูŠู† ุนู„ู‰ ุงุฎุชูŠุงุฑ ุตูŠุบุฉ ุงู„ุชุตุฏูŠุฑ ุงู„ุฃู…ุซู„ ู„ุญุงู„ุชู‡ู… ุงู„ุงุณุชุฎุฏุงู…ูŠุฉ ุงู„ู…ุญุฏุฏุฉ ุจู†ุงุกู‹ ุนู„ู‰ ู…ุชุทู„ุจุงุช ุงู„ุณุฑุนุฉ ูˆุงู„ุฏู‚ุฉ. + +[ุฃู…ุซู„ุฉ ุงุฎุชุจุงุฑ ุงู„ุฃุฏุงุก](benchmark.md){ .md-button } diff --git a/ultralytics/docs/ar/modes/index.md:Zone.Identifier b/ultralytics/docs/ar/modes/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/modes/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/modes/predict.md b/ultralytics/docs/ar/modes/predict.md new file mode 100755 index 0000000..69a9cb8 --- /dev/null +++ b/ultralytics/docs/ar/modes/predict.md @@ -0,0 +1,217 @@ +--- +comments: true +description: ุงูƒุชุดู ูƒูŠููŠุฉ ุงุณุชุฎุฏุงู… ูˆุถุน ุงู„ุชู†ุจุค YOLOv8 ู„ู…ู‡ุงู… ู…ุฎุชู„ูุฉ. ุชุนุฑู‘ู ุนู„ู‰ ู…ุตุงุฏุฑ ุงู„ุชู†ุจุค ุงู„ู…ุฎุชู„ูุฉ ู…ุซู„ ุงู„ุตูˆุฑ ูˆู…ู‚ุงุทุน ุงู„ููŠุฏูŠูˆ ูˆุชู†ุณูŠู‚ุงุช ุงู„ุจูŠุงู†ุงุช ุงู„ู…ุฎุชู„ูุฉ. +keywords: UltralyticsุŒ YOLOv8ุŒ ูˆุถุน ุงู„ุชู†ุจุคุŒ ู…ุตุงุฏุฑ ุงู„ุชู†ุจุคุŒ ู…ู‡ุงู… ุงู„ุชู†ุจุคุŒ ูˆุถุน ุงู„ุชุฏูู‚ุŒ ู…ุนุงู„ุฌุฉ ุงู„ุตูˆุฑุŒ ู…ุนุงู„ุฌุฉ ุงู„ููŠุฏูŠูˆุŒ ุงู„ุชุนู„ู… ุงู„ุขู„ูŠุŒ ุงู„ุฐูƒุงุก ุงู„ุงุตุทู†ุงุนูŠ +--- + +# ุงู„ุชู†ุจุค ุจุงู„ู†ู…ูˆุฐุฌ ุจุงุณุชุฎุฏุงู… Ultralytics YOLO + +ุงู„ุจูŠุฆุฉ ูˆุงู„ุชูƒุงู…ู„ ุงู„ุฎุงุตุฉ ุจู†ุธุงู… Ultralytics YOLO + +## ุงู„ู…ู‚ุฏู…ุฉ + +ููŠ ุนุงู„ู… ุงู„ุชุนู„ู… ุงู„ุขู„ูŠ ูˆุฑุคูŠุฉ ุงู„ุญุงุณูˆุจุŒ ูŠูุทู„ู‚ ุนู„ู‰ ุนู…ู„ูŠุฉ ุงุณุชุฎู„ุงุต ุงู„ู…ุนู†ู‰ ู…ู† ุงู„ุจูŠุงู†ุงุช ุงู„ุจุตุฑูŠุฉ ุงุณู… "ุงู„ุงุณุชุฏู„ุงู„" ุฃูˆ "ุงู„ุชู†ุจุค". ูŠูˆูุฑ YOLOv8 ู…ู† Ultralytics ู…ูŠุฒุฉ ู‚ูˆูŠุฉ ุชูุนุฑู ุจู€**ูˆุถุน ุงู„ุชู†ุจุค** ูˆุงู„ุชูŠ ุชู… ุชุตู…ูŠู…ู‡ุง ุฎุตูŠุตู‹ุง ู„ู„ุงุณุชุฏู„ุงู„ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ูˆุจุฃุฏุงุก ุนุงู„ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ูˆุงุณุนุฉ ู…ู† ู…ุตุงุฏุฑ ุงู„ุจูŠุงู†ุงุช. + +

+
+ +
+ ุดุงู‡ุฏ: ูƒูŠููŠุฉ ุงุณุชุฎุฑุงุฌ ุงู„ู†ุชุงุฆุฌ ู…ู† ู†ู…ูˆุฐุฌ Ultralytics YOLOv8 ู„ู…ุดุงุฑูŠุน ู…ุฎุตุตุฉ. +

+ +## ุงู„ุชุทุจูŠู‚ุงุช ููŠ ุงู„ุนุงู„ู… ุงู„ุญู‚ูŠู‚ูŠ + +| ุงู„ุชุตู†ูŠุน | ุงู„ุฑูŠุงุถุฉ | ุงู„ุณู„ุงู…ุฉ | +|:-------------------------------------------------:|:----------------------------------------------------:|:-------------------------------------------:| +| ![Vehicle Spare Parts Detection][car spare parts] | ![Football Player Detection][football player detect] | ![People Fall Detection][human fall detect] | +| ูƒุดู ู‚ุทุน ุบูŠุงุฑ ุงู„ู…ุฑูƒุจุงุช | ูƒุดู ู„ุงุนุจูŠ ูƒุฑุฉ ุงู„ู‚ุฏู… | ูƒุดู ุณู‚ูˆุท ุงู„ุฃุดุฎุงุต | + +## ู„ู…ุงุฐุง ูŠุฌุจ ุงุณุชุฎุฏุงู… Ultralytics YOLO ู„ุนู…ู„ูŠุงุช ุงู„ุชู†ุจุคุŸ + +ููŠู…ุง ูŠู„ูŠ ุงู„ุฃุณุจุงุจ ุงู„ุชูŠ ูŠุฌุจ ุฃุฎุฐู‡ุง ููŠ ุงู„ุงุนุชุจุงุฑ ุนู†ุฏ ุงู„ุงุณุชูุงุฏุฉ ู…ู† ูˆุถุน ุงู„ุชู†ุจุค YOLOv8 ู„ุงุญุชูŠุงุฌุงุช ุงู„ุชู†ุจุค ุงู„ู…ุฎุชู„ูุฉ: + +- **ุงู„ุชู†ูˆุน:** ู‚ุงุฏุฑ ุนู„ู‰ ุงู„ุชู†ุจุค ุนู„ู‰ ุงู„ุตูˆุฑ ูˆู…ู‚ุงุทุน ุงู„ููŠุฏูŠูˆุŒ ูˆุญุชู‰ ุงู„ุชุฏูู‚ุงุช ุงู„ุญูŠุฉ. +- **ุงู„ุฃุฏุงุก:** ู…ุตู…ู… ู„ู„ุชุทุจูŠู‚ุงุช ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ูˆุงู„ู…ุนุงู„ุฌุฉ ุนุงู„ูŠุฉ ุงู„ุณุฑุนุฉ ุฏูˆู† ุงู„ุชุถุญูŠุฉ ุจุงู„ุฏู‚ุฉ. +- **ุณู‡ูˆู„ุฉ ุงู„ุงุณุชุฎุฏุงู…:** ูˆุงุฌู‡ุงุช Python ูˆุงู„ูˆุงุฌู‡ุฉ ุงู„ุณุทุฑูŠุฉ ู„ุชุณุฑูŠุน ุงู„ู†ุดุฑ ูˆุงู„ุงุฎุชุจุงุฑ. +- **ู‚ุงุจู„ูŠุฉ ุงู„ุชุฎุตูŠุต ุงู„ุนุงู„ูŠุฉ:** ุฅุนุฏุงุฏุงุช ูˆู…ุนู„ู…ุงุช ู…ุฎุชู„ูุฉ ู„ุถุจุท ุณู„ูˆูƒ ุงู„ุชู†ุจุค ุงู„ู†ู…ูˆุฐุฌ ูˆูู‚ู‹ุง ู„ู…ุชุทู„ุจุงุชูƒ ุงู„ู…ุญุฏุฏุฉ. + +### ุงู„ู…ูŠุฒุงุช ุงู„ุฑุฆูŠุณูŠุฉ ู„ูˆุถุน ุงู„ุชู†ุจุค + +ุชู… ุชุตู…ูŠู… ูˆุถุน ุงู„ุชู†ุจุค ุงู„ุฎุงุต ุจู€ YOLOv8 ู„ูŠูƒูˆู† ู‚ูˆูŠู‹ุง ูˆู…ุชุนุฏุฏ ุงู„ุงุณุชุฎุฏุงู…ุงุชุŒ ูˆูŠุชู…ูŠุฒ ุจู…ุง ูŠู„ูŠ: + +- **ุชูˆุงูู‚ ู…ุชุนุฏุฏ ู…ุตุงุฏุฑ ุงู„ุจูŠุงู†ุงุช:** ุณูˆุงุก ูƒุงู†ุช ุจูŠุงู†ุงุชูƒ ุนุจุงุฑุฉ ุนู† ุตูˆุฑ ูุฑุฏูŠุฉ ุฃูˆ ู…ุฌู…ูˆุนุฉ ู…ู† ุงู„ุตูˆุฑ ุฃูˆ ู…ู„ูุงุช ููŠุฏูŠูˆ ุฃูˆ ุชุฏูู‚ุงุช ููŠุฏูŠูˆ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠุŒ ุณูŠุชู…ูƒู† ูˆุถุน ุงู„ุชู†ุจุค ู…ู† ุงู„ุชุนุงู…ู„ ู…ุนู‡ุง ุฌู…ูŠุนู‹ุง. +- **ูˆุถุน ุงู„ุชุฏูู‚ ุงู„ุญูŠ:** ุงุณุชุฎุฏู… ู…ูŠุฒุฉ ุงู„ุชุฏูู‚ ู„ุฅู†ุดุงุก ู…ูˆู„ุฏ ูุนู‘ุงู„ ู„ูƒุงุฆู†ุงุช "ุงู„ู†ุชุงุฆุฌ" ุจุงุณุชุฎุฏุงู… ุงู„ุฐุงูƒุฑุฉ. ู‚ู… ุจุชู…ูƒูŠู† ู‡ุฐุง ุจุชุนูŠูŠู† `stream=True` ููŠ ุทุฑูŠู‚ุฉ ุงุณุชุฏุนุงุก ุงู„ู…ุชู†ุจุฆ. +- **ู…ุนุงู„ุฌุฉ ุฏููุนุงุช:** ุงู„ู‚ุฏุฑุฉ ุนู„ู‰ ู…ุนุงู„ุฌุฉ ุงู„ุนุฏูŠุฏ ู…ู† ุงู„ุตูˆุฑ ุฃูˆ ุฅุทุงุฑุงุช ุงู„ููŠุฏูŠูˆ ููŠ ุฏููุนุฉ ูˆุงุญุฏุฉุŒ ู…ู…ุง ูŠุฒูŠุฏ ุฃูƒุซุฑ ู…ู† ุณุฑุนุฉ ุงู„ุชู†ุจุค. +- **ุณู‡ู„ ุงู„ุชูƒุงู…ู„:** ูŠุณู‡ู„ ุงู„ุฏู…ุฌ ู…ุน ุฎุทูˆุท ุงู„ุฃู†ุงุจูŠุจ ุงู„ุจูŠุงู†ูŠุฉ ุงู„ุญุงู„ูŠุฉ ูˆู…ูƒูˆู†ุงุช ุงู„ุจุฑุงู…ุฌ ุงู„ุฃุฎุฑู‰ ุจูุถู„ ูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ ุงู„ุชุทุจูŠู‚ุงุช ุงู„ู…ุฑู†ุฉ. + +ุชูุฑุฌุน ู†ู…ุงุฐุฌ Ultralytics YOLO ุฅู…ุง ู‚ุงุฆู…ุฉ Python ู…ู† ูƒุงุฆู†ุงุช "ุงู„ู†ุชุงุฆุฌ" ุฃูˆ ู…ูู†ุดุฆ ุจุฑู…ุฌูŠุงู‹ ูุนู‘ุงู„ ู„ูƒุงุฆู†ุงุช ุงู„ู€ "ุงู„ู†ุชุงุฆุฌ" ููŠ ุญุงู„ ุชู… ุชู…ุฑูŠุฑ `stream=True` ุฅู„ู‰ ุงู„ู†ู…ูˆุฐุฌ ุฃุซู†ุงุก ุนู…ู„ูŠุฉ ุงู„ุชู†ุจุค: + +!!! Example "ุงู„ุชู†ุจุค" + + === "ุงู„ุนูˆุฏุฉ ุจู‚ุงุฆู…ุฉ ูˆุงุญุฏุฉ ุจุงุณุชุฎุฏุงู… `stream=False`" + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n.pt') # ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ูุฏุฑูŽู‘ุจ ู…ุณุจู‚ู‹ุง + + # ุชุดุบูŠู„ ุงู„ุชู†ุจุค ุจุฏููุนุฉ ุนู„ู‰ ู‚ุงุฆู…ุฉ ู…ู† ุงู„ุตูˆุฑ + results = model(['im1.jpg', 'im2.jpg']) # ุงู„ุนูˆุฏุฉ ุจู‚ุงุฆู…ุฉ ู…ู† ูƒุงุฆู†ุงุช 'ุงู„ู†ุชุงุฆุฌ' + + # ู…ุนุงู„ุฌุฉ ู‚ุงุฆู…ุฉ ุงู„ู†ุชุงุฆุฌ + for result in results: + boxes = result.boxes # ูƒุงุฆู† Boxes ู„ู…ุฎุฑุฌุงุช bbox + masks = result.masks # ูƒุงุฆู† Masks ู„ู…ุฎุฑุฌุงุช ู‚ู†ูˆุงุช ุงู„ูุตู„ ุงู„ุนู†ู‚ูˆุฏูŠ + keypoints = result.keypoints # ูƒุงุฆู† Keypoints ู„ู…ุฎุฑุฌุงุช ุงู„ุงุชุฌุงู‡ุงุช + probs = result.probs # ูƒุงุฆู† Probs ู„ู…ุฎุฑุฌุงุช ุงู„ุชุตู†ูŠู + ``` + + === "ุงู„ุนูˆุฏุฉ ุจู…ูู†ุดุฆ ูุนุงู„ ู…ุน `stream=True`" + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n.pt') # ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ูุฏุฑูŽู‘ุจ ู…ุณุจู‚ู‹ุง + + # ุชุดุบูŠู„ ุงู„ุชู†ุจุค ุจุฏููุนุฉ ุนู„ู‰ ู‚ุงุฆู…ุฉ ู…ู† ุงู„ุตูˆุฑ + results = model(['im1.jpg', 'im2.jpg'], stream=True) # ุงู„ุนูˆุฏุฉ ุจู…ูู†ุดุฆ ูุนุงู„ ู„ูƒุงุฆู†ุงุช 'ุงู„ู†ุชุงุฆุฌ' + + # ู…ุนุงู„ุฌุฉ ุงู„ู…ูู†ุดุฆ ุงู„ูุนุงู„ + for result in results: + boxes = result.boxes # ูƒุงุฆู† Boxes ู„ู…ุฎุฑุฌุงุช bbox + masks = result.masks # ูƒุงุฆู† Masks ู„ู…ุฎุฑุฌุงุช ู‚ู†ูˆุงุช ุงู„ูุตู„ ุงู„ุนู†ู‚ูˆุฏูŠ + keypoints = result.keypoints # ูƒุงุฆู† Keypoints ู„ู…ุฎุฑุฌุงุช ุงู„ุงุชุฌุงู‡ุงุช + probs = result.probs # ูƒุงุฆู† Probs ู„ู…ุฎุฑุฌุงุช ุงู„ุชุตู†ูŠู + ``` + +## ู…ุตุงุฏุฑ ุงู„ุชู†ุจุค + +ูŠู…ูƒู† ู„ู€ YOLOv8 ู…ุนุงู„ุฌุฉ ุฃู†ูˆุงุน ู…ุฎุชู„ูุฉ ู…ู† ู…ุตุงุฏุฑ ุงู„ุฅุฏุฎุงู„ ู„ุนู…ู„ูŠุฉ ุงู„ุงุณุชุฏู„ุงู„ุŒ ุนู„ู‰ ุงู„ู†ุญูˆ ุงู„ู…ูˆุถุญ ููŠ ุงู„ุฌุฏูˆู„ ุฃุฏู†ุงู‡. ุชุดู…ู„ ุงู„ู…ุตุงุฏุฑ ุงู„ุตูˆุฑ ุงู„ุซุงุจุชุฉ ูˆุชูŠุงุฑุงุช ุงู„ููŠุฏูŠูˆ ูˆุชู†ุณูŠู‚ุงุช ู…ุฎุชู„ูุฉ ู„ู„ุจูŠุงู†ุงุช. ูŠุดูŠุฑ ุงู„ุฌุฏูˆู„ ุฃูŠุถู‹ุง ุฅู„ู‰ ู…ุง ุฅุฐุง ูƒุงู† ูŠู…ูƒู† ุงุณุชุฎุฏุงู… ูƒู„ ู…ุตุฏุฑ ููŠ ูˆุถุน ุงู„ุชุฏูู‚ ุจุงุณุชุฎุฏุงู… ุงู„ูˆุณูŠุท `stream=True` โœ…. ูŠุนุชุจุฑ ูˆุถุน ุงู„ุชุฏูู‚ ู…ููŠุฏู‹ุง ู„ู…ุนุงู„ุฌุฉ ู…ู‚ุงุทุน ุงู„ููŠุฏูŠูˆ ุฃูˆ ุงู„ุชุฏูู‚ุงุช ุงู„ุญูŠุฉ ุญูŠุซ ูŠู‚ูˆู… ุจุฅู†ุดุงุก ู…ูู†ุดุฆ ู„ู„ู†ุชุงุฆุฌ ุจุฏู„ุงู‹ ู…ู† ุชุญู…ูŠู„ ุฌู…ูŠุน ุงู„ุฅุทุงุฑุงุช ููŠ ุงู„ุฐุงูƒุฑุฉ. + +!!! Tip "ุทุฑุงุฒ" + + ุงุณุชุฎุฏู… `stream=True` ู„ู…ุนุงู„ุฌุฉ ู…ู‚ุงุทุน ุงู„ููŠุฏูŠูˆ ุงู„ุทูˆูŠู„ุฉ ุฃูˆ ู…ุฌู…ูˆุนุงุช ุงู„ุจูŠุงู†ุงุช ุงู„ูƒุจูŠุฑุฉ ู„ุฅุฏุงุฑุฉ ุงู„ุฐุงูƒุฑุฉ ุจูƒูุงุกุฉ. ุนู†ุฏู…ุง ุชูƒูˆู† ุงู„ู‚ูŠู…ุฉ ู…ุณุงูˆูŠุฉ ู„ู€ `stream=False`ุŒ ูŠุชู… ุชุฎุฒูŠู† ุงู„ู†ุชุงุฆุฌ ู„ุฌู…ูŠุน ุงู„ุฅุทุงุฑุงุช ุฃูˆ ู†ู‚ุงุท ุงู„ุจูŠุงู†ุงุช ููŠ ุงู„ุฐุงูƒุฑุฉุŒ ูˆุงู„ุชูŠ ูŠู…ูƒู† ุฃู† ุชุชุฑุงูƒู… ุจุณุฑุนุฉ ูˆุชูุณุจูู‘ุจ ุฃุฎุทุงุก ุงู„ุฐุงูƒุฑุฉ ุบูŠุฑ ุงู„ูƒุงููŠุฉ ู„ู„ู…ุฏุฎู„ุงุช ุงู„ูƒุจูŠุฑุฉ. ุนู„ู‰ ุงู„ู†ู‚ูŠุถ ู…ู† ุฐู„ูƒุŒ ูŠุณุชุฎุฏู… ุงู„ุชุฏูู‚ `stream=True` ู…ูˆู„ุฏู‹ุง ูŠูุจู‚ูŠ ู†ุชุงุฆุฌ ุงู„ุฅุทุงุฑ ุงู„ุญุงู„ูŠ ุฃูˆ ู†ู‚ุทุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ุญุงู„ูŠุฉ ููŠ ุงู„ุฐุงูƒุฑุฉ ูู‚ุทุŒ ู…ู…ุง ูŠู‚ู„ู„ ุจุดูƒู„ ูƒุจูŠุฑ ู…ู† ุงุณุชู‡ู„ุงูƒ ุงู„ุฐุงูƒุฑุฉ ูˆูŠู…ู†ุน ู…ุดูƒู„ุงุช ุนุฏู… ูƒูุงูŠุฉ ุงู„ุฐุงูƒุฑุฉ. + +| ู…ุตุฏุฑ | ุงู„ูˆุณูŠุท | ุงู„ู†ูˆุน | ุงู„ู…ู„ุงุญุธุงุช | +|------------------|--------------------------------------------|-----------------|----------------------------------------------------------------------------------------------| +| ุตูˆุฑุฉ | `'ุตูˆุฑุฉ.jpg'` | `str` or `Path` | ู…ู„ู ุตูˆุฑุฉ ูˆุงุญุฏุฉ. | +| ุฑุงุจุท URL | `'https://ultralytics.com/images/bus.jpg'` | `str` | ุฑุงุจุท URL ู„ุตูˆุฑุฉ ู…ุง. | +| ู„ู‚ุทุฉ ุดุงุดุฉ ุจุฑู…ุฌูŠุฉ | `'ุงู„ุดุงุดุฉ'` | `str` | ู‚ู… ุจุงู„ุชู‚ุงุท ู„ู‚ุทุฉ ุดุงุดุฉ ุจุฑู…ุฌูŠุฉ. | +| PIL | `Image.open('im.jpg')` | `PIL.Image` | ุงู„ุตูŠุบุฉ HWC ู…ุน ู‚ู†ูˆุงุช RGB. | +| OpenCV | `cv2.imread('im.jpg')` | `np.ndarray` | ุงู„ุตูŠุบุฉ HWC ู…ุน ู‚ู†ูˆุงุช BGR `uint8 (0-255)`. | +| numpy | `np.zeros((640,1280,3))` | `np.ndarray` | ุงู„ุตูŠุบุฉ HWC ู…ุน ู‚ู†ูˆุงุช BGR `uint8 (0-255)`. | +| torch | `torch.zeros(16,3,320,640)` | `torch.Tensor` | ุงู„ุตูŠุบุฉ BCHW ู…ุน ู‚ู†ูˆุงุช RGB `float32 (0.0-1.0)`. | +| CSV | `'ุงู„ู…ุตุงุฏุฑ.csv'` | `str` or `Path` | ู…ู„ู CSV ูŠุญุชูˆูŠ ุนู„ู‰ ู…ุณุงุฑุงุช ุงู„ุตูˆุฑ ุฃูˆ ู…ู‚ุงุทุน ุงู„ููŠุฏูŠูˆ ุฃูˆ ุงู„ู…ุฌู„ุฏุงุช. | +| ููŠุฏูŠูˆ โœ… | `'ุงู„ููŠุฏูŠูˆ.mp4'` | `str` or `Path` | ู…ู„ู ููŠุฏูŠูˆ ุจุชู†ุณูŠู‚ุงุช ู…ุซู„ MP4 ูˆ AVI ูˆู…ุง ุฅู„ู‰ ุฐู„ูƒ. | +| ุงู„ุฏู„ูŠู„ โœ… | `'ุงู„ู…ุณุงุฑ/'` | `str` or `Path` | ู…ุณุงุฑ ุฅู„ู‰ ู…ุฌู„ุฏ ูŠุญุชูˆูŠ ุนู„ู‰ ุตูˆุฑ ุฃูˆ ู…ู‚ุงุทุน ููŠุฏูŠูˆ. | +| glob โœ… | `'ุงู„ู…ุณุงุฑ/*.jpg'` | `str` | ู†ู…ุท glob ู„ู…ุทุงุจู‚ุฉ ุนุฏุฉ ู…ู„ูุงุช. ุงุณุชุฎุฏู… ุญุฑู `*` ูƒุญุฑุทูˆู…. | +| ูŠูˆุชูŠูˆุจ โœ… | `'https://youtu.be/LNwODJXcvt4'` | `str` | ุฑุงุจุท URL ุฅู„ู‰ ููŠุฏูŠูˆ ูŠูˆุชูŠูˆุจ. | +| ุชุฏูู‚ โœ… | `'rtsp://example.com/media.mp4'` | `str` | ุนู†ูˆุงู† URL ู„ุจุฑูˆุชูˆูƒูˆู„ุงุช ุงู„ุชุฏูู‚ ู…ุซู„ RTSP ูˆ RTMP ูˆ TCP ุฃูˆ ุนู†ูˆุงู† IP. | +| ุชุฏูู‚ ู…ุชุนุฏุฏ โœ… | `'list.streams'` | `str` or `Path` | ู…ู„ู ู†ุตูŠ `*.streams` ู…ุน ุนู†ูˆุงู† ุชุฏูู‚ URL ููŠ ูƒู„ ุตูุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ 8 ุชุฏูู‚ุงุช ุณุชุนู…ู„ ุจุญุฌู… ุฏููุนุฉ 8. | + +ููŠู…ุง ูŠู„ูŠ ุฃู…ุซู„ุฉ ุชุนู„ูŠู…ุงุช ุจุฑู…ุฌูŠุฉ ู„ุงุณุชุฎุฏุงู… ูƒู„ ู†ูˆุน ู…ู† ู…ุตุฏุฑ: + +!!! Example "ู…ุตุงุฏุฑ ุงู„ุชู†ุจุค" + + === "ุงู„ุตูˆุฑุฉ" + ู‚ู… ุจุฃุฌุฑุงุก ุนู…ู„ูŠุฉ ุงู„ุชู†ุจุค ุนู„ู‰ ู…ู„ู ุตูˆุฑุฉ. + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง + model = YOLO('yolov8n.pt') + + # ุชู†ุดูŠุท ุนู…ู„ูŠุฉ ุงู„ุชู†ุจุค ู„ู…ู„ู ุงู„ุตูˆุฑุฉ + source = 'ุงู„ู…ุณุงุฑ/ุฅู„ู‰/ุงู„ุตูˆุฑุฉ.jpg' + + # ุงู„ุฌู…ุน ุจูŠู† ุงู„ุชู†ุจุค ุนู„ู‰ ุงู„ู…ุตุฏุฑ + results = model(source) # ู‚ุงุฆู…ุฉ ูƒุงุฆู†ุงุช ุงู„ู†ุชุงุฆุฌ + ``` + + === "ู„ู‚ุทุฉ ุดุงุดุฉ ุจุฑู…ุฌูŠุฉ" + ู‚ู… ุจุฃุฌุฑุงุก ุนู…ู„ูŠุฉ ุงู„ุชู†ุจุค ุนู„ู‰ ู…ุญุชูˆู‰ ุงู„ุดุงุดุฉ ุงู„ุญุงู„ูŠ ูƒู„ู‚ุทุฉ ุดุงุดุฉ. + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง + model = YOLO('yolov8n.pt') + + # ุชุนุฑูŠู ุงู„ู„ู‚ุทุฉ ุงู„ุญุงู„ูŠุฉ ูƒู…ุตุฏุฑ + source = 'ุงู„ุดุงุดุฉ' + + # ุงู„ุฌู…ุน ุจูŠู† ุงู„ุชู†ุจุค ุนู„ู‰ ุงู„ู…ุตุฏุฑ + results = model(source) # ู‚ุงุฆู…ุฉ ูƒุงุฆู†ุงุช ุงู„ู†ุชุงุฆุฌ + ``` + + === "ุฑุงุจุท URL" + ู‚ู… ุจุฃุฌุฑุงุก ุนู…ู„ูŠุฉ ุงู„ุชู†ุจุค ุนู„ู‰ ุตูˆุฑุฉ ู…ูˆุฌูˆุฏุฉ ุนู„ู‰ ุงู„ุฅู†ุชุฑู†ุช ุฃูˆ ููŠุฏูŠูˆ. + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง + model = YOLO('yolov8n.pt') + + # ุชุนุฑูŠู ุฑุงุจุท ุงู„ุตูˆุฑุฉ ุฃูˆ ุงู„ููŠุฏูŠูˆ ุนู„ู‰ ุงู„ุฅู†ุชุฑู†ุช + source = 'https://ultralytics.com/images/bus.jpg' + + # ุงู„ุฌู…ุน ุจูŠู† ุงู„ุชู†ุจุค ุนู„ู‰ ุงู„ู…ุตุฏุฑ + results = model(source) # ู‚ุงุฆู…ุฉ ูƒุงุฆู†ุงุช ุงู„ู†ุชุงุฆุฌ + ``` + + === "PIL" + ู‚ู… ุจุฃุฌุฑุงุก ุนู…ู„ูŠุฉ ุงู„ุชู†ุจุค ุนู„ู‰ ุตูˆุฑุฉ ู…ูุชูˆุญุฉ ุจูˆุงุณุทุฉ ู…ูƒุชุจุฉ Python Imaging Library (PIL). + ```python + from PIL import Image + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง + model = YOLO('yolov8n.pt') + + # ูุชุญ ุตูˆุฑุฉ ุจุงุณุชุฎุฏุงู… PIL + source = Image.open('ุงู„ู…ุณุงุฑ/ุฅู„ู‰/ุงู„ุตูˆุฑุฉ.jpg') + + # ุงู„ุฌู…ุน ุจูŠู† ุงู„ุชู†ุจุค ุนู„ู‰ ุงู„ู…ุตุฏุฑ + results = model(source) # ู‚ุงุฆู…ุฉ ูƒุงุฆู†ุงุช ุงู„ู†ุชุงุฆุฌ + ``` + + === "OpenCV" + ู‚ู… ุจุฃุฌุฑุงุก ุนู…ู„ูŠุฉ ุงู„ุชู†ุจุค ุนู„ู‰ ุตูˆุฑุฉ ู…ูู‚ุฑูˆุกุฉ ุจูˆุงุณุทุฉ OpenCV. + ```python + import cv2 + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง + model = YOLO('yolov8n.pt') + + # ู‚ุฑุงุกุฉ ุตูˆุฑุฉ ุจุงุณุชุฎุฏุงู… OpenCV + source = cv2.imread('ุงู„ู…ุณุงุฑ/ุฅู„ู‰/ุงู„ุตูˆุฑุฉ.jpg') + + # ุงู„ุฌู…ุน ุจูŠู† ุงู„ุชู†ุจุค ุนู„ู‰ ุงู„ู…ุตุฏุฑ + results = model(source) # ู‚ุงุฆู…ุฉ ูƒุงุฆู†ุงุช ุงู„ู†ุชุงุฆุฌ + ``` + + === "numpy" + ู‚ู… ุจุฃุฌุฑุงุก ุนู…ู„ูŠุฉ ุงู„ุชู†ุจุค ุนู„ู‰ ุตูˆุฑุฉ ู…ูู…ุซู„ุฉ ูƒู…ุตููˆูุฉ numpy. + ```python + import numpy as np + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง + model = YOLO('yolov8n.pt') + + # ุฅู†ุดุงุก ู…ุตููˆูุฉ numpy ุนุดูˆุงุฆูŠุฉ ููŠ ุตูŠุบุฉ HWC (640, 640, 3) ุจู‚ูŠู… ุจูŠู† [0, 255] ูˆู†ูˆุน uint8 + source = np.random.randint(low=0, high=255, size=(640, 640, 3), dtype='uint8') + + # ุงู„ุฌู…ุน ุจูŠู† ุงู„ุชู†ุจุค ุนู„ู‰ ุงู„ู…ุตุฏุฑ + results = model(source) # ู‚ุงุฆู…ุฉ ูƒุงุฆู†ุงุช ุงู„ู†ุชุงุฆุฌ + ``` + +[car spare parts]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/a0f802a8-0776-44cf-8f17-93974a4a28a1 + +[football player detect]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/7d320e1f-fc57-4d7f-a691-78ee579c3442 + +[human fall detect]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/86437c4a-3227-4eee-90ef-9efb697bdb43 diff --git a/ultralytics/docs/ar/modes/predict.md:Zone.Identifier b/ultralytics/docs/ar/modes/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/modes/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/modes/track.md b/ultralytics/docs/ar/modes/track.md new file mode 100755 index 0000000..5300ecf --- /dev/null +++ b/ultralytics/docs/ar/modes/track.md @@ -0,0 +1,360 @@ +--- +comments: true +description: ุชุนุฑู ุนู„ู‰ ูƒูŠููŠุฉ ุงุณุชุฎุฏุงู… Ultralytics YOLO ู„ุชุชุจุน ุงู„ูƒุงุฆู†ุงุช ููŠ ุชุฏูู‚ุงุช ุงู„ููŠุฏูŠูˆ. ุฃุฏู„ุฉ ู„ุงุณุชุฎุฏุงู… ู…ุฎุชู„ู ุงู„ู…ุชุชุจุนูŠู† ูˆุชุฎุตูŠุต ุฅุนุฏุงุฏุงุช ุงู„ู…ุชุชุจุน. +keywords: UltralyticsุŒ YOLOุŒ ุชุชุจุน ุงู„ูƒุงุฆู†ุงุชุŒ ุชุฏูู‚ุงุช ุงู„ููŠุฏูŠูˆุŒ BoT-SORTุŒ ByteTrackุŒ ุฏู„ูŠู„ PythonุŒ ุฏู„ูŠู„ ุฎุท ุงู„ุฃูˆุงู…ุฑ (CLI) +--- + +# ุชุชุจุน ุนุฏุฉ ูƒุงุฆู†ุงุช ุจุงุณุชุฎุฏุงู… Ultralytics YOLO + +Multi-object tracking examples + +ูŠุนุฏ ุชุชุจุน ุงู„ูƒุงุฆู†ุงุช ููŠ ู…ุฌุงู„ ุชุญู„ูŠู„ ุงู„ููŠุฏูŠูˆ ู…ู‡ู…ุฉ ุญุฑุฌุฉ ู„ูŠุณ ูู‚ุท ููŠ ุชุญุฏูŠุฏ ู…ูˆู‚ุน ูˆูุฆุฉ ุงู„ูƒุงุฆู†ุงุช ุฏุงุฎู„ ุงู„ุฅุทุงุฑุŒ ูˆู„ูƒู† ุฃูŠุถู‹ุง ููŠ ุงู„ุญูุงุธ ุนู„ู‰ ู‡ูˆูŠุฉ ูุฑูŠุฏุฉ ู„ูƒู„ ูƒุงุฆู† ูŠุชู… ุงูƒุชุดุงูู‡ ู…ุน ุชู‚ุฏู… ุงู„ููŠุฏูŠูˆ. ุชูƒุงุฏ ุงู„ุชุทุจูŠู‚ุงุช ู„ุง ุชุนุฏ ูˆู„ุง ุชุญุตู‰ - ุชุชุฑุงูˆุญ ู…ู† ุงู„ู…ุฑุงู‚ุจุฉ ูˆุงู„ุฃู…ุงู† ุฅู„ู‰ ุชุญู„ูŠู„ ุงู„ุฑูŠุงุถุฉ ุงู„ููˆุฑูŠุฉ. + +## ู„ู…ุงุฐุง ูŠุฌุจ ุงุฎุชูŠุงุฑ Ultralytics YOLO ู„ุชุชุจุน ุงู„ูƒุงุฆู†ุงุชุŸ + +ุฅู† ู…ุฎุฑุฌุงุช ุงู„ู…ุชุชุจุนูŠู† ููŠ Ultralytics ู…ุชุณู‚ุฉ ู…ุน ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุงู„ู‚ูŠุงุณูŠ ูˆู„ู‡ุง ู‚ูŠู…ุฉ ู…ุถุงูุฉ ู…ู† ู‡ูˆูŠุงุช ุงู„ูƒุงุฆู†ุงุช. ู‡ุฐุง ูŠุฌุนู„ ู…ู† ุงู„ุณู‡ู„ ุชุชุจุน ุงู„ูƒุงุฆู†ุงุช ููŠ ุชุฏูู‚ุงุช ุงู„ููŠุฏูŠูˆ ูˆุฃุฏุงุก ุงู„ุชุญู„ูŠู„ุงุช ุงู„ุชุงู„ูŠุฉ. ุฅู„ูŠูƒ ู„ู…ุงุฐุง ูŠุฌุจ ุฃู† ุชููƒุฑ ููŠ ุงุณุชุฎุฏุงู… Ultralytics YOLO ู„ุชู„ุจูŠุฉ ุงุญุชูŠุงุฌุงุช ุชุชุจุน ุงู„ูƒุงุฆู†ุงุช ุงู„ุฎุงุตุฉ ุจูƒ: + +- **ุงู„ูƒูุงุกุฉ:** ู…ุนุงู„ุฌุฉ ุชุฏูู‚ุงุช ุงู„ููŠุฏูŠูˆ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุฏูˆู† ุงู„ู…ุณุงูˆู…ุฉ ุนู„ู‰ ุงู„ุฏู‚ุฉ. +- **ุงู„ู…ุฑูˆู†ุฉ:** ูŠุฏุนู… ุงู„ุนุฏูŠุฏ ู…ู† ุฎูˆุงุฑุฒู…ูŠุงุช ุงู„ุชุชุจุน ูˆุงู„ุชูƒูˆูŠู†ุงุช. +- **ุณู‡ูˆู„ุฉ ุงู„ุงุณุชุฎุฏุงู…:** ูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ ุชุทุจูŠู‚ุงุช ุจุณูŠุทุฉ ู„ู„ุบุงูŠุฉ ูˆุฎูŠุงุฑุงุช ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ ู„ู„ุงุณุชุฏู…ุงุฌ ุงู„ุณุฑูŠุน ูˆุงู„ู†ุดุฑ. +- **ุฅู…ูƒุงู†ูŠุฉ ุงู„ุชุฎุตูŠุต:** ุณู‡ู„ ุงู„ุงุณุชุฎุฏุงู… ู…ุน ู†ู…ุงุฐุฌ YOLO ู…ุฏุฑุจุฉ ู…ุฎุตุตุฉุŒ ู…ู…ุง ูŠุณู…ุญ ุจุงู„ุงูƒุชู…ุงู„ ููŠ ุงู„ุชุทุจูŠู‚ุงุช ุฐุงุช ุงู„ู†ุทุงู‚ ุงู„ุฎุงุต. + +

+
+ +
+ ุดุงู‡ุฏ: ูƒุดู ุงู„ูƒุงุฆู†ุงุช ูˆุชุชุจุนู‡ุง ุจุงุณุชุฎุฏุงู… Ultralytics YOLOv8. +

+ +## ุชุทุจูŠู‚ุงุช ููŠ ุงู„ุนุงู„ู… ุงู„ุญู‚ูŠู‚ูŠ + +| ุงู„ู†ู‚ู„ | ุงู„ุจูŠุน ุจุงู„ุชุฌุฒุฆุฉ | ุงู„ุงุณุชุฒุฑุงุน ุงู„ู…ุงุฆูŠ | +|:----------------------------------:|:--------------------------------:|:----------------------------:| +| ![Vehicle Tracking][vehicle track] | ![People Tracking][people track] | ![Fish Tracking][fish track] | +| ุชุชุจุน ุงู„ู…ุฑูƒุจุงุช | ุชุชุจุน ุงู„ุฃุดุฎุงุต | ุชุชุจุน ุงู„ุฃุณู…ุงูƒ | + +## ู…ู„ุงู…ุญ ุจู„ู…ุญุฉ + +ูŠูˆูุฑ Ultralytics YOLO ู…ูŠุฒุงุช ูƒุดู ุงู„ูƒุงุฆู†ุงุช ู„ุชูˆููŠุฑ ุชุชุจุน ูุนุงู„ ูˆู…ุชุนุฏุฏ ุงู„ุงุณุชุฎุฏุงู…ุงุช ู„ู„ูƒุงุฆู†ุงุช: + +- **ุชุชุจุน ููˆุฑูŠ:** ุชุชุจุน ุงู„ูƒุงุฆู†ุงุช ุจุณู„ุงุณุฉ ููŠ ู…ู‚ุงุทุน ุงู„ููŠุฏูŠูˆ ุฐุงุช ู…ุนุฏู„ ุงู„ุฅุทุงุฑุงุช ุงู„ุนุงู„ูŠ. +- **ุฏุนู… ุนุฏุฉ ู…ุชุชุจุนูŠู†:** ุงุฎุชูŠุงุฑ ุจูŠู† ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ุฎูˆุงุฑุฒู…ูŠุงุช ุงู„ุชุชุจุน ุงู„ู…ุนุชู…ุฏุฉ. +- **ุชุฎุตูŠุต ุชูƒูˆูŠู†ุงุช ุงู„ู…ุชุชุจุน ุงู„ู…ุชุงุญุฉ:** ุถุจุท ุฎูˆุงุฑุฒู…ูŠุฉ ุงู„ุชุชุจุน ู„ุชู„ุจูŠุฉ ุงู„ู…ุชุทู„ุจุงุช ุงู„ู…ุญุฏุฏุฉ ุนู† ุทุฑูŠู‚ ุถุจุท ู…ุฎุชู„ู ุงู„ู…ุนู„ู…ุงุช. + +## ู…ุชุชุจุนูˆู† ู…ุชุงุญูˆู† + +ูŠุฏุนู… Ultralytics YOLO ุงู„ุฎูˆุงุฑุฒู…ูŠุงุช ุงู„ุชุงู„ูŠุฉ ู„ู„ุชุชุจุน. ูŠู…ูƒู† ุชู…ูƒูŠู†ู‡ุง ุนู† ุทุฑูŠู‚ ุชู…ุฑูŠุฑ ู…ู„ู ุชูƒูˆูŠู† YAML ุฐูŠ ุงู„ุตู„ุฉ ู…ุซู„ "tracker=tracker_type.yaml": + +* [BoT-SORT](https://github.com/NirAharon/BoT-SORT) - ุงุณุชุฎุฏู… `botsort.yaml` ู„ุชู…ูƒูŠู† ู‡ุฐุง ุงู„ู…ุชุชุจุน. +* [ByteTrack](https://github.com/ifzhang/ByteTrack) - ุงุณุชุฎุฏู… `bytetrack.yaml` ู„ุชู…ูƒูŠู† ู‡ุฐุง ุงู„ู…ุชุชุจุน. + +ุงู„ู…ุชุชุจุน ุงู„ุงูุชุฑุงุถูŠ ู‡ูˆ BoT-SORT. + +## ุชุชุจุน + +ู„ุชุดุบูŠู„ ุงู„ู…ุชุชุจุน ุนู„ู‰ ุชุฏูู‚ุงุช ุงู„ููŠุฏูŠูˆุŒ ุงุณุชุฎุฏู… ู†ู…ูˆุฐุฌ ุชุญุฏูŠุฏ (Detect) ุฃูˆ ู‚ุทุน (Segment) ุฃูˆ ูˆุถุน (Pose) ู…ุฏุฑุจ ู…ุซู„ YOLOv8n ูˆ YOLOv8n-seg ูˆ YOLOv8n-pose. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ ุฃูˆ ู…ุฎุตุต + model = YOLO('yolov8n.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ Detect + model = YOLO('yolov8n-seg.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ Segment + model = YOLO('yolov8n-pose.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ Pose + model = YOLO('path/to/best.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฎุตุต ู…ุฏุฑุจ + + # ู‚ู… ุจุชู†ููŠุฐ ุงู„ุชุชุจุน ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True) # ุงู„ุชุชุจุน ุจุงุณุชุฎุฏุงู… ุงู„ู…ุชุชุจุน ุงู„ุงูุชุฑุงุถูŠ + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True, tracker="bytetrack.yaml") # ุงู„ุชุชุจุน ุจุงุณุชุฎุฏุงู… ู…ุชุชุจุน ByteTrack + ``` + + === "CLI" + + ```bash + # ู‚ู… ุจุชู†ููŠุฐ ุงู„ุชุชุจุน ุจุงุณุชุฎุฏุงู… ู…ุฎุชู„ู ุงู„ู†ู…ุงุฐุฌ ุจุงุณุชุฎุฏุงู… ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" # ู†ู…ูˆุฐุฌ Detect ุฑุณู…ูŠ + yolo track model=yolov8n-seg.pt source="https://youtu.be/LNwODJXcvt4" # ู†ู…ูˆุฐุฌ Segment ุฑุณู…ูŠ + yolo track model=yolov8n-pose.pt source="https://youtu.be/LNwODJXcvt4" # ู†ู…ูˆุฐุฌ Pose ุฑุณู…ูŠ + yolo track model=path/to/best.pt source="https://youtu.be/LNwODJXcvt4" # ุชู… ุชุฏุฑูŠุจ ู†ู…ูˆุฐุฌ ู…ุฎุตุต + + # ุชุชุจุน ุนู† ุทุฑูŠู‚ ByteTrack ู…ุชุชุจุน + yolo track model=path/to/best.pt tracker="bytetrack.yaml" + ``` + +ูƒู…ุง ูŠุธู‡ุฑ ููŠ ุงู„ุงุณุชุฎุฏุงู… ุฃุนู„ุงู‡ุŒ ูŠุชูˆูุฑ ุงู„ุชุชุจุน ู„ุฌู…ูŠุน ู†ู…ุงุฐุฌ Detect ูˆ Segment ูˆ Pose ุงู„ุชูŠ ุชุนู…ู„ ุนู„ู‰ ู…ู‚ุงุทุน ุงู„ููŠุฏูŠูˆ ุฃูˆ ู…ุตุงุฏุฑ ุงู„ุจุซ. + +## ุงู„ุงุนุฏุงุฏุงุช + +### ู…ุนุงู…ู„ุงุช ุงู„ุชุชุจุน + +ุชุชุดุงุฑูƒ ุฅุนุฏุงุฏุงุช ุงู„ุชุชุจุน ุงู„ุฎุตุงุฆุต ู…ุน ูˆุถุน ุงู„ุชูˆู‚ุนุงุช (Predict)ุŒ ู…ุซู„ `conf` ูˆ `iou` ูˆ `show`. ู„ู„ุญุตูˆู„ ุนู„ู‰ ู…ุฒูŠุฏ ู…ู† ุงู„ุชูƒูˆูŠู†ุงุชุŒ ุฑุงุฌุน ุตูุญุฉ ุงู„ู†ู…ูˆุฐุฌ [Predict](../modes/predict.md#inference-arguments). + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ู‚ู… ุจุชูƒูˆูŠู† ู…ุนู„ู…ุงุช ุงู„ุชุชุจุน ูˆู‚ู… ุจุชุดุบูŠู„ ุงู„ุชุชุจุน + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", conf=0.3, iou=0.5, show=True) + ``` + + === "CLI" + + ```bash + # ู‚ู… ุจุชูƒูˆูŠู† ู…ุนู„ู…ุงุช ุงู„ุชุชุจุน ูˆู‚ู… ุจุชุดุบูŠู„ ุงู„ุชุชุจุน ุจุงุณุชุฎุฏุงู… ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" conf=0.3, iou=0.5 show + ``` + +### ุงุฎุชูŠุงุฑ ุงู„ู…ุชุชุจุน + +ูŠุชูŠุญ ู„ูƒ Ultralytics ุฃูŠุถู‹ุง ุงุณุชุฎุฏุงู… ู…ู„ู ุชูƒูˆูŠู† ู…ุชุชุจุน ู…ุนุฏู„. ู„ู„ู‚ูŠุงู… ุจุฐู„ูƒุŒ ุฃู†ู‚ู„ ู†ุณุฎุฉ ู…ู† ู…ู„ู ุชูƒูˆูŠู† ุงู„ู…ุชุชุจุน (ู…ุซู„ `custom_tracker.yaml`) ู…ู† [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) ูˆู‚ู… ุจุชุนุฏูŠู„ ุฃูŠ ุชูƒูˆูŠู†ุงุช (ุจุงุณุชุซู†ุงุก `tracker_type`) ุญุณุจ ุงุญุชูŠุงุฌุงุชูƒ. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ู‚ู… ุจุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ ูˆุชุดุบูŠู„ ุงู„ุชุชุจุน ุจุงุณุชุฎุฏุงู… ู…ู„ู ุชูƒูˆูŠู† ู…ุฎุตุต + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", tracker='custom_tracker.yaml') + ``` + + === "CLI" + + ```bash + # ู‚ู… ุจุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ ูˆุชุดุบูŠู„ ุงู„ุชุชุจุน ุจุงุณุชุฎุฏุงู… ู…ู„ู ุชูƒูˆูŠู† ู…ุฎุตุต ุจุงุณุชุฎุฏุงู… ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" tracker='custom_tracker.yaml' + ``` + +ู„ู„ุญุตูˆู„ ุนู„ู‰ ู‚ุงุฆู…ุฉ ุดุงู…ู„ุฉ ู…ู† ูˆุณุงุฆุท ุชุชุจุนุŒ ุฑุงุฌุน ุงู„ุตูุญุฉ [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers). + +## ุฃู…ุซู„ุฉ Python + +### ุงู„ุญูุงุธ ุนู„ู‰ ุงู„ู…ุณุงุฑุงุช ุงู„ุชูƒุฑุงุฑูŠุฉ + +ููŠู…ุง ูŠู„ูŠ ุณูƒุฑูŠุจุช Python ุจุงุณุชุฎุฏุงู… OpenCV (cv2) ูˆ YOLOv8 ู„ุชุดุบูŠู„ ุชุชุจุน ุงู„ูƒุงุฆู†ุงุช ุนู„ู‰ ุฅุทุงุฑุงุช ุงู„ููŠุฏูŠูˆ. ูŠูุชุฑุถ ู‡ุฐุง ุงู„ุณูƒุฑูŠุจุช ุฃู†ูƒ ู‚ุฏ ู‚ู…ุช ุจุงู„ูุนู„ ุจุชุซุจูŠุช ุงู„ุญุฒู… ุงู„ู„ุงุฒู…ุฉ (opencv-python ูˆ ultralytics). ุงู„ู…ุนุงู…ู„ `persist=True` ูŠุฎุจุฑ ุงู„ู…ุชุชุจุน ุฃู† ุงู„ุตูˆุฑุฉ ุงู„ุญุงู„ูŠุฉ ุฃูˆ ุงู„ุฅุทุงุฑ ุงู„ุชุงู„ูŠ ููŠ ุงู„ุชุณู„ุณู„ ูˆู…ู† ุงู„ู…ุชูˆู‚ุน ุฃู† ูŠุชูˆูุฑ ู…ุณุงุฑุงุช ู…ู† ุงู„ุตูˆุฑุฉ ุงู„ุณุงุจู‚ุฉ ููŠ ุงู„ุตูˆุฑุฉ ุงู„ุญุงู„ูŠุฉ. + +!!! Example "For-loop ู„ู„ุชุฏูู‚ ู…ุน ุงู„ุชุชุจุน" + + ```python + import cv2 + from ultralytics import YOLO + + # ุญู…ู‘ู„ ู†ู…ูˆุฐุฌ YOLOv8 + model = YOLO('yolov8n.pt') + + # ุงูุชุญ ู…ู„ู ุงู„ููŠุฏูŠูˆ + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # ุชุญู„ู‚ ุนุจุฑ ุฅุทุงุฑุงุช ุงู„ููŠุฏูŠูˆ + while cap.isOpened(): + # ู‚ุฑุงุกุฉ ุงู„ุฅุทุงุฑ ู…ู† ุงู„ููŠุฏูŠูˆ + success, frame = cap.read() + + if success: + # ุชุดุบูŠู„ ุชุชุจุน YOLOv8 ุนู„ู‰ ุงู„ุฅุทุงุฑ ุŒ ูˆุญูุธ ุงู„ู…ุณุงุฑุงุช ุจูŠู† ุงู„ุฅุทุงุฑุงุช + results = model.track(frame, persist=True) + + # ุชุตูˆุฑ ุงู„ู†ุชุงุฆุฌ ุนู„ู‰ ุงู„ุฅุทุงุฑ + annotated_frame = results[0].plot() + + # ุนุฑุถ ุงู„ุฅุทุงุฑ ุงู„ู…ุนู„ู‚ + cv2.imshow("YOLOv8 Tracking", annotated_frame) + + # ูƒุณุฑ ุงู„ู„ูˆุจ ููŠ ุญุงู„ุฉ ุงู„ุถุบุท ุนู„ู‰ 'q' + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # ูƒุณุฑ ุงู„ู„ูˆุจ ููŠ ู†ู‡ุงูŠุฉ ุงู„ููŠุฏูŠูˆ + break + + # ุฅุทู„ุงู‚ ูƒุงุฆู† ุงู„ุชู‚ุงุท ุงู„ููŠุฏูŠูˆ ูˆุฅุบู„ุงู‚ ู†ุงูุฐุฉ ุงู„ุนุฑุถ + cap.release() + cv2.destroyAllWindows() + ``` + +ูŠุฑุฌู‰ ู…ู„ุงุญุธุฉ ุงู„ุชุบูŠูŠุฑ ู…ู† `model(frame)` ุฅู„ู‰ `model.track(frame)` ุŒ ู…ู…ุง ูŠู…ูƒู† ุงู„ุชุชุจุน ุจุฏู„ุงู‹ ู…ู† ุงู„ูƒุดู ุงู„ุจุณูŠุท. ุณูŠุชู… ุชุดุบูŠู„ ุงู„ุจุฑู†ุงู…ุฌ ุงู„ู…ุนุฏู„ ุนู„ู‰ ูƒู„ ุฅุทุงุฑ ููŠุฏูŠูˆ ูˆุชุตูˆุฑ ุงู„ู†ุชุงุฆุฌ ูˆุนุฑุถู‡ุง ููŠ ู†ุงูุฐุฉ. ูŠู…ูƒู† ุงู„ุฎุฑูˆุฌ ู…ู† ุงู„ุญู„ู‚ุฉ ุนู† ุทุฑูŠู‚ ุงู„ุถุบุท ุนู„ู‰ 'q'. + +### ุฑุณู… ุงู„ู…ุณุงุฑุงุช ุนุจุฑ ุงู„ูˆู‚ุช + +ูŠู…ูƒู† ุฃู† ูŠูˆูุฑ ุฑุณู… ุงู„ู…ุณุงุฑุงุช ุงู„ูƒุงุฆู†ูŠุฉ ุนุจุฑ ุงู„ุฅุทุงุฑุงุช ุงู„ู…ุชุชุงู„ูŠุฉ ุฅุดุงุฑุงุช ู‚ูŠู…ุฉ ุญูˆู„ ุฃู†ู…ุงุท ุงู„ุญุฑูƒุฉ ูˆุงู„ุณู„ูˆูƒ ู„ู„ูƒุงุฆู†ุงุช ุงู„ู…ูƒุชุดูุฉ ููŠ ุงู„ููŠุฏูŠูˆ. ุจุงุณุชุฎุฏุงู… Ultralytics YOLOv8 ุŒ ูŠุนุฏ ุชุตูˆูŠุฑ ู‡ุฐู‡ ุงู„ู…ุณุงุฑุงุช ุนู…ู„ูŠุฉ ุณู„ุณุฉ ูˆูุนุงู„ุฉ. + +ููŠ ุงู„ู…ุซุงู„ ุงู„ุชุงู„ูŠ ุŒ ู†ูˆุถุญ ูƒูŠููŠุฉ ุงุณุชุฎุฏุงู… ู‚ุฏุฑุงุช ูŠูˆูƒูˆ 8 YOLO ู„ุชุชุจุน ุงู„ูƒุงุฆู†ุงุช ู„ุฑุณู… ุญุฑูƒุฉ ุงู„ูƒุงุฆู†ุงุช ุงู„ู…ูƒุชุดูุฉ ุนุจุฑ ุฅุทุงุฑุงุช ุงู„ููŠุฏูŠูˆ ุงู„ู…ุชุนุฏุฏุฉ. ูŠุชุถู…ู† ู‡ุฐุง ุงู„ุจุฑู†ุงู…ุฌ ูุชุญ ู…ู„ู ููŠุฏูŠูˆ ูˆู‚ุฑุงุกุชู‡ ุฅุทุงุฑู‹ุง ุจุฅุทุงุฑ ุŒ ูˆุงุณุชุฎุฏุงู… ู†ู…ูˆุฐุฌ YOLO ู„ุชุญุฏูŠุฏ ูˆุชุชุจุน ุงู„ุนุฏูŠุฏ ู…ู† ุงู„ูƒุงุฆู†ุงุช. ุนู† ุทุฑูŠู‚ ุงู„ุงุญุชูุงุธ ุจู†ู‚ุงุท ุงู„ูˆุณุท ู„ู…ุฑุจุนุงุช ุงู„ุญุฏูˆุฏ ุงู„ู…ูƒุชุดูุฉ ูˆุชูˆุตูŠู„ู‡ุง ุŒ ูŠู…ูƒู†ู†ุง ุฑุณู… ุฎุทูˆุท ุชู…ุซู„ ุงู„ู…ุณุงุฑุงุช ุงู„ุชูŠ ุชู… ุงุชุจุงุนู‡ุง ุจูˆุงุณุทุฉ ุงู„ูƒุงุฆู†ุงุช ุงู„ุชูŠ ุชู…ุช ู…ุชุงุจุนุชู‡ุง. + +!!! Example "ุฑุณู… ุงู„ู…ุณุงุฑุงุช ุนุจุฑ ุฅุทุงุฑุงุช ุงู„ููŠุฏูŠูˆ ุงู„ู…ุชุนุฏุฏุฉ" + + ```python + from collections import defaultdict + + import cv2 + import numpy as np + + from ultralytics import YOLO + + # ุญู…ู‘ู„ ู†ู…ูˆุฐุฌ YOLOv8 + model = YOLO('yolov8n.pt') + + # ุงูุชุญ ู…ู„ู ุงู„ููŠุฏูŠูˆ + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # ุงุญูุธ ุชุงุฑูŠุฎ ุงู„ู…ุณุงุฑุงุช + track_history = defaultdict(lambda: []) + + # ุชุญู„ู‚ ุนุจุฑ ุฅุทุงุฑุงุช ุงู„ููŠุฏูŠูˆ + while cap.isOpened(): + # ู‚ุฑุงุกุฉ ุงู„ุฅุทุงุฑ ู…ู† ุงู„ููŠุฏูŠูˆ + success, frame = cap.read() + + if success: + # ุชุดุบูŠู„ ุชุชุจุน YOLOv8 ุนู„ู‰ ุงู„ุฅุทุงุฑ ุŒ ูˆุญูุธ ุงู„ู…ุณุงุฑุงุช ุจูŠู† ุงู„ุฅุทุงุฑุงุช + results = model.track(frame, persist=True) + + # ุงู„ุญุตูˆู„ ุนู„ู‰ ุงู„ู…ุฑุจุนุงุช ูˆู…ุนุฑูุงุช ุงู„ู…ุณุงุฑ + boxes = results[0].boxes.xywh.cpu() + track_ids = results[0].boxes.id.int().cpu().tolist() + + # ุชุตูˆุฑ ุงู„ู†ุชุงุฆุฌ ุนู„ู‰ ุงู„ุฅุทุงุฑ + annotated_frame = results[0].plot() + + # ุฑุณู… ุงู„ู…ุณุงุฑุงุช + for box, track_id in zip(boxes, track_ids): + x, y, w, h = box + track = track_history[track_id] + track.append((float(x), float(y))) # x, y ู†ู‚ุทุฉ ุงู„ูˆุณุท + if len(track) > 30: # ุงุญุชูุธ ุจู€ 90 ู…ุณุงุฑู‹ุง ู„ู€ 90 ุฅุทุงุฑู‹ุง + track.pop(0) + + # ุฑุณู… ุฎุทูˆุท ุงู„ุชุชุจุน + points = np.hstack(track).astype(np.int32).reshape((-1, 1, 2)) + cv2.polylines(annotated_frame, [points], isClosed=False, color=(230, 230, 230), thickness=10) + + # ุนุฑุถ ุงู„ุฅุทุงุฑ ุงู„ู…ุนู„ู‚ + cv2.imshow("YOLOv8 Tracking", annotated_frame) + + # ูƒุณุฑ ุงู„ู„ูˆุจ ููŠ ุญุงู„ุฉ ุงู„ุถุบุท ุนู„ู‰ 'q' + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # ูƒุณุฑ ุงู„ู„ูˆุจ ููŠ ู†ู‡ุงูŠุฉ ุงู„ููŠุฏูŠูˆ + break + + # ุฅุทู„ุงู‚ ูƒุงุฆู† ุงู„ุชู‚ุงุท ุงู„ููŠุฏูŠูˆ ูˆุฅุบู„ุงู‚ ู†ุงูุฐุฉ ุงู„ุนุฑุถ + cap.release() + cv2.destroyAllWindows() + ``` + +### ุงู„ุชุชุจุน ู…ุชุนุฏุฏ ุงู„ุฎูŠูˆุท + +ูŠูˆูุฑ ุงู„ุชุชุจุน ู…ุชุนุฏุฏ ุงู„ุฎูŠูˆุท ุงู„ู‚ุฏุฑุฉ ุนู„ู‰ ุชุดุบูŠู„ ุชุชุจุน ุงู„ูƒุงุฆู†ุงุช ุนู„ู‰ ุนุฏุฉ ุชุฏูู‚ุงุช ููŠุฏูŠูˆ ููŠ ูˆู‚ุช ูˆุงุญุฏ. ู‡ุฐุง ู…ููŠุฏ ุจุดูƒู„ ุฎุงุต ุนู†ุฏ ุงู„ุชุนุงู…ู„ ู…ุน ู…ุฏุฎู„ุงุช ููŠุฏูŠูˆ ู…ุชุนุฏุฏุฉ ุŒ ู…ุซู„ ู…ู† ูƒุงู…ูŠุฑุงุช ุงู„ู…ุฑุงู‚ุจุฉ ุงู„ู…ุชุนุฏุฏุฉ ุŒ ุญูŠุซ ูŠู…ูƒู† ุฃู† ูŠุนุฒุฒ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ู…ุชุฒุงู…ู†ุฉ ุงู„ูƒูุงุกุฉ ูˆุงู„ุฃุฏุงุก ุจุดูƒู„ ูƒุจูŠุฑ. + +ููŠ ุงู„ุณูƒุฑูŠุจุช ุงู„ุจุงูŠุซูˆู† ุงู„ู…ู‚ุฏู… ุŒ ู†ุณุชุฎุฏู… ูˆุญุฏุฉ `threading` ููŠ Python ู„ุชุดุบูŠู„ ุนุฏุฉ ู†ุณุฎ ู…ุชุฒุงู…ู†ุฉ ู…ู† ุงู„ู…ุชุชุจุน. ูŠูƒูˆู† ู„ูƒู„ ู…ูˆุถูˆุน ู…ุณุคูˆู„ูŠุฉ ุชุดุบูŠู„ ุงู„ู…ุชุชุจุน ุนู„ู‰ ู…ู„ู ููŠุฏูŠูˆ ูˆุงุญุฏ ุŒ ูˆุชุนู…ู„ ุฌู…ูŠุน ุงู„ุฎูŠูˆุท ููŠ ุงู„ุฎู„ููŠุฉ ููŠ ู†ูุณ ุงู„ูˆู‚ุช. + +ู„ู„ุชุฃูƒุฏ ู…ู† ุฃู† ูƒู„ ุฎูŠุท ูŠุชู„ู‚ู‰ ุงู„ู…ุนู„ู…ุงุช ุงู„ุตุญูŠุญุฉ (ู…ู„ู ุงู„ููŠุฏูŠูˆ ูˆุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุณุชุฎุฏู… ูˆูู‡ุฑุณ ุงู„ู…ู„ู) ุŒ ู†ุญุฏุฏ ูˆุธูŠูุฉ `run_tracker_in_thread` ุงู„ุชูŠ ุชู‚ุจู„ ู‡ุฐู‡ ุงู„ู…ุนู„ู…ุงุช ูˆุชุญุชูˆูŠ ุนู„ู‰ ุญู„ู‚ุฉ ุงู„ู…ุชุงุจุนุฉ ุงู„ุฑุฆูŠุณูŠุฉ. ู‡ุฐู‡ ุงู„ูˆุธูŠูุฉ ุชู‚ุฑุฃ ุฅุทุงุฑ ุงู„ููŠุฏูŠูˆ ุงู„ุฎุงุตุฉ ุจุงู„ููŠุฏูŠูˆ ู…ุจุงุดุฑุฉ ู…ู† ู…ุตุฏุฑ ุงู„ู…ู„ู ุงู„ูˆุงุญุฏ ุŒ ูˆุชุดุบูŠู„ ุงู„ู…ุชุชุจุน ุŒ ูˆุนุฑุถ ุงู„ู†ุชุงุฆุฌ. + +ุชุณุชุฎุฏู… ููŠ ู‡ุฐุง ุงู„ู…ุซุงู„ ู†ู…ูˆุฐุฌูŠู† ู…ุฎุชู„ููŠู†: 'yolov8n.pt' ูˆ 'yolov8n-seg.pt' ุŒ ูŠู‚ูˆู… ูƒู„ ู…ู†ู‡ู…ุง ุจุชุชุจุน ุงู„ูƒุงุฆู†ุงุช ููŠ ู…ู„ู ููŠุฏูŠูˆ ู…ุฎุชู„ู. ุชู… ุชุญุฏูŠุฏ ู…ู„ูุงุช ุงู„ููŠุฏูŠูˆ ููŠ `video_file1` ูˆ `video_file2`. + +ุชุนุฏูŠู„ ู…ุนู„ู…ุงุช `daemon=True` ููŠ `threading.Thread` ูŠุนู†ูŠ ุฃู† ู‡ุฐู‡ ุงู„ุฎูŠูˆุท ุณุชุชู… ุฅุบู„ุงู‚ู‡ุง ุจู…ุฌุฑุฏ ุงู†ุชู‡ุงุก ุงู„ุจุฑู†ุงู…ุฌ ุงู„ุฑุฆูŠุณูŠ. ุซู… ู†ุจุฏุฃ ุงู„ุฎูŠูˆุท ุจุงุณุชุฎุฏุงู… `start ()` ูˆุงุณุชุฎุฏู… `join ()` ู„ุฌุนู„ ุงู„ุฎูŠุท ุงู„ุฑุฆูŠุณูŠ ูŠู†ุชุธุฑ ุญุชู‰ ูŠู†ุชู‡ูŠ ุฎูŠุทูŠ ุงู„ู…ุชุชุจุน. + +ุฃุฎูŠุฑู‹ุง ุŒ ุจุนุฏ ุงูƒุชู…ุงู„ ุฌู…ูŠุน ุงู„ุฎูŠูˆุท ู„ู…ู‡ู…ุชู‡ุง ุŒ ูŠุชู… ุฅุบู„ุงู‚ ุงู„ู†ูˆุงูุฐ ุงู„ุชูŠ ุชุนุฑุถ ุงู„ู†ุชุงุฆุฌ ุจุงุณุชุฎุฏุงู… `cv2.destroyAllWindows()`. + +!!! Example "Streaming for-loop with tracking" + + ```python + import threading + import cv2 + from ultralytics import YOLO + + + def run_tracker_in_thread(filename, model, file_index): + """ + ูŠุดุบู„ ู…ู„ู ููŠุฏูŠูˆ ุฃูˆ ู…ุตุฏุฑ ุชูŠุงุฑ ุงู„ูƒุงู…ูŠุฑุง ุจุงู„ุชุฒุงู…ู† ู…ุน YOLOv8 ุงู„ู†ู…ูˆุฐุฌ ุจุงุณุชุฎุฏุงู… ุชุนุฏุฏ ุงู„ุฎูŠูˆุท. + + ู‡ุฐู‡ ุงู„ูˆุธูŠูุฉ ุชู„ุชู‚ุท ุฅุทุงุฑุงุช ุงู„ููŠุฏูŠูˆ ู…ู† ู…ู„ู ุฃูˆ ู…ุตุฏุฑ ุงู„ูƒุงู…ูŠุฑุง ุงู„ู…ุนุฑูˆู ุŒ ูˆุชุณุชุฎุฏู… ู†ู…ูˆุฐุฌ YOLOv8 ู„ุชุชุจุน ุงู„ูƒุงุฆู†ุงุช. + ูŠุนู…ู„ ุงู„ุจุฑู†ุงู…ุฌ ููŠ ุฎูŠุทู‡ ุงู„ุฎุงุต ู„ู„ู…ุนุงู„ุฌุฉ ุงู„ู…ุชุฒุงู…ู†ุฉ. + + Args: + filename (str): ู…ุณุงุฑ ู…ู„ู ุงู„ููŠุฏูŠูˆ ุฃูˆ ู…ุนุฑู ู…ุตุฏุฑ ูƒุงู…ูŠุฑุง ุงู„ูˆูŠุจ / ุฎุงุฑุฌูŠุฉ. + model (obj): ูƒุงุฆู† ู†ู…ูˆุฐุฌ YOLOv8. + file_index (int): ู…ุคุดุฑ ู„ุชุญุฏูŠุฏ ุงู„ู…ู„ู ุจุดูƒู„ ูุฑูŠุฏ ุŒ ูŠูุณุชุฎุฏู… ู„ุฃุบุฑุงุถ ุงู„ุนุฑุถ. + + ู…ู„ุงุญุธุฉ: + ุงุถุบุท ุนู„ู‰ 'q' ู„ุฅู†ู‡ุงุก ู†ุงูุฐุฉ ุนุฑุถ ุงู„ููŠุฏูŠูˆ. + """ + video = cv2.VideoCapture(filename) # ู‚ุฑุงุกุฉ ู…ู„ู ุงู„ููŠุฏูŠูˆ + + while True: + ret, frame = video.read() # ู‚ุฑุงุกุฉ ุฅุทุงุฑุงุช ุงู„ููŠุฏูŠูˆ + + # ุฅู†ู‡ุงุก ุงู„ุฏูˆุฑุฉ ุฅุฐุง ู„ู… ูŠุชุจู‚ู‰ ุฅุทุงุฑุงุช ุนู„ู‰ ุงู„ููŠุฏูŠูˆูŠู† + if not ret: + break + + # ุชุชุจุน ูƒุงุฆู†ุงุช ููŠ ุงู„ุฅุทุงุฑุงุช ุฅุฐุง ุชูˆูุฑุช + results = model.track(frame, persist=True) + res_plotted = results[0].plot() + cv2.imshow(f"Tracking_Stream_{file_index}", res_plotted) + + key = cv2.waitKey(1) + if key == ord('q'): + break + + # ุฅุทู„ุงู‚ ู…ุตุฏุฑูŠ ุงู„ููŠุฏูŠูˆ + video.release() + + + # ุญู…ู‘ู„ ุงู„ู†ู…ุงุฐุฌ + model1 = YOLO('yolov8n.pt') + model2 = YOLO('yolov8n-seg.pt') + + # ุญุฏุฏ ู…ู„ูุงุช ุงู„ููŠุฏูŠูˆ ู„ู„ู…ุชุงุจุนูŠู† + video_file1 = "path/to/video1.mp4" # ู…ุณุงุฑ ู…ู„ู ุงู„ููŠุฏูŠูˆ ุŒ 0 ู„ูƒุงู…ูŠุฑุง ุงู„ูˆูŠุจ + video_file2 = 0 # ู…ุณุงุฑ ู…ู„ู ุงู„ููŠุฏูŠูˆ ุŒ 0 ู„ูƒุงู…ูŠุฑุง ุงู„ูˆูŠุจ ุŒ 1 ู„ูƒุงู…ูŠุฑุง ุฎุงุฑุฌูŠุฉ + + # ุฅู†ุดุงุก ุฎูŠูˆุท ุงู„ู…ุชุงุจุน + tracker_thread1 = threading.Thread(target=run_tracker_in_thread, args=(video_file1, model1 ,1), daemon=True) + tracker_thread2 = threading.Thread(target=run_tracker_in_thread, args=(video_file2, model2, 2), daemon=True) + + # ุจุฏุก ุฎูŠูˆุท ุงู„ู…ุชุงุจุน + tracker_thread1.start() + tracker_thread2.start() + + # ุงู†ุชุธุฑ ุญุชู‰ ูŠู†ุชู‡ูŠ ุฎูŠุท ุงู„ู…ุชุงุจุน + tracker_thread1.join() + tracker_thread2.join() + + # Clean up and close windows + cv2.destroyAllWindows() + ``` + +ูŠู…ูƒู† ุจุณู‡ูˆู„ุฉ ุชูˆุณูŠุน ู‡ุฐุง ุงู„ู…ุซุงู„ ู„ู„ุชุนุงู…ู„ ู…ุน ู…ู„ูุงุช ููŠุฏูŠูˆ ูˆู†ู…ุงุฐุฌ ุฃุฎุฑู‰ ู…ู† ุฎู„ุงู„ ุฅู†ุดุงุก ู…ุฒูŠุฏ ู…ู† ุงู„ุฎูŠูˆุท ูˆุชุทุจูŠู‚ ู†ูุณ ุงู„ู…ู†ู‡ุฌูŠุฉ. + +## ุงู„ู…ุณุงู‡ู…ุฉ ููŠ ุงู„ู…ุชุชุจุนูˆู† ุงู„ุฌุฏูŠุฏูˆู† + +ู‡ู„ ุฃู†ุช ู…ุงู‡ุฑ ููŠ ุงู„ุชุชุจุน ู…ุชุนุฏุฏ ุงู„ูƒุงุฆู†ุงุช ูˆู‚ุฏ ู†ูุฐุช ุฃูˆ ุตูŠุบุช ุจู†ุฌุงุญ ุฎูˆุงุฑุฒู…ูŠุฉ ุชุชุจุน ุจุงุณุชุฎุฏุงู… Ultralytics YOLOุŸ ู†ุฏุนูˆูƒ ู„ู„ู…ุดุงุฑูƒุฉ ููŠ ู‚ุณู… ุงู„ู…ุชุชุจุนูŠู† ู„ุฏูŠู†ุง ููŠ [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers)! ู‚ุฏ ุชูƒูˆู† ุงู„ุชุทุจูŠู‚ุงุช ููŠ ุงู„ุนุงู„ู… ุงู„ุญู‚ูŠู‚ูŠ ูˆุงู„ุญู„ูˆู„ ุงู„ุชูŠ ุชู‚ุฏู…ู‡ุง ู„ุง ุชู‚ุฏุฑ ุจุซู…ู† ู„ู„ู…ุณุชุฎุฏู…ูŠู† ุงู„ุนุงู…ู„ูŠู† ุนู„ู‰ ู…ู‡ุงู… ุงู„ุชุชุจุน. + +ู…ู† ุฎู„ุงู„ ุงู„ู…ุณุงู‡ู…ุฉ ููŠ ู‡ุฐุง ุงู„ู‚ุณู… ุŒ ุชุณุงุนุฏ ููŠ ุชูˆุณูŠุน ู†ุทุงู‚ ุญู„ูˆู„ ุงู„ุชุชุจุน ุงู„ู…ุชุงุญุฉ ููŠ ุฅุทุงุฑ Ultralytics YOLO ุŒ ู…ุถูŠูู‹ุง ุทุจู‚ุฉ ุฃุฎุฑู‰ ู…ู† ุงู„ูˆุธุงุฆู ูˆุงู„ูุนุงู„ูŠุฉ ู„ู„ู…ุฌุชู…ุน. + +ู„ุจุฏุก ุงู„ู…ุณุงู‡ู…ุฉ ุŒ ูŠุฑุฌู‰ ุงู„ุฑุฌูˆุน ุฅู„ู‰ [ุฏู„ูŠู„ ุงู„ู…ุณุงู‡ู…ุฉ ุงู„ุฎุงุต ุจู†ุง](https://docs.ultralytics.com/help/contributing) ู„ู„ุญุตูˆู„ ุนู„ู‰ ุชุนู„ูŠู…ุงุช ุดุงู…ู„ุฉ ุญูˆู„ ุชู‚ุฏูŠู… ุทู„ุจ ุณุญุจ (PR) ๐Ÿ› ๏ธ. ู†ุชุทู„ุน ุจุดูƒู„ ูƒุจูŠุฑ ุฅู„ู‰ ู…ุง ุณุชุฌู„ุจู‡ ู„ู„ุทุงูˆู„ุฉ! + +ู„ู†ุนุฒุฒ ู…ุนู‹ุง ู‚ุฏุฑุงุช ุนู…ู„ูŠุฉ ุงู„ุชุชุจุน ู„ุฃุฌู‡ุฒุฉ Ultralytics YOLO ๐Ÿ™! + +[vehicle track]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/ee6e6038-383b-4f21-ac29-b2a1c7d386ab + +[people track]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/93bb4ee2-77a0-4e4e-8eb6-eb8f527f0527 + +[fish track]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/a5146d0f-bfa8-4e0a-b7df-3c1446cd8142 diff --git a/ultralytics/docs/ar/modes/track.md:Zone.Identifier b/ultralytics/docs/ar/modes/track.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/modes/track.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/modes/train.md b/ultralytics/docs/ar/modes/train.md new file mode 100755 index 0000000..54881f1 --- /dev/null +++ b/ultralytics/docs/ar/modes/train.md @@ -0,0 +1,286 @@ +--- +comments: true +description: ุฏู„ูŠู„ ุฎุทูˆุฉ ุจุฎุทูˆุฉ ู„ุชุฏุฑูŠุจ ู†ู…ุงุฐุฌ YOLOv8 ุจุงุณุชุฎุฏุงู… Ultralytics YOLO ุจู…ุง ููŠ ุฐู„ูƒ ุฃู…ุซู„ุฉ ุนู„ู‰ ุงู„ุชุฏุฑูŠุจ ุจุงุณุชุฎุฏุงู… ุจุทุงู‚ุฉ ุฑุณูˆู…ุงุช ู…ู†ูุตู„ุฉ ูˆู…ุชุนุฏุฏุฉ ุงู„ุจุทุงู‚ุงุช ุงู„ุฑุณูˆู…ูŠุฉ +keywords: UltralyticsุŒ YOLOv8ุŒ YOLOุŒ ูƒุดู ุงู„ูƒุงุฆู†ุงุชุŒ ูˆุถุน ุชุฏุฑูŠุจุŒ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ู…ุฎุตุตุฉุŒ ุชุฏุฑูŠุจ ุจุทุงู‚ุฉ ุฑุณูˆู…ุงุชุŒ ู…ุชุนุฏุฏุฉ ุงู„ุจุทุงู‚ุงุช ุงู„ุฑุณูˆู…ูŠุฉุŒ ู…ุนู„ู…ุงุช ุชูƒุจูŠุฑุŒ ุฃู…ุซู„ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑุŒ ุฃู…ุซู„ุฉ ุจุงูŠุซูˆู† +--- + +# ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุจุงุณุชุฎุฏุงู… Ultralytics YOLO + +ุจูŠุฆุฉ ูˆู…ุฏู…ุฌุงุช Ultralytics YOLO + +## ุงู„ู…ู‚ุฏู…ุฉ + +ูŠุชุถู…ู† ุชุฏุฑูŠุจ ู†ู…ูˆุฐุฌ ุงู„ุชุนู„ู… ุงู„ุนู…ูŠู‚ ุชุฒูˆูŠุฏู‡ ุจุงู„ุจูŠุงู†ุงุช ูˆุถุจุท ู…ุนู„ู…ุงุชู‡ ุจุญูŠุซ ูŠุชู…ูƒู† ู…ู† ุฅุฌุฑุงุก ุชูˆู‚ุนุงุช ุฏู‚ูŠู‚ุฉ. ูŠุชู… ุชุตู…ูŠู… ูˆุถุน ุงู„ุชุฏุฑูŠุจ ููŠ Ultralytics YOLOv8 ู„ุชุฏุฑูŠุจ ูุนุงู„ ูˆูุนุงู„ ู„ู†ู…ุงุฐุฌ ูƒุดู ุงู„ูƒุงุฆู†ุงุชุŒ ู…ุณุชุบู„ุงู‹ ุชู…ุงู…ู‹ุง ุฅู…ูƒุงู†ุงุช ุงู„ุฃุฌู‡ุฒุฉ ุงู„ุญุฏูŠุซุฉ. ูŠู‡ุฏู ู‡ุฐุง ุงู„ุฏู„ูŠู„ ุฅู„ู‰ ุดุฑุญ ุฌู…ูŠุน ุงู„ุชูุงุตูŠู„ ุงู„ุชูŠ ุชุญุชุงุฌ ุฅู„ู‰ ุงู„ุจุฏุก ููŠ ุชุฏุฑูŠุจ ุงู„ู†ู…ุงุฐุฌ ุงู„ุฎุงุตุฉ ุจูƒ ุจุงุณุชุฎุฏุงู… ู…ุฌู…ูˆุนุฉ ู…ุชูŠู†ุฉ ู…ู† ู…ูŠุฒุงุช YOLOv8. + +

+
+ +
+ ุดุงู‡ุฏ: ูƒูŠููŠุฉ ุชุฏุฑูŠุจ ู†ู…ูˆุฐุฌ YOLOv8 ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ู…ุฎุตุตุฉ ุงู„ุฎุงุตุฉ ุจูƒ ููŠ Google Colab. +

+ +## ู„ู…ุงุฐุง ุงุฎุชูŠุงุฑ Ultralytics YOLO ู„ู„ุชุฏุฑูŠุจุŸ + +ุฅู„ูŠูƒ ุจุนุถ ุงู„ุฃุณุจุงุจ ุงู„ู…ู‚ู†ุนุฉ ู„ุงุฎุชูŠุงุฑ ูˆุถุน ุงู„ุชุฏุฑูŠุจ ููŠ YOLOv8: + +- **ุงู„ูƒูุงุกุฉ:** ุงุณุชูุฏ ุฅู„ู‰ ุฃู‚ุตู‰ ุญุฏ ู…ู† ุงู„ุฃุฌู‡ุฒุฉ ุงู„ุฎุงุตุฉ ุจูƒุŒ ุณูˆุงุก ูƒู†ุช ุชุณุชุฎุฏู… ุจุทุงู‚ุฉ ุฑุณูˆู…ุงุช ูˆุงุญุฏุฉ ุฃูˆ ุชูˆุณูŠุนู‡ุง ุนุจุฑ ุนุฏุฉ ุจุทุงู‚ุงุช ุฑุณูˆู…ุงุช. +- **ุชุนุฏุฏ ุงู„ุงุณุชุฎุฏุงู…ุงุช:** ู‚ู… ุจุงู„ุชุฏุฑูŠุจ ุนู„ู‰ ู…ุฌู…ูˆุนุงุช ุงู„ุจูŠุงู†ุงุช ุงู„ู…ุฎุตุตุฉ ุจุงู„ุฅุถุงูุฉ ุฅู„ู‰ ุงู„ู…ุฌู…ูˆุนุงุช ุงู„ู…ุชุงุญุฉ ุจุณู‡ูˆู„ุฉ ู…ุซู„ COCO ูˆ VOC ูˆ ImageNet. +- **ุณู‡ู„ ุงู„ุงุณุชุฎุฏุงู…:** ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ CLI ูˆูˆุงุฌู‡ุฉ Python ุงู„ุจุณูŠุทุฉ ูˆุงู„ู‚ูˆูŠุฉ ู„ุชุฌุฑุจุฉ ุชุฏุฑูŠุจ ู…ุจุงุดุฑุฉ. +- **ู…ุฑูˆู†ุฉ ุงู„ู…ุนู„ู…ุงุช:** ู…ุฌู…ูˆุนุฉ ูˆุงุณุนุฉ ู…ู† ุงู„ู…ุนู„ู…ุงุช ุงู„ู‚ุงุจู„ุฉ ู„ู„ุชุฎุตูŠุต ู„ุถุจุท ุฃุฏุงุก ุงู„ู†ู…ูˆุฐุฌ. + +### ุงู„ู…ูŠุฒุงุช ุงู„ุฑุฆูŠุณูŠุฉ ู„ูˆุถุน ุงู„ุชุฏุฑูŠุจ + +ุชุชู…ุซู„ ุงู„ู…ูŠุฒุงุช ุงู„ุจุงุฑุฒุฉ ู„ูˆุถุน ุงู„ุชุฏุฑูŠุจ ููŠ YOLOv8 ููŠ ู…ุง ูŠู„ูŠ: + +- **ุชู†ุฒูŠู„ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุชู„ู‚ุงุฆูŠู‹ุง:** ุชู‚ูˆู… ู…ุฌู…ูˆุนุงุช ุงู„ุจูŠุงู†ุงุช ุงู„ู‚ูŠุงุณูŠุฉ ู…ุซู„ COCO ูˆ VOC ูˆ ImageNet ุจุงู„ุชู†ุฒูŠู„ ุชู„ู‚ุงุฆูŠู‹ุง ุนู†ุฏ ุฃูˆู„ ุงุณุชุฎุฏุงู…. +- **ุฏุนู… ู…ุชุนุฏุฏ ุงู„ุจุทุงู‚ุงุช ุงู„ุฑุณูˆู…ูŠุฉ:** ู‚ู… ุจุชูˆุฒูŠุน ุงู„ุนู…ู„ูŠุงุช ุงู„ุชุฏุฑูŠุจูŠุฉ ุจุณู„ุงุณุฉ ุนุจุฑ ุนุฏุฉ ุจุทุงู‚ุงุช ุฑุณูˆู…ุงุช ู„ุชุณุฑูŠุน ุงู„ุนู…ู„ูŠุฉ. +- **ุถุจุท ุงู„ู…ุนู„ู…ุงุช:** ุงู„ุฎูŠุงุฑ ู„ุชุนุฏูŠู„ ุงู„ู…ุนู„ู…ุงุช ุงู„ุชูƒุจูŠุฑ ู…ู† ุฎู„ุงู„ ู…ู„ูุงุช ุชูƒูˆูŠู† YAML ุฃูˆ ูˆุณุงุฆุท ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ. +- **ู…ุฑุงู‚ุจุฉ ูˆุชุชุจุน:** ุชุชุจุน ููŠ ุงู„ูˆู‚ุช ุงู„ูุนู„ูŠ ู„ู…ู‚ุงูŠูŠุณ ุงู„ุชุฏุฑูŠุจ ูˆุชุตูˆุฑ ุนู…ู„ูŠุฉ ุงู„ุชุนู„ู… ู„ุชุญู‚ูŠู‚ ุฑุคู‰ ุฃูุถู„. + +!!! Example "ู†ุตูŠุญุฉ" + + * ูŠุชู… ุชู†ุฒูŠู„ ู…ุฌู…ูˆุนุงุช YOLOv8 ุงู„ู‚ูŠุงุณูŠุฉ ู…ุซู„ COCO ูˆ VOC ูˆ ImageNet ูˆุบูŠุฑู‡ุง ุชู„ู‚ุงุฆูŠู‹ุง ุนู†ุฏ ุงู„ุงุณุชุฎุฏุงู… ุงู„ุฃูˆู„ุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„: `yolo train data=coco.yaml` + +## ุฃู…ุซู„ุฉ ุงุณุชุฎุฏุงู… + +ุชุฏุฑูŠุจ YOLOv8n ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO128 ู„ู…ุฏุฉ 100 ุญู‚ุจุฉ ุจุญุฌู… ุตูˆุฑุฉ 640. ูŠู…ูƒู† ุชุญุฏูŠุฏ ุฌู‡ุงุฒ ุงู„ุชุฏุฑูŠุจ ุจุงุณุชุฎุฏุงู… ุงู„ูˆุณูŠุทุฉ `device`. ุฅุฐุง ู„ู… ูŠุชู… ุชู…ุฑูŠุฑ ูˆุณูŠุทุฉุŒ ุณูŠุชู… ุงุณุชุฎุฏุงู… ุงู„ุฌู‡ุงุฒ ุจุทุงู‚ุฉ ุงู„ุฑุณูˆู…ุงุช "device=0" ุฅุฐุง ูƒุงู†ุช ู…ุชุงุญุฉุŒ ูˆุฅู„ุง ุณูŠุชู… ุงุณุชุฎุฏุงู… `device=cpu`. ุงุณุชุนุฑุถ ุงู„ุฌุฏูˆู„ ุงู„ุฒู…ู†ูŠ ุฃุฏู†ุงู‡ ู„ู„ุญุตูˆู„ ุนู„ู‰ ู‚ุงุฆู…ุฉ ูƒุงู…ู„ุฉ ุจูˆุณุงุฆุท ุงู„ุชุฏุฑูŠุจ. + +!!! Example "ุฃู…ุซู„ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ ู„ู„ุชุฏุฑูŠุจ ุจุงุณุชุฎุฏุงู… ุจุทุงู‚ุฉ ุฑุณูˆู…ุงุช ู…ุณุชู‚ู„ุฉ ูˆู…ุนุงู„ุฌ ู…ุฑูƒุฒูŠ" + + ูŠุชู… ุชุญุฏูŠุฏ ุงู„ุฌู‡ุงุฒ ุชู„ู‚ุงุฆูŠู‹ุง. ุฅุฐุง ูƒุงู†ุช ุจุทุงู‚ุฉ ุฑุณูˆู…ุงุช ู…ุชุงุญุฉุŒ ุณูŠุชู… ุงุณุชุฎุฏุงู…ู‡ุงุŒ ูˆุฅู„ุง ุณุชุจุฏุฃ ุงู„ุชุฏุฑูŠุจ ุนู„ู‰ ุงู„ู…ุนุงู„ุฌ ุงู„ู…ุฑูƒุฒูŠ. + + === "ุจุงูŠุซูˆู†" + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n.yaml') # ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† ู…ู„ู YAML + model = YOLO('yolov8n.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง (ุงู„ุฃูƒุซุฑ ุชูˆุตูŠุฉ ู„ู„ุชุฏุฑูŠุจ) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # ุฅู†ุดุงุก ู…ู† ู…ู„ู YAML ูˆู†ู‚ู„ ุงู„ุฃูˆุฒุงู† + + # ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + + === "ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ" + ```bash + # ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† ู…ู„ู YAML ูˆุจุฏุก ุงู„ุชุฏุฑูŠุจ ู…ู† ุงู„ุจุฏุงูŠุฉ + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # ุจุฏุก ุงู„ุชุฏุฑูŠุจ ู…ู† ู†ู…ูˆุฐุฌ *.pt ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† ู…ู„ู YAML ูˆู†ู‚ู„ ุงู„ุฃูˆุฒุงู† ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ูˆุจุฏุก ุงู„ุชุฏุฑูŠุจ + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### ุงู„ุชุฏุฑูŠุจ ู…ุชุนุฏุฏ ุงู„ุจุทุงู‚ุงุช ุงู„ุฑุณูˆู…ูŠุฉ + +ูŠุชูŠุญ ุงู„ุชุฏุฑูŠุจ ู…ุชุนุฏุฏ ุงู„ุจุทุงู‚ุงุช ุงู„ุฑุณูˆู…ูŠุฉ ุงุณุชุฎุฏุงู… ุงู„ู…ูˆุงุฑุฏ ุงู„ุฃุฌู‡ุฒุฉ ุงู„ู…ุชุงุญุฉ ุจูƒูุงุกุฉ ุฃูƒุจุฑ ู…ู† ุฎู„ุงู„ ุชูˆุฒูŠุน ุฃุนุจุงุก ุงู„ุชุฏุฑูŠุจ ุนุจุฑ ุนุฏุฉ ุจุทุงู‚ุงุช ุฑุณูˆู…ูŠุฉ. ู‡ุฐู‡ ุงู„ู…ูŠุฒุฉ ู…ุชุงุญุฉ ู…ู† ุฎู„ุงู„ ูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ ุงู„ุชุทุจูŠู‚ุงุช ุจุงุณุชุฎุฏุงู… Python ูˆุณุทุฑ ุงู„ุฃูˆุงู…ุฑ. ู„ุชู…ูƒูŠู† ุงู„ุชุฏุฑูŠุจ ู…ุชุนุฏุฏ ุงู„ุจุทุงู‚ุงุช ุงู„ุฑุณูˆู…ูŠุฉุŒ ุญุฏุฏ ู…ุนุฑูุงุช ุฃุฌู‡ุฒุฉ GPU ุงู„ุชูŠ ุชุฑุบุจ ููŠ ุงุณุชุฎุฏุงู…ู‡ุง. + +!!! Example "ุฃู…ุซู„ุฉ ุนู„ู‰ ุงู„ุชุฏุฑูŠุจ ู…ุชุนุฏุฏ ุงู„ุจุทุงู‚ุงุช ุงู„ุฑุณูˆู…ูŠุฉ" + + ู„ู„ุชุฏุฑูŠุจ ุจุงุณุชุฎุฏุงู… ุฃุฌู‡ุฒุชูŠ GPUุŒ ุฌู‡ุงุฒ CUDA 0 ูˆ 1ุŒ ุงุณุชุฎุฏู… ุงู„ุฃูˆุงู…ุฑ ุงู„ุชุงู„ูŠุฉ. ู‚ู… ุจุชูˆุณูŠุนู‡ุง ู„ุงุณุชุฎุฏุงู… ุงู„ู…ุฒูŠุฏ ู…ู† ุงู„ุจุทุงู‚ุงุช. + + === "ุจุงูŠุซูˆู†" + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง (ุงู„ุฃูƒุซุฑ ุชูˆุตูŠุฉ ู„ู„ุชุฏุฑูŠุจ) + + # ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุจุฃุฌู‡ุฒุฉ GPU 2 + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device=[0, 1]) + ``` + + === "ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ" + ```bash + # ุจุฏุก ุงู„ุชุฏุฑูŠุจ ู…ู† ู†ู…ูˆุฐุฌ *.pt ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ุจุงุณุชุฎุฏุงู… ุจุทุงู‚ุงุช GPU 0 ูˆ 1 + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=0,1 + ``` + +### ุงู„ุชุฏุฑูŠุจ ุจุงุณุชุฎุฏุงู… Apple M1 ูˆ M2 MPS + +ู…ุน ุฏุนู… ุดุฑุงุฆุญ Apple M1 ูˆ M2 ุงู„ู…ุฏู…ุฌ ููŠ ู†ู…ุงุฐุฌ Ultralytics YOLOุŒ ูŠู…ูƒู†ูƒ ุงู„ุขู† ุชุฏุฑูŠุจ ู†ู…ุงุฐุฌูƒ ุนู„ู‰ ุงู„ุฃุฌู‡ุฒุฉ ุงู„ุชูŠ ุชุณุชุฎุฏู… ู†ุธุงู… Metal Performance Shaders (MPS) ุงู„ู‚ูˆูŠ. ูŠูˆูุฑ MPS ุทุฑูŠู‚ุฉ ุนุงู„ูŠุฉ ุงู„ุฃุฏุงุก ู„ุชู†ููŠุฐ ุงู„ู…ู‡ุงู… ุงู„ุญุณุงุจูŠุฉ ูˆู…ุนุงู„ุฌุฉ ุงู„ุตูˆุฑ ุนู„ู‰ ุดุฑุงุฆุญ ุงู„ุณูŠู„ูŠูƒูˆู† ุงู„ู…ุฎุตุตุฉ ู„ุนุจุฉ Apple. + +ู„ุชู…ูƒูŠู† ุงู„ุชุฏุฑูŠุจ ุนู„ู‰ ุดุฑุงุฆุญ Apple M1 ูˆ M2ุŒ ูŠุฌุจ ุนู„ูŠูƒ ุชุญุฏูŠุฏ "mps" ูƒุฌู‡ุงุฒูƒ ุนู†ุฏ ุจุฏุก ุนู…ู„ูŠุฉ ุงู„ุชุฏุฑูŠุจ. ููŠู…ุง ูŠู„ูŠ ู…ุซุงู„ ู„ูƒูŠููŠุฉ ุงู„ู‚ูŠุงู… ุจุฐู„ูƒ ููŠ ุจุงูŠุซูˆู† ูˆุนุจุฑ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ: + +!!! Example "ู…ุซุงู„ ุนู„ู‰ ุงู„ุชุฏุฑูŠุจ ุจูˆุงุณุทุฉ MPS" + + === "ุจุงูŠุซูˆู†" + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง (ุงู„ุฃูƒุซุฑ ุชูˆุตูŠุฉ ู„ู„ุชุฏุฑูŠุจ) + + # ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุจุงุณุชุฎุฏุงู… 2 ุจุทุงู‚ุงุช GPU + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device='mps') + ``` + + === "ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ" + ```bash + # ุจุฏุก ุงู„ุชุฏุฑูŠุจ ู…ู† ู†ู…ูˆุฐุฌ *.pt ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ุจุงุณุชุฎุฏุงู… ุจุทุงู‚ุงุช GPU 0 ูˆ 1 + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=mps + ``` + +ุนู†ุฏ ุงู„ุงุณุชูุงุฏุฉ ู…ู† ู‚ุฏุฑุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ุญุงุณูˆุจูŠุฉ ู„ุดุฑุงุฆุญ M1/M2ุŒ ูŠุชูŠุญ ู„ูƒ ู‡ุฐุง ุงู„ุญู…ู„ ุฃุฏุงุกู‹ ุฃูƒุซุฑ ูƒูุงุกุฉ ู„ู…ู‡ุงู… ุงู„ุชุฏุฑูŠุจ. ู„ู„ุญุตูˆู„ ุนู„ู‰ ุฅุฑุดุงุฏุงุช ุฃูƒุซุฑ ุชูุตูŠู„ุงู‹ ูˆุฎูŠุงุฑุงุช ุชูƒูˆูŠู† ู…ุชู‚ุฏู…ุฉุŒ ูŠุฑุฌู‰ ุงู„ุฑุฌูˆุน ุฅู„ู‰ [ูˆุซุงุฆู‚ PyTorch MPS](https://pytorch.org/docs/stable/notes/mps.html). + +### ุงุณุชุฆู†ุงู ุงู„ุชุฏุฑูŠุจ ุงู„ู…ู‚ุทูˆุน + +ูŠุนุชุจุฑ ุงุณุชุฆู†ุงู ุงู„ุชุฏุฑูŠุจ ู…ู† ุงู„ุญุงู„ุงุช ุงู„ุชุฎุฒูŠู† ุงู„ุณุงุจู‚ุฉ ู…ูŠุฒุฉ ุญุงุณู…ุฉ ุนู†ุฏ ุงู„ุนู…ู„ ู…ุน ู†ู…ุงุฐุฌ ุงู„ุชุนู„ู… ุงู„ุนู…ูŠู‚. ูŠู…ูƒู† ุฃู† ูŠูƒูˆู† ู‡ุฐุง ู…ููŠุฏู‹ุง ููŠ ุงู„ุนุฏูŠุฏ ู…ู† ุงู„ุณูŠู†ุงุฑูŠูˆู‡ุงุชุŒ ู…ุซู„ ุนู†ุฏ ุชุนุทู„ ุนู…ู„ูŠุฉ ุงู„ุชุฏุฑูŠุจ ุจุดูƒู„ ุบูŠุฑ ู…ุชูˆู‚ุนุŒ ุฃูˆ ุนู†ุฏ ุงู„ุฑุบุจุฉ ููŠ ู…ุชุงุจุนุฉ ุชุฏุฑูŠุจ ู†ู…ูˆุฐุฌ ุจูŠุงู†ุงุช ุฌุฏูŠุฏุฉ ุฃูˆ ู„ูุชุฑุฉ ุฒู…ู†ูŠุฉ ุฃุทูˆู„. + +ุนู†ุฏ ุงุณุชุฆู†ุงู ุงู„ุชุฏุฑูŠุจุŒ ูŠู‚ูˆู… Ultralytics YOLO ุจุชุญู…ูŠู„ ุงู„ุฃูˆุฒุงู† ู…ู† ุขุฎุฑ ู†ู…ูˆุฐุฌ ู…ุญููˆุธ ูˆุฃูŠุถู‹ุง ุงุณุชุนุงุฏุฉ ุญุงู„ุฉ ุงู„ู…ุญุณู†ุŒ ูˆุฌุฏูˆู„ุฉ ู…ุนุฏู„ ุงู„ุชุนู„ู…ุŒ ูˆุนุฏุฏ ุงู„ุญู‚ุจุฉ. ู‡ุฐุง ูŠุชูŠุญ ู„ูƒ ู…ุชุงุจุนุฉ ุนู…ู„ูŠุฉ ุงู„ุชุฏุฑูŠุจ ุจุดูƒู„ ุณู„ุณ ู…ู† ุญูŠุซ ุชูˆู‚ูุช. + +ูŠู…ูƒู†ูƒ ุจุณู‡ูˆู„ุฉ ุงุณุชุฆู†ุงู ุงู„ุชุฏุฑูŠุจ ููŠ Ultralytics YOLO ุนู† ุทุฑูŠู‚ ุชุนูŠูŠู† ุงู„ูˆุณูŠุทุฉ `resume` ุฅู„ู‰ `True` ุนู†ุฏ ุงุณุชุฏุนุงุก ุทุฑูŠู‚ุฉ `train`ุŒ ูˆุชุญุฏูŠุฏ ุงู„ู…ุณุงุฑ ุฅู„ู‰ ู…ู„ู `.pt` ุงู„ุฐูŠ ูŠุญุชูˆูŠ ุนู„ู‰ ุฃูˆุฒุงู† ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฏุฑุจ ุฌุฒุฆูŠู‹ุง. + +ููŠู…ุง ูŠู„ูŠ ู…ุซุงู„ ู„ูƒูŠููŠุฉ ุงุณุชุฆู†ุงู ุชุฏุฑูŠุจ ู…ู‚ุทูˆุน ุจุงุณุชุฎุฏุงู… ุจุงูŠุซูˆู† ูˆุนุจุฑ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ: + +!!! Example "ู…ุซุงู„ ุนู„ู‰ ุงุณุชุฆู†ุงู ุงู„ุชุฏุฑูŠุจ" + + === "ุจุงูŠุซูˆู†" + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('path/to/last.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ุฌุฒุฆูŠู‹ุง + + # ุงุณุชุฆู†ุงู ุงู„ุชุฏุฑูŠุจ + results = model.train(resume=True) + ``` + + === "ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ" + ```bash + # ุงุณุชุฆู†ุงู ุชุฏุฑูŠุจ ู…ุชู‚ุทุน + yolo train resume model=path/to/last.pt + ``` + +ู…ู† ุฎู„ุงู„ ุชุนูŠูŠู† `resume=True`ุŒ ุณุชูˆุงุตู„ ูˆุธูŠูุฉ 'train' ุงู„ุชุฏุฑูŠุจ ู…ู† ุญูŠุซ ุชูˆู‚ูุชุŒ ุจุงุณุชุฎุฏุงู… ุงู„ุญุงู„ุฉ ุงู„ู…ุฎุฒู†ุฉ ููŠ ู…ู„ู 'path/to/last.pt'. ุฅุฐุง ุชู… ุญุฐู ุงู„ูˆุณูŠุทุฉ `resume` ุฃูˆ ุชุนูŠูŠู†ู‡ุง ุนู„ู‰ `False`ุŒ ุณุชุจุฏุฃ ูˆุธูŠูุฉ 'train' ุฌู„ุณุฉ ุชุฏุฑูŠุจ ุฌุฏูŠุฏุฉ. + +ุชุฐูƒุฑ ุฃู†ู‡ ูŠุชู… ุญูุธ ู†ู‚ุงุท ุงู„ุชูุชูŠุด ููŠ ู†ู‡ุงูŠุฉ ูƒู„ ุญู‚ุจุฉ ุงูุชุฑุงุถูŠุงู‹ุŒ ุฃูˆ ููŠ ูุชุฑุฉ ุซุงุจุชุฉ ุจุงุณุชุฎุฏุงู… ูˆุณูŠุทุฉ 'save_period'ุŒ ู„ุฐุง ูŠุฌุจ ุนู„ูŠูƒ ุฅุชู…ุงู… ุญู‚ุจุฉ ูˆุงุญุฏุฉ ุนู„ู‰ ุงู„ุฃู‚ู„ ู„ุงุณุชุฆู†ุงู ุชุดุบูŠู„ ุชุฏุฑูŠุจ. + +## ุงู„ูˆุณุงุฆุท + +ุชุชุนู„ู‚ ุฅุนุฏุงุฏุงุช ุงู„ุชุฏุฑูŠุจ ู„ู†ู…ุงุฐุฌ YOLO ุจุงู„ู…ุนู„ู…ุงุช ูˆุงู„ุชูƒูˆูŠู†ุงุช ุงู„ู…ุฎุชู„ูุฉ ุงู„ู…ุณุชุฎุฏู…ุฉ ู„ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ู…ุง. ูŠู…ูƒู† ุฃู† ุชุคุซุฑ ู‡ุฐู‡ ุงู„ุฅุนุฏุงุฏุงุช ุนู„ู‰ ุฃุฏุงุก ุงู„ู†ู…ูˆุฐุฌ ูˆุณุฑุนุชู‡ ูˆุฏู‚ุชู‡. ุชุชุถู…ู† ุจุนุถ ุฅุนุฏุงุฏุงุช YOLO ุงู„ุชุฏุฑูŠุจ ุงู„ุดุงุฆุนุฉ ุญุฌู… ุงู„ุฏููุนุงุชุŒ ู…ุนุฏู„ ุงู„ุชุนู„ู…ุŒ ุงู„ุฒุฎู…ุŒ ูˆุงู„ุชู‚ู„ูŠู„ ุงู„ู‚ูŠู…ูŠ ู„ู„ุฃูˆุฒุงู†. ุงู„ุนูˆุงู…ู„ ุงู„ุฃุฎุฑู‰ ุงู„ุชูŠ ู‚ุฏ ุชุคุซุฑ ููŠ ุนู…ู„ูŠุฉ ุงู„ุชุฏุฑูŠุจ ุชุดู…ู„ ุงุฎุชูŠุงุฑ ุงู„ู…ุญุณู†ุŒ ุงุฎุชูŠุงุฑ ุฏุงู„ุฉ ุงู„ุฎุณุงุฑุฉุŒ ูˆุญุฌู… ูˆุชุฑูƒูŠุจ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ุชุฏุฑูŠุจ. ู…ู† ุงู„ู…ู‡ู… ุถุจุท ูˆุชุฌุฑุจุฉ ู‡ุฐู‡ ุงู„ุฅุนุฏุงุฏุงุช ุจุนู†ุงูŠุฉ ู„ุชุญู‚ูŠู‚ ุฃูุถู„ ุฃุฏุงุก ู…ู…ูƒู† ู„ู…ู‡ู…ุฉ ู…ุนูŠู†ุฉ. + +| ุงู„ู…ูุชุงุญ | ุงู„ู‚ูŠู…ุฉ | ุงู„ูˆุตู | +|-------------------|----------|---------------------------------------------------------------------------------------------------------------------------| +| `model` | `None` | ู…ุณุงุฑ ุฅู„ู‰ ู…ู„ู ุงู„ู†ู…ูˆุฐุฌุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ yolov8n.ptุŒ yolov8n.yaml | +| `data` | `None` | ู…ุณุงุฑ ุฅู„ู‰ ู…ู„ู ุงู„ุจูŠุงู†ุงุชุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ coco128.yaml | +| `epochs` | `100` | ุนุฏุฏ ุงู„ุญู‚ุจ ู„ู„ุชุฏุฑูŠุจ ู„ | +| `patience` | `50` | ุญู‚ุจ ู„ู„ุงู†ุชุธุงุฑ ุจุฏูˆู† ุชุญุณู† ุธุงู‡ุฑ ู„ุฅูŠู‚ุงู ุงู„ุชุฏุฑูŠุจ ู…ุจูƒุฑุง | +| `batch` | `16` | ุนุฏุฏ ุงู„ุตูˆุฑ ููŠ ูƒู„ ุฏููุนุฉ (-1 for AutoBatch) | +| `imgsz` | `640` | ุญุฌู… ุงู„ุตูˆุฑ ุงู„ุฏุฎู„ ุจุตูˆุฑุฉ ู…ุซุงู„ูŠุฉ | +| `save` | `True` | ุญุงู„ ุฅู†ู‚ุงุฐ ุงู„ู†ู‚ุงุท ุงู„ู…ูุชูˆุญุฉ ู„ู„ุชุฏุฑูŠุจ ูˆู†ุชุงุฆุฌ ุงู„ูƒุดู | +| `save_period` | `-1` | ุญูุธ ุงู„ู†ู‚ุทุฉ ุงู„ูุงุตู„ุฉ ูƒู„ x ุญู‚ุจุฉ (ุชูƒูˆู† ู…ุนุทู„ุฉ ุฅุฐุง ูƒุงู†ุช < 1) | +| `cache` | `False` | ุตุญูŠุญ / ุฐุงูƒุฑุฉ ุนุดูˆุงุฆูŠุฉ ุฃูˆ ู‚ุฑุต / ุบูŠุฑ ุตุญูŠุญ. ุงุณุชุฎุฏู… ุฐุงูƒุฑุฉ ุงู„ุชุฎุฒูŠู† ุงู„ู…ุคู‚ุช ููŠ ุชุญู…ูŠู„ ุงู„ุจูŠุงู†ุงุช | +| `device` | `None` | ุงู„ุฌู‡ุงุฒ ู„ุชุดุบูŠู„ ุงู„ุชุฏุฑูŠุจ ุนู„ูŠู‡ุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ ุฌู‡ุงุฒ ุงู„ุฑุณูˆู…ุงุช cuda=0 ุฃูˆ ุฌู‡ุงุฒ ุงู„ุฑุณูˆู…ุงุช cuda=0,1,2,3 ุฃูˆ ุฌู‡ุงุฒ ุงู„ู…ุนุงู„ุฌ ุงู„ู…ุฑูƒุฒูŠcpu | +| `workers` | `8` | ุนุฏุฏ ุฎูŠูˆุท ุงู„ุนุงู…ู„ุฉ ู„ุชุญู…ูŠู„ ุงู„ุจูŠุงู†ุงุช (ู„ูƒู„ RANK ุฅุฐุง ูƒุงู† DDP) | +| `project` | `None` | ุงุณู… ุงู„ู…ุดุฑูˆุน | +| `name` | `None` | ุงุณู… ุงู„ุชุฌุฑุจุฉ | +| `exist_ok` | `False` | ู…ุง ุฅุฐุง ูƒุงู† ุณูŠุชู… ุงู„ูƒุชุงุจุฉ ููˆู‚ ุชุฌุฑุจุฉ ู…ูˆุฌูˆุฏุฉ | +| `pretrained` | `True` | (bool ุฃูˆ str) ู…ุง ุฅุฐุง ูƒุงู† ุณูŠุชู… ุงุณุชุฎุฏุงู… ู†ู…ูˆุฐุฌ ู…ุชุฏุฑุจ ู…ุณุจู‚ู‹ุง (bool) ุฃูˆ ู†ู…ูˆุฐุฌ ู„ุชุญู…ูŠู„ ุงู„ุฃูˆุฒุงู† ู…ู†ู‡ (str) | +| `optimizer` | `'auto'` | ุงู„ู…ุญุณู† ู„ุงุณุชุฎุฏุงู…ู‡ุŒ ุงู„ุฎูŠุงุฑุงุช=[SGDุŒ AdamุŒ AdamaxุŒ AdamWุŒ NAdamุŒ RAdamุŒ RMSPropุŒ Auto] | +| `verbose` | `False` | ู…ุง ุฅุฐุง ูƒุงู† ุณูŠุชู… ุทุจุงุนุฉ ู…ุฎุฑุฌุงุช ู…ูุตู„ุฉ | +| `seed` | `0` | ุงู„ุจุฐุฑุฉ ุงู„ุนุดูˆุงุฆูŠุฉ ู„ุฅุนุงุฏุฉ ุงู„ุฅู†ุชุงุฌูŠุฉ | +| `deterministic` | `True` | ู…ุง ุฅุฐุง ูƒุงู† ูŠุชู… ุชู…ูƒูŠู† ุงู„ูˆุถุน ุงู„ู…ุญุฏุฏ | +| `single_cls` | `False` | ูŠุฌุจ ุชุฏุฑูŠุจ ุจูŠุงู†ุงุช ู…ุชุนุฏุฏุฉ ุงู„ูุฆุงุช ูƒูุฆุฉ ูˆุงุญุฏุฉ | +| `rect` | `False` | ุชุฏุฑูŠุจ ู…ุณุชุทูŠู„ ุจุงุณุชุฎุฏุงู… ุชุฌู…ูŠุน ุงู„ุฏููุนุงุช ู„ู„ุญุฏ ุงู„ุฃุฏู†ู‰ ู…ู† ุงู„ุญุดูˆ | +| `cos_lr` | `False` | ุงุณุชุฎุฏู… ุฌุฏูˆู„ุฉ ู…ุนุฏู„ ุงู„ุชุนู„ู… ุจุชูˆู‚ูŠุช ุงู„ูƒูˆุณุง | +| `close_mosaic` | `10` | (int) ุชุนุทูŠู„ ุงู„ุชูƒุจูŠุฑ ุงู„ุชุฌุงู†ุจูŠ ู„ู„ุญุฌู… ู„ู„ุญู‚ุจ ุงู„ู†ู‡ุงุฆูŠุฉ (0 ู„ู„ุชุนุทูŠู„) | +| `resume` | `False` | ุงุณุชุฃู†ู ุงู„ุชุฏุฑูŠุจ ู…ู† ุงู„ู†ู‚ุทุฉ ุงู„ุฃุฎูŠุฑุฉ | +| `amp` | `True` | ุชุฏุฑูŠุจ ุฏู‚ุฉ ู…ุฎุชู„ุทุฉ ุชู„ู‚ุงุฆูŠุฉ (AMP)ุŒ ุงู„ุฎูŠุงุฑุงุช=[TrueุŒ False] | +| `fraction` | `1.0` | ู†ุณุจุฉ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ู…ุฑุงุฏ ุชุฏุฑูŠุจู‡ุง (ุงู„ุงูุชุฑุงุถูŠ ู‡ูˆ 1.0ุŒ ุฌู…ูŠุน ุงู„ุตูˆุฑ ููŠ ู…ุฌู…ูˆุนุฉ ุงู„ุชุฏุฑูŠุจ) | +| `profile` | `False` | ู‚ู… ุจุชุดุบูŠู„ ุจุฑูˆูุงูŠู„ ุงู„ุณุฑุนุฉ ู„ู…ุดุบู„ุงุช ONNX ูˆ TensorRT ุฃุซู†ุงุก ุงู„ุชุฏุฑูŠุจ ู„ู„ู…ุณุฌู„ุงุช | +| `freeze` | `None` | (int ุฃูˆ listุŒ ุงุฎุชูŠุงุฑูŠ) ุชุฌู…ูŠุฏ ุฃูˆู„ n ุทุจู‚ุฉุŒ ุฃูˆ ู‚ุงุฆู…ุฉ ุทุจู‚ุงุช ุงู„ูู‡ุฑุณ ุฎู„ุงู„ ุงู„ุชุฏุฑูŠุจ | +| `lr0` | `0.01` | ู…ุนุฏู„ ุงู„ุชุนู„ู… ุงู„ุฃูˆู„ูŠ (ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ SGD=1E-2ุŒ Adam=1E-3) | +| `lrf` | `0.01` | ู…ุนุฏู„ ุงู„ุชุนู„ู… ุงู„ู†ู‡ุงุฆูŠ (lr0 * lrf) | +| `momentum` | `0.937` | ุงู„ุฒุฎู… SGD / Adam beta1 | +| `weight_decay` | `0.0005` | ุชู‚ู„ูŠู„ ุงู„ุฃูˆุฒุงู† ู„ู„ู…ุญุณู† (5e-4) | +| `warmup_epochs` | `3.0` | ุญู‚ุจ ุงู„ุงุญู…ุงุก (ุงู„ุฃุฌุฒุงุก ุงู„ู…ุฆูˆูŠุฉ ู…ู‚ุจูˆู„ุฉ) | +| `warmup_momentum` | `0.8` | ุงู„ุฒุฎู… ุงู„ุฃูˆู„ูŠ ู„ู„ุชุฏูู‚ ุงู„ุฃุนู„ู‰ | +| `warmup_bias_lr` | `0.1` | ู†ุณุจุฉ ุชุนู„ู… ุงู„ุงู†ุญูŠุงุฒ ุงู„ุฃูˆู„ูŠ ู„ู„ุชุฏูู‚ ุงู„ุนู„ูˆูŠ | +| `box` | `7.5` | ูˆุฒู† ูุงู‚ุฏ ุงู„ุตู†ุฏูˆู‚ | +| `cls` | `0.5` | ูˆุฒู† ูุงู‚ุฏ ุงู„ุชุตู†ูŠู (ุชู†ุงุณุจ ู…ุน ุงู„ุจูƒุณู„) | +| `dfl` | `1.5` | ูˆุฒู† ุงู„ุฎุณุงุฑุฉ ุงู„ุฃู…ุงู…ูŠุฉ ู„ู„ุชุตู†ูŠู ูˆุงู„ุตู†ุฏูˆู‚ | +| `pose` | `12.0` | ูˆุฒู† ูุงู‚ุฏ ุงู„ูˆุถุน (ุงู„ูˆุถุน ูู‚ุท) | +| `kobj` | `2.0` | ูˆุฒู† ูุงู‚ุฏ ู†ู‚ุทุฉ ุงู„ู…ูุชุงุญ (ุงู„ูˆุถุน ูู‚ุท) | +| `label_smoothing` | `0.0` | ุงู„ุชุณูˆูŠุฉ ุงู„ุบู…ูˆุถ (ูƒุณุฑ) | +| `nbs` | `64` | ุญุฌู… ุงู„ุฏููุนุฉ ุงู„ุงุณู…ูŠ | +| `overlap_mask` | `True` | ุงู„ุชุญุฌูŠู… ูŠุฌุจ ุฃู† ูŠุชุฏุงุฎู„ ุฃู‚ู†ุนุฉ ุงู„ุชุฏุฑูŠุจ (ุงู„ุชุฏุฑูŠุจ ุงู„ูุตู„ูŠ ูู‚ุท) | +| `mask_ratio` | `4` | ู…ุนุฏู„ ุชุญุฌูŠู… ุฃู‚ู†ุนุฉ (ุงู„ุชุฏุฑูŠุจ ุงู„ูุตู„ูŠ ูู‚ุท) | +| `dropout` | `0.0` | ุงุณุชุฎุฏุงู… ุชู†ุธูŠู… ุงู„ุฅุณู‚ุงุท (ุงู„ุชุฏุฑูŠุจ ุงู„ุชุทุจูŠู‚ูŠ ูู‚ุท) | +| `val` | `True` | ุงู„ุชุญู‚ู‚/ุงู„ุงุฎุชุจุงุฑ ุฎู„ุงู„ ุงู„ุชุฏุฑูŠุจ | + +## ุชุณุฌูŠู„ + +ุนู†ุฏ ุชุฏุฑูŠุจ ู†ู…ูˆุฐุฌ YOLOv8ุŒ ู‚ุฏ ุชุฌุฏ ุฃู†ู‡ ู…ู† ุงู„ู…ููŠุฏ ุชุชุจุน ุฃุฏุงุก ุงู„ู†ู…ูˆุฐุฌ ู…ุน ู…ุฑูˆุฑ ุงู„ูˆู‚ุช. ู‡ู†ุง ูŠุฃุชูŠ ุฏูˆุฑ ุชุณุฌูŠู„. ูŠูˆูุฑ Ultralytics' YOLO ุฏุนู…ู‹ุง ู„ุซู„ุงุซุฉ ุฃู†ูˆุงุน ู…ู† ุฃุฌู‡ุฒุฉ ุงู„ุณุฌู„ - Comet ูˆ ClearML ูˆ TensorBoard. + +ู„ุงุณุชุฎุฏุงู… ุณุฌู„ุŒ ุญุฏุฏู‡ ู…ู† ู‚ุงุฆู…ุฉ ุงู„ุณุญุจ ุฃุณูู„ ุงู„ูƒูˆุฏ ูˆู‚ู… ุจุชุดุบูŠู„ู‡. ุณูŠุชู… ุชุซุจูŠุช ุงู„ุณุฌู„ ุงู„ู…ุฎุชุงุฑ ูˆุชู‡ูŠุฆุชู‡. + +### Comet + +[Comet](../../../integrations/comet.md) ู‡ูˆ ู…ู†ุตุฉ ุชุณู…ุญ ู„ุนู„ู…ุงุก ุงู„ุจูŠุงู†ุงุช ูˆุงู„ู…ุทูˆุฑูŠู† ุจู…ุชุงุจุนุฉ ูˆู…ู‚ุงุฑู†ุฉ ูˆุดุฑุญ ูˆุชุญุณูŠู† ุงู„ุชุฌุงุฑุจ ูˆุงู„ู†ู…ุงุฐุฌ. ูŠูˆูุฑ ูˆุธุงุฆู ู…ุซู„ ุงู„ู…ู‚ุงูŠูŠุณ ุงู„ุฒู…ู†ูŠุฉ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ูˆูุฑูˆู‚ุงุช ุงู„ุดูุฑุฉ ูˆุชุชุจุน ุงู„ู…ุนู„ู…ุงุช. + +ู„ุงุณุชุฎุฏุงู… Comet: + +!!! Example "ุฃู…ุซู„ุฉ ุจุงูŠุซูˆู†" + + === "ุจุงูŠุซูˆู†" + ```python + # pip install comet_ml + import comet_ml + + comet_ml.init() + ``` + +ุชุฐูƒุฑ ุชุณุฌูŠู„ ุงู„ุฏุฎูˆู„ ุฅู„ู‰ ุญุณุงุจูƒ ููŠ Comet ุนู„ู‰ ู…ูˆู‚ุนู‡ู… ุนู„ู‰ ุงู„ูˆูŠุจ ูˆุงู„ุญุตูˆู„ ุนู„ู‰ ู…ูุชุงุญ API ุงู„ุฎุงุต ุจูƒ. ุณุชุญุชุงุฌ ุฅู„ู‰ ุฅุถุงูุชู‡ ุฅู„ู‰ ุงู„ุฅุนุฏุงุฏุงุช ุงู„ู…ุชุบูŠุฑุฉ ููŠ ุงู„ุจูŠุฆุฉ ุงู„ุฎุงุตุฉ ุจูƒ ุฃูˆ ุจุฑู†ุงู…ุฌ ุงู„ู†ุต ุงู„ุฎุงุต ุจูƒ ู„ุชุณุฌูŠู„ ุงู„ุชุฌุงุฑุจ ุงู„ุฎุงุตุฉ ุจูƒ. + +### ClearML + +[ClearML](https://www.clear.ml/) ู‡ูŠ ู…ู†ุตุฉ ู…ูุชูˆุญุฉ ุงู„ู…ุตุฏุฑ ุชุนู…ู„ ุนู„ู‰ ุชุชุจุน ุงู„ุชุฌุงุฑุจ ูˆุชุณู‡ูŠู„ ู…ุดุงุฑูƒุฉ ุงู„ู…ูˆุงุฑุฏ ุจูƒูุงุกุฉ. ุชู… ุชุตู…ูŠู…ู‡ ู„ู…ุณุงุนุฏุฉ ุงู„ูุฑู‚ ููŠ ุฅุฏุงุฑุฉ ูˆุชู†ููŠุฐ ูˆุฅุนุงุฏุฉ ุฅู†ุชุงุฌ ุนู…ู„ู‡ู… ููŠ ู…ุฌุงู„ ุชุนู„ู… ุงู„ุขู„ุฉ ุจูƒูุงุกุฉ ุฃูƒุจุฑ. + +ู„ุงุณุชุฎุฏุงู… ClearML: + +!!! Example "ุฃู…ุซู„ุฉ ุจุงูŠุซูˆู†" + + === "ุจุงูŠุซูˆู†" + ```python + # pip install clearml + import clearml + + clearml.browser_login() + ``` + +ุจุนุฏ ุชุดุบูŠู„ ู‡ุฐุง ุงู„ุณูƒุฑูŠุจุชุŒ ุณุชุญุชุงุฌ ุฅู„ู‰ ุชุณุฌูŠู„ ุงู„ุฏุฎูˆู„ ุฅู„ู‰ ุญุณุงุจ ClearML ุงู„ุฎุงุต ุจูƒ ุนู„ู‰ ุงู„ู…ุณุชุนุฑุถ ูˆู…ุตุงุฏู‚ุฉ ุฌู„ุณุชูƒ. + +## TensorBoard + +[TensorBoard](https://www.tensorflow.org/tensorboard) ู‡ูŠ ู…ุฌู…ูˆุนุฉ ุฃุฏูˆุงุช ู„ุชุตูˆุฑ TensorFlow ุŒ ุชุณู…ุญ ู„ูƒ ุจุชุตูˆุฑ ู†ู…ูˆุฐุฌ TensorFlow ุงู„ุฎุงุต ุจูƒ ุŒ ูˆุฑุณู… ุงู„ู…ู‚ุงูŠูŠุณ ุงู„ูƒู…ูŠุฉ ุญูˆู„ ุชู†ููŠุฐ ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฎุงุต ุจูƒ ุŒ ูˆุนุฑุถ ุจูŠุงู†ุงุช ุฅุถุงููŠุฉ ู…ุซู„ ุงู„ุตูˆุฑ ุงู„ุชูŠ ุชู…ุฑ ุนุจุฑู‡ุง. + +ู„ู„ุงุณุชูุงุฏุฉ ู…ู† TensorBoard ููŠ [Google Colab](https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb): + +!!! Example "ุฃู…ุซู„ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ" + + === "ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ" + ```bash + load_ext tensorboard + tensorboard --logdir ultralytics/runs # ุงุณุชุจุฏู„ ุจุงู„ุฏู„ูŠู„ 'runs' + ``` + +ู„ุงุณุชุฎุฏุงู… TensorBoard ู…ุญู„ูŠู‹ุงุŒ ู‚ู… ุจุชุดุบูŠู„ ุงู„ุฃู…ุฑ ุฃุฏู†ุงู‡ ูˆุงุนุฑุถ ุงู„ู†ุชุงุฆุฌ ุนู„ู‰ ุงู„ุฑุงุจุท http://localhost:6006/. + +!!! Example "ุฃู…ุซู„ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ" + + === "ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ" + ```bash + tensorboard --logdir ultralytics/runs # ุงุณุชุจุฏู„ ุจุงู„ุฏู„ูŠู„ 'runs' + ``` + +ุณูŠุชู… ุชุญู…ูŠู„ TensorBoard ูˆุชูˆุฌูŠู‡ู‡ ุฅู„ู‰ ุงู„ุฏู„ูŠู„ ุงู„ุฐูŠ ูŠุชู… ุญูุธ ุณุฌู„ุงุช ุงู„ุชุฏุฑูŠุจ ููŠู‡. + +ุจุนุฏ ุฅุนุฏุงุฏ ุงู„ุณุฌู„ ุงู„ุฎุงุต ุจูƒุŒ ูŠู…ูƒู†ูƒ ุงู„ุงุณุชู…ุฑุงุฑ ููŠ ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ. ุณูŠุชู… ุณุฌู„ ุฌู…ูŠุน ู…ู‚ุงูŠูŠุณ ุงู„ุชุฏุฑูŠุจ ุชู„ู‚ุงุฆูŠู‹ุง ููŠ ุงู„ู…ู†ุตุฉ ุงู„ุชูŠ ุงุฎุชุฑุชู‡ุงุŒ ูˆูŠู…ูƒู†ูƒ ุงู„ูˆุตูˆู„ ุฅู„ู‰ ู‡ุฐู‡ ุงู„ุณุฌู„ุงุช ู„ู…ุฑุงู‚ุจุฉ ุฃุฏุงุก ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฎุงุต ุจูƒ ู…ุน ู…ุฑูˆุฑ ุงู„ูˆู‚ุช ูˆู…ู‚ุงุฑู†ุฉ ู†ู…ุงุฐุฌ ู…ุฎุชู„ูุฉ ูˆุชุญุฏูŠุฏ ุงู„ู…ุฌุงู„ุงุช ุงู„ุชูŠ ูŠู…ูƒู† ุชุญุณูŠู†ู‡ุง. diff --git a/ultralytics/docs/ar/modes/train.md:Zone.Identifier b/ultralytics/docs/ar/modes/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/modes/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/modes/val.md b/ultralytics/docs/ar/modes/val.md new file mode 100755 index 0000000..ed95432 --- /dev/null +++ b/ultralytics/docs/ar/modes/val.md @@ -0,0 +1,86 @@ +--- +comments: true +description: ุฏู„ูŠู„ ู„ุงุฎุชุจุงุฑ ู†ู…ุงุฐุฌ YOLOv8 ุงู„ุตุญูŠุญุฉ. ุชุนุฑู ุนู„ู‰ ูƒูŠููŠุฉ ุชู‚ูŠูŠู… ุฃุฏุงุก ู†ู…ุงุฐุฌ YOLO ุงู„ุฎุงุตุฉ ุจูƒ ุจุงุณุชุฎุฏุงู… ุฅุนุฏุงุฏุงุช ูˆู…ู‚ุงูŠูŠุณ ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉ ู…ุน ุฃู…ุซู„ุฉ ุจุฑู…ุฌูŠุฉ ุจุงู„ู„ุบุฉ ุงู„ุจุงูŠุซูˆู† ูˆูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ. +keywords: Ultralytics, YOLO Docs, YOLOv8, ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉ, ุชู‚ูŠูŠู… ุงู„ู†ู…ูˆุฐุฌ, ุงู„ู…ุนู„ู…ุงุช ุงู„ูุฑุนูŠุฉ, ุงู„ุฏู‚ุฉ, ุงู„ู…ู‚ุงูŠูŠุณ, ุงู„ุจุงูŠุซูˆู†, ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ +--- + +# ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ุงุฐุฌ ุจุงุณุชุฎุฏุงู… Ultralytics YOLO + +ุจูŠุฆุฉ ุงู„ู†ุธุงู… ุงู„ุจูŠุฆูŠ ูˆุงู„ุชูƒุงู…ู„ุงุช ู„ู€ Ultralytics YOLO + +## ู…ู‚ุฏู…ุฉ + +ูŠุนุชุจุฑ ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ุฎุทูˆุฉ ุญุงุณู…ุฉ ููŠ ุฎุท ุฃู†ุงุจูŠุจ ุงู„ุชุนู„ู… ุงู„ุขู„ูŠุŒ ุญูŠุซ ูŠุชูŠุญ ู„ูƒ ุชู‚ูŠูŠู… ุฌูˆุฏุฉ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ. ูŠูˆูุฑ ูˆุถุน ุงู„ู€ Val ููŠ Ultralytics YOLOv8 ู…ุฌู…ูˆุนุฉ ุฃุฏูˆุงุช ูˆู…ู‚ุงูŠูŠุณ ู‚ูˆูŠุฉ ู„ุชู‚ูŠูŠู… ุฃุฏุงุก ู†ู…ุงุฐุฌ ุงู„ูƒุดู ุนู† ุงู„ูƒุงุฆู†ุงุช ุงู„ุฎุงุตุฉ ุจูƒ. ูŠุนู…ู„ ู‡ุฐุง ุงู„ุฏู„ูŠู„ ูƒู…ุตุฏุฑ ูƒุงู…ู„ ู„ูู‡ู… ูƒูŠููŠุฉ ุงุณุชุฎุฏุงู… ูˆุถุน ุงู„ู€ Val ุจุดูƒู„ ูุนุงู„ ู„ุถู…ุงู† ุฃู† ู†ู…ุงุฐุฌูƒ ุฏู‚ูŠู‚ุฉ ูˆู…ูˆุซูˆู‚ุฉ. + +## ู„ู…ุงุฐุง ูŠูˆูุฑ Ultralytics YOLO ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉ + +ู‡ู†ุง ู‡ูŠ ุงู„ุฃุณุจุงุจ ุงู„ุชูŠ ุชุฌุนู„ ุงุณุชุฎุฏุงู… ูˆุถุน ุงู„ู€ Val ููŠ YOLOv8 ู…ููŠุฏู‹ุง: + +- **ุงู„ุฏู‚ุฉ:** ุงู„ุญุตูˆู„ ุนู„ู‰ ู…ู‚ุงูŠูŠุณ ุฏู‚ูŠู‚ุฉ ู…ุซู„ mAP50 ูˆ mAP75 ูˆ mAP50-95 ู„ุชู‚ูŠูŠู… ู†ู…ูˆุฐุฌูƒ ุจุดูƒู„ ุดุงู…ู„. +- **ุงู„ุฑุงุญุฉ:** ุงุณุชุฎุฏู… ุงู„ู…ูŠุฒุงุช ุงู„ู…ุฏู…ุฌุฉ ุงู„ุชูŠ ุชุชุฐูƒุฑ ุฅุนุฏุงุฏุงุช ุงู„ุชุฏุฑูŠุจุŒ ู…ู…ุง ูŠุจุณุท ุนู…ู„ูŠุฉ ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉ. +- **ู…ุฑูˆู†ุฉ:** ู‚ู… ุจุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ุจุงุณุชุฎุฏุงู… ู†ูุณ ุงู„ู…ุฌู…ูˆุนุงุช ุงู„ุจูŠุงู†ุงุช ูˆุฃุญุฌุงู… ุงู„ุตูˆุฑ ุฃูˆ ู…ุฌู…ูˆุนุงุช ุจูŠุงู†ุงุช ูˆุฃุญุฌุงู… ุตูˆุฑ ู…ุฎุชู„ูุฉ. +- **ุถุจุท ุงู„ู…ุนู„ู…ุงุช ุงู„ูุฑุนูŠุฉ:** ุงุณุชุฎุฏู… ุงู„ู…ู‚ุงูŠูŠุณ ุงู„ุชุญู‚ู‚ ู„ุถุจุท ู†ู…ูˆุฐุฌูƒ ู„ุชุญุณูŠู† ุงู„ุฃุฏุงุก. + +### ุงู„ู…ูŠุฒุงุช ุงู„ุฑุฆูŠุณูŠุฉ ู„ูˆุถุน ุงู„ู€ Val + +ู‡ุฐู‡ ู‡ูŠ ุงู„ูˆุธุงุฆู ุงู„ู…ู…ูŠุฒุฉ ุงู„ุชูŠ ูŠูˆูุฑู‡ุง ูˆุถุน ุงู„ู€ Val ููŠ YOLOv8: + +- **ุงู„ุฅุนุฏุงุฏุงุช ุงู„ุชู„ู‚ุงุฆูŠุฉ:** ูŠุชุฐูƒุฑ ุงู„ู†ู…ุงุฐุฌ ุฅุนุฏุงุฏุงุช ุงู„ุชุฏุฑูŠุจ ุงู„ุฎุงุตุฉ ุจู‡ุง ู„ู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉ ุจุณู‡ูˆู„ุฉ. +- **ุฏุนู… ู…ุชุนุฏุฏ ุงู„ู…ู‚ุงูŠูŠุณ:** ู‚ูŠู… ู†ู…ูˆุฐุฌูƒ ุจู†ุงุกู‹ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ู…ู† ู…ู‚ุงูŠูŠุณ ุงู„ุฏู‚ุฉ. +- **ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ ูˆูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ Python:** ุงุฎุชุฑ ุจูŠู† ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ ุฃูˆ ูˆุงุฌู‡ุฉ ุจุฑู…ุฌุฉ Python ุญุณุจ ุชูุถูŠู„ูƒ ู„ู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉ. +- **ุชูˆุงูู‚ ุงู„ุจูŠุงู†ุงุช:** ูŠุนู…ู„ ุจุณู„ุงุณุฉ ู…ุน ู…ุฌู…ูˆุนุงุช ุงู„ุจูŠุงู†ุงุช ุงู„ู…ุณุชุฎุฏู…ุฉ ุฎู„ุงู„ ู…ุฑุญู„ุฉ ุงู„ุชุฏุฑูŠุจ ุจุงู„ุฅุถุงูุฉ ุฅู„ู‰ ู…ุฌู…ูˆุนุงุช ุงู„ุจูŠุงู†ุงุช ุงู„ู…ุฎุตุตุฉ. + +!!! Tip "ู†ุตูŠุญุฉ" + + * ุชุชุฐูƒุฑ ู†ู…ุงุฐุฌ YOLOv8 ุฅุนุฏุงุฏุงุช ุงู„ุชุฏุฑูŠุจ ุชู„ู‚ุงุฆูŠู‹ุงุŒ ู„ุฐุง ูŠู…ูƒู†ูƒ ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ุจู†ูุณ ุญุฌู… ุงู„ุตูˆุฑุฉ ูˆุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ุฃุตู„ูŠุฉ ุจุณู‡ูˆู„ุฉ ุจุงุณุชุฎุฏุงู… "yolo val model=yolov8n.pt" ุฃูˆ "model('yolov8n.pt').val()" + +## ุฃู…ุซู„ุฉ ุงู„ุงุณุชุฎุฏุงู… + +ุชุญู‚ู‚ ู…ู† ุฏู‚ุฉ ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฏุฑุจ YOLOv8n ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO128. ู„ุง ูŠู„ุฒู… ุชู…ุฑูŠุฑ ุฃูŠ ูˆุณูŠุทุฉ ูƒูˆุณูŠุทุฉ ูŠุชุฐูƒุฑ ุงู„ู€ model ุงู„ุชุฏุฑูŠุจ ูˆุงู„ูˆุณูŠุทุงุช ูƒุณู…ุงุช ุงู„ู†ู…ูˆุฐุฌ. ุงู†ุธุฑ ุงู„ุฌุฏูˆู„ ุฃุฏู†ุงู‡ ู„ู„ุญุตูˆู„ ุนู„ู‰ ู‚ุงุฆู…ุฉ ูƒุงู…ู„ุฉ ู…ู† ูˆุณูŠุทุงุช ุงู„ุชุตุฏูŠุฑ. + +!!! Example "ู…ุซุงู„" + + === "ุงู„ุจุงูŠุซูˆู†" + + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n.pt') # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฎุตุต + + # ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ + metrics = model.val() # ู„ุง ูŠู„ุฒู… ุฃูŠ ูˆุณูŠุทุงุชุŒ ูŠุชุฐูƒุฑ ุงู„ุชูƒูˆูŠู† ูˆุงู„ูˆุณูŠุทุงุช + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ู‚ุงุฆู…ุฉ ุชุญุชูˆูŠ ุนู„ู‰ map50-95 ู„ูƒู„ ูุฆุฉ + ``` + === "ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ" + + ```bash + yolo detect val model=yolov8n.pt # ุชุฌุฑูŠุจ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + yolo detect val model=path/to/best.pt # ุชุฌูŽูŒุฑุจ ู†ู…ูˆุฐุฌ ู…ุฎุตุต + ``` + +## ุงู„ูˆุณูŠุทุงุช + +ุชุดูŠุฑ ุฅุนุฏุงุฏุงุช ุงู„ุชุญู‚ู‚ ุจุงู„ู†ุณุจุฉ ู„ู†ู…ุงุฐุฌ YOLO ุฅู„ู‰ ุงู„ู…ุนู„ู…ุงุช ุงู„ูุฑุนูŠุฉ ูˆุงู„ุชูƒูˆูŠู†ุงุช ุงู„ู…ุฎุชู„ูุฉ ุงู„ู…ุณุชุฎุฏู…ุฉ ู„ุชู‚ูŠูŠู… ุฃุฏุงุก ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ุงู„ุชุญู‚ู‚. ู‡ุฐู‡ ุงู„ุฅุนุฏุงุฏุงุช ูŠู…ูƒู† ุฃู† ุชุคุซุฑ ุนู„ู‰ ุฃุฏุงุก ุงู„ู†ู…ูˆุฐุฌ ูˆุณุฑุนุชู‡ ูˆุฏู‚ุชู‡. ุชุดู…ู„ ุจุนุถ ุฅุนุฏุงุฏุงุช ุงู„ุชุญู‚ู‚ ุงู„ุดุงุฆุนุฉ ููŠ YOLO ุญุฌู… ุงู„ุฏูุนุฉ ูˆุชูƒุฑุงุฑุงุช ุชู†ููŠุฐ ุงู„ุชุญู‚ู‚ ุฃุซู†ุงุก ุงู„ุชุฏุฑูŠุจ ูˆุงู„ู…ู‚ุงูŠูŠุณ ุงู„ู…ุณุชุฎุฏู…ุฉ ู„ุชู‚ูŠูŠู… ุฃุฏุงุก ุงู„ู†ู…ูˆุฐุฌ. ุงู„ุนูˆุงู…ู„ ุงู„ุฃุฎุฑู‰ ุงู„ุชูŠ ู‚ุฏ ุชุคุซุฑ ุนู„ู‰ ุงู„ุนู…ู„ูŠุฉ ุงู„ุฎุงุตุฉ ุจุงู„ุชุญู‚ู‚ ุชุดู…ู„ ุญุฌู… ูˆุชุฑูƒูŠุจ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ุชุญู‚ู‚ ูˆุงู„ู…ู‡ู…ุฉ ุงู„ู…ุญุฏุฏุฉ ุงู„ุชูŠ ูŠุชู… ุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ููŠู‡ุง. ู…ู† ุงู„ู…ู‡ู… ุถุจุท ู‡ุฐู‡ ุงู„ุฅุนุฏุงุฏุงุช ูˆุชุฌุฑุจุชู‡ุง ุจุนู†ุงูŠุฉ ู„ุถู…ุงู† ุฃุฏุงุก ุฌูŠุฏ ู„ู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ุงู„ุชุญู‚ู‚ ูˆูƒุดู ูˆู…ู†ุน ุงู„ุญุงู„ุฉ ุงู„ุชูŠ ูŠุชู… ููŠู‡ุง ุถุจุท ุงู„ุทุฑุงุฒ ุจุดูƒู„ ุฌูŠุฏ. + +| ู…ูุชุงุญ | ุงู„ู‚ูŠู…ุฉ | ุงู„ูˆุตู | +|---------------|---------|------------------------------------------------------------------------------------| +| `data` | `None` | ู…ุณุงุฑ ุฅู„ู‰ ู…ู„ู ุงู„ุจูŠุงู†ุงุชุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ coco128.yaml | +| `imgsz` | `640` | ุญุฌู… ุงู„ุตูˆุฑ ุงู„ุฏุงุฎู„ูŠุฉ ุจุงุนุชุจุงุฑู‡ุง ุนุฏุฏ ุตุญูŠุญ | +| `batch` | `16` | ุนุฏุฏ ุงู„ุตูˆุฑ ู„ูƒู„ ุฏูุนุฉ (-1 ู„ู„ุฏูุน ุงู„ุขู„ูŠ) | +| `save_json` | `False` | ุญูุธ ุงู„ู†ุชุงุฆุฌ ููŠ ู…ู„ู JSON | +| `save_hybrid` | `False` | ุญูุธ ุงู„ู†ุณุฎุฉ ุงู„ู…ุฎุชู„ุทุฉ ู„ู„ุชุณู…ูŠุงุช (ุงู„ุชุณู…ูŠุงุช + ุงู„ุชู†ุจุคุงุช ุงู„ุฅุถุงููŠุฉ) | +| `conf` | `0.001` | ุญุฏ ุงู„ุซู‚ุฉ ููŠ ูƒุดู ุงู„ูƒุงุฆู† | +| `iou` | `0.6` | ุญุฏ ุชุฏุงุฎู„ ุนู„ู‰ ุงู„ู…ุชุญุฏุฉ (IoU) ู„ุนู…ู„ูŠุฉ ุงู„ุฌู…ุน ูˆุงู„ุทุฑุญ | +| `max_det` | `300` | ุงู„ุนุฏุฏ ุงู„ุฃู‚ุตู‰ ู…ู† ุงู„ูƒุดูุงุช ู„ูƒู„ ุตูˆุฑุฉ | +| `half` | `True` | ุงุณุชุฎุฏู… ุงู„ุชู†ุตุช ู†ุตู ุงู„ุฏู‚ุฉ (FP16) | +| `device` | `None` | ุงู„ุฌู‡ุงุฒ ุงู„ุฐูŠ ูŠุชู… ุชุดุบูŠู„ู‡ ุนู„ูŠู‡ุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ ุฌู‡ุงุฒ Cuda=0/1/2/3 ุฃูˆ ุฌู‡ุงุฒ=ู…ุนุงู„ุฌ (CPU) | +| `dnn` | `False` | ุงุณุชุฎุฏู… OpenCV DNN ู„ุนู…ู„ูŠุฉ ุงู„ุชู†ุตุช ุงู„ุฃู…ุซู„ | +| `plots` | `False` | ุฅุธู‡ุงุฑ ุงู„ุฑุณูˆู… ุงู„ุจูŠุงู†ูŠุฉ ุฃุซู†ุงุก ุงู„ุชุฏุฑูŠุจ | +| `rect` | `False` | ุชุญู‚ู‚ ุตูŠุบุฉ *rectangular* ู…ุน ุชุฌู…ูŠุน ูƒู„ ุฏูุนุฉ ู„ู„ุญุตูˆู„ ุนู„ู‰ ุงู„ุญุฏ ุงู„ุฃุฏู†ู‰ ู…ู† ุงู„ุชุนุจุฆุฉ | +| `split` | `val` | ุงุฎุชุฑ ุชู‚ุณูŠู… ุงู„ุจูŠุงู†ุงุช ู„ู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ "val"ุŒ "test" ุฃูˆ "train" | +| diff --git a/ultralytics/docs/ar/modes/val.md:Zone.Identifier b/ultralytics/docs/ar/modes/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/modes/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/quickstart.md b/ultralytics/docs/ar/quickstart.md new file mode 100755 index 0000000..2364115 --- /dev/null +++ b/ultralytics/docs/ar/quickstart.md @@ -0,0 +1,326 @@ +--- +comments: true +description: ุงุณุชูƒุดู ุฃุณุงู„ูŠุจ ู…ุฎุชู„ูุฉ ู„ุชุซุจูŠุช Ultralytics ุจุงุณุชุฎุฏุงู… pip ูˆ conda ูˆ git ูˆ Docker. ุชุนุฑู‘ู ุนู„ู‰ ูƒูŠููŠุฉ ุงุณุชุฎุฏุงู… Ultralytics ู…ุน ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ ุฃูˆ ุถู…ู† ู…ุดุงุฑูŠุน Python ุงู„ุฎุงุตุฉ ุจูƒ. +keywords: ุชุซุจูŠุช Ultralytics, pip install Ultralytics, Docker install Ultralytics, Ultralytics command line interface, Ultralytics Python interface +--- + +## ุชุซุจูŠุช Ultralytics + +ูŠูˆูุฑ Ultralytics ุทุฑู‚ ุชุซุจูŠุช ู…ุฎุชู„ูุฉ ุจู…ุง ููŠ ุฐู„ูƒ pip ูˆ conda ูˆ Docker. ูŠู…ูƒู†ูƒ ุชุซุจูŠุช YOLOv8 ุนู† ุทุฑูŠู‚ ุญุฒู…ุฉ `ultralytics` ู…ู† ุฎู„ุงู„ pip ู„ู„ุฅุตุฏุงุฑ ุงู„ุฃุญุฏุซ ูˆุงู„ู…ุณุชู‚ุฑ ุฃูˆ ู…ู† ุฎู„ุงู„ ุงุณุชู†ุณุงุฎ [ู…ุณุชูˆุฏุน Ultralytics ุนู„ู‰ GitHub](https://github.com/ultralytics/ultralytics) ู„ู„ุญุตูˆู„ ุนู„ู‰ ุงู„ุฅุตุฏุงุฑ ุงู„ุฃุญุฏุซ. ูŠู…ูƒู† ุงุณุชุฎุฏุงู… Docker ู„ุชู†ููŠุฐ ุงู„ุญุฒู…ุฉ ููŠ ุญุงูˆูŠุฉ ู…ุนุฒูˆู„ุฉุŒ ูˆุชุฌู†ุจ ุงู„ุชุซุจูŠุช ุงู„ู…ุญู„ูŠ. + +!!! Note "ู…ู„ุงุญุธุฉ" + + ๐Ÿšง ุชู… ุจู†ุงุก ูˆุซุงุฆู‚ู†ุง ู…ุชุนุฏุฏุฉ ุงู„ู„ุบุงุช ุญุงู„ูŠู‹ุงุŒ ูˆู†ุนู…ู„ ุจุฌุฏ ู„ุชุญุณูŠู†ู‡ุง. ุดูƒุฑู‹ุง ู„ูƒ ุนู„ู‰ ุตุจุฑูƒ! ๐Ÿ™ + +!!! Example "ุชุซุจูŠุช" + + === "ุชุซุจูŠุช ุจุงุณุชุฎุฏุงู… pip (ุงู„ู…ูˆุตูŽู‰ ุจู‡)" + ู‚ู… ุจุชุซุจูŠุช ุญุฒู…ุฉ `ultralytics` ุจุงุณุชุฎุฏุงู… pipุŒ ุฃูˆ ู‚ู… ุจุชุญุฏูŠุซ ุงู„ุชุซุจูŠุช ุงู„ุญุงู„ูŠ ุนู† ุทุฑูŠู‚ ุชุดุบูŠู„ `pip install -U ultralytics`. ู‚ู… ุจุฒูŠุงุฑุฉ ู…ุคุดุฑ Python Package Index (PyPI) ู„ู„ุญุตูˆู„ ุนู„ู‰ ู…ุฒูŠุฏ ู…ู† ุงู„ุชูุงุตูŠู„ ุญูˆู„ ุญุฒู…ุฉ `ultralytics`: [https://pypi.org/project/ultralytics/](https://pypi.org/project/ultralytics/). + + [![ู†ุณุฎุฉ PyPI](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![ุงู„ุชู†ุฒูŠู„ุงุช](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + + ```bash + # ู‚ู… ุจุชุซุจูŠุช ุญุฒู…ุฉ ultralytics ู…ู† PyPI + pip install ultralytics + ``` + + ูŠู…ูƒู†ูƒ ุฃูŠุถู‹ุง ุชุซุจูŠุช ุญุฒู…ุฉ `ultralytics` ู…ุจุงุดุฑุฉ ู…ู† ู…ุณุชูˆุฏุน GitHub [repository](https://github.com/ultralytics/ultralytics). ู‚ุฏ ูŠูƒูˆู† ุฐู„ูƒ ู…ููŠุฏู‹ุง ุฅุฐุง ูƒู†ุช ุชุฑุบุจ ููŠ ุงู„ุญุตูˆู„ ุนู„ู‰ ุงู„ุฅุตุฏุงุฑ ุงู„ุชุฌุฑูŠุจูŠ ุงู„ุฃุญุฏุซ. ุชุฃูƒุฏ ู…ู† ุชุซุจูŠุช ุฃุฏุงุฉ ุงู„ุฃูˆุงู…ุฑ Git ุนู„ู‰ ู†ุธุงู…ูƒ. ูŠูุซุจู‘ุช ุงู„ุฃู…ุฑ `@main` ุงู„ูุฑุน `main` ูˆูŠู…ูƒู† ุชุนุฏูŠู„ู‡ ุฅู„ู‰ ูุฑุน ุขุฎุฑุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ `@my-branch`ุŒ ุฃูˆ ูŠู…ูƒู† ุฅุฒุงู„ุชู‡ ุชู…ุงู…ู‹ุง ู„ู„ุงู†ุชู‚ุงู„ ุฅู„ู‰ ุงู„ูุฑุน ุงู„ุฑุฆูŠุณูŠ `main`. + + ```bash + # ู‚ู… ุจุชุซุจูŠุช ุญุฒู…ุฉ ultralytics ู…ู† GitHub + pip install git+https://github.com/ultralytics/ultralytics.git@main + ``` + + + === "ุชุซุจูŠุช ุจุงุณุชุฎุฏุงู… conda" + Conda ู‡ูˆ ู…ุฏูŠุฑ ุญุฒู… ุจุฏูŠู„ ู„ู€ pip ูˆูŠู…ูƒู† ุงุณุชุฎุฏุงู…ู‡ ุฃูŠุถู‹ุง ู„ู„ุชุซุจูŠุช. ู‚ู… ุจุฒูŠุงุฑุฉ Anaconda ู„ู„ุญุตูˆู„ ุนู„ู‰ ู…ุฒูŠุฏ ู…ู† ุงู„ุชูุงุตูŠู„ ุนู„ู‰ [https://anaconda.org/conda-forge/ultralytics](https://anaconda.org/conda-forge/ultralytics). ูŠู…ูƒู† ุงู„ุนุซูˆุฑ ุนู„ู‰ ู…ุณุชูˆุฏุน Ultralytics feedstock ู„ุชุญุฏูŠุซ ุญุฒู…ุฉ conda ุนู„ู‰ [https://github.com/conda-forge/ultralytics-feedstock/](https://github.com/conda-forge/ultralytics-feedstock/). + + + [![ูˆุตูุฉ conda](https://img.shields.io/badge/recipe-ultralytics-green.svg)](https://anaconda.org/conda-forge/ultralytics) [![ุชู†ุฒูŠู„ุงุช conda](https://img.shields.io/conda/dn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![ุฅุตุฏุงุฑ conda](https://img.shields.io/conda/vn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![ู…ู†ุตุงุช conda](https://img.shields.io/conda/pn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) + + ```bash + # ู‚ู… ุจุชุซุจูŠุช ุญุฒู…ุฉ ultralytics ุจุงุณุชุฎุฏุงู… conda + conda install -c conda-forge ultralytics + ``` + + !!! Note "ู…ู„ุงุญุธุฉ" + + ุฅุฐุง ูƒู†ุช ุชู‚ูˆู… ุจุงู„ุชุซุจูŠุช ููŠ ุจูŠุฆุฉ CUDAุŒ ูุฅู† ุงู„ู…ู…ุงุฑุณุฉ ุงู„ุฌูŠุฏุฉ ู‡ูŠ ุชุซุจูŠุช `ultralytics`, `pytorch` ูˆ `pytorch-cuda` ููŠ ู†ูุณ ุงู„ุฃู…ุฑ ู„ู„ุณู…ุงุญ ู„ู…ุฏูŠุฑ ุญุฒู… conda ุจุญู„ ุฃูŠ ุชุนุงุฑุถุงุชุŒ ุฃูˆ ูˆุฅู„ุง ูู‚ูˆู… ุจุชุซุจูŠุช `pytorch-cuda` ููŠ ู†ู‡ุงูŠุฉ ุงู„ุฃู…ุฑ ู„ู„ุณู…ุงุญ ู„ู‡ ุจุชุฌุงูˆุฒ ุญุฒู…ุฉ `pytorch` ุงู„ู…ุญุฏุฏุฉ ู„ูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ู…ุฑูƒุฒูŠุฉ ุฅุฐุง ู„ุฒู… ุงู„ุฃู…ุฑ. + ```bash + # ู‚ู… ุจุชุซุจูŠุช ูƒุงูุฉ ุงู„ุญุฒู… ู…ุนู‹ุง ุจุงุณุชุฎุฏุงู… conda + conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics + ``` + + ### ุตูˆุฑุฉ Docker ููŠ Conda + + ุชุชูˆูุฑ ุฃูŠุถู‹ุง ุตูˆุฑ Docker ู„ู€ Conda ู„ู€ Ultralytics ู…ู† [DockerHub](https://hub.docker.com/r/ultralytics/ultralytics). ุชุณุชู†ุฏ ู‡ุฐู‡ ุงู„ุตูˆุฑ ุฅู„ู‰ [Miniconda3](https://docs.conda.io/projects/miniconda/en/latest/) ูˆู‡ูŠ ูˆุณูŠู„ุฉ ุจุณูŠุทุฉ ู„ุจุฏุก ุงุณุชุฎุฏุงู… `ultralytics` ููŠ ุจูŠุฆุฉ Conda. + + ```bash + # ู‚ู… ุจุชุนูŠูŠู† ุงุณู… ุงู„ุตูˆุฑุฉ ุจูˆุตูู‡ ู…ุชุบูŠุฑ + t=ultralytics/ultralytics:latest-conda + + # ุงุณุญุจ ุฃุญุฏุซ ุตูˆุฑุฉ ultralytics ู…ู† Docker Hub + sudo docker pull $t + + # ู‚ู… ุจุชุดุบูŠู„ ุตูˆุฑุฉ ultralytics ููŠ ุญุงูˆูŠุฉ ู…ุน ุฏุนู… GPU + sudo docker run -it --ipc=host --gpus all $t # all GPUs + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # ู‚ุฏ ูŠุชู… ุชุญุฏูŠุฏ GPUs + ``` + + === "ุงุณุชู†ุณุงุฎ Git" + ู‚ู… ุจู†ุณุฎ ู…ุณุชูˆุฏุน `ultralytics` ุฅุฐุง ูƒู†ุช ู…ู‡ุชู…ู‹ุง ุจุงู„ู…ุณุงู‡ู…ุฉ ููŠ ุงู„ุชุทูˆูŠุฑ ุฃูˆ ุชุฑุบุจ ููŠ ุชุฌุฑุจุฉ ุงู„ุดูุฑุฉ ุงู„ู…ุตุฏุฑูŠุฉ ุงู„ุฃุญุฏุซ. ุจุนุฏ ุงู„ุงุณุชู†ุณุงุฎุŒ ุงู†ุชู‚ู„ ุฅู„ู‰ ุงู„ุฏู„ูŠู„ ูˆู‚ู… ุจุชุซุจูŠุช ุงู„ุญุฒู…ุฉ ููŠ ูˆุถุน ุงู„ุชุญุฑูŠุฑ `-e` ุจุงุณุชุฎุฏุงู… pip. + ```bash + # ู‚ู… ุจู†ุณุฎ ู…ุณุชูˆุฏุน ultralytics + git clone https://github.com/ultralytics/ultralytics + + # ุงู†ุชู‚ู„ ุฅู„ู‰ ุงู„ุฏู„ูŠู„ ุงู„ู…ู†ุณูˆุฎ + cd ultralytics + + # ู‚ู… ุจุชุซุจูŠุช ุงู„ุญุฒู…ุฉ ููŠ ูˆุถุน ุงู„ุชุญุฑูŠุฑ + pip install -e . + ``` + + === "Docker" + + ุชู…ูƒู†ูƒ ู…ู† ุงุณุชุฎุฏุงู… Docker ุจุณู‡ูˆู„ุฉ ู„ุชู†ููŠุฐ ุญุฒู…ุฉ `ultralytics` ููŠ ุญุงูˆูŠุฉ ู…ุนุฒูˆู„ุฉุŒ ู…ู…ุง ูŠุถู…ู† ุฃุฏุงุกู‹ ุณู„ุณู‹ุง ูˆู…ุชุณู‚ู‹ุง ููŠ ู…ุฎุชู„ู ุงู„ุจูŠุฆุงุช. ุนู† ุทุฑูŠู‚ ุงุฎุชูŠุงุฑ ุฅุญุฏู‰ ุตูˆุฑ Docker ุงู„ุฃุตู„ูŠุฉ ู„ู€ `ultralytics` ู…ู† [Docker Hub](https://hub.docker.com/r/ultralytics/ultralytics)ุŒ ู„ู† ุชุชุฌู†ุจ ูู‚ุท ุชุนู‚ูŠุฏ ุงู„ุชุซุจูŠุช ุงู„ู…ุญู„ูŠ ูˆู„ูƒู†ูƒ ุณุชุณุชููŠุฏ ุฃูŠุถู‹ุง ู…ู† ูˆุตูˆู„ ุฅู„ู‰ ุจูŠุฆุฉ ุนู…ู„ ู…ุชุญู‚ู‚ุฉ ูˆูุนุงู„ุฉ. ูŠู‚ุฏู… Ultralytics 5 ุตูˆุฑ Docker ู…ุฏุนูˆู…ุฉ ุฑุฆูŠุณูŠุฉุŒ ูŠุชู… ุชุตู…ูŠู… ูƒู„ ู…ู†ู‡ุง ู„ุชูˆููŠุฑ ุชูˆุงูู‚ ุนุงู„ูŠ ูˆูƒูุงุกุฉ ู„ู…ู†ุตุงุช ูˆุญุงู„ุงุช ุงุณุชุฎุฏุงู… ู…ุฎุชู„ูุฉ: + + Docker Pulls + + - **Dockerfile:** ุตูˆุฑุฉ GPU ุงู„ู…ูˆุตู‰ ุจู‡ุง ู„ู„ุชุฏุฑูŠุจ. + - **Dockerfile-arm64:** ู…ุญุณู‘ู† ู„ุจู†ูŠุฉ ARM64ุŒ ู…ู…ุง ูŠุชูŠุญ ุงู„ู†ุดุฑ ุนู„ู‰ ุฃุฌู‡ุฒุฉ ู…ุซู„ Raspberry Pi ูˆู…ู†ุตุงุช ุฃุฎุฑู‰ ุชุนุชู…ุฏ ุนู„ู‰ ARM64. + - **Dockerfile-cpu:** ุฅุตุฏุงุฑ ู…ู†ุงุณุจ ู„ู„ุชุญูƒู… ุจูˆุญุฏุฉ ุงู„ู…ุนุงู„ุฌุฉ ุงู„ู…ุฑูƒุฒูŠุฉ ูู‚ุท ุจุฏูˆู† ุฏุนู… ู„ู„ GPU. + - **Dockerfile-jetson:** ู…ุตู…ู… ุฎุตูŠุตู‹ุง ู„ุฃุฌู‡ุฒุฉ NVIDIA JetsonุŒ ูˆูŠุฏู…ุฌ ุฏุนู…ู‹ุง ู„ู„ GPU ุงู„ู…ุญุณู† ู„ู‡ุฐู‡ ุงู„ู…ู†ุตุงุช. + - **Dockerfile-python:** ุตูˆุฑุฉ ุตุบูŠุฑุฉ ุจู‡ุง ูู‚ุท Python ูˆุงู„ุชุจุนูŠุงุช ุงู„ุถุฑูˆุฑูŠุฉุŒ ู…ุซุงู„ูŠุฉ ู„ู„ุชุทุจูŠู‚ุงุช ูˆุงู„ุชุทูˆูŠุฑ ุงู„ุฎููŠู. + - **Dockerfile-conda:** ู‚ุงุฆู…ุฉ ุนู„ู‰ Miniconda3 ู…ุน ุชุซุจูŠุช conda ู„ุญุฒู…ุฉ ultralytics. + + ููŠู…ุง ูŠู„ูŠ ุงู„ุฃูˆุงู…ุฑ ู„ู„ุญุตูˆู„ ุนู„ู‰ ุฃุญุฏุซ ุตูˆุฑุฉ ูˆุชุดุบูŠู„ู‡ุง: + + ```bash + # ู‚ู… ุจุชุนูŠูŠู† ุงุณู… ุงู„ุตูˆุฑุฉ ุจูˆุตูู‡ ู…ุชุบูŠุฑ + t=ultralytics/ultralytics:latest + + # ุงุณุญุจ ุฃุญุฏุซ ุตูˆุฑุฉ ultralytics ู…ู† Docker Hub + sudo docker pull $t + + # ู‚ู… ุจุชุดุบูŠู„ ุตูˆุฑุฉ ultralytics ููŠ ุญุงูˆูŠุฉ ู…ุน ุฏุนู… GPU + sudo docker run -it --ipc=host --gpus all $t # all GPUs + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # ู‚ุฏ ูŠุชู… ุชุญุฏูŠุฏ GPUs + ``` + + ูŠู‚ูˆู… ุงู„ุฃู…ุฑ ุฃุนู„ุงู‡ ุจุชู‡ูŠุฆุฉ ุญุงูˆูŠุฉ Docker ุจุฃุญุฏุซ ุตูˆุฑุฉ `ultralytics`. ูŠูุณู†ุฏ ุงู„ุนู„ุงู…ุฉ `-it` ุฌู‡ุงุฒู‹ุง ุงูุชุฑุงุถูŠู‹ุง TTY ูˆูŠุญุงูุธ ุนู„ู‰ ูุชุญ stdin ู„ุชู…ูƒูŠู†ูƒ ู…ู† ุงู„ุชูุงุนู„ ู…ุน ุงู„ุญุงูˆูŠุฉ. ุชุนูŠูŠู† ุงู„ุนู„ุงู…ุฉ `--ipc=host` ู…ุณุงุญุฉ ุงุณู… IPC (Inter-Process Communication) ุฅู„ู‰ ุงู„ู…ุถูŠูุŒ ูˆู‡ูˆ ุฃู…ุฑ ุถุฑูˆุฑูŠ ู„ู…ุดุงุฑูƒุฉ ุงู„ุฐุงูƒุฑุฉ ุจูŠู† ุงู„ุนู…ู„ูŠุงุช. ุชูู…ูƒู‘ู† ุงู„ุนู„ุงู…ุฉ `--gpus all` ุงู„ูˆุตูˆู„ ุฅู„ู‰ ูƒู„ ูˆุญุฏุงุช ุงู„ู…ุนุงู„ุฌุฉ ุงู„ู…ุฑูƒุฒูŠุฉ ุงู„ุฑุณูˆู…ูŠุฉ ุงู„ู…ุชุงุญุฉ ุฏุงุฎู„ ุงู„ุญุงูˆูŠุฉุŒ ู…ู…ุง ู‡ูˆ ุฃู…ุฑ ุญุงุณู… ู„ู„ู…ู‡ุงู… ุงู„ุชูŠ ุชุชุทู„ุจ ุญุณุงุจุงุช GPU. + + ู…ู„ุงุญุธุฉ: ู„ู„ุนู…ู„ ู…ุน ุงู„ู…ู„ูุงุช ุนู„ู‰ ุฌู‡ุงุฒูƒ ุงู„ู…ุญู„ูŠ ุฏุงุฎู„ ุงู„ุญุงูˆูŠุฉุŒ ุงุณุชุฎุฏู… ู…ุฌู„ุฏุงุช Docker ู„ุชูˆุตูŠู„ ุฏู„ูŠู„ ู…ุญู„ูŠ ุจุงู„ุญุงูˆูŠุฉ: + + ```bash + # ู…ุฌู„ุฏ ุงู„ุฏู„ูŠู„ ุงู„ู…ุญู„ูŠ ุจุงู„ุญุงูˆูŠุฉ + sudo docker run -it --ipc=host --gpus all -v /path/on/host:/path/in/container $t + ``` + + ู‚ู… ุจุชุบูŠูŠุฑ `/path/on/host` ุจู…ุณุงุฑ ุงู„ุฏู„ูŠู„ ุนู„ู‰ ุฌู‡ุงุฒูƒ ุงู„ู…ุญู„ูŠุŒ ูˆ `/path/in/container` ุจุงุงู„ู…ุณุงุฑ ุงู„ู…ุทู„ูˆุจ ุฏุงุฎู„ ุญุงูˆูŠุฉ Docker ู„ู„ูˆุตูˆู„ ุฅู„ูŠู‡. + + ู„ู„ุงุณุชูุงุฏุฉ ุงู„ู‚ุตูˆู‰ ู…ู† ุงุณุชุฎุฏุงู… Docker ุงู„ู…ุชู‚ุฏู…ุŒ ู„ุง ุชุชุฑุฏุฏ ููŠ ุงุณุชูƒุดุงู [ุฏู„ูŠู„ Ultralytics Docker](https://docs.ultralytics.com/guides/docker-quickstart/). + +ุฑุงุฌุน ู…ู„ู `requirements.txt` ุงู„ุฎุงุต ุจู€ `ultralytics` [ู‡ู†ุง](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) ู„ู„ุญุตูˆู„ ุนู„ู‰ ู‚ุงุฆู…ุฉ ุงู„ู…ุชุทู„ุจุงุช. ูŠูุฑุฌู‰ ู…ู„ุงุญุธุฉ ุฃู† ุฌู…ูŠุน ุงู„ุฃู…ุซู„ุฉ ุฃุนู„ุงู‡ ูŠุชู… ุชุซุจูŠุช ุฌู…ูŠุน ุงู„ู…ุชุทู„ุจุงุช ุงู„ู…ุทู„ูˆุจุฉ. + +

+
+ +
+ ุดุงู‡ุฏ: ุฏู„ูŠู„ ูุชุน Ultralytics YOLO ุงู„ุณุฑูŠุน +

+ +!!! Tip "ู†ุตูŠุญุฉ" + + ูŠุฎุชู„ู ู…ุชุทู„ุจุงุช PyTorch ุญุณุจ ู†ุธุงู… ุงู„ุชุดุบูŠู„ ูˆู…ุชุทู„ุจุงุช CUDAุŒ ู„ุฐุง ูŠููˆุตูŽู‰ ุจุชุซุจูŠุช PyTorch ุฃูˆู„ุงู‹ ุจุงุณุชุฎุฏุงู… ุงู„ุชุนู„ูŠู…ุงุช ุงู„ู…ูˆุฌูˆุฏุฉ ููŠ [https://pytorch.org/get-started/locally](https://pytorch.org/get-started/locally). + + + PyTorch ุชุนู„ูŠู…ุงุช ุงู„ุชุซุจูŠุช + + +## ุงุณุชุฎุฏู… Ultralytics ู…ุน ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ (CLI) + +ุชุชูŠุญ ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ (CLI) ููŠ Ultralytics ุชุดุบูŠู„ ุฃูˆุงู…ุฑ ุจุณูŠุทุฉ ุจุฏูˆู† ุงู„ุญุงุฌุฉ ุฅู„ู‰ ุจูŠุฆุฉ Python. ู„ุง ุชุญุชุงุฌ CLI ุฅู„ู‰ ุฃูŠ ุชุฎุตูŠุต ุฃูˆ ูƒูˆุฏ Python. ูŠู…ูƒู†ูƒ ุจุจุณุงุทุฉ ุชุดุบูŠู„ ุฌู…ูŠุน ุงู„ู…ู‡ุงู… ู…ู† ุงู„ุทุฑููŠุฉ ุจุงุณุชุฎุฏุงู… ุงู„ุฃู…ุฑ `yolo`. ุชุญู‚ู‚ ู…ู† [ุฏู„ูŠู„ CLI](/../usage/cli.md) ู„ู…ุนุฑูุฉ ุงู„ู…ุฒูŠุฏ ุญูˆู„ ุงุณุชุฎุฏุงู… YOLOv8 ู…ู† ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ. + +!!! Example "ู…ุซุงู„" + + === "ุงู„ุตูŠุบุฉ" + ุชุณุชุฎุฏู… ุฃูˆุงู…ุฑ Ultralytics `yolo` ุงู„ุตูŠุบุฉ ุงู„ุชุงู„ูŠุฉ: + ```bash + yolo TASK MODE ARGS + ``` + + - `TASK` (ุงุฎุชูŠุงุฑูŠ) ุฃุญุฏ ุงู„ุชุงู„ูŠ ([detect](tasks/detect.md), [segment](tasks/segment.md), [classify](tasks/classify.md), [pose](tasks/pose.md)) + - `MODE` (ู…ุทู„ูˆุจ) ูˆุงุญุฏ ู…ู† ([train](modes/train.md), [val](modes/val.md), [predict](modes/predict.md), [export](modes/export.md), [track](modes/track.md)) + - `ARGS` (ุงุฎุชูŠุงุฑูŠ) ุฃุฒูˆุงุฌ "arg=value" ู…ุซู„ `imgsz=640` ุงู„ุชูŠ ุชุณุชุจุฏู„ ุงู„ู‚ูŠู… ุงู„ุงูุชุฑุงุถูŠุฉ. + + ุฑุงุฌุน ุฌู…ูŠุน `ARGS` [ู‡ู†ุง](/../usage/cfg.md) ุฃูˆ ุจุงุณุชุฎุฏุงู… ุงู„ุฃู…ุฑ `yolo cfg` ููŠ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ. + + === "ุงู„ุชุฏุฑูŠุจ" + ู‚ู… ุจุชุฏุฑูŠุจ ู†ู…ูˆุฐุฌ ุงูƒุชุดุงู ู„ู…ุฏุฉ 10 ุญู„ู‚ุงุช ู…ุน ุณุนุฑ ุชุนู„ู… ุจุฏุกูŠ 0.01 + ```bash + yolo train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "ุงู„ุชู†ุจุค" + ุชู†ุจุค ุจููŠุฏูŠูˆ YouTube ุจุงุณุชุฎุฏุงู… ู†ู…ูˆุฐุฌ ุชุฌุฒุฆุฉ ู…ุนุชู…ุฏ ู…ุณุจู‚ู‹ุง ุนู†ุฏ ุญุฌู… ุงู„ุตูˆุฑุฉ 320: + ```bash + yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "ุงู„ุชุญู‚ู‚" + ุงู„ุชุญู‚ู‚ ู…ู† ู†ู…ูˆุฐุฌ ุงูƒุชุดุงู ู…ุนุชู…ุฏ ู…ุณุจู‚ู‹ุง ุนู„ู‰ ุฏููุนูŽุฉ ูˆุงุญุฏุฉ ูˆุญุฌู… ุตูˆุฑุฉ ู‚ุฏุฑู‡ 640: + ```bash + yolo val model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + + === "ุงู„ุชุตุฏูŠุฑ" + ู‚ู… ุจุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ ูุฆุฉ YOLOv8n ุฅู„ู‰ ุชู†ุณูŠู‚ ONNX ุนู„ู‰ ุญุฌู… ุตูˆุฑุฉ 224 ุจูˆุงุณุทุฉ 128 (ู„ุง ูŠู„ุฒู… TASK) + ```bash + yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128 + ``` + + === "ุฎุงุต" + ู‚ู… ุจุชุดุบูŠู„ ุฃูˆุงู…ุฑ ุฎุงุตุฉ ู„ุนุฑุถ ุงู„ุฅุตุฏุงุฑุฉ ูˆุนุฑุถ ุงู„ุฅุนุฏุงุฏุงุช ูˆุชุดุบูŠู„ ุนู…ู„ูŠุงุช ุงู„ุชุญู‚ู‚ ูˆุงู„ู…ุฒูŠุฏ: + ```bash + yolo help + yolo checks + yolo version + yolo settings + yolo copy-cfg + yolo cfg + ``` + +!!! Warning "ุชุญุฐูŠุฑ" +ูŠุฌุจ ุชู…ุฑูŠุฑ ุงู„ูˆุณูˆู… ูƒุฃุฒูˆุงุฌ "arg=val"ุŒ ูˆุฃู† ุชููุตู„ ุจุนู„ุงู…ุฉ ุชุณุงูˆูŠ `=` ูˆุฃู† ุชููุตู„ ุจู…ุณุงูุงุช ุจูŠู† ุงู„ุฃุฒูˆุงุฌ. ู„ุง ุชุณุชุฎุฏู… ุจุงุฏุฆุงุช ุงู„ูˆุณูˆู… `--` ุฃูˆ ููˆุงุตู„ `,` ุจูŠู† ุงู„ูˆุณูˆู…. + + - `yolo predict model=yolov8n.pt imgsz=640 conf=0.25` โœ… + - `yolo predict model yolov8n.pt imgsz 640 conf 0.25` โŒ (ู…ูู‚ูˆุฏ ุงู„ุนู„ุงู…ุฉ ุงู„ู…ุณุงูˆุงุฉ) + - `yolo predict model=yolov8n.pt, imgsz=640, conf=0.25` โŒ (ู„ุง ุชุณุชุฎุฏู… `,`) + - `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25` โŒ (ู„ุง ุชุณุชุฎุฏู… `--`) + +[ุฏู„ูŠู„ CLI](/../usage/cli.md){ .md-button } + +## ุงุณุชุฎุฏู… Ultralytics ู…ุน Python + +ุชุณู…ุญ ูˆุงุฌู‡ุฉ Python ููŠ YOLOv8 ุจุงู„ุชูƒุงู…ู„ ุงู„ุณู„ุณ ููŠ ู…ุดุงุฑูŠุน Python ุงู„ุฎุงุตุฉ ุจูƒุŒ ู…ู…ุง ูŠุฌุนู„ ู…ู† ุงู„ุณู‡ู„ ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ ูˆุชุดุบูŠู„ู‡ ูˆู…ุนุงู„ุฌุฉ ู†ุชุงุฆุฌู‡. ุงู„ู…ุตู…ู…ุฉ ุจุจุณุงุทุฉ ูˆุณู‡ูˆู„ุฉ ุงู„ุงุณุชุฎุฏุงู… ููŠ ุงู„ุงุนุชุจุงุฑุŒ ุชู…ูƒู† ูˆุงุฌู‡ุฉ Python ุงู„ู…ุณุชุฎุฏู…ูŠู† ู…ู† ุชู†ููŠุฐ ุงู„ูƒุดู ุนู„ู‰ ุงู„ูƒุงุฆู†ุงุช ูˆุงู„ุชุฌุฒุฆุฉ ูˆุงู„ุชุตู†ูŠู ููŠ ู…ุดุงุฑูŠุนู‡ู…. ูŠุฌุนู„ ู‡ุฐุง ูˆุงุฌู‡ุฉ YOLOv8 Python ุฃุฏุงุฉ ู‚ูŠู…ุฉ ู„ุฃูŠ ุดุฎุต ูŠุฑุบุจ ููŠ ุฏู…ุฌ ู‡ุฐู‡ ุงู„ูˆุธุงุฆู ููŠ ู…ุดุงุฑูŠุนู‡ู… ุจุงุณูŠุงุชูˆ. + +ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ุŒ ูŠู…ูƒู† ู„ู„ู…ุณุชุฎุฏู…ูŠู† ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌุŒ ุชุฏุฑูŠุจู‡ุŒ ุชู‚ูŠูŠู… ุฃุฏุงุฆู‡ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุชุญู‚ู‚ุŒ ูˆุญุชู‰ ุชุตุฏูŠุฑู‡ ุฅู„ู‰ ุชู†ุณูŠู‚ ONNX ุจุจุถุนุฉ ุฃุณุทุฑ ูู‚ุท ู…ู† ุงู„ุดูุฑุฉ. ุชุญู‚ู‚ ู…ู† [ุฏู„ูŠู„ Python](/../usage/python.md) ู„ู…ุนุฑูุฉ ุงู„ู…ุฒูŠุฏ ุญูˆู„ ุงุณุชุฎุฏุงู… YOLOv8 ุฏุงุฎู„ ู…ุดุงุฑูŠุนูƒ ุงู„ุฎุงุตุฉ. + +!!! Example "ู…ุซุงู„" + + ```python + from ultralytics import YOLO + + # ุฃู†ุดุฆ ู†ู…ูˆุฐุฌ YOLO ุฌุฏูŠุฏ ู…ู† ุงู„ุจุฏุงูŠุฉ + model = YOLO('yolov8n.yaml') + + # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ YOLO ู…ุนุชู…ุฏ ู…ุณุจู‚ู‹ุง (ู…ูˆุตูŽู‰ ุจู‡ ู„ู„ุชุฏุฑูŠุจ) + model = YOLO('yolov8n.pt') + + # ู‚ู… ุจุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุจุงุณุชุฎุฏุงู… ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช 'coco128.yaml' ู„ู…ุฏุฉ 3 ุญู„ู‚ุงุช + results = model.train(data='coco128.yaml', epochs=3) + + # ู‚ู… ุจุชู‚ูŠูŠู… ุฃุฏุงุก ุงู„ู†ู…ูˆุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุชุญู‚ู‚ + results = model.val() + + # ู‚ู… ุจุฅุฌุฑุงุก ุงู„ูƒุดู ุนู„ู‰ ุตูˆุฑุฉ ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ + results = model('https://ultralytics.com/images/bus.jpg') + + # ู‚ู… ุจุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ ุฅู„ู‰ ุชู†ุณูŠู‚ ONNX + success = model.export(format='onnx') + ``` + +[ุฏู„ูŠู„ Python](/../usage/python.md){.md-button .md-button--primary} + +## ุฅุนุฏุงุฏุงุช Ultralytics + +ูŠูˆูุฑ ู…ูƒุชุจุฉ Ultralytics ู†ุธุงู…ู‹ุง ู‚ูˆูŠู‹ุง ู„ุฅุฏุงุฑุฉ ุงู„ุฅุนุฏุงุฏุงุช ู„ุชู…ูƒูŠู† ุงู„ุชุญูƒู… ุจู…ุญุงูƒุงุฉ ุชูุตูŠู„ูŠุฉ ู„ุชุฌุงุฑุจูƒ. ู…ู† ุฎู„ุงู„ ุงุณุชุฎุฏุงู… `SettingsManager` ููŠ ุงู„ูˆุญุฏุฉ `ultralytics.utils`ุŒ ูŠู…ูƒู† ู„ู„ู…ุณุชุฎุฏู…ูŠู† ุงู„ูˆุตูˆู„ ุจุณู‡ูˆู„ุฉ ุฅู„ู‰ ุฅุนุฏุงุฏุงุชู‡ู… ูˆุชุนุฏูŠู„ู‡ุง. ูŠุชู… ุชุฎุฒูŠู†ู‡ุง ููŠ ู…ู„ู YAML ูˆูŠู…ูƒู† ุนุฑุถู‡ุง ุฃูˆ ุชุนุฏูŠู„ู‡ุง ุฅู…ุง ู…ุจุงุดุฑุฉ ููŠ ุจูŠุฆุฉ Python ุฃูˆ ู…ู† ุฎู„ุงู„ ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ (CLI). + +### ูุญุต ุงู„ุฅุนุฏุงุฏุงุช + +ู„ู„ุญุตูˆู„ ุนู„ู‰ ูู‡ู… ู„ู„ุชูƒูˆูŠู† ุงู„ุญุงู„ูŠ ู„ุฅุนุฏุงุฏุงุชูƒุŒ ูŠู…ูƒู†ูƒ ุนุฑุถู‡ุง ู…ุจุงุดุฑุฉู‹: + +!!! Example "ุนุฑุถ ุงู„ุฅุนุฏุงุฏุงุช" + + === "Python" + ูŠูู…ูƒู†ูƒ ุงุณุชุฎุฏุงู… Python ู„ุนุฑุถ ุงู„ุฅุนุฏุงุฏุงุช ุงู„ุฎุงุตุฉ ุจูƒ. ุงุจุฏุฃ ุจู€ุงุณุชูŠุฑุงุฏ ุงู„ูƒุงุฆู† `settings` ู…ู† ูˆุญุฏุฉ `ultralytics`. ุงุณุชุฎุฏู… ุงู„ุฃูˆุงู…ุฑ ุงู„ุชุงู„ูŠุฉ ู„ุทุจุงุนุฉ ุงู„ุฅุนุฏุงุฏุงุช ูˆุงู„ุนูˆุฏุฉ ู…ู†ู‡ุง: + ```python + from ultralytics import settings + + # ุนุฑุถ ูƒู„ ุงู„ุฅุนุฏุงุฏุงุช + print(settings) + + # ุฅุฑุฌุงุน ุฅุนุฏุงุฏ ู…ุญุฏุฏ + value = settings['runs_dir'] + ``` + + === "CLI" + ุจุฏู„ุงู‹ ู…ู† ุฐู„ูƒุŒ ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑ ุชุณู…ุญ ู„ูƒ ุจุงู„ุชุญู‚ู‚ ู…ู† ุงู„ุฅุนุฏุงุฏุงุช ุงู„ุฎุงุตุฉ ุจูƒ ุจุงุณุชุฎุฏุงู… ุฃู…ุฑ ุจุณูŠุท: + ```bash + yolo settings + ``` + +### ุชุนุฏูŠู„ ุงู„ุฅุนุฏุงุฏุงุช + +ูŠุณู…ุญ ู„ูƒ Ultralytics ุจุชุนุฏูŠู„ ุงู„ุฅุนุฏุงุฏุงุช ุจุณู‡ูˆู„ุฉ. ูŠู…ูƒู† ุชู†ููŠุฐ ุงู„ุชุบูŠูŠุฑุงุช ุจุงู„ุทุฑู‚ ุงู„ุชุงู„ูŠุฉ: + +!!! Example "ุชุญุฏูŠุซ ุงู„ุฅุนุฏุงุฏุงุช" + + === "Python" + ุฏุงุฎู„ ุจูŠุฆุฉ PythonุŒ ุงุทู„ุจ ุงู„ุทุฑูŠู‚ุฉ `update` ุนู„ู‰ ุงู„ูƒุงุฆู† `settings` ู„ุชุบูŠูŠุฑ ุฅุนุฏุงุฏุงุชูƒ: + + ```python + from ultralytics import settings + + # ุชุญุฏูŠุซ ุฅุนุฏุงุฏ ูˆุงุญุฏ + settings.update({'runs_dir': '/path/to/runs'}) + + # ุชุญุฏูŠุซ ุฅุนุฏุงุฏุงุช ู…ุชุนุฏุฏุฉ + settings.update({'runs_dir': '/path/to/runs', 'tensorboard': False}) + + # ุฅุนุงุฏุฉ ุงู„ุฅุนุฏุงุฏุงุช ุฅู„ู‰ ุงู„ู‚ูŠู… ุงู„ุงูุชุฑุงุถูŠุฉ + settings.reset() + ``` + + === "CLI" + ุฅุฐุง ูƒู†ุช ุชูุถู„ ุงุณุชุฎุฏุงู… ูˆุงุฌู‡ุฉ ุณุทุฑ ุงู„ุฃูˆุงู…ุฑุŒ ูŠู…ูƒู†ูƒ ุงุณุชุฎุฏุงู… ุงู„ุฃูˆุงู…ุฑ ุงู„ุชุงู„ูŠุฉ ู„ุชุนุฏูŠู„ ุฅุนุฏุงุฏุงุชูƒ: + + ```bash + # ุชุญุฏูŠุซ ุฅุนุฏุงุฏ ูˆุงุญุฏ + yolo settings runs_dir='/path/to/runs' + + # ุชุญุฏูŠุซ ุฅุนุฏุงุฏุงุช ู…ุชุนุฏุฏุฉ + yolo settings runs_dir='/path/to/runs' tensorboard=False + + # ุฅุนุงุฏุฉ ุงู„ุฅุนุฏุงุฏุงุช ุฅู„ู‰ ุงู„ู‚ูŠู… ุงู„ุงูุชุฑุงุถูŠุฉ + yolo settings reset + ``` + +### ูู‡ู… ุงู„ุฅุนุฏุงุฏุงุช + +ูŠูˆูุฑ ุงู„ุฌุฏูˆู„ ุฃุฏู†ุงู‡ ู†ุธุฑุฉ ุนุงู…ุฉ ุนู„ู‰ ุงู„ุฅุนุฏุงุฏุงุช ุงู„ู…ุชุงุญุฉ ู„ู„ุถุจุท ููŠ Ultralytics. ูŠุชู… ุชูˆุถูŠุญ ูƒู„ ุฅุนุฏุงุฏ ุจุงู„ุฅุถุงูุฉ ุฅู„ู‰ ู‚ูŠู…ุฉ ู…ุซุงู„ูŠุฉ ูˆู†ูˆุน ุงู„ุจูŠุงู†ุงุช ูˆูˆุตู ู…ูˆุฌุฒ. + +| ุงู„ุงุณู… | ุงู„ู‚ูŠู…ุฉ ุงู„ู…ุซุงู„ูŠุฉ | ู†ูˆุน ุงู„ุจูŠุงู†ุงุช | ุงู„ูˆุตู | +|--------------------|-----------------------|--------------|-------------------------------------------------------------------------------------------------------------| +| `settings_version` | `'0.0.4'` | `str` | ุฅุตุฏุงุฑ ุฅุนุฏุงุฏุงุช Ultralytics (ู…ุฎุชู„ู ุนู† ุฅุตุฏุงุฑ Ultralytics [pip](https://pypi.org/project/ultralytics/)) | +| `datasets_dir` | `'/path/to/datasets'` | `str` | ุงู„ู…ุณุงุฑ ุงู„ุฐูŠ ูŠุชู… ุชุฎุฒูŠู†ู‡ ููŠู‡ ู…ุฌู…ูˆุนุงุช ุงู„ุจูŠุงู†ุงุช | +| `weights_dir` | `'/path/to/weights'` | `str` | ุงู„ู…ุณุงุฑ ุงู„ุฐูŠ ูŠุชู… ุชุฎุฒูŠู†ู‡ ููŠู‡ ุฃูˆุฒุงู† ุงู„ู†ู…ูˆุฐุฌ | +| `runs_dir` | `'/path/to/runs'` | `str` | ุงู„ู…ุณุงุฑ ุงู„ุฐูŠ ูŠุชู… ุชุฎุฒูŠู†ู‡ ููŠู‡ ุชุดุบูŠู„ ุงู„ุชุฌุงุฑุจ | +| `uuid` | `'a1b2c3d4'` | `str` | ู…ูุนุฑูู‘ู ูุฑูŠุฏ ู„ุฅุนุฏุงุฏุงุช ุงู„ุญุงู„ูŠุฉ | +| `sync` | `True` | `bool` | ู…ุง ุฅุฐุง ูƒุงู† ูŠุชู… ู…ุฒุงู…ู†ุฉ ุงู„ุชุญู„ูŠู„ุงุช ูˆุญูˆุงุฏุซ ุงู„ุฃุนุทุงู„ ุฅู„ู‰ HUB | +| `api_key` | `''` | `str` | HUB ุงู„ุฎุงุต ุจู€ Ultralytics [API Key](https://hub.ultralytics.com/settings?tab=api+keys) | +| `clearml` | `True` | `bool` | ู…ุง ุฅุฐุง ูƒุงู† ูŠุชู… ุงุณุชุฎุฏุงู… ClearML ู„ุชุณุฌูŠู„ ุงู„ุชุฌุงุฑุจ | +| `comet` | `True` | `bool` | ู…ุง ุฅุฐุง ูƒุงู† ูŠุชู… ุงุณุชุฎุฏุงู… [Comet ML](https://bit.ly/yolov8-readme-comet) ู„ุชุชุจุน ูˆุชุตูˆุฑ ุงู„ุชุฌุงุฑุจ | +| `dvc` | `True` | `bool` | ู…ุง ุฅุฐุง ูƒุงู† ูŠุชู… ุงุณุชุฎุฏุงู… [DVC ู„ุชุชุจุน ุงู„ุชุฌุงุฑุจ](https://dvc.org/doc/dvclive/ml-frameworks/yolo) ูˆุงู„ุชุญูƒู… ููŠ ุงู„ู†ุณุฎ | +| `hub` | `True` | `bool` | ู…ุง ุฅุฐุง ูƒุงู† ูŠุชู… ุงุณุชุฎุฏุงู… [Ultralytics HUB](https://hub.ultralytics.com) ู„ู„ุชูƒุงู…ู„ | +| `mlflow` | `True` | `bool` | ู…ุง ุฅุฐุง ูƒุงู† ูŠุชู… ุงุณุชุฎุฏุงู… MLFlow ู„ุชุชุจุน ุงู„ุชุฌุงุฑุจ | +| `neptune` | `True` | `bool` | ู…ุง ุฅุฐุง ูƒุงู† ูŠุชู… ุงุณุชุฎุฏุงู… Neptune ู„ุชุชุจุน ุงู„ุชุฌุงุฑุจ | +| `raytune` | `True` | `bool` | ู…ุง ุฅุฐุง ูƒุงู† ูŠุชู… ุงุณุชุฎุฏุงู… Ray Tune ู„ุถุจุท ุงู„ุญุณุงุณูŠุฉ | +| `tensorboard` | `True` | `bool` | ู…ุง ุฅุฐุง ูƒุงู† ูŠุชู… ุงุณุชุฎุฏุงู… TensorBoard ู„ู„ุชุตูˆุฑ | +| `wandb` | `True` | `bool` | ู…ุง ุฅุฐุง ูƒุงู† ูŠุชู… ุงุณุชุฎุฏุงู… Weights & Biases ู„ุชุณุฌูŠู„ ุงู„ุจูŠุงู†ุงุช | + +ุฃุซู†ุงุก ุชู†ู‚ู„ูƒ ููŠ ู…ุดุงุฑูŠุนูƒ ุฃูˆ ุชุฌุงุฑุจูƒุŒ ุชุฃูƒุฏ ู…ู† ู…ุฑุงุฌุนุฉ ู‡ุฐู‡ ุงู„ุฅุนุฏุงุฏุงุช ู„ุถู…ุงู† ุชูƒูˆูŠู†ู‡ุง ุจุดูƒู„ ู…ุซุงู„ูŠ ูˆูู‚ู‹ุง ู„ุงุญุชูŠุงุฌุงุชูƒ. diff --git a/ultralytics/docs/ar/quickstart.md:Zone.Identifier b/ultralytics/docs/ar/quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/tasks/classify.md b/ultralytics/docs/ar/tasks/classify.md new file mode 100755 index 0000000..b0dadf4 --- /dev/null +++ b/ultralytics/docs/ar/tasks/classify.md @@ -0,0 +1,172 @@ +--- +comments: true +description: ุชุนุฑู‘ู ุนู„ู‰ ู†ู…ุงุฐุฌ YOLOv8 Classify ู„ุชุตู†ูŠู ุงู„ุตูˆุฑ. ุงุญุตู„ ุนู„ู‰ ู…ุนู„ูˆู…ุงุช ู…ูุตู„ุฉ ุญูˆู„ ู‚ุงุฆู…ุฉ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง ูˆูƒูŠููŠุฉ ุงู„ุชุฏุฑูŠุจ ูˆุงู„ุชุญู‚ู‚ ูˆุงู„ุชู†ุจุค ูˆุชุตุฏูŠุฑ ุงู„ู†ู…ุงุฐุฌ. +keywords: UltralyticsุŒ YOLOv8ุŒ ุชุตู†ูŠู ุงู„ุตูˆุฑุŒ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุงุŒ YOLOv8n-clsุŒ ุงู„ุชุฏุฑูŠุจุŒ ุงู„ุชุญู‚ู‚ุŒ ุงู„ุชู†ุจุคุŒ ุชุตุฏูŠุฑ ุงู„ู†ู…ุงุฐุฌ +--- + +# ุชุตู†ูŠู ุงู„ุตูˆุฑ + +ุฃู…ุซู„ุฉ ุนู„ู‰ ุชุตู†ูŠู ุงู„ุตูˆุฑ + +ุชุนุชุจุฑ ุนู…ู„ูŠุฉ ุชุตู†ูŠู ุงู„ุตูˆุฑ ุฃุจุณุท ุงู„ู…ู‡ุงู… ุงู„ุซู„ุงุซุฉ ูˆุชู†ุทูˆูŠ ุนู„ู‰ ุชุตู†ูŠู ุตูˆุฑุฉ ูƒุงู…ู„ุฉ ููŠ ุฅุญุฏู‰ ุงู„ูุฆุงุช ุงู„ู…ุญุฏุฏุฉ ุณุงุจู‚ู‹ุง. + +ู†ุงุชุฌ ู†ู…ูˆุฐุฌ ุชุตู†ูŠู ุงู„ุตูˆุฑ ู‡ูˆ ุชุณู…ูŠุฉ ูุฆุฉ ูˆุงุญุฏุฉ ูˆุฏุฑุฌุฉ ุซู‚ุฉ. ูŠูƒูˆู† ุชุตู†ูŠู ุงู„ุตูˆุฑ ู…ููŠุฏู‹ุง ุนู†ุฏู…ุง ุชุญุชุงุฌ ูู‚ุท ุฅู„ู‰ ู…ุนุฑูุฉ ูุฆุฉ ุงู„ุตูˆุฑุฉ ูˆู„ุง ุชุญุชุงุฌ ุฅู„ู‰ ู…ุนุฑูุฉ ู…ูˆู‚ุน ุงู„ูƒุงุฆู†ุงุช ุงู„ุชุงุจุนุฉ ู„ุชู„ูƒ ุงู„ูุฆุฉ ุฃูˆ ุดูƒู„ู‡ุง ุงู„ุฏู‚ูŠู‚. + +!!! Tip "ู†ุตูŠุญุฉ" + + ุชุณุชุฎุฏู… ู†ู…ุงุฐุฌ YOLOv8 Classify ุงู„ู„ุงุญู‚ุฉ "-cls"ุŒ ู…ุซุงู„ู‹ุง "yolov8n-cls.pt" ูˆุชู… ุชุฏุฑูŠุจู‡ุง ุนู„ู‰ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +## [ุงู„ู†ู…ุงุฐุฌ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +ุชุธู‡ุฑ ู‡ู†ุง ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง ู„ู€ YOLOv8 ู„ู„ุชุตู†ูŠู. ุชู… ุชุฏุฑูŠุจ ู†ู…ุงุฐุฌ ุงู„ูƒุดู ูˆุงู„ุดุนุจุฉ ูˆุงู„ู…ูˆุถุน ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)ุŒ ุจูŠู†ู…ุง ุชู… ุชุฏุฑูŠุจ ู†ู…ุงุฐุฌ ุงู„ุชุตู†ูŠู ู…ุณุจู‚ู‹ุง ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +ูŠุชู… ุชู†ุฒูŠู„ [ุงู„ู†ู…ุงุฐุฌ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) ุชู„ู‚ุงุฆูŠู‹ุง ู…ู† ุฃุญุฏุซ ุฅุตุฏุงุฑ ู„ู€ Ultralytics [releases](https://github.com/ultralytics/assets/releases) ุนู†ุฏ ุงู„ุงุณุชุฎุฏุงู… ุงู„ุฃูˆู„. + +| ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุญุฌู…
(ุจูƒุณู„) | ุฏู‚ุฉ (ุฃุนู„ู‰ 1)
acc | ุฏู‚ุฉ (ุฃุนู„ู‰ 5)
acc | ุณุฑุนุฉ ุงู„ุชู†ููŠุฐ
ONNX ู„ู„ูˆุญุฏุฉ ุงู„ู…ุฑูƒุฒูŠุฉ
(ู…ู„ู„ูŠ ุซุงู†ูŠุฉ) | ุณุฑุนุฉ ุงู„ุชู†ููŠุฐ
A100 TensorRT
(ู…ู„ู„ูŠ ุซุงู†ูŠุฉ) | ุงู„ู…ุนู„ู…ุงุช
(ู…ู„ูŠูˆู†) | FLOPs
(ู…ู„ูŠุงุฑ) ู„ุญุฌู… 640 | +|----------------------------------------------------------------------------------------------|----------------------|--------------------------|--------------------------|-----------------------------------------------------------|----------------------------------------------------|--------------------------|--------------------------------| +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- ู‚ูŠู…ุฉ **acc** ู‡ูŠ ุฏู‚ุฉ ุงู„ู†ู…ุงุฐุฌ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ุงู„ุชุญู‚ู‚ [ImageNet](https://www.image-net.org/). +
ู„ุฅุนุงุฏุฉ ุฅู†ุชุงุฌ ุฐู„ูƒุŒ ุงุณุชุฎุฏู… `yolo val classify data=path/to/ImageNet device=0` +- ูŠุชู… ุญุณุงุจ ุณุฑุนุฉ **Speed** ุจู†ุงุกู‹ ุนู„ู‰ ู…ุชูˆุณุท ุตูˆุฑ ุงู„ุชุญู‚ู‚ ู…ู† ImageNet ุจุงุณุชุฎุฏุงู… [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/). +
ู„ุฅุนุงุฏุฉ ุฅู†ุชุงุฌ ุฐู„ูƒุŒ ุงุณุชุฎุฏู… `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu` + +## ุงู„ุชุฏุฑูŠุจ + +ู‚ู… ุจุชุฏุฑูŠุจ YOLOv8n-cls ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช MNIST160 ู„ู…ุฏุฉ 100 ุฏูˆุฑุฉ ุนู†ุฏ ุญุฌู… ุงู„ุตูˆุฑุฉ 64 ุจูƒุณู„. ู„ู„ุญุตูˆู„ ุนู„ู‰ ู‚ุงุฆู…ุฉ ูƒุงู…ู„ุฉ ุจุงู„ูˆุณุงุฆุท ุงู„ู…ุชุงุญุฉุŒ ุงุทู„ุน ุนู„ู‰ ุตูุญุฉ [ุชูƒูˆูŠู†](/../usage/cfg.md). + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n-cls.yaml') # ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† ู†ู…ูˆุฐุฌ YAML + model = YOLO('yolov8n-cls.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑู‘ุจ ู…ุณุจู‚ู‹ุง (ู…ูˆุตู‰ ุจู‡ ู„ู„ุชุฏุฑูŠุจ) + model = YOLO('yolov8n-cls.yaml').load('yolov8n-cls.pt') # ุฅู†ุดุงุก ู…ู† YAML ูˆู†ู‚ู„ ุงู„ุฃูˆุฒุงู† + + # ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ + results = model.train(data='mnist160', epochs=100, imgsz=64) + ``` + + === "CLI" + + ```bash + # ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† YAML ูˆุจุฏุก ุงู„ุชุฏุฑูŠุจ ู…ู† ุงู„ุจุฏุงูŠุฉ + yolo classify train data=mnist160 model=yolov8n-cls.yaml epochs=100 imgsz=64 + + # ุจุฏุก ุงู„ุชุฏุฑูŠุจ ู…ู† ู†ู…ูˆุฐุฌ ู…ุฏุฑู‘ุจ ุจุตูŠุบุฉ pt + yolo classify train data=mnist160 model=yolov8n-cls.pt epochs=100 imgsz=64 + + # ุฅู†ุดุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† YAML ูˆู†ู‚ู„ ุงู„ุฃูˆุฒุงู† ุงู„ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง ูˆุจุฏุก ุงู„ุชุฏุฑูŠุจ + yolo classify train data=mnist160 model=yolov8n-cls.yaml pretrained=yolov8n-cls.pt epochs=100 imgsz=64 + ``` + +### ุชู†ุณูŠู‚ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช + +ูŠู…ูƒู† ุงู„ุนุซูˆุฑ ุนู„ู‰ ุชู†ุณูŠู‚ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ุชุตู†ูŠู YOLO ุจุงู„ุชูุตูŠู„ ููŠ [ู…ุฑุดุฏ ุงู„ู…ุฌู…ูˆุนุฉ](../../../datasets/classify/index.md). + +## ุงู„ุชุญู‚ู‚ + +ู‚ู… ุจุชุญุฏูŠุฏ ุฏู‚ุฉ ุงู„ู†ู…ูˆุฐุฌ YOLOv8n-cls ุงู„ู…ุฏุฑู‘ุจ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช MNIST160. ู„ุง ูŠู„ุฒู… ุชู…ุฑูŠุฑ ุฃูŠ ูˆุณูŠุทุฉ ุญูŠุซ ูŠุญุชูุธ `model` ุจุจูŠุงู†ุงุช ุงู„ุชุฏุฑูŠุจ ูˆุงู„ูˆุณุงุฆุท ูƒุณู…ุงุช ุงู„ู†ู…ูˆุฐุฌ. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n-cls.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฎุตุต + + # ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ + metrics = model.val() # ู„ุง ุชุญุชุงุฌ ุฅู„ู‰ ูˆุณุงุฆุทุŒ ูŠุชู… ุชุฐูƒุฑ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ูˆุงู„ุฅุนุฏุงุฏุงุช ุงู„ู†ู…ูˆุฐุฌ + metrics.top1 # ุฏู‚ุฉ ุฃุนู„ู‰ 1 + metrics.top5 # ุฏู‚ุฉ ุฃุนู„ู‰ 5 + ``` + === "CLI" + + ```bash + yolo classify val model=yolov8n-cls.pt # ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฑุณู…ูŠ + yolo classify val model=path/to/best.pt # ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฎุตุต + ``` + +## ุงู„ุชู†ุจุค + +ุงุณุชุฎุฏู… ู†ู…ูˆุฐุฌ YOLOv8n-cls ุงู„ู…ุฏุฑู‘ุจ ู„ุชู†ููŠุฐ ุชู†ุจุคุงุช ุนู„ู‰ ุงู„ุตูˆุฑ. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n-cls.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฎุตุต + + # ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ + results = model('https://ultralytics.com/images/bus.jpg') # ุชู†ุจุค ุนู„ู‰ ุตูˆุฑุฉ + ``` + === "CLI" + + ```bash + yolo classify predict model=yolov8n-cls.pt source='https://ultralytics.com/images/bus.jpg' # ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฑุณู…ูŠ + yolo classify predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฎุตุต + ``` + +ุฑุงุฌุน ุชูุงุตูŠู„ ูƒุงู…ู„ุฉ ุญูˆู„ ูˆุถุน `predict` ููŠ ุงู„ุตูุญุฉ [Predict](https://docs.ultralytics.com/modes/predict/). + +## ุชุตุฏูŠุฑ + +ู‚ู… ุจุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ YOLOv8n-cls ุฅู„ู‰ ุชู†ุณูŠู‚ ู…ุฎุชู„ู ู…ุซู„ ONNXุŒ CoreMLุŒ ูˆู…ุง ุฅู„ู‰ ุฐู„ูƒ. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n-cls.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑู‘ุจ ู…ุฎุตุต + + # ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-cls.pt format=onnx # ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฑุณู…ูŠ + yolo export model=path/to/best.pt format=onnx # ุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ ู…ุฏุฑู‘ุจ ู…ุฎุตุต + ``` + +ุชุชูˆูุฑ ุตูŠุบ ุชุตุฏูŠุฑ YOLOv8-cls ููŠ ุงู„ุฌุฏูˆู„ ุฃุฏู†ุงู‡. ูŠู…ูƒู†ูƒ ุชู†ุจุค ุฃูˆ ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉ ู…ุจุงุดุฑุฉู‹ ุนู„ู‰ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุตุฏุฑุŒ ุฃูŠ "yolo predict model=yolov8n-cls.onnx". ูŠุชู… ุนุฑุถ ุฃู…ุซู„ุฉ ู„ุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฎุงุต ุจูƒ ุจุนุฏ ุงู„ุงู†ุชู‡ุงุก ู…ู† ุงู„ุชุตุฏูŠุฑ. + +| ุงู„ุตูŠุบุฉ | ูˆุณูŠุทุฉ ุงู„ุตูŠุบุฉ | ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุจูŠุงู†ุงุช ุงู„ูˆุตููŠุฉ | ุงู„ูˆุณูŠุทุงุช | +|--------------------------------------------------------------------|---------------|-------------------------------|------------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-cls.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-cls.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-cls.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-cls_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-cls.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-cls.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-cls_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-cls.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-cls.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-cls_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-cls_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-cls_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-cls_ncnn_model/` | โœ… | `imgsz`, `half` | + +ุฑุงุฌุน ุงู„ุชูุงุตูŠู„ ุงู„ูƒุงู…ู„ุฉ ุญูˆู„ `export` ููŠ ุงู„ุตูุญุฉ [Export](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/ar/tasks/classify.md:Zone.Identifier b/ultralytics/docs/ar/tasks/classify.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/tasks/classify.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/tasks/detect.md b/ultralytics/docs/ar/tasks/detect.md new file mode 100755 index 0000000..644269a --- /dev/null +++ b/ultralytics/docs/ar/tasks/detect.md @@ -0,0 +1,185 @@ +--- +comments: true +description: ูˆุซุงุฆู‚ ุฑุณู…ูŠุฉ ู„ู€ YOLOv8 ุจูˆุงุณุทุฉ Ultralytics. ุชุนู„ู… ูƒูŠููŠุฉ ุชุฏุฑูŠุจ ูˆ ุงู„ุชุญู‚ู‚ ู…ู† ุตุญุฉ ูˆ ุงู„ุชู†ุจุค ูˆ ุชุตุฏูŠุฑ ุงู„ู†ู…ุงุฐุฌ ุจุชู†ุณูŠู‚ุงุช ู…ุฎุชู„ูุฉ. ุชุชุถู…ู† ุฅุญุตุงุฆูŠุงุช ุงู„ุฃุฏุงุก ุงู„ุชูุตูŠู„ูŠุฉ. +keywords: YOLOv8, Ultralytics, ุงู„ุชุนุฑู ุนู„ู‰ ุงู„ูƒุงุฆู†ุงุช, ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ู† ู‚ุจู„, ุงู„ุชุฏุฑูŠุจ, ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉ, ุงู„ุชู†ุจุค, ุชุตุฏูŠุฑ ุงู„ู†ู…ุงุฐุฌ, COCO, ImageNet, PyTorch, ONNX, CoreML +--- + +# ุงู„ุชุนุฑู ุนู„ู‰ ุงู„ูƒุงุฆู†ุงุช + +Beispiele fรผr die Erkennung von Objekten + +Task ุงู„ุชุนุฑู ุนู„ู‰ ุงู„ูƒุงุฆู†ุงุช ู‡ูˆ ุนุจุงุฑุฉ ุนู† ุชุนุฑู ุนู„ู‰ ู…ูˆู‚ุน ูˆ ูุฆุฉ ุงู„ูƒุงุฆู†ุงุช ููŠ ุตูˆุฑุฉ ุฃูˆ ููŠุฏูŠูˆ. + +ู…ุฎุฑุฌุงุช ุฌู‡ุงุฒ ุงู„ุงุณุชุดุนุงุฑ ู‡ูŠ ู…ุฌู…ูˆุนุฉ ู…ู† ู…ุฑุจุนุงุช ุชุญูŠุท ุจุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ุตูˆุฑุฉุŒ ู…ุน ุชุตู†ูŠู ุงู„ูุฆุฉ ูˆุฏุฑุฌุงุช ูˆุซู‚ุฉ ู„ูƒู„ ู…ุฑุจุน. ุงู„ุชุนุฑู ุนู„ู‰ ุงู„ูƒุงุฆู†ุงุช ู‡ูˆ ุงุฎุชูŠุงุฑ ุฌูŠุฏ ุนู†ุฏู…ุง ุชุญุชุงุฌ ุฅู„ู‰ ุชุญุฏูŠุฏ ูƒุงุฆู†ุงุช ู…ู‡ู…ุฉ ููŠ ู…ุดู‡ุฏุŒ ูˆู„ูƒู†ูƒ ู„ุง ุชุญุชุงุฌ ุฅู„ู‰ ู…ุนุฑูุฉ ุจุงู„ุถุจุท ุฃูŠู† ูŠูƒู…ู† ุงู„ูƒุงุฆู† ุฃูˆ ุดูƒู„ู‡ ุงู„ุฏู‚ูŠู‚. + +

+
+ +
+ ุดุงู‡ุฏ: ุงู„ุชุนุฑู ุนู„ู‰ ุงู„ูƒุงุฆู†ุงุช ุจุงุณุชุฎุฏุงู… ู†ู…ูˆุฐุฌ Ultralytics YOLOv8 ู…ุน ุชุฏุฑูŠุจ ู…ุณุจู‚. +

+ +!!! Tip "ุชู„ู…ูŠุญ" + + ู†ู…ุงุฐุฌ YOLOv8 Detect ู‡ูŠ ุงู„ู†ู…ุงุฐุฌ ุงู„ุงูุชุฑุงุถูŠุฉ YOLOv8ุŒ ุฃูŠ `yolov8n.pt` ูˆ ู‡ูŠ ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ุนู„ู‰ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml). + +## [ุงู„ู†ู…ุงุฐุฌ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +ุชูุนุฑุถ ู‡ู†ุง ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ู„ู€ YOLOv8 Detect. ุงู„ู†ู…ุงุฐุฌ Detect ูˆ Segment ูˆ Pose ู…ุนุชู…ุฏุฉ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)ุŒ ุจูŠู†ู…ุง ุงู„ู†ู…ุงุฐุฌ Classify ู…ุนุชู…ุฏุฉ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +ุชูู‚ูˆู… ุงู„ู†ู…ุงุฐุฌ ุจุงู„ุชู†ุฒูŠู„ ุชู„ู‚ุงุฆูŠู‹ุง ู…ู† ุฃุญุฏุซ [ุฅุตุฏุงุฑ Ultralytics](https://github.com/ultralytics/assets/releases) ุนู†ุฏ ุงู„ุงุณุชุฎุฏุงู… ู„ุฃูˆู„ ู…ุฑุฉ. + +| ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุญุฌู…
(ุจูƒุณู„) | mAPval
50-95 | ุงู„ุณุฑุนุฉ
CPU ONNX
(ู…ู„ู„ูŠ ุซุงู†ูŠุฉ) | ุงู„ุณุฑุนุฉ
A100 TensorRT
(ู…ู„ู„ูŠ ุซุงู†ูŠุฉ) | ุงู„ูˆุฒู†
(ู…ูŠุบุง) | FLOPs
(ู…ู„ูŠุงุฑ) | +|--------------------------------------------------------------------------------------|----------------------|----------------------|-----------------------------------------|----------------------------------------------|----------------------|-----------------------| +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- ู‚ูŠู… mAPval ุชู†ุทุจู‚ ุนู„ู‰ ู…ู‚ูŠุงุณ ู†ู…ูˆุฐุฌ ูˆุงุญุฏ-ู…ู‚ูŠุงุณ ูˆุงุญุฏ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช [COCO val2017](http://cocodataset.org). +
ุงุนูŠุฏ ุญุณุงุจู‡ุง ุจูˆุงุณุทุฉ `yolo val detect data=coco.yaml device=0` +- ุงู„ุณุฑุนุฉุชู…ุช ู…ุชูˆุณุทุฉ ุนู„ู‰ ุตูˆุฑ COCO val ุจุงุณุชุฎุฏุงู… [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) + instance. +
ุงุนูŠุฏ ุญุณุงุจู‡ุง ุจูˆุงุณุทุฉ `yolo val detect data=coco128.yaml batch=1 device=0|cpu` + +## ุชุฏุฑูŠุจ + +ู‚ู… ุจุชุฏุฑูŠุจ YOLOv8n ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช COCO128 ู„ู…ุฏุฉ 100 ุฏูˆุฑุฉ ุนู„ู‰ ุญุฌู… ุตูˆุฑุฉ 640. ู„ู„ุญุตูˆู„ ุนู„ู‰ ู‚ุงุฆู…ุฉ ูƒุงู…ู„ุฉ ุจุงู„ูˆุณุงุฆุท ุงู„ู…ุชุงุญุฉ ุงู†ุธุฑ ุงู„ุตูุญุฉ [ุงู„ุชูƒูˆูŠู†](/../usage/cfg.md). + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n.yaml') # ุจู†ุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† YAML + model = YOLO('yolov8n.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง (ู…ูˆุตู‰ ุจู‡ ู„ู„ุชุฏุฑูŠุจ) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # ุจู†ุงุก ู…ู† YAML ูˆ ู†ู‚ู„ ุงู„ุฃูˆุฒุงู† + + # ู‚ู… ุจุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # ู‚ู… ุจุจู†ุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† YAML ูˆุงุจุฏุฃ ุงู„ุชุฏุฑูŠุจ ู…ู† ุงู„ุตูุฑ + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # ุงุจุฏุฃ ุงู„ุชุฏุฑูŠุจ ู…ู† ู†ู…ูˆุฐุฌ *.pt ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # ุจู†ุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† YAMLุŒ ูˆู†ู‚ู„ ุงู„ุฃูˆุฒุงู† ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ุงู‹ ุฅู„ู‰ ุงู„ู†ู…ูˆุฐุฌ ูˆุงุจุฏุฃ ุงู„ุชุฏุฑูŠุจ + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### ุชู†ุณูŠู‚ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช + +ูŠู…ูƒู† ุงู„ุนุซูˆุฑ ุนู„ู‰ ุชู†ุณูŠู‚ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ุงู„ุชุนุฑู ุนู„ู‰ ุงู„ูƒุงุฆู†ุงุช ุจุงู„ุชูุตูŠู„ ููŠ [ุฏู„ูŠู„ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช](../../../datasets/detect/index.md). ู„ุชุญูˆูŠู„ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ุญุงู„ูŠุฉ ู…ู† ุชู†ุณูŠู‚ุงุช ุฃุฎุฑู‰ (ู…ุซู„ COCO ุงู„ุฎ) ุฅู„ู‰ ุชู†ุณูŠู‚ YOLOุŒ ูŠุฑุฌู‰ ุงุณุชุฎุฏุงู… ุฃุฏุงุฉ [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) ุงู„ู…ู‚ุฏู…ุฉ ู…ู† Ultralytics. + +## ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉ + +ู‚ู… ุจุชุญู‚ู‚ ู…ู† ุฏู‚ุฉ ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง YOLOv8n ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช COCO128. ู„ูŠุณ ู‡ู†ุงูƒ ุญุงุฌุฉ ุฅู„ู‰ ุชู…ุฑูŠุฑ ุฃูŠ ูˆุณูŠุทุงุช ุญูŠุซ ูŠุญุชูุธ ุงู„ู†ู…ูˆุฐุฌ ุจุจูŠุงู†ุงุชู‡ ุงู„ุชุฏุฑูŠุจูŠุฉ ูˆุงู„ูˆุณูŠุทุงุช ูƒุณู…ุงุช ุงู„ู†ู…ูˆุฐุฌ. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฎุตุต + + # ู‚ู… ุจุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ + metrics = model.val() # ู„ุง ุญุงุฌุฉ ู„ุฃูŠ ุจูŠุงู†ุงุชุŒ ูŠุชุฐูƒุฑ ุงู„ู†ู…ูˆุฐุฌ ุจูŠุงู†ุงุช ุงู„ุชุฏุฑูŠุจ ูˆ ุงู„ูˆุณูŠุทุงุช + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ู‚ุงุฆู…ุฉ ุชุญุชูˆูŠ map50-95 ู„ูƒู„ ูุฆุฉ + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฑุณู…ูŠ + yolo detect val model=path/to/best.pt # ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฎุตุต + ``` + +## ุงู„ุชู†ุจุค + +ุงุณุชุฎุฏู… ู†ู…ูˆุฐุฌ YOLOv8n ุงู„ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ู„ุชุดุบูŠู„ ุงู„ุชู†ุจุคุงุช ุนู„ู‰ ุงู„ุตูˆุฑ. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฎุตุต + + # ุฃุฌุฑู ุงู„ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ + results = model('https://ultralytics.com/images/bus.jpg') # ุงู„ุชู†ุจุค ุนู„ู‰ ุตูˆุฑุฉ + ``` + === "CLI" + + ```bash + yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' # ุงู„ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฑุณู…ูŠ + yolo detect predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ุงู„ุชู†ุจุค ุจุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฎุตุต + ``` + +ุงู†ุธุฑ ุชูุงุตูŠู„ ูˆุถุน ุงู„ู€ `predict` ุงู„ูƒุงู…ู„ ููŠ ุตูุญุฉ [Predict](https://docs.ultralytics.com/modes/predict/). + +## ุชุตุฏูŠุฑ + +ู‚ู… ุจุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ YOLOv8n ุฅู„ู‰ ุชู†ุณูŠู‚ ู…ุฎุชู„ู ู…ุซู„ ONNXุŒ CoreML ูˆุบูŠุฑู‡ุง. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุฎุตุต + + # ู‚ู… ุจุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฑุณู…ูŠ + yolo export model=path/to/best.pt format=onnx # ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฏุฑุจ ู…ุฎุตุต + ``` + +ุงู„ุชู†ุณูŠู‚ุงุช ุงู„ู…ุฏุนูˆู…ุฉ ู„ุชุตุฏูŠุฑ YOLOv8 ู…ุฏุฑุฌุฉ ููŠ ุงู„ุฌุฏูˆู„ ุฃุฏู†ุงู‡. ูŠู…ูƒู†ูƒ ุงู„ุชู†ุจุค ุฃูˆ ุงู„ุชุญู‚ู‚ ู…ู† ุตุญุฉ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุตุฏุฑุฉ ู…ุจุงุดุฑุฉุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ `yolo predict model=yolov8n.onnx`. ุณูŠุชู… ุนุฑุถ ุฃู…ุซู„ุฉ ุงุณุชุฎุฏุงู… ู„ู†ู…ูˆุฐุฌูƒ ุจุนุฏ ุงูƒุชู…ุงู„ ุงู„ุชุตุฏูŠุฑ. + +| ุงู„ุดูƒู„ | ู…ุณุงูุฉ `format` | ุงู„ู†ู…ูˆุฐุฌ | ุจูŠุงู†ุงุช ุงู„ูˆุตู | ูˆุณูŠุทุงุช | +|--------------------------------------------------------------------|----------------|---------------------------|--------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - ุฃูˆ | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half`, `int8` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras`, `int8` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +ุงู†ุธุฑ ุชูุงุตูŠู„ ูƒุงู…ู„ุฉ ู„ู„ู€ `export` ููŠ ุตูุญุฉ [Export](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/ar/tasks/detect.md:Zone.Identifier b/ultralytics/docs/ar/tasks/detect.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/tasks/detect.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/tasks/index.md b/ultralytics/docs/ar/tasks/index.md new file mode 100755 index 0000000..dfb2f43 --- /dev/null +++ b/ultralytics/docs/ar/tasks/index.md @@ -0,0 +1,55 @@ +--- +comments: true +description: ุชุนุฑู‘ู ุนู„ู‰ ุงู„ู…ู‡ุงู… ุงู„ุฃุณุงุณูŠุฉ ู„ุชู‚ู†ูŠุฉ YOLOv8 ู„ู„ุฑุคูŠุฉ ุงู„ุญุงุณูˆุจูŠุฉ ูˆุงู„ุชูŠ ุชุดู…ู„ ุงู„ูƒุดูุŒ ุงู„ุชุฌุฒุฆุฉุŒ ุงู„ุชุตู†ูŠู ูˆุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ. ุชุนุฑู ุนู„ู‰ ุงุณุชุฎุฏุงู…ุงุชู‡ุง ููŠ ู…ุดุงุฑูŠุน ุงู„ุฐูƒุงุก ุงู„ุงุตุทู†ุงุนูŠ ุงู„ุฎุงุตุฉ ุจูƒ. +keywords: UltralyticsุŒ YOLOv8ุŒ ุงู„ูƒุดูุŒ ุงู„ุชุฌุฒุฆุฉุŒ ุงู„ุชุตู†ูŠูุŒ ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉุŒ ุงู„ุฅุทุงุฑ ุงู„ุฐูƒูŠ ู„ู„ุฐูƒุงุก ุงู„ุงุตุทู†ุงุนูŠุŒ ุงู„ู…ู‡ุงู… ุงู„ุฑุคูŠุฉ ุงู„ุญุงุณูˆุจูŠุฉ +--- + +# ู…ู‡ุงู… Ultralytics YOLOv8 + +
+ู…ู‡ุงู… Ultralytics YOLOv8 ุงู„ู…ุฏุนูˆู…ุฉ + +YOLOv8 ู‡ูˆ ุฅุทุงุฑ ุฐูƒุงุก ุงุตุทู†ุงุนูŠ ูŠุฏุนู… ุนุฏุฉ **ู…ู‡ุงู…** ู„ู„ุฑุคูŠุฉ ุงู„ุญุงุณูˆุจูŠุฉ. ูŠู…ูƒู† ุงุณุชุฎุฏุงู… ุงู„ุฅุทุงุฑ ู„ุฃุฏุงุก [ุงู„ูƒุดู](detect.md)ุŒ [ุงู„ุชุฌุฒุฆุฉ](segment.md)ุŒ [ุงู„ุชุตู†ูŠู](classify.md)ุŒ ูˆ[ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ](pose.md). ูƒู„ ู…ู† ู‡ุฐู‡ ุงู„ู…ู‡ุงู… ู„ู‡ุง ู‡ุฏู ู…ุฎุชู„ู ูˆุงุณุชุฎุฏุงู… ู…ุญุฏุฏ. + +!!! Note "ู…ู„ุงุญุธุฉ" + + ๐Ÿšง ูŠุฌุฑูŠ ุจู†ุงุก ูˆุซุงุฆู‚ู†ุง ู…ุชุนุฏุฏุฉ ุงู„ู„ุบุงุช ุญุงู„ูŠู‹ุงุŒ ูˆู†ุนู…ู„ ุฌุงู‡ุฏูŠู† ุนู„ู‰ ุชุญุณูŠู†ู‡ุง. ุดูƒุฑู‹ุง ู„ุตุจุฑูƒ! ๐Ÿ™ + +

+
+ +
+ ุดุงู‡ุฏ: ุงุณุชูƒุดู ู…ู‡ุงู… Ultralytics YOLO: ูƒุดู ุงู„ูƒุงุฆู†ุงุชุŒ ุงู„ุชุฌุฒุฆุฉุŒ ุงู„ุชุชุจุน ูˆุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ. +

+ +## [ุงู„ูƒุดู](detect.md) + +ุงู„ูƒุดู ู‡ูˆ ุงู„ู…ู‡ู…ุฉ ุงู„ุฃุณุงุณูŠุฉ ุงู„ู…ุฏุนูˆู…ุฉ ุจูˆุงุณุทุฉ YOLOv8. ูŠุชุถู…ู† ุงู„ูƒุดู ุงูƒุชุดุงู ุงู„ูƒุงุฆู†ุงุช ููŠ ุตูˆุฑุฉ ุฃูˆ ุฅุทุงุฑ ููŠุฏูŠูˆ ูˆุฑุณู… ู…ุฑุจุนุงุช ู…ุญูŠุทุฉ ุญูˆู„ู‡ุง. ูŠุชู… ุชุตู†ูŠู ุงู„ูƒุงุฆู†ุงุช ุงู„ู…ูƒุชุดูุฉ ุฅู„ู‰ ูุฆุงุช ู…ุฎุชู„ูุฉ ุงุณุชู†ุงุฏู‹ุง ุฅู„ู‰ ู…ูŠุฒุงุชู‡ุง. ูŠู…ูƒู† ู„ู€ YOLOv8 ุงูƒุชุดุงู ุฃูƒุซุฑ ู…ู† ูƒุงุฆู† ูˆุงุญุฏ ููŠ ุตูˆุฑุฉ ุฃูˆ ุฅุทุงุฑ ููŠุฏูŠูˆ ูˆุงุญุฏ ุจุฏู‚ุฉ ูˆุณุฑุนุฉ ุนุงู„ูŠุฉ. + +[ุฃู…ุซู„ุฉ ู„ู„ูƒุดู](detect.md){ .md-button } + +## [ุงู„ุชุฌุฒุฆุฉ](segment.md) + +ุงู„ุชุฌุฒุฆุฉ ู‡ูŠ ู…ู‡ู…ุฉ ุชุชุถู…ู† ุชู‚ุณูŠู… ุตูˆุฑุฉ ุฅู„ู‰ ู…ู†ุงุทู‚ ู…ุฎุชู„ูุฉ ุงุณุชู†ุงุฏู‹ุง ุฅู„ู‰ ู…ุญุชูˆู‰ ุงู„ุตูˆุฑุฉ. ูŠุชู… ุชุนูŠูŠู† ุนู„ุงู…ุฉ ู„ูƒู„ ู…ู†ุทู‚ุฉ ุงุณุชู†ุงุฏู‹ุง ุฅู„ู‰ ู…ุญุชูˆุงู‡ุง. ุชุนุชุจุฑ ู‡ุฐู‡ ุงู„ู…ู‡ู…ุฉ ู…ููŠุฏุฉ ููŠ ุชุทุจูŠู‚ุงุช ู…ุซู„ ุชุฌุฒุฆุฉ ุงู„ุตูˆุฑ ูˆุชุตูˆูŠุฑ ุงู„ุทุจูŠุฉ. ูŠุณุชุฎุฏู… YOLOv8 ู†ุณุฎุฉ ู…ุนุฏู„ุฉ ู…ู† ู‡ู†ุฏุณุฉ U-Net ู„ุฃุฏุงุก ุงู„ุชุฌุฒุฆุฉ. + +[ุฃู…ุซู„ุฉ ู„ู„ุชุฌุฒุฆุฉ](segment.md){ .md-button } + +## [ุงู„ุชุตู†ูŠู](classify.md) + +ุงู„ุชุตู†ูŠู ู‡ูˆ ู…ู‡ู…ุฉ ุชุชุถู…ู† ุชุตู†ูŠู ุตูˆุฑุฉ ุฅู„ู‰ ูุฆุงุช ู…ุฎุชู„ูุฉ. ูŠู…ูƒู† ุงุณุชุฎุฏุงู… YOLOv8 ู„ุชุตู†ูŠู ุงู„ุตูˆุฑ ุงุณุชู†ุงุฏู‹ุง ุฅู„ู‰ ู…ุญุชูˆุงู‡ุง. ูŠุณุชุฎุฏู… ู†ุณุฎุฉ ู…ุนุฏู„ุฉ ู…ู† ู‡ู†ุฏุณุฉ EfficientNet ู„ุฃุฏุงุก ุงู„ุชุตู†ูŠู. + +[ุฃู…ุซู„ุฉ ู„ู„ุชุตู†ูŠู](classify.md){ .md-button } + +## [ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ](pose.md) + +ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ/ุงู„ู†ู‚ุงุท ุงู„ุฑุฆูŠุณูŠุฉ ู‡ูˆ ู…ู‡ู…ุฉ ุชุชุถู…ู† ุงูƒุชุดุงู ู†ู‚ุงุท ู…ุญุฏุฏุฉ ููŠ ุตูˆุฑุฉ ุฃูˆ ุฅุทุงุฑ ููŠุฏูŠูˆ. ูŠูุดุงุฑ ุฅู„ู‰ ู‡ุฐู‡ ุงู„ู†ู‚ุงุท ุจู…ุตุทู„ุญ ุงู„ู†ู‚ุงุท ุงู„ุฑุฆูŠุณูŠุฉ ูˆุชูุณุชุฎุฏู… ู„ุชุชุจุน ุงู„ุญุฑูƒุฉ ุฃูˆ ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ. ูŠู…ูƒู† ู„ู€ YOLOv8 ุงูƒุชุดุงู ุงู„ู†ู‚ุงุท ุงู„ุฑุฆูŠุณูŠุฉ ููŠ ุตูˆุฑุฉ ุฃูˆ ุฅุทุงุฑ ููŠุฏูŠูˆ ุจุฏู‚ุฉ ูˆุณุฑุนุฉ ุนุงู„ูŠุฉ. + +[ุฃู…ุซู„ุฉ ู„ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ](pose.md){ .md-button } + +## ุงู„ุงุณุชู†ุชุงุฌ + +ูŠุฏุนู… YOLOv8 ู…ู‡ุงู… ู…ุชุนุฏุฏุฉุŒ ุจู…ุง ููŠ ุฐู„ูƒ ุงู„ูƒุดูุŒ ุงู„ุชุฌุฒุฆุฉุŒ ุงู„ุชุตู†ูŠูุŒ ูˆูƒุดู ุงู„ู†ู‚ุงุท ุงู„ุฑุฆูŠุณูŠุฉ. ู„ูƒู„ ู…ู† ู‡ุฐู‡ ุงู„ู…ู‡ุงู… ุฃู‡ุฏุงู ูˆุงุณุชุฎุฏุงู…ุงุช ู…ุฎุชู„ูุฉ. ุนู† ุทุฑูŠู‚ ูู‡ู… ุงู„ุงุฎุชู„ุงูุงุช ุจูŠู† ู‡ุฐู‡ ุงู„ู…ู‡ุงู…ุŒ ูŠู…ูƒู†ูƒ ุงุฎุชูŠุงุฑ ุงู„ู…ู‡ู…ุฉ ุงู„ู…ู†ุงุณุจุฉ ู„ุชุทุจูŠู‚ ุงู„ุฑุคูŠุฉ ุงู„ุญุงุณูˆุจูŠุฉ ุงู„ุฎุงุต ุจูƒ. diff --git a/ultralytics/docs/ar/tasks/index.md:Zone.Identifier b/ultralytics/docs/ar/tasks/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/tasks/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/tasks/pose.md b/ultralytics/docs/ar/tasks/pose.md new file mode 100755 index 0000000..8ac1771 --- /dev/null +++ b/ultralytics/docs/ar/tasks/pose.md @@ -0,0 +1,186 @@ +--- +comments: true +description: ุชุนุฑู‘ู ุนู„ู‰ ูƒูŠููŠุฉ ุงุณุชุฎุฏุงู… Ultralytics YOLOv8 ู„ู…ู‡ุงู… ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ. ุงุนุซุฑ ุนู„ู‰ ู†ู…ุงุฐุฌ ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุงุŒ ูˆุชุนู„ู… ูƒูŠููŠุฉ ุงู„ุชุฏุฑูŠุจ ูˆุงู„ุชุญู‚ู‚ ูˆุงู„ุชู†ุจุค ูˆุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌูƒ ุงู„ุฎุงุต. +keywords: UltralyticsุŒ YOLOุŒ YOLOv8ุŒ ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ ุŒ ูƒุดู ู†ู‚ุงุท ุงู„ู…ูุงุชูŠุญ ุŒ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุŒ ู†ู…ุงุฐุฌ ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง ุŒ ุชุนู„ู… ุงู„ุขู„ุฉ ุŒ ุงู„ุฐูƒุงุก ุงู„ุงุตุทู†ุงุนูŠ +--- + +# ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ + +ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ ู‡ูˆ ู…ู‡ู…ุฉ ุชู†ุทูˆูŠ ุนู„ู‰ ุชุญุฏูŠุฏ ู…ูˆู‚ุน ู†ู‚ุงุท ู…ุญุฏุฏุฉ ููŠ ุงู„ุตูˆุฑุฉ ุŒ ูˆุนุงุฏุฉู‹ ู…ุง ูŠุดุงุฑ ุฅู„ูŠู‡ุง ุจู†ู‚ุงุท ุงู„ูˆุถูˆุญ. ูŠู…ูƒู† ุฃู† ุชู…ุซู„ ู†ู‚ุงุท ุงู„ูˆุถูˆุญ ุฃุฌุฒุงุกู‹ ู…ุฎุชู„ูุฉู‹ ู…ู† ุงู„ูƒุงุฆู† ู…ุซู„ ุงู„ู…ูุงุตู„ ุฃูˆ ุงู„ุนู„ุงู…ุงุช ุงู„ู…ู…ูŠุฒุฉ ุฃูˆ ุงู„ู…ูŠุฒุงุช ุงู„ุจุงุฑุฒุฉ ุงู„ุฃุฎุฑู‰. ุนุงุฏุฉู‹ ู…ุง ูŠุชู… ุชู…ุซูŠู„ ู…ูˆุงู‚ุน ู†ู‚ุงุท ุงู„ูˆุถูˆุญ ูƒู…ุฌู…ูˆุนุฉ ู…ู† ุงู„ุฅุญุฏุงุซูŠุงุช 2D `[x ุŒ y]` ุฃูˆ 3D `[x ุŒ y ุŒ visible]`. + +ูŠูƒูˆู† ู†ุงุชุฌ ู†ู…ูˆุฐุฌ ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ ู…ุฌู…ูˆุนุฉ ู…ู† ุงู„ู†ู‚ุงุท ุงู„ุชูŠ ุชู…ุซู„ ู†ู‚ุงุท ุงู„ูˆุถูˆุญ ุนู„ู‰ ูƒุงุฆู† ููŠ ุงู„ุตูˆุฑุฉ ุŒ ุนุงุฏุฉู‹ ู…ุน ู†ู‚ุงุท ุงู„ุซู‚ุฉ ู„ูƒู„ ู†ู‚ุทุฉ. ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ ู‡ูˆ ุฎูŠุงุฑ ุฌูŠุฏ ุนู†ุฏู…ุง ุชุญุชุงุฌ ุฅู„ู‰ ุชุญุฏูŠุฏ ุฃุฌุฒุงุก ู…ุญุฏุฏุฉ ู…ู† ูƒุงุฆู† ููŠ ู…ุดู‡ุฏุŒ ูˆู…ูˆู‚ุนู‡ุง ุจุงู„ู†ุณุจุฉ ู„ุจุนุถู‡ุง ุงู„ุจุนุถ. + +

+
+ +
+ ุดุงู‡ุฏ: ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ ู…ุน Ultralytics YOLOv8. +

+ +!!! Tip "ู†ุตูŠุญุฉ" + + ุงู„ู†ู…ุงุฐุฌ ุงู„ุชูŠ ุชุญุชูˆูŠ ุนู„ู‰ ุงู„ุจุงุฏุฆุฉ "-pose" ุชุณุชุฎุฏู… ู„ู†ู…ุงุฐุฌ YOLOv8 pose ุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ `yolov8n-pose.pt`. ู‡ุฐู‡ ุงู„ู†ู…ุงุฐุฌ ู…ุฏุฑุจุฉ ุนู„ู‰ [ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ู†ู‚ุงุท ุงู„ูˆุถูˆุญ COCO]("https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco-pose.yaml") ูˆู‡ูŠ ู…ู†ุงุณุจุฉ ู„ู…ุฌู…ูˆุนุฉ ู…ุชู†ูˆุนุฉ ู…ู† ู…ู‡ุงู… ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ. + +## [ุงู„ู†ู…ุงุฐุฌ]("https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8") + +ุชุนุฑุถ ู†ู…ุงุฐุฌ ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง ู„ู€ YOLOv8 ุงู„ุชูŠ ุชุณุชุฎุฏู… ู„ุชู‚ุฏูŠุฑ ุงู„ูˆุถุนูŠุฉ ู‡ู†ุง. ุงู„ู†ู…ุงุฐุฌ ู„ู„ูƒุดู ูˆุงู„ุดุฑูŠุญุฉ ูˆุงู„ูˆุถุนูŠุฉ ูŠุชู… ุชุฏุฑูŠุจู‡ุง ุนู„ู‰ [ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)ุŒ ุจูŠู†ู…ุง ุชุชู… ุชุฏุฑูŠุจ ู†ู…ุงุฐุฌ ุงู„ุชุตู†ูŠู ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ImageNet. + +ูŠุชู… ุชู†ุฒูŠู„ ุงู„ู†ู…ุงุฐุฌ ู…ู† [ุขุฎุฑ ุฅุตุฏุงุฑ Ultralytics]("https://github.com/ultralytics/assets/releases") ุชู„ู‚ุงุฆูŠู‹ุง ุนู†ุฏ ุงุณุชุฎุฏุงู…ู‡ุง ู„ุฃูˆู„ ู…ุฑุฉ. + +| ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุญุฌู… (ุจุงู„ุจูƒุณู„) | mAPุงู„ูˆุถุนูŠุฉ 50-95 | mAPุงู„ูˆุถุนูŠุฉ 50 | ุณุฑุนุฉุงู„ูˆุญุฏุฉ ุงู„ู…ุฑูƒุฒูŠุฉ ONNX(ms) | ุณุฑุนุฉA100 TensorRT(ms) | ุงู„ู…ุนู„ู…ุงุช (ู…ู„ูŠูˆู†) | FLOPs (ุจุงู„ู…ู„ูŠุงุฑ) | +|------------------------------------------------------------------------------------------------------|-----------------|-----------------------|--------------------|----------------------------------------|---------------------------------|------------------|------------------| +| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | +| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | +| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | +| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | +| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | +| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +- ุชุนุชุจุฑ ุงู„ู‚ูŠู… **mAPval** ู„ู†ู…ูˆุฐุฌ ูˆุงุญุฏ ูˆู…ู‚ูŠุงุณ ูˆุงุญุฏ ูู‚ุท ุนู„ู‰ [COCO Keypoints val2017](http://cocodataset.org) + ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช. +
ูŠู…ูƒู† ุฅุนุงุฏุฉ ุฅู†ุชุงุฌู‡ ุจูˆุงุณุทุฉ `ูŠูˆู„ูˆ val pose data=coco-pose.yaml device=0` +- ูŠุชู… ุญุณุงุจ **ุงู„ุณุฑุนุฉ** ู…ู† ุฎู„ุงู„ ู…ุชูˆุณุท ุตูˆุฑ COCO val ุจุงุณุชุฎุฏุงู… [ุงู„ู…ุฑูˆุญุฉ ุงู„ุญุฑุงุฑูŠุฉ Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) + ู…ุซูŠู„. +
ูŠู…ูƒู† ุฅุนุงุฏุฉ ุฅู†ุชุงุฌู‡ ุจูˆุงุณุทุฉ `ูŠูˆู„ูˆ val pose data=coco8-pose.yaml batch=1 device=0|cpu` + +## ุงู„ุชุฏุฑูŠุจ + +ูŠุชู… ุชุฏุฑูŠุจ ู†ู…ูˆุฐุฌ YOLOv8-pose ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO128-pose. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n-pose.yaml') # ุจู†ุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† ู…ู„ู YAML + model = YOLO('yolov8n-pose.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑู‘ุจ ู…ุณุจู‚ู‹ุง (ู…ูˆุตู‰ ุจู‡ ู„ู„ุชุฏุฑูŠุจ) + model = YOLO('yolov8n-pose.yaml').load('yolov8n-pose.pt') # ุจู†ุงุก ู†ู…ูˆุฐุฌ ู…ู† YAML ูˆู†ู‚ู„ ุงู„ูˆุฒู† + + # ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ + results = model.train(data='coco8-pose.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # ุจู†ุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† YAML ูˆุจุฏุก ุงู„ุชุฏุฑูŠุจ ู…ู† ุงู„ุจุฏุงูŠุฉ. + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml epochs=100 imgsz=640 + + # ุงู„ุจุฏุก ููŠ ุงู„ุชุฏุฑูŠุจ ู…ู† ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง *.pt + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.pt epochs=100 imgsz=640 + + # ุจู†ุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† YAML ุŒ ูˆู†ู‚ู„ ุงู„ุฃูˆุฒุงู† ุงู„ู…ุฏุฑู‘ุจุฉ ู…ุณุจู‚ู‹ุง ุฅู„ูŠู‡ ุŒ ูˆุงู„ุจุฏุก ููŠ ุงู„ุชุฏุฑูŠุจ. + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml pretrained=yolov8n-pose.pt epochs=100 imgsz=640 + ``` + +### ุชู†ุณูŠู‚ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช + +ูŠู…ูƒู† ุงู„ุนุซูˆุฑ ุนู„ู‰ ุชู†ุณูŠู‚ ู…ุฌู…ูˆุนุงุช ุจูŠุงู†ุงุช ู†ู‚ุงุท ุงู„ูˆุถูˆุญ YOLO ููŠ [ุฏู„ูŠู„ ุงู„ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช](../../../datasets/pose/index.md). ู„ุชุญูˆูŠู„ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ุญุงู„ูŠุฉ ุงู„ุชูŠ ู„ุฏูŠูƒ ู…ู† ุชู†ุณูŠู‚ุงุช ุฃุฎุฑู‰ (ู…ุซู„ COCO ุฅู„ุฎ) ุฅู„ู‰ ุชู†ุณูŠู‚ YOLO ุŒ ูŠุฑุฌู‰ ุงุณุชุฎุฏุงู… ุฃุฏุงุฉ [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) ู…ู† Ultralytics. + +## ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉ + +ุชุญู‚ู‚ ู…ู† ุฏู‚ุฉ ู†ู…ูˆุฐุฌ YOLOv8n-pose ุงู„ู…ุฏุฑู‘ุจ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO128-pose. ู„ุง ูŠู„ุฒู… ุชู…ุฑูŠุฑ ุณุจุจ ู…ุง ูƒูˆุณูŠุท ุฅู„ู‰ `model` +ุนู†ุฏ ุงุณุชุฏุนุงุก. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n-pose.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฎุตุต + + # ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ + metrics = model.val() # ู„ุง ูŠูˆุฌุฏ ุญุงุฌุฉ ู„ุฃูŠ ุณุจุจุŒ ูŠุชุฐูƒุฑ ุงู„ู†ู…ูˆุฐุฌ ุงู„ุจูŠุงู†ุงุช ูˆุงู„ูˆุณุงุฆุท ูƒู…ุฌุงู„ุงุช ู„ู„ู†ู…ูˆุฐุฌ + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ู‚ุงุฆู…ุฉ ุชุญุชูˆูŠ ุนู„ู‰ map50-95 ู„ูƒู„ ูุฆุฉ + ``` + === "CLI" + + ```bash + yolo pose val model=yolov8n-pose.pt # ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฑุณู…ูŠ + yolo pose val model=path/to/best.pt # ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฎุตุต + ``` + +## ุงู„ุชู†ุจุค + +ุงุณุชุฎุฏู… ู†ู…ูˆุฐุฌ YOLOv8n-pose ุงู„ู…ุฏุฑู‘ุจ ู„ุชุดุบูŠู„ ุชูˆู‚ุนุงุช ุนู„ู‰ ุงู„ุตูˆุฑ. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n-pose.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฎุตุต + + # ุงู„ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ + results = model('https://ultralytics.com/images/bus.jpg') # ุงู„ุชู†ุจุค ุจุตูˆุฑุฉ + ``` + === "CLI" + + ```bash + yolo pose predict model=yolov8n-pose.pt source='https://ultralytics.com/images/bus.jpg' # ุงู„ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฑุณู…ูŠ + yolo pose predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ุงู„ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฎุตุต + ``` + +ุงู†ุธุฑ ุชูุงุตูŠู„ `predict` ูƒุงู…ู„ุฉ ููŠ [ุตูุญุฉ ุงู„ุชู†ุจุค](https://docs.ultralytics.com/modes/predict/). + +## ุงู„ุชุตุฏูŠุฑ + +ู‚ู… ุจุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ YOLOv8n-pose ุฅู„ู‰ ุชู†ุณูŠู‚ ู…ุฎุชู„ู ู…ุซู„ ONNXุŒ CoreMLุŒ ุงู„ุฎ. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n-pose.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุฎุตุต + + # ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-pose.pt format=onnx # ุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + yolo export model=path/to/best.pt format=onnx # ุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ ู…ุฎุตุต + ``` + +ุชุชูˆูุฑ ุชู†ุณูŠู‚ุงุช ุชุตุฏูŠุฑ YOLOv8-pose ููŠ ุงู„ุฌุฏูˆู„ ุฃุฏู†ุงู‡. ูŠู…ูƒู†ูƒ ุงู„ุชู†ุจุค ุฃูˆ ุงู„ุชุญู‚ู‚ ู…ุจุงุดุฑุฉู‹ ุนู„ู‰ ุงู„ู†ู…ุงุฐุฌ ุงู„ู…ุตุฏุฑุฉ ุŒ ุนู„ู‰ ุณุจูŠู„ ุงู„ู…ุซุงู„ `yolo predict model=yolov8n-pose.onnx`. ุชูˆุฌุฏ ุฃู…ุซู„ุฉ ุงุณุชุฎุฏุงู… ู…ุชุงุญุฉ ู„ู†ู…ูˆุฐุฌูƒ ุจุนุฏ ุงูƒุชู…ุงู„ ุนู…ู„ูŠุฉ ุงู„ุชุตุฏูŠุฑ. + +| ุชู†ุณูŠู‚ | ุฅุฌุฑุงุก `format` | ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุจูŠุงู†ุงุช ุงู„ูˆุตููŠุฉ | ุงู„ูˆุณุงุฆุท | +|--------------------------------------------------------------------|----------------|--------------------------------|------------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-pose.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-pose.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-pose.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-pose_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-pose.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-pose.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-pose_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-pose.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-pose.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-pose_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-pose_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-pose_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-pose_ncnn_model/` | โœ… | `imgsz`, `half` | + +ุงู†ุธุฑ ุชูุงุตูŠู„ `export` ูƒุงู…ู„ุฉ ููŠ [ุตูุญุฉ ุงู„ุชุตุฏูŠุฑ](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/ar/tasks/pose.md:Zone.Identifier b/ultralytics/docs/ar/tasks/pose.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/tasks/pose.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ar/tasks/segment.md b/ultralytics/docs/ar/tasks/segment.md new file mode 100755 index 0000000..85f7d39 --- /dev/null +++ b/ultralytics/docs/ar/tasks/segment.md @@ -0,0 +1,189 @@ +--- +comments: true +description: ุชุนู„ู… ูƒูŠููŠุฉ ุงุณุชุฎุฏุงู… ู†ู…ุงุฐุฌ ูุตู„ ุงู„ุฃุดูƒุงู„ ุงู„ูุฑุฏูŠุฉ ู…ุน Ultralytics YOLO. ุชุนู„ูŠู…ุงุช ุญูˆู„ ุงู„ุชุฏุฑูŠุจ ูˆุงู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉ ูˆุชูˆู‚ุน ุงู„ุตูˆุฑุฉ ูˆุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ. +keywords: yolov8 ุŒ ูุตู„ ุงู„ุฃุดูƒุงู„ ุงู„ูุฑุฏูŠุฉ ุŒ Ultralytics ุŒ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO ุŒ ุชุฌุฒุฆุฉ ุงู„ุตูˆุฑุฉ ุŒ ูƒุดู ุงู„ูƒุงุฆู†ุงุช ุŒ ุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ ุŒ ุงู„ุชุญู‚ู‚ ู…ู† ุตุญุฉ ุงู„ู†ู…ูˆุฐุฌ ุŒ ุชูˆู‚ุน ุงู„ุตูˆุฑุฉ ุŒ ุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ +--- + +# ูุตู„ ุงู„ุฃุดูƒุงู„ ุงู„ูุฑุฏูŠุฉ + +ุฃู…ุซู„ุฉ ุนู„ู‰ ูุตู„ ุงู„ุฃุดูƒุงู„ ุงู„ูุฑุฏูŠุฉ + +ูŠุฐู‡ุจ ูุตู„ ุงู„ุฃุดูƒุงู„ ุงู„ูุฑุฏูŠุฉ ุฎุทูˆุฉ ุฃุจุนุฏ ู…ู† ูƒุดู ุงู„ูƒุงุฆู†ุงุช ูˆูŠู†ุทูˆูŠ ุนู„ู‰ ุชุญุฏูŠุฏ ุงู„ูƒุงุฆู†ุงุช ุงู„ูุฑุฏูŠุฉ ููŠ ุตูˆุฑุฉ ูˆุชุฌุฒูŠุฆู‡ุง ุนู† ุจู‚ูŠุฉ ุงู„ุตูˆุฑุฉ. + +ู†ุงุชุฌ ู†ู…ูˆุฐุฌ ูุตู„ ุงู„ุฃุดูƒุงู„ ุงู„ูุฑุฏูŠุฉ ู‡ูˆ ู…ุฌู…ูˆุนุฉ ู…ู† ุงู„ุฃู‚ู†ุนุฉ ุฃูˆ ุงู„ุญุฏูˆุฏ ุงู„ุชูŠ ุชุญุฏุฏ ูƒู„ ูƒุงุฆู† ููŠ ุงู„ุตูˆุฑุฉ ุŒ ุฌู†ุจู‹ุง ุฅู„ู‰ ุฌู†ุจ ู…ุน ุชุตู†ูŠู ุงู„ุตู†ู ูˆู†ู‚ุงุท ุงู„ุซู‚ุฉ ู„ูƒู„ ูƒุงุฆู†. ูŠูƒูˆู† ูุตู„ ุงู„ุฃุดูƒุงู„ ุงู„ูุฑุฏูŠุฉ ู…ููŠุฏู‹ุง ุนู†ุฏู…ุง ุชุญุชุงุฌ ุฅู„ู‰ ู…ุนุฑูุฉ ู„ูŠุณ ูู‚ุท ุฃูŠู† ุชูˆุฌุฏ ุงู„ูƒุงุฆู†ุงุช ููŠ ุงู„ุตูˆุฑุฉ ุŒ ูˆู„ูƒู† ุฃูŠุถู‹ุง ู…ุง ู‡ูˆ ุดูƒู„ู‡ุง ุงู„ุฏู‚ูŠู‚. + +

+
+ +
+ ุงู„ู…ุดุงู‡ุฏุฉ: ุชุดุบูŠู„ ูุตู„ ุงู„ุฃุดูƒุงู„ ู…ุน ู†ู…ูˆุฐุฌ Ultralytics YOLOv8 ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง ุจุงุณุชุฎุฏุงู… Python. +

+ +!!! Tip "ู†ุตูŠุญุฉ" + + ุชุณุชุฎุฏู… ู†ู…ุงุฐุฌ YOLOv8 Seg ุงู„ู„ุงุญู‚ุฉ `-seg`ุŒ ุฃูŠ `yolov8n-seg.pt` ูˆุชูƒูˆู† ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ุนู„ู‰ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml). + +## [ุงู„ู†ู…ุงุฐุฌ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +ุชูุนุฑุถ ู‡ู†ุง ุงู„ู†ู…ุงุฐุฌ ุงู„ุฌุงู‡ุฒุฉ ุงู„ู…ุฏุฑุจุฉ ู…ุณุจู‚ู‹ุง ู„ู€ YOLOv8 Segment. ูŠุชู… ุชุฏุฑูŠุจ ู†ู…ุงุฐุฌ ุงู„ูƒุดู ูˆุงู„ุชุฌุฒูŠุก ูˆุงู„ู…ูˆุงู‚ู ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ุŒ ุจูŠู†ู…ุง ุชุฏุฑุจ ู†ู…ุงุฐุฌ ุงู„ุชุตู†ูŠู ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +ุชุชู… ุชู†ุฒูŠู„ [ุงู„ู†ู…ุงุฐุฌ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) ุชู„ู‚ุงุฆูŠู‹ุง ู…ู† [ุงู„ุฅุตุฏุงุฑ](https://github.com/ultralytics/assets/releases) ุงู„ุฃุฎูŠุฑ ู„ู€ Ultralytics ุนู†ุฏ ุฃูˆู„ ุงุณุชุฎุฏุงู…. + +| ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุญุฌู…
ุจูƒุณู„ | mAPbox
50-95 | mAPmask
50-95 | ุงู„ุณุฑุนุฉ
CPU ONNX
(ู…ู„ู„ูŠ ุซุงู†ูŠุฉ) | ุงู„ุณุฑุนุฉ
A100 TensorRT
(ู…ู„ู„ูŠ ุซุงู†ูŠุฉ) | ุงู„ู…ุนู„ู…ุงุช
(ู…ู„ูŠูˆู†) | FLOPs
(ู…ู„ูŠุงุฑ) | +|----------------------------------------------------------------------------------------------|--------------------|----------------------|-----------------------|-----------------------------------------|----------------------------------------------|--------------------------|-----------------------| +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- ุชูุณุชุฎุฏู… ู‚ูŠู… **mAPval** ู„ู†ู…ูˆุฐุฌ ูˆุงุญุฏ ูˆุญุฌู… ูˆุงุญุฏ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช [COCO val2017](http://cocodataset.org). +
ูŠู…ูƒู† ุฅุนุงุฏุฉ ุฅู†ุชุงุฌู‡ุง ุจุงุณุชุฎุฏุงู… `yolo val segment data=coco.yaml device=0` +- **ุชูุญุณุจ ุงู„ุณุฑุนุฉ** ูƒู…ุชูˆุณุท ุนู„ู‰ ุตูˆุฑ COCO val ุจุงุณุชุฎุฏุงู… [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) + instance. +
ูŠู…ูƒู† ุฅุนุงุฏุฉ ุฅู†ุชุงุฌู‡ุง ุจุงุณุชุฎุฏุงู… `yolo val segment data=coco128-seg.yaml batch=1 device=0|cpu` + +## ุงู„ุชุฏุฑูŠุจ + +ู‚ู… ุจุชุฏุฑูŠุจ YOLOv8n-seg ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO128-seg ู„ู…ุฏุฉ 100 ุฏูˆุฑุฉ ุนู†ุฏ ุญุฌู… ุตูˆุฑุฉ 640. ู„ู„ุญุตูˆู„ ุนู„ู‰ ู‚ุงุฆู…ุฉ ูƒุงู…ู„ุฉ ุจุงู„ูˆุณุงุฆุท ุงู„ู…ุชุงุญุฉ ุŒ ุฑุงุฌุน ุตูุญุฉ [ุงู„ุชูƒูˆูŠู†](/../usage/cfg.md). + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ู‚ู… ุจุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n-seg.yaml') # ู‚ู… ุจุจู†ุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† ู…ู„ู YAML + model = YOLO('yolov8n-seg.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง (ู…ูˆุตู‰ ุจู‡ ู„ู„ุชุฏุฑูŠุจ) + model = YOLO('yolov8n-seg.yaml').load('yolov8n.pt') # ู‚ู… ุจุจู†ุงุฆู‡ ู…ู† YAML ูˆู†ู‚ู„ ุงู„ูˆุฒู† + + # ู‚ู… ุจุชุฏุฑูŠุจ ุงู„ู†ู…ูˆุฐุฌ + results = model.train(data='coco128-seg.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # ู‚ู… ุจุจู†ุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† ู…ู„ู YAML ูˆุจุฏุก ุงู„ุชุฏุฑูŠุจ ู…ู† ุงู„ุจุฏุงูŠุฉ + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml epochs=100 imgsz=640 + + # ู‚ู… ุจุจุฏุก ุงู„ุชุฏุฑูŠุจ ู…ู† ู†ู…ูˆุฐุฌ *.pt ู…ุฏุฑุจ ู…ุณุจู‚ู‹ุง + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.pt epochs=100 imgsz=640 + + # ู‚ู… ุจุจู†ุงุก ู†ู…ูˆุฐุฌ ุฌุฏูŠุฏ ู…ู† YAML ูˆู†ู‚ู„ ุงู„ุฃูˆุฒุงู† ุงู„ู…ุฏุฑุจุฉ ู…ุณุจูŽู‚ู‹ุง ุฅู„ูŠู‡ ูˆุงุจุฏุฃ ุงู„ุชุฏุฑูŠุจ + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml pretrained=yolov8n-seg.pt epochs=100 imgsz=640 + ``` + +### ุชู†ุณูŠู‚ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช + +ูŠู…ูƒู† ุงู„ุนุซูˆุฑ ุนู„ู‰ ุชู†ุณูŠู‚ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช ุชุฌุฒูŠุก YOLO ุจุงู„ุชูุตูŠู„ ููŠ [ุฏู„ูŠู„ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช](../../../datasets/segment/index.md). ู„ุชุญูˆูŠู„ ู…ุฌู…ูˆุนุฉ ุงู„ุจูŠุงู†ุงุช ุงู„ุญุงู„ูŠุฉ ุงู„ุชูŠ ุชุชุจุน ุชู†ุณูŠู‚ุงุช ุฃุฎุฑู‰ (ู…ุซู„ COCO ุฅู„ุฎ) ุฅู„ู‰ ุชู†ุณูŠู‚ YOLO ุŒ ูŠูุฑุฌู‰ ุงุณุชุฎุฏุงู… ุฃุฏุงุฉ [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) ู…ู† Ultralytics. + +## ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ุตุญุฉ + +ู‚ู… ุจุงู„ุชุญู‚ู‚ ู…ู† ุฏู‚ุฉ ู†ู…ูˆุฐุฌ YOLOv8n-seg ุงู„ู…ุฏุฑุจ ุนู„ู‰ ู…ุฌู…ูˆุนุฉ ุจูŠุงู†ุงุช COCO128-seg. ู„ุง ุญุงุฌุฉ ู„ุชู…ุฑูŠุฑ ุฃูŠ ูˆุณูŠุทุฉ ูƒู…ุง ูŠุญุชูุธ ุงู„ู†ู…ูˆุฐุฌ ุจุจูŠุงู†ุงุช "ุชุฏุฑูŠุจู‡" ูˆุงู„ูˆุณูŠุทุงุช ูƒุณู…ุงุช ุงู„ู†ู…ูˆุฐุฌ. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ู‚ู… ุจุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n-seg.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฎุตุต + + # ู‚ู… ุจุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ + metrics = model.val() # ู„ุง ุญุงุฌุฉ ุฅู„ู‰ ุฃูŠ ูˆุณูŠุทุฉ ุŒ ูŠุชุฐูƒุฑ ุงู„ู†ู…ูˆุฐุฌ ุจูŠุงู†ุงุช ุงู„ุชุฏุฑูŠุจ ูˆุงู„ูˆุณูŠุทุงุช ูƒุณู…ุงุช ุงู„ู†ู…ูˆุฐุฌ + metrics.box.map # map50-95(B) + metrics.box.map50 # map50(B) + metrics.box.map75 # map75(B) + metrics.box.maps # ู‚ุงุฆู…ุฉ ุชุญุชูˆูŠ ุนู„ู‰ map50-95(B) ู„ูƒู„ ูุฆุฉ + metrics.seg.map # map50-95(M) + metrics.seg.map50 # map50(M) + metrics.seg.map75 # map75(M) + metrics.seg.maps # ู‚ุงุฆู…ุฉ ุชุญุชูˆูŠ ุนู„ู‰ map50-95(M) ู„ูƒู„ ูุฆุฉ + ``` + === "CLI" + + ```bash + yolo segment val model=yolov8n-seg.pt # ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฑุณู…ูŠ + yolo segment val model=path/to/best.pt # ุงู„ุชุญู‚ู‚ ู…ู† ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฎุตุต + ``` + +## ุงู„ุชู†ุจุค + +ุงุณุชุฎุฏู… ู†ู…ูˆุฐุฌ YOLOv8n-seg ุงู„ู…ุฏุฑุจ ู„ู„ู‚ูŠุงู… ุจุงู„ุชู†ุจุคุงุช ุนู„ู‰ ุงู„ุตูˆุฑ. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ู‚ู… ุจุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n-seg.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฎุตุต + + # ุงู„ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ + results = model('https://ultralytics.com/images/bus.jpg') # ุงู„ุชู†ุจุค ุนู„ู‰ ุตูˆุฑุฉ + ``` + === "CLI" + + ```bash + yolo segment predict model=yolov8n-seg.pt source='https://ultralytics.com/images/bus.jpg' # ุงู„ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ุงู„ุฑุณู…ูŠ + yolo segment predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ุงู„ุชู†ุจุค ุจุงุณุชุฎุฏุงู… ุงู„ู†ู…ูˆุฐุฌ ุงู„ู…ุฎุตุต + ``` + +ุงู†ุธุฑ ุชูุงุตูŠู„ "ุงู„ุชู†ุจุค" ุงู„ูƒุงู…ู„ุฉ ููŠ [ุงู„ุตูุญุฉ](https://docs.ultralytics.com/modes/predict/). + +## ุงู„ุชุตุฏูŠุฑ + +ู‚ู… ุจุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ YOLOv8n-seg ุฅู„ู‰ ุชู†ุณูŠู‚ ู…ุฎุชู„ู ู…ุซู„ ONNX ูˆ CoreML ูˆู…ุง ุฅู„ู‰ ุฐู„ูƒ. + +!!! Example "ู…ุซุงู„" + + === "Python" + + ```python + from ultralytics import YOLO + + # ู‚ู… ุจุชุญู…ูŠู„ ุงู„ู†ู…ูˆุฐุฌ + model = YOLO('yolov8n-seg.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + model = YOLO('path/to/best.pt') # ู‚ู… ุจุชุญู…ูŠู„ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุฎุตุต + + # ู‚ู… ุจุชุตุฏูŠุฑ ุงู„ู†ู…ูˆุฐุฌ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-seg.pt format=onnx # ุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ ุฑุณู…ูŠ + yolo export model=path/to/best.pt format=onnx # ุชุตุฏูŠุฑ ู†ู…ูˆุฐุฌ ู…ุฏุฑุจ ู…ุฎุตุต + ``` + +ุตูŠุบ ุชุตุฏูŠุฑ YOLOv8-seg ุงู„ู…ุชุงุญุฉ ููŠ ุงู„ุฌุฏูˆู„ ุฃุฏู†ุงู‡. ูŠู…ูƒู†ูƒ ุงู„ุชู†ุจุค ุฃูˆ ุงู„ุชุญู‚ู‚ ู…ู† ุตุญุฉ ุงู„ู…ูˆุฏูŠู„ ุงู„ู…ุตุฏุฑ ุจุดูƒู„ ู…ุจุงุดุฑ ุŒ ุฃูŠ `yolo predict model=yolov8n-seg.onnx`. ูŠุชู… ุนุฑุถ ุฃู…ุซู„ุฉ ุนู† ุงู„ุงุณุชุฎุฏุงู… ู„ู†ู…ูˆุฐุฌูƒ ุจุนุฏ ุงูƒุชู…ุงู„ ุงู„ุชุตุฏูŠุฑ. + +| ุงู„ุตูŠุบุฉ | `format` Argument | ุงู„ู†ู…ูˆุฐุฌ | ุงู„ุชุนู„ูŠู…ุงุช | ุงู„ุฎูŠุงุฑุงุช | +|--------------------------------------------------------------------|-------------------|-------------------------------|-----------|-------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-seg.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-seg.torchscript` | โœ… | `ุงู„ุญุฌู… ุŒ ุงู„ุฃู…ุงู†` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-seg.onnx` | โœ… | `ุงู„ุญุฌู… ุŒ half ุŒ dynamic ุŒ simplify ุŒ opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-seg_openvino_model/` | โœ… | `ุงู„ุญุฌู… ุŒ half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-seg.engine` | โœ… | `ุงู„ุญุฌู… ุŒ half ุŒ dynamic ุŒ simplify ุŒ workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-seg.mlpackage` | โœ… | `ุงู„ุญุฌู… ุŒ half ุŒ int8 ุŒ nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-seg_saved_model/` | โœ… | `ุงู„ุญุฌู… ุŒ keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-seg.pb` | โŒ | `ุงู„ุญุฌู…` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-seg.tflite` | โœ… | `ุงู„ุญุฌู… ุŒ half ุŒ int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-seg_edgetpu.tflite` | โœ… | `ุงู„ุญุฌู…` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-seg_web_model/` | โœ… | `ุงู„ุญุฌู…` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-seg_paddle_model/` | โœ… | `ุงู„ุญุฌู…` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-seg_ncnn_model/` | โœ… | `ุงู„ุญุฌู… ุŒ half` | + +ุงู†ุธุฑ ุชูุงุตูŠู„ "ุงู„ุชุตุฏูŠุฑ" ุงู„ูƒุงู…ู„ุฉ ููŠ [ุงู„ุตูุญุฉ](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/ar/tasks/segment.md:Zone.Identifier b/ultralytics/docs/ar/tasks/segment.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ar/tasks/segment.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/build_docs.py b/ultralytics/docs/build_docs.py new file mode 100755 index 0000000..914f2fe --- /dev/null +++ b/ultralytics/docs/build_docs.py @@ -0,0 +1,116 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +""" +This Python script is designed to automate the building and post-processing of MkDocs documentation, particularly for +projects with multilingual content. It streamlines the workflow for generating localized versions of the documentation +and updating HTML links to ensure they are correctly formatted. + +Key Features: +- Automated building of MkDocs documentation: The script compiles both the main documentation and + any localized versions specified in separate MkDocs configuration files. +- Post-processing of generated HTML files: After the documentation is built, the script updates all + HTML files to remove the '.md' extension from internal links. This ensures that links in the built + HTML documentation correctly point to other HTML pages rather than Markdown files, which is crucial + for proper navigation within the web-based documentation. + +Usage: +- Run the script from the root directory of your MkDocs project. +- Ensure that MkDocs is installed and that all MkDocs configuration files (main and localized versions) + are present in the project directory. +- The script first builds the documentation using MkDocs, then scans the generated HTML files in the 'site' + directory to update the internal links. +- It's ideal for projects where the documentation is written in Markdown and needs to be served as a static website. + +Note: +- This script is built to be run in an environment where Python and MkDocs are installed and properly configured. +""" + +import re +import shutil +import subprocess +from pathlib import Path + +DOCS = Path(__file__).parent.resolve() +SITE = DOCS.parent / 'site' + + +def build_docs(): + """Build docs using mkdocs.""" + if SITE.exists(): + print(f'Removing existing {SITE}') + shutil.rmtree(SITE) + + # Build the main documentation + print(f'Building docs from {DOCS}') + subprocess.run(f'mkdocs build -f {DOCS}/mkdocs.yml', check=True, shell=True) + + # Build other localized documentations + for file in DOCS.glob('mkdocs_*.yml'): + print(f'Building MkDocs site with configuration file: {file}') + subprocess.run(f'mkdocs build -f {file}', check=True, shell=True) + print(f'Site built at {SITE}') + + +def update_html_links(): + """Update href links in HTML files to remove '.md' and '/index.md', excluding links starting with 'https://'.""" + html_files = Path(SITE).rglob('*.html') + total_updated_links = 0 + + for html_file in html_files: + with open(html_file, 'r+', encoding='utf-8') as file: + content = file.read() + # Find all links to be updated, excluding those starting with 'https://' + links_to_update = re.findall(r'href="(?!https://)([^"]+?)(/index)?\.md"', content) + + # Update the content and count the number of links updated + updated_content, number_of_links_updated = re.subn(r'href="(?!https://)([^"]+?)(/index)?\.md"', + r'href="\1"', content) + total_updated_links += number_of_links_updated + + # Special handling for '/index' links + updated_content, number_of_index_links_updated = re.subn(r'href="([^"]+)/index"', r'href="\1/"', + updated_content) + total_updated_links += number_of_index_links_updated + + # Write the updated content back to the file + file.seek(0) + file.write(updated_content) + file.truncate() + + # Print updated links for this file + for link in links_to_update: + print(f'Updated link in {html_file}: {link[0]}') + + print(f'Total number of links updated: {total_updated_links}') + + +def update_page_title(file_path: Path, new_title: str): + """Update the title of an HTML file.""" + + # Read the content of the file + with open(file_path, encoding='utf-8') as file: + content = file.read() + + # Replace the existing title with the new title + updated_content = re.sub(r'.*?', f'{new_title}', content) + + # Write the updated content back to the file + with open(file_path, 'w', encoding='utf-8') as file: + file.write(updated_content) + + +def main(): + # Build the docs + build_docs() + + # Update .md in href links + update_html_links() + + # Show command to serve built website + print('Serve site at http://localhost:8000 with "python -m http.server --directory site"') + + # Update titles + update_page_title(SITE / '404.html', new_title='Ultralytics Docs - Not Found') + + +if __name__ == '__main__': + main() diff --git a/ultralytics/docs/build_docs.py:Zone.Identifier b/ultralytics/docs/build_docs.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/build_docs.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/build_reference.py b/ultralytics/docs/build_reference.py new file mode 100755 index 0000000..cb15d34 --- /dev/null +++ b/ultralytics/docs/build_reference.py @@ -0,0 +1,128 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +""" +Helper file to build Ultralytics Docs reference section. Recursively walks through ultralytics dir and builds an MkDocs +reference section of *.md files composed of classes and functions, and also creates a nav menu for use in mkdocs.yaml. + +Note: Must be run from repository root directory. Do not run from docs directory. +""" + +import re +from collections import defaultdict +from pathlib import Path + +from ultralytics.utils import ROOT + +NEW_YAML_DIR = ROOT.parent +CODE_DIR = ROOT +REFERENCE_DIR = ROOT.parent / 'docs/en/reference' + + +def extract_classes_and_functions(filepath: Path) -> tuple: + """Extracts class and function names from a given Python file.""" + content = filepath.read_text() + class_pattern = r'(?:^|\n)class\s(\w+)(?:\(|:)' + func_pattern = r'(?:^|\n)def\s(\w+)\(' + + classes = re.findall(class_pattern, content) + functions = re.findall(func_pattern, content) + + return classes, functions + + +def create_markdown(py_filepath: Path, module_path: str, classes: list, functions: list): + """Creates a Markdown file containing the API reference for the given Python module.""" + md_filepath = py_filepath.with_suffix('.md') + + # Read existing content and keep header content between first two --- + header_content = '' + if md_filepath.exists(): + existing_content = md_filepath.read_text() + header_parts = existing_content.split('---') + for part in header_parts: + if 'description:' in part or 'comments:' in part: + header_content += f'---{part}---\n\n' + + module_name = module_path.replace('.__init__', '') + module_path = module_path.replace('.', '/') + url = f'https://github.com/ultralytics/ultralytics/blob/main/{module_path}.py' + edit = f'https://github.com/ultralytics/ultralytics/edit/main/{module_path}.py' + title_content = ( + f'# Reference for `{module_path}.py`\n\n' + f'!!! Note\n\n' + f' This file is available at [{url}]({url}). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request]({edit}) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™!\n\n' + ) + md_content = ['

\n'] + [f'## ::: {module_name}.{class_name}\n\n

\n' for class_name in classes] + md_content.extend(f'## ::: {module_name}.{func_name}\n\n

\n' for func_name in functions) + md_content = header_content + title_content + '\n'.join(md_content) + if not md_content.endswith('\n'): + md_content += '\n' + + md_filepath.parent.mkdir(parents=True, exist_ok=True) + md_filepath.write_text(md_content) + + return md_filepath.relative_to(NEW_YAML_DIR) + + +def nested_dict() -> defaultdict: + """Creates and returns a nested defaultdict.""" + return defaultdict(nested_dict) + + +def sort_nested_dict(d: dict) -> dict: + """Sorts a nested dictionary recursively.""" + return {key: sort_nested_dict(value) if isinstance(value, dict) else value for key, value in sorted(d.items())} + + +def create_nav_menu_yaml(nav_items: list): + """Creates a YAML file for the navigation menu based on the provided list of items.""" + nav_tree = nested_dict() + + for item_str in nav_items: + item = Path(item_str) + parts = item.parts + current_level = nav_tree['reference'] + for part in parts[2:-1]: # skip the first two parts (docs and reference) and the last part (filename) + current_level = current_level[part] + + md_file_name = parts[-1].replace('.md', '') + current_level[md_file_name] = item + + nav_tree_sorted = sort_nested_dict(nav_tree) + + def _dict_to_yaml(d, level=0): + """Converts a nested dictionary to a YAML-formatted string with indentation.""" + yaml_str = '' + indent = ' ' * level + for k, v in d.items(): + if isinstance(v, dict): + yaml_str += f'{indent}- {k}:\n{_dict_to_yaml(v, level + 1)}' + else: + yaml_str += f"{indent}- {k}: {str(v).replace('docs/en/', '')}\n" + return yaml_str + + # Print updated YAML reference section + print('Scan complete, new mkdocs.yaml reference section is:\n\n', _dict_to_yaml(nav_tree_sorted)) + + # Save new YAML reference section + # (NEW_YAML_DIR / 'nav_menu_updated.yml').write_text(_dict_to_yaml(nav_tree_sorted)) + + +def main(): + """Main function to extract class and function names, create Markdown files, and generate a YAML navigation menu.""" + nav_items = [] + + for py_filepath in CODE_DIR.rglob('*.py'): + classes, functions = extract_classes_and_functions(py_filepath) + + if classes or functions: + py_filepath_rel = py_filepath.relative_to(CODE_DIR) + md_filepath = REFERENCE_DIR / py_filepath_rel + module_path = f"ultralytics.{py_filepath_rel.with_suffix('').as_posix().replace('/', '.')}" + md_rel_filepath = create_markdown(md_filepath, module_path, classes, functions) + nav_items.append(str(md_rel_filepath)) + + create_nav_menu_yaml(nav_items) + + +if __name__ == '__main__': + main() diff --git a/ultralytics/docs/build_reference.py:Zone.Identifier b/ultralytics/docs/build_reference.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/build_reference.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/index.md b/ultralytics/docs/de/index.md new file mode 100755 index 0000000..a008082 --- /dev/null +++ b/ultralytics/docs/de/index.md @@ -0,0 +1,83 @@ +--- +comments: true +description: Entdecken Sie einen vollstรคndigen Leitfaden zu Ultralytics YOLOv8, einem schnellen und prรคzisen Modell zur Objekterkennung und Bildsegmentierung. Installations-, Vorhersage-, Trainingstutorials und mehr. +keywords: Ultralytics, YOLOv8, Objekterkennung, Bildsegmentierung, maschinelles Lernen, Deep Learning, Computer Vision, YOLOv8 Installation, YOLOv8 Vorhersage, YOLOv8 Training, YOLO-Geschichte, YOLO-Lizenzen +--- + +
+

+ + Ultralytics YOLO Banner +

+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ Ultralytics CI + Ultralytics Code Coverage + YOLOv8 Zitation + Docker Ziehungen + Discord +
+ Auf Gradient ausfรผhren + In Colab รถffnen + In Kaggle รถffnen +
+ +Wir stellen [Ultralytics](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics) vor, die neueste Version des renommierten Echtzeit-Modells zur Objekterkennung und Bildsegmentierung. YOLOv8 basiert auf den neuesten Erkenntnissen im Bereich Deep Learning und Computer Vision und bietet eine unvergleichliche Leistung hinsichtlich Geschwindigkeit und Genauigkeit. Sein optimiertes Design macht es fรผr verschiedene Anwendungen geeignet und leicht an verschiedene Hardwareplattformen anpassbar, von Edge-Gerรคten bis hin zu Cloud-APIs. + +Erkunden Sie die YOLOv8-Dokumentation, eine umfassende Ressource, die Ihnen helfen soll, seine Funktionen und Fรคhigkeiten zu verstehen und zu nutzen. Ob Sie ein erfahrener Machine-Learning-Praktiker sind oder neu in diesem Bereich, dieses Hub zielt darauf ab, das Potenzial von YOLOv8 in Ihren Projekten zu maximieren + +!!! Note "Hinweis" + + ๐Ÿšง Unsere mehrsprachige Dokumentation wird derzeit entwickelt und wir arbeiten intensiv an ihrer Verbesserung. Wir danken fรผr Ihre Geduld! ๐Ÿ™ + +## Wo Sie beginnen sollten + +- **Installieren** Sie `ultralytics` mit pip und starten Sie in wenigen Minuten   [:material-clock-fast: Loslegen](quickstart.md){ .md-button } +- **Vorhersagen** Sie neue Bilder und Videos mit YOLOv8   [:octicons-image-16: Auf Bilder vorhersagen](modes/predict.md){ .md-button } +- **Trainieren** Sie ein neues YOLOv8-Modell mit Ihrem eigenen benutzerdefinierten Datensatz   [:fontawesome-solid-brain: Ein Modell trainieren](modes/train.md){ .md-button } +- **Erforschen** Sie YOLOv8-Aufgaben wie Segmentieren, Klassifizieren, Posenschรคtzung und Verfolgen   [:material-magnify-expand: Aufgaben erkunden](tasks/index.md){ .md-button } + +

+
+ +
+ Ansehen: Wie Sie ein YOLOv8-Modell auf Ihrem eigenen Datensatz in Google Colab trainieren. +

+ +## YOLO: Eine kurze Geschichte + +[YOLO](https://arxiv.org/abs/1506.02640) (You Only Look Once), ein beliebtes Modell zur Objekterkennung und Bildsegmentierung, wurde von Joseph Redmon und Ali Farhadi an der Universitรคt von Washington entwickelt. Seit seiner Einfรผhrung im Jahr 2015 erfreut es sich aufgrund seiner hohen Geschwindigkeit und Genauigkeit groรŸer Beliebtheit. + +- [YOLOv2](https://arxiv.org/abs/1612.08242), verรถffentlicht im Jahr 2016, verbesserte das Originalmodell durch die Einfรผhrung von Batch-Normalisierung, Ankerkรคsten und Dimensionsclustern. +- [YOLOv3](https://pjreddie.com/media/files/papers/YOLOv3.pdf), eingefรผhrt im Jahr 2018, erhรถhte die Leistung des Modells weiter mit einem effizienteren Backbone-Netzwerk, mehreren Ankern und rรคumlichem Pyramid-Pooling. +- [YOLOv4](https://arxiv.org/abs/2004.10934) wurde 2020 verรถffentlicht und brachte Neuerungen wie Mosaic-Datenerweiterung, einen neuen ankerfreien Erkennungskopf und eine neue Verlustfunktion. +- [YOLOv5](https://github.com/ultralytics/yolov5) verbesserte die Leistung des Modells weiter und fรผhrte neue Funktionen ein, wie Hyperparameter-Optimierung, integriertes Experiment-Tracking und automatischen Export in beliebte Exportformate. +- [YOLOv6](https://github.com/meituan/YOLOv6) wurde 2022 von [Meituan](https://about.meituan.com/) als Open Source zur Verfรผgung gestellt und wird in vielen autonomen Lieferrobotern des Unternehmens eingesetzt. +- [YOLOv7](https://github.com/WongKinYiu/yolov7) fรผhrte zusรคtzliche Aufgaben ein, wie Posenschรคtzung auf dem COCO-Keypoints-Datensatz. +- [YOLOv8](https://github.com/ultralytics/ultralytics) ist die neueste Version von YOLO von Ultralytics. Als Spitzenmodell der neuesten Generation baut YOLOv8 auf dem Erfolg vorheriger Versionen auf und fรผhrt neue Funktionen und Verbesserungen fรผr erhรถhte Leistung, Flexibilitรคt und Effizienz ein. YOLOv8 unterstรผtzt eine vollstรคndige Palette an Vision-KI-Aufgaben, einschlieรŸlich [Erkennung](tasks/detect.md), [Segmentierung](tasks/segment.md), [Posenschรคtzung](tasks/pose.md), [Verfolgung](modes/track.md) und [Klassifizierung](tasks/classify.md). Diese Vielseitigkeit ermรถglicht es Benutzern, die Fรคhigkeiten von YOLOv8 in verschiedenen Anwendungen und Domรคnen zu nutzen. + +## YOLO-Lizenzen: Wie wird Ultralytics YOLO lizenziert? + +Ultralytics bietet zwei Lizenzoptionen, um unterschiedliche Einsatzszenarien zu berรผcksichtigen: + +- **AGPL-3.0-Lizenz**: Diese [OSI-geprรผfte](https://opensource.org/licenses/) Open-Source-Lizenz ist ideal fรผr Studenten und Enthusiasten und fรถrdert offene Zusammenarbeit und Wissensaustausch. Weitere Details finden Sie in der [LIZENZ](https://github.com/ultralytics/ultralytics/blob/main/LICENSE)-Datei. +- **Enterprise-Lizenz**: Fรผr die kommerzielle Nutzung konzipiert, ermรถglicht diese Lizenz die problemlose Integration von Ultralytics-Software und KI-Modellen in kommerzielle Produkte und Dienstleistungen und umgeht die Open-Source-Anforderungen der AGPL-3.0. Wenn Ihr Szenario die Einbettung unserer Lรถsungen in ein kommerzielles Angebot beinhaltet, kontaktieren Sie uns รผber [Ultralytics-Lizenzierung](https://ultralytics.com/license). + +Unsere Lizenzstrategie ist darauf ausgerichtet sicherzustellen, dass jegliche Verbesserungen an unseren Open-Source-Projekten der Gemeinschaft zurรผckgegeben werden. Wir halten die Prinzipien von Open Source in Ehren โค๏ธ und es ist unser Anliegen, dass unsere Beitrรคge auf Weisen genutzt und erweitert werden kรถnnen, die fรผr alle vorteilhaft sind. diff --git a/ultralytics/docs/de/index.md:Zone.Identifier b/ultralytics/docs/de/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/models/fast-sam.md b/ultralytics/docs/de/models/fast-sam.md new file mode 100755 index 0000000..e726c5c --- /dev/null +++ b/ultralytics/docs/de/models/fast-sam.md @@ -0,0 +1,193 @@ +--- +comments: true +description: Erkunden Sie FastSAM, eine CNN-basierte Lรถsung zur Echtzeit-Segmentierung von Objekten in Bildern. Verbesserte Benutzerinteraktion, Recheneffizienz und anpassbar fรผr verschiedene Vision-Aufgaben. +keywords: FastSAM, maschinelles Lernen, CNN-basierte Lรถsung, Objektsegmentierung, Echtzeillรถsung, Ultralytics, Vision-Aufgaben, Bildverarbeitung, industrielle Anwendungen, Benutzerinteraktion +--- + +# Fast Segment Anything Model (FastSAM) + +Das Fast Segment Anything Model (FastSAM) ist eine neuartige, Echtzeit-CNN-basierte Lรถsung fรผr die Segment Anything Aufgabe. Diese Aufgabe zielt darauf ab, jedes Objekt in einem Bild auf Basis verschiedener mรถglicher Benutzerinteraktionen zu segmentieren. FastSAM reduziert signifikant den Rechenbedarf, wรคhrend es eine wettbewerbsfรคhige Leistung beibehรคlt und somit fรผr eine Vielzahl von Vision-Aufgaben praktisch einsetzbar ist. + +![รœbersicht รผber die Architektur des Fast Segment Anything Model (FastSAM)](https://user-images.githubusercontent.com/26833433/248551984-d98f0f6d-7535-45d0-b380-2e1440b52ad7.jpg) + +## รœberblick + +FastSAM wurde entwickelt, um die Einschrรคnkungen des [Segment Anything Model (SAM)](sam.md) zu beheben, einem schweren Transformer-Modell mit erheblichem Rechenressourcenbedarf. Das FastSAM teilt die Segment Anything Aufgabe in zwei aufeinanderfolgende Stufen auf: die Instanzsegmentierung und die promptgesteuerte Auswahl. In der ersten Stufe wird [YOLOv8-seg](../tasks/segment.md) verwendet, um die Segmentierungsmasken aller Instanzen im Bild zu erzeugen. In der zweiten Stufe gibt es den Bereich von Interesse aus, der dem Prompt entspricht. + +## Hauptmerkmale + +1. **Echtzeitlรถsung:** Durch die Nutzung der Recheneffizienz von CNNs bietet FastSAM eine Echtzeitlรถsung fรผr die Segment Anything Aufgabe und eignet sich somit fรผr industrielle Anwendungen, die schnelle Ergebnisse erfordern. + +2. **Effizienz und Leistung:** FastSAM bietet eine signifikante Reduzierung des Rechen- und Ressourcenbedarfs, ohne die Leistungsqualitรคt zu beeintrรคchtigen. Es erzielt eine vergleichbare Leistung wie SAM, verwendet jedoch drastisch reduzierte Rechenressourcen und ermรถglicht so eine Echtzeitanwendung. + +3. **Promptgesteuerte Segmentierung:** FastSAM kann jedes Objekt in einem Bild anhand verschiedener mรถglicher Benutzerinteraktionsaufforderungen segmentieren. Dies ermรถglicht Flexibilitรคt und Anpassungsfรคhigkeit in verschiedenen Szenarien. + +4. **Basierend auf YOLOv8-seg:** FastSAM basiert auf [YOLOv8-seg](../tasks/segment.md), einem Objektdetektor mit einem Instanzsegmentierungsmodul. Dadurch ist es in der Lage, die Segmentierungsmasken aller Instanzen in einem Bild effektiv zu erzeugen. + +5. **Wettbewerbsfรคhige Ergebnisse auf Benchmarks:** Bei der Objektvorschlagsaufgabe auf MS COCO erzielt FastSAM hohe Punktzahlen bei deutlich schnellerem Tempo als [SAM](sam.md) auf einer einzelnen NVIDIA RTX 3090. Dies demonstriert seine Effizienz und Leistungsfรคhigkeit. + +6. **Praktische Anwendungen:** Der vorgeschlagene Ansatz bietet eine neue, praktische Lรถsung fรผr eine Vielzahl von Vision-Aufgaben mit sehr hoher Geschwindigkeit, die zehn- oder hundertmal schneller ist als vorhandene Methoden. + +7. **Mรถglichkeit zur Modellkompression:** FastSAM zeigt, dass der Rechenaufwand erheblich reduziert werden kann, indem ein kรผnstlicher Prior in die Struktur eingefรผhrt wird. Dadurch erรถffnen sich neue Mรถglichkeiten fรผr groรŸe Modellarchitekturen fรผr allgemeine Vision-Aufgaben. + +## Verfรผgbare Modelle, unterstรผtzte Aufgaben und Betriebsmodi + +In dieser Tabelle werden die verfรผgbaren Modelle mit ihren spezifischen vorab trainierten Gewichten, den unterstรผtzten Aufgaben und ihrer Kompatibilitรคt mit verschiedenen Betriebsmodi wie [Inferenz](../modes/predict.md), [Validierung](../modes/val.md), [Training](../modes/train.md) und [Export](../modes/export.md) angezeigt. Dabei stehen โœ… Emojis fรผr unterstรผtzte Modi und โŒ Emojis fรผr nicht unterstรผtzte Modi. + +| Modelltyp | Vorab trainierte Gewichte | Unterstรผtzte Aufgaben | Inferenz | Validierung | Training | Export | +|-----------|---------------------------|---------------------------------------------|----------|-------------|----------|--------| +| FastSAM-s | `FastSAM-s.pt` | [Instanzsegmentierung](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| FastSAM-x | `FastSAM-x.pt` | [Instanzsegmentierung](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Beispiele fรผr die Verwendung + +Die FastSAM-Modelle lassen sich problemlos in Ihre Python-Anwendungen integrieren. Ultralytics bietet eine benutzerfreundliche Python-API und CLI-Befehle zur Vereinfachung der Entwicklung. + +### Verwendung der Methode `predict` + +Um eine Objekterkennung auf einem Bild durchzufรผhren, verwenden Sie die Methode `predict` wie folgt: + +!!! Example "Beispiel" + + === "Python" + ```python + from ultralytics import FastSAM + from ultralytics.models.fastsam import FastSAMPrompt + + # Definieren Sie die Quelle fรผr die Inferenz + source = 'Pfad/zum/bus.jpg' + + # Erstellen Sie ein FastSAM-Modell + model = FastSAM('FastSAM-s.pt') # oder FastSAM-x.pt + + # Fรผhren Sie die Inferenz auf einem Bild durch + everything_results = model(source, device='cpu', retina_masks=True, imgsz=1024, conf=0.4, iou=0.9) + + # Bereiten Sie ein Prompt-Process-Objekt vor + prompt_process = FastSAMPrompt(source, everything_results, device='cpu') + + # Alles-Prompt + ann = prompt_process.everything_prompt() + + # Bbox Standardform [0,0,0,0] -> [x1,y1,x2,y2] + ann = prompt_process.box_prompt(bbox=[200, 200, 300, 300]) + + # Text-Prompt + ann = prompt_process.text_prompt(text='ein Foto von einem Hund') + + # Punkt-Prompt + # Punkte Standard [[0,0]] [[x1,y1],[x2,y2]] + # Punktbezeichnung Standard [0] [1,0] 0:Hintergrund, 1:Vordergrund + ann = prompt_process.point_prompt(points=[[200, 200]], pointlabel=[1]) + prompt_process.plot(annotations=ann, output='./') + ``` + + === "CLI" + ```bash + # Laden Sie ein FastSAM-Modell und segmentieren Sie alles damit + yolo segment predict model=FastSAM-s.pt source=Pfad/zum/bus.jpg imgsz=640 + ``` + +Dieser Code-Ausschnitt zeigt die Einfachheit des Ladens eines vorab trainierten Modells und das Durchfรผhren einer Vorhersage auf einem Bild. + +### Verwendung von `val` + +Die Validierung des Modells auf einem Datensatz kann wie folgt durchgefรผhrt werden: + +!!! Example "Beispiel" + + === "Python" + ```python + from ultralytics import FastSAM + + # Erstellen Sie ein FastSAM-Modell + model = FastSAM('FastSAM-s.pt') # oder FastSAM-x.pt + + # Validieren Sie das Modell + results = model.val(data='coco8-seg.yaml') + ``` + + === "CLI" + ```bash + # Laden Sie ein FastSAM-Modell und validieren Sie es auf dem COCO8-Beispieldatensatz mit BildgrรถรŸe 640 + yolo segment val model=FastSAM-s.pt data=coco8.yaml imgsz=640 + ``` + +Bitte beachten Sie, dass FastSAM nur die Erkennung und Segmentierung einer einzigen Objektklasse unterstรผtzt. Das bedeutet, dass es alle Objekte als dieselbe Klasse erkennt und segmentiert. Daher mรผssen Sie beim Vorbereiten des Datensatzes alle Objektkategorie-IDs in 0 umwandeln. + +## Offizielle Verwendung von FastSAM + +FastSAM ist auch direkt aus dem [https://github.com/CASIA-IVA-Lab/FastSAM](https://github.com/CASIA-IVA-Lab/FastSAM) Repository erhรคltlich. Hier ist ein kurzer รœberblick รผber die typischen Schritte, die Sie unternehmen kรถnnten, um FastSAM zu verwenden: + +### Installation + +1. Klonen Sie das FastSAM-Repository: + ```shell + git clone https://github.com/CASIA-IVA-Lab/FastSAM.git + ``` + +2. Erstellen und aktivieren Sie eine Conda-Umgebung mit Python 3.9: + ```shell + conda create -n FastSAM python=3.9 + conda activate FastSAM + ``` + +3. Navigieren Sie zum geklonten Repository und installieren Sie die erforderlichen Pakete: + ```shell + cd FastSAM + pip install -r requirements.txt + ``` + +4. Installieren Sie das CLIP-Modell: + ```shell + pip install git+https://github.com/openai/CLIP.git + ``` + +### Beispielverwendung + +1. Laden Sie eine [Modell-Sicherung](https://drive.google.com/file/d/1m1sjY4ihXBU1fZXdQ-Xdj-mDltW-2Rqv/view?usp=sharing) herunter. + +2. Verwenden Sie FastSAM fรผr Inferenz. Beispielbefehle: + + - Segmentieren Sie alles in einem Bild: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg + ``` + + - Segmentieren Sie bestimmte Objekte anhand eines Textprompts: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --text_prompt "der gelbe Hund" + ``` + + - Segmentieren Sie Objekte innerhalb eines Begrenzungsrahmens (geben Sie die Boxkoordinaten im xywh-Format an): + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --box_prompt "[570,200,230,400]" + ``` + + - Segmentieren Sie Objekte in der Nรคhe bestimmter Punkte: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --point_prompt "[[520,360],[620,300]]" --point_label "[1,0]" + ``` + +Sie kรถnnen FastSAM auch รผber eine [Colab-Demo](https://colab.research.google.com/drive/1oX14f6IneGGw612WgVlAiy91UHwFAvr9?usp=sharing) oder die [HuggingFace-Web-Demo](https://huggingface.co/spaces/An-619/FastSAM) testen, um eine visuelle Erfahrung zu machen. + +## Zitate und Danksagungen + +Wir mรถchten den Autoren von FastSAM fรผr ihre bedeutenden Beitrรคge auf dem Gebiet der Echtzeit-Instanzsegmentierung danken: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{zhao2023fast, + title={Fast Segment Anything}, + author={Xu Zhao and Wenchao Ding and Yongqi An and Yinglong Du and Tao Yu and Min Li and Ming Tang and Jinqiao Wang}, + year={2023}, + eprint={2306.12156}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +Die ursprรผngliche FastSAM-Arbeit ist auf [arXiv](https://arxiv.org/abs/2306.12156) zu finden. Die Autoren haben ihre Arbeit รถffentlich zugรคnglich gemacht, und der Code ist auf [GitHub](https://github.com/CASIA-IVA-Lab/FastSAM) verfรผgbar. Wir schรคtzen ihre Bemรผhungen, das Fachgebiet voranzutreiben und ihre Arbeit der breiteren Gemeinschaft zugรคnglich zu machen. diff --git a/ultralytics/docs/de/models/fast-sam.md:Zone.Identifier b/ultralytics/docs/de/models/fast-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/models/fast-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/models/index.md b/ultralytics/docs/de/models/index.md new file mode 100755 index 0000000..4482ccc --- /dev/null +++ b/ultralytics/docs/de/models/index.md @@ -0,0 +1,98 @@ +--- +comments: true +description: Entdecken Sie die vielfรคltige Palette an Modellen der YOLO-Familie, SAM, MobileSAM, FastSAM, YOLO-NAS und RT-DETR, die von Ultralytics unterstรผtzt werden. Beginnen Sie mit Beispielen fรผr die CLI- und Python-Nutzung. +keywords: Ultralytics, Dokumentation, YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS, RT-DETR, Modelle, Architekturen, Python, CLI +--- + +# Von Ultralytics unterstรผtzte Modelle + +Willkommen bei der Modell-Dokumentation von Ultralytics! Wir bieten Unterstรผtzung fรผr eine breite Palette von Modellen, die jeweils fรผr spezifische Aufgaben wie [Objekterkennung](../tasks/detect.md), [Instanzsegmentierung](../tasks/segment.md), [Bildklassifizierung](../tasks/classify.md), [Posenschรคtzung](../tasks/pose.md) und [Multi-Objekt-Tracking](../modes/track.md) maรŸgeschneidert sind. Wenn Sie daran interessiert sind, Ihre Modellarchitektur bei Ultralytics beizutragen, sehen Sie sich unseren [Beitragenden-Leitfaden](../../help/contributing.md) an. + +!!! Note "Hinweis" + + ๐Ÿšง Unsere Dokumentation in verschiedenen Sprachen ist derzeit im Aufbau und wir arbeiten hart daran, sie zu verbessern. Vielen Dank fรผr Ihre Geduld! ๐Ÿ™ + +## Vorgestellte Modelle + +Hier sind einige der wichtigsten unterstรผtzten Modelle: + +1. **[YOLOv3](yolov3.md)**: Die dritte Iteration der YOLO-Modellfamilie, ursprรผnglich von Joseph Redmon, bekannt fรผr ihre effiziente Echtzeit-Objekterkennungsfรคhigkeiten. +2. **[YOLOv4](yolov4.md)**: Ein dunkelnetz-natives Update von YOLOv3, verรถffentlicht von Alexey Bochkovskiy im Jahr 2020. +3. **[YOLOv5](yolov5.md)**: Eine verbesserte Version der YOLO-Architektur von Ultralytics, die bessere Leistungs- und Geschwindigkeitskompromisse im Vergleich zu frรผheren Versionen bietet. +4. **[YOLOv6](yolov6.md)**: Verรถffentlicht von [Meituan](https://about.meituan.com/) im Jahr 2022 und in vielen autonomen Lieferrobotern des Unternehmens im Einsatz. +5. **[YOLOv7](yolov7.md)**: Aktualisierte YOLO-Modelle, die 2022 von den Autoren von YOLOv4 verรถffentlicht wurden. +6. **[YOLOv8](yolov8.md) NEU ๐Ÿš€**: Die neueste Version der YOLO-Familie, mit erweiterten Fรคhigkeiten wie Instanzsegmentierung, Pose/Schlรผsselpunktschรคtzung und Klassifizierung. +7. **[Segment Anything Model (SAM)](sam.md)**: Metas Segment Anything Model (SAM). +8. **[Mobile Segment Anything Model (MobileSAM)](mobile-sam.md)**: MobileSAM fรผr mobile Anwendungen, von der Kyung Hee University. +9. **[Fast Segment Anything Model (FastSAM)](fast-sam.md)**: FastSAM von der Image & Video Analysis Group, Institute of Automation, Chinesische Akademie der Wissenschaften. +10. **[YOLO-NAS](yolo-nas.md)**: YOLO Neural Architecture Search (NAS) Modelle. +11. **[Realtime Detection Transformers (RT-DETR)](rtdetr.md)**: Baidus PaddlePaddle Realtime Detection Transformer (RT-DETR) Modelle. + +

+
+ +
+ Anschauen: Fรผhren Sie Ultralytics YOLO-Modelle in nur wenigen Codezeilen aus. +

+ +## Einstieg: Nutzungbeispiele + +Dieses Beispiel bietet einfache YOLO-Trainings- und Inferenzbeispiele. Fรผr vollstรคndige Dokumentationen รผber diese und andere [Modi](../modes/index.md) siehe die Dokumentationsseiten [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) und [Export](../modes/export.md). + +Beachten Sie, dass das folgende Beispiel fรผr YOLOv8 [Detect](../tasks/detect.md) Modelle zur Objekterkennung ist. Fรผr zusรคtzliche unterstรผtzte Aufgaben siehe die Dokumentation zu [Segment](../tasks/segment.md), [Classify](../tasks/classify.md) und [Pose](../tasks/pose.md). + +!!! Example "Beispiel" + + === "Python" + + Vorgefertigte PyTorch `*.pt` Modelle sowie Konfigurationsdateien `*.yaml` kรถnnen den Klassen `YOLO()`, `SAM()`, `NAS()` und `RTDETR()` รผbergeben werden, um eine Modellinstanz in Python zu erstellen: + + ```python + from ultralytics import YOLO + + # Laden eines COCO-vortrainierten YOLOv8n Modells + model = YOLO('yolov8n.pt') + + # Modellinformationen anzeigen (optional) + model.info() + + # Model auf dem COCO8-Beispieldatensatz fรผr 100 Epochen trainieren + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Inferenz mit dem YOLOv8n Modell auf das Bild 'bus.jpg' ausfรผhren + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI-Befehle sind verfรผgbar, um die Modelle direkt auszufรผhren: + + ```bash + # Ein COCO-vortrainiertes YOLOv8n Modell laden und auf dem COCO8-Beispieldatensatz fรผr 100 Epochen trainieren + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Ein COCO-vortrainiertes YOLOv8n Modell laden und Inferenz auf das Bild 'bus.jpg' ausfรผhren + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## Neue Modelle beitragen + +Sind Sie daran interessiert, Ihr Modell bei Ultralytics beizutragen? GroรŸartig! Wir sind immer offen dafรผr, unser Modellportfolio zu erweitern. + +1. **Repository forken**: Beginnen Sie mit dem Forken des [Ultralytics GitHub-Repositorys](https://github.com/ultralytics/ultralytics). + +2. **Ihren Fork klonen**: Klonen Sie Ihren Fork auf Ihre lokale Maschine und erstellen Sie einen neuen Branch, um daran zu arbeiten. + +3. **Ihr Modell implementieren**: Fรผgen Sie Ihr Modell entsprechend den in unserem [Beitragenden-Leitfaden](../../help/contributing.md) bereitgestellten Kodierungsstandards und Richtlinien hinzu. + +4. **Grรผndlich testen**: Stellen Sie sicher, dass Sie Ihr Modell sowohl isoliert als auch als Teil des Pipelines grรผndlich testen. + +5. **Eine Pull-Anfrage erstellen**: Sobald Sie mit Ihrem Modell zufrieden sind, erstellen Sie eine Pull-Anfrage zum Hauptrepository zur รœberprรผfung. + +6. **Code-Review & Zusammenfรผhren**: Nach der รœberprรผfung, wenn Ihr Modell unseren Kriterien entspricht, wird es in das Hauptrepository zusammengefรผhrt. + +Fรผr detaillierte Schritte konsultieren Sie unseren [Beitragenden-Leitfaden](../../help/contributing.md). diff --git a/ultralytics/docs/de/models/index.md:Zone.Identifier b/ultralytics/docs/de/models/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/models/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/models/mobile-sam.md b/ultralytics/docs/de/models/mobile-sam.md new file mode 100755 index 0000000..7f30175 --- /dev/null +++ b/ultralytics/docs/de/models/mobile-sam.md @@ -0,0 +1,116 @@ +--- +comments: true +description: Erfahren Sie mehr รผber MobileSAM, dessen Implementierung, den Vergleich mit dem Original-SAM und wie Sie es im Ultralytics-Framework herunterladen und testen kรถnnen. Verbessern Sie Ihre mobilen Anwendungen heute. +keywords: MobileSAM, Ultralytics, SAM, mobile Anwendungen, Arxiv, GPU, API, Bildencoder, Maskendekoder, Modell-Download, Testmethode +--- + +![MobileSAM Logo](https://github.com/ChaoningZhang/MobileSAM/blob/master/assets/logo2.png?raw=true) + +# Mobile Segment Anything (MobileSAM) + +Das MobileSAM-Paper ist jetzt auf [arXiv](https://arxiv.org/pdf/2306.14289.pdf) verfรผgbar. + +Eine Demonstration von MobileSAM, das auf einer CPU ausgefรผhrt wird, finden Sie unter diesem [Demo-Link](https://huggingface.co/spaces/dhkim2810/MobileSAM). Die Leistung auf einer Mac i5 CPU betrรคgt etwa 3 Sekunden. Auf der Hugging Face-Demo fรผhrt die Benutzeroberflรคche und CPUs mit niedrigerer Leistung zu einer langsameren Reaktion, aber die Funktion bleibt effektiv. + +MobileSAM ist in verschiedenen Projekten implementiert, darunter [Grounding-SAM](https://github.com/IDEA-Research/Grounded-Segment-Anything), [AnyLabeling](https://github.com/vietanhdev/anylabeling) und [Segment Anything in 3D](https://github.com/Jumpat/SegmentAnythingin3D). + +MobileSAM wird mit einem einzigen GPU und einem 100K-Datensatz (1% der Originalbilder) in weniger als einem Tag trainiert. Der Code fรผr dieses Training wird in Zukunft verfรผgbar gemacht. + +## Verfรผgbarkeit von Modellen, unterstรผtzte Aufgaben und Betriebsarten + +Die folgende Tabelle zeigt die verfรผgbaren Modelle mit ihren spezifischen vortrainierten Gewichten, die unterstรผtzten Aufgaben und ihre Kompatibilitรคt mit unterschiedlichen Betriebsarten wie [Inferenz](../modes/predict.md), [Validierung](../modes/val.md), [Training](../modes/train.md) und [Export](../modes/export.md). Unterstรผtzte Betriebsarten werden mit โœ…-Emojis und nicht unterstรผtzte Betriebsarten mit โŒ-Emojis angezeigt. + +| Modelltyp | Vortrainierte Gewichte | Unterstรผtzte Aufgaben | Inferenz | Validierung | Training | Export | +|-----------|------------------------|---------------------------------------------|----------|-------------|----------|--------| +| MobileSAM | `mobile_sam.pt` | [Instanzsegmentierung](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Anpassung von SAM zu MobileSAM + +Da MobileSAM die gleiche Pipeline wie das Original-SAM beibehรคlt, haben wir das ursprรผngliche Preprocessing, Postprocessing und alle anderen Schnittstellen eingebunden. Personen, die derzeit das ursprรผngliche SAM verwenden, kรถnnen daher mit minimalem Aufwand zu MobileSAM wechseln. + +MobileSAM bietet vergleichbare Leistungen wie das ursprรผngliche SAM und behรคlt dieselbe Pipeline, mit Ausnahme eines Wechsels des Bildencoders. Konkret ersetzen wir den ursprรผnglichen, leistungsstarken ViT-H-Encoder (632M) durch einen kleineren Tiny-ViT-Encoder (5M). Auf einem einzelnen GPU arbeitet MobileSAM in etwa 12 ms pro Bild: 8 ms auf dem Bildencoder und 4 ms auf dem Maskendekoder. + +Die folgende Tabelle bietet einen Vergleich der Bildencoder, die auf ViT basieren: + +| Bildencoder | Original-SAM | MobileSAM | +|-----------------|--------------|-----------| +| Parameter | 611M | 5M | +| Geschwindigkeit | 452ms | 8ms | + +Sowohl das ursprรผngliche SAM als auch MobileSAM verwenden denselben promptgefรผhrten Maskendekoder: + +| Maskendekoder | Original-SAM | MobileSAM | +|-----------------|--------------|-----------| +| Parameter | 3.876M | 3.876M | +| Geschwindigkeit | 4ms | 4ms | + +Hier ist ein Vergleich der gesamten Pipeline: + +| Gesamte Pipeline (Enc+Dec) | Original-SAM | MobileSAM | +|----------------------------|--------------|-----------| +| Parameter | 615M | 9.66M | +| Geschwindigkeit | 456ms | 12ms | + +Die Leistung von MobileSAM und des ursprรผnglichen SAM werden sowohl mit einem Punkt als auch mit einem Kasten als Prompt demonstriert. + +![Bild mit Punkt als Prompt](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +![Bild mit Kasten als Prompt](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +Mit seiner รผberlegenen Leistung ist MobileSAM etwa 5-mal kleiner und 7-mal schneller als das aktuelle FastSAM. Weitere Details finden Sie auf der [MobileSAM-Projektseite](https://github.com/ChaoningZhang/MobileSAM). + +## Testen von MobileSAM in Ultralytics + +Wie beim ursprรผnglichen SAM bieten wir eine unkomplizierte Testmethode in Ultralytics an, einschlieรŸlich Modi fรผr Punkt- und Kasten-Prompts. + +### Modell-Download + +Sie kรถnnen das Modell [hier](https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt) herunterladen. + +### Punkt-Prompt + +!!! Example "Beispiel" + + === "Python" + ```python + from ultralytics import SAM + + # Laden Sie das Modell + model = SAM('mobile_sam.pt') + + # Vorhersage einer Segmentierung basierend auf einem Punkt-Prompt + model.predict('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +### Kasten-Prompt + +!!! Example "Beispiel" + + === "Python" + ```python + from ultralytics import SAM + + # Laden Sie das Modell + model = SAM('mobile_sam.pt') + + # Vorhersage einer Segmentierung basierend auf einem Kasten-Prompt + model.predict('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + ``` + +Wir haben `MobileSAM` und `SAM` mit derselben API implementiert. Fรผr weitere Verwendungsinformationen sehen Sie bitte die [SAM-Seite](sam.md). + +## Zitate und Danksagungen + +Wenn Sie MobileSAM in Ihrer Forschungs- oder Entwicklungsarbeit nรผtzlich finden, zitieren Sie bitte unser Paper: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{mobile_sam, + title={Faster Segment Anything: Towards Lightweight SAM for Mobile Applications}, + author={Zhang, Chaoning and Han, Dongshen and Qiao, Yu and Kim, Jung Uk and Bae, Sung Ho and Lee, Seungkyu and Hong, Choong Seon}, + journal={arXiv preprint arXiv:2306.14289}, + year={2023} + } diff --git a/ultralytics/docs/de/models/mobile-sam.md:Zone.Identifier b/ultralytics/docs/de/models/mobile-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/models/mobile-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/models/rtdetr.md b/ultralytics/docs/de/models/rtdetr.md new file mode 100755 index 0000000..dbc0b41 --- /dev/null +++ b/ultralytics/docs/de/models/rtdetr.md @@ -0,0 +1,93 @@ +--- +comments: true +description: Entdecken Sie die Funktionen und Vorteile von RT-DETR, dem effizienten und anpassungsfรคhigen Echtzeitobjektdetektor von Baidu, der von Vision Transformers unterstรผtzt wird, einschlieรŸlich vortrainierter Modelle. +keywords: RT-DETR, Baidu, Vision Transformers, Objekterkennung, Echtzeitleistung, CUDA, TensorRT, IoU-bewusste Query-Auswahl, Ultralytics, Python API, PaddlePaddle +--- + +# Baidus RT-DETR: Ein Echtzeit-Objektdetektor auf Basis von Vision Transformers + +## รœberblick + +Der Real-Time Detection Transformer (RT-DETR), entwickelt von Baidu, ist ein moderner End-to-End-Objektdetektor, der Echtzeitleistung mit hoher Genauigkeit bietet. Er nutzt die Leistung von Vision Transformers (ViT), um Multiskalen-Funktionen effizient zu verarbeiten, indem intra-skaliere Interaktion und eine skalenรผbergreifende Fusion entkoppelt werden. RT-DETR ist hoch anpassungsfรคhig und unterstรผtzt flexible Anpassung der Inferenzgeschwindigkeit durch Verwendung verschiedener Decoder-Schichten ohne erneutes Training. Das Modell รผbertrifft viele andere Echtzeit-Objektdetektoren auf beschleunigten Backends wie CUDA mit TensorRT. + +![Beispielbild des Modells](https://user-images.githubusercontent.com/26833433/238963168-90e8483f-90aa-4eb6-a5e1-0d408b23dd33.png) +**รœbersicht von Baidus RT-DETR.** Die Modellarchitekturdiagramm des RT-DETR zeigt die letzten drei Stufen des Backbone {S3, S4, S5} als Eingabe fรผr den Encoder. Der effiziente Hybrid-Encoder verwandelt Multiskalen-Funktionen durch intraskalare Feature-Interaktion (AIFI) und das skalenรผbergreifende Feature-Fusion-Modul (CCFM) in eine Sequenz von Bildmerkmalen. Die IoU-bewusste Query-Auswahl wird verwendet, um eine feste Anzahl von Bildmerkmalen als anfรคngliche Objekt-Queries fรผr den Decoder auszuwรคhlen. Der Decoder optimiert iterativ Objekt-Queries, um Boxen und Vertrauenswerte zu generieren ([Quelle](https://arxiv.org/pdf/2304.08069.pdf)). + +### Hauptmerkmale + +- **Effizienter Hybrid-Encoder:** Baidus RT-DETR verwendet einen effizienten Hybrid-Encoder, der Multiskalen-Funktionen verarbeitet, indem intra-skaliere Interaktion und eine skalenรผbergreifende Fusion entkoppelt werden. Dieses einzigartige Design auf Basis von Vision Transformers reduziert die Rechenkosten und ermรถglicht die Echtzeit-Objekterkennung. +- **IoU-bewusste Query-Auswahl:** Baidus RT-DETR verbessert die Initialisierung von Objekt-Queries, indem IoU-bewusste Query-Auswahl verwendet wird. Dadurch kann das Modell sich auf die relevantesten Objekte in der Szene konzentrieren und die Erkennungsgenauigkeit verbessern. +- **Anpassbare Inferenzgeschwindigkeit:** Baidus RT-DETR ermรถglicht flexible Anpassungen der Inferenzgeschwindigkeit durch Verwendung unterschiedlicher Decoder-Schichten ohne erneutes Training. Diese Anpassungsfรคhigkeit erleichtert den praktischen Einsatz in verschiedenen Echtzeit-Objekterkennungsszenarien. + +## Vortrainierte Modelle + +Die Ultralytics Python API bietet vortrainierte PaddlePaddle RT-DETR-Modelle in verschiedenen Skalierungen: + +- RT-DETR-L: 53,0% AP auf COCO val2017, 114 FPS auf T4 GPU +- RT-DETR-X: 54,8% AP auf COCO val2017, 74 FPS auf T4 GPU + +## Beispiele fรผr die Verwendung + +Das folgende Beispiel enthรคlt einfache Trainings- und Inferenzbeispiele fรผr RT-DETRR. Fรผr die vollstรคndige Dokumentation zu diesen und anderen [Modi](../modes/index.md) siehe die Dokumentationsseiten fรผr [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) und [Export](../modes/export.md). + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import RTDETR + + # Laden Sie ein vortrainiertes RT-DETR-l Modell auf COCO + model = RTDETR('rtdetr-l.pt') + + # Zeigen Sie Informationen รผber das Modell an (optional) + model.info() + + # Trainieren Sie das Modell auf dem COCO8-Beispiel-Datensatz fรผr 100 Epochen + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Fรผhren Sie die Inferenz mit dem RT-DETR-l Modell auf dem Bild 'bus.jpg' aus + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ```bash + # Laden Sie ein vortrainiertes RT-DETR-l Modell auf COCO und trainieren Sie es auf dem COCO8-Beispiel-Datensatz fรผr 100 Epochen + yolo train model=rtdetr-l.pt data=coco8.yaml epochs=100 imgsz=640 + + # Laden Sie ein vortrainiertes RT-DETR-l Modell auf COCO und fรผhren Sie die Inferenz auf dem Bild 'bus.jpg' aus + yolo predict model=rtdetr-l.pt source=path/to/bus.jpg + ``` + +## Unterstรผtzte Aufgaben und Modi + +In dieser Tabelle werden die Modelltypen, die spezifischen vortrainierten Gewichte, die von jedem Modell unterstรผtzten Aufgaben und die verschiedenen Modi ([Train](../modes/train.md), [Val](../modes/val.md), [Predict](../modes/predict.md), [Export](../modes/export.md)), die unterstรผtzt werden, mit โœ…-Emoji angezeigt. + +| Modelltyp | Vortrainierte Gewichte | Unterstรผtzte Aufgaben | Inferenz | Validierung | Training | Exportieren | +|--------------------|------------------------|---------------------------------------|----------|-------------|----------|-------------| +| RT-DETR GroรŸ | `rtdetr-l.pt` | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| RT-DETR Extra-GroรŸ | `rtdetr-x.pt` | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +## Zitate und Danksagungen + +Wenn Sie Baidus RT-DETR in Ihrer Forschungs- oder Entwicklungsarbeit verwenden, zitieren Sie bitte das [ursprรผngliche Papier](https://arxiv.org/abs/2304.08069): + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{lv2023detrs, + title={DETRs Beat YOLOs on Real-time Object Detection}, + author={Wenyu Lv and Shangliang Xu and Yian Zhao and Guanzhong Wang and Jinman Wei and Cheng Cui and Yuning Du and Qingqing Dang and Yi Liu}, + year={2023}, + eprint={2304.08069}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +Wir mรถchten Baidu und dem [PaddlePaddle](https://github.com/PaddlePaddle/PaddleDetection)-Team fรผr die Erstellung und Pflege dieser wertvollen Ressource fรผr die Computer-Vision-Community danken. Ihre Beitrag zum Gebiet der Entwicklung des Echtzeit-Objekterkenners auf Basis von Vision Transformers, RT-DETR, wird sehr geschรคtzt. + +*Keywords: RT-DETR, Transformer, ViT, Vision Transformers, Baidu RT-DETR, PaddlePaddle, Paddle Paddle RT-DETR, Objekterkennung in Echtzeit, objekterkennung basierend auf Vision Transformers, vortrainierte PaddlePaddle RT-DETR Modelle, Verwendung von Baidus RT-DETR, Ultralytics Python API* diff --git a/ultralytics/docs/de/models/rtdetr.md:Zone.Identifier b/ultralytics/docs/de/models/rtdetr.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/models/rtdetr.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/models/sam.md b/ultralytics/docs/de/models/sam.md new file mode 100755 index 0000000..9ca50b9 --- /dev/null +++ b/ultralytics/docs/de/models/sam.md @@ -0,0 +1,226 @@ +--- +comments: true +description: Erkunden Sie das innovative Segment Anything Model (SAM) von Ultralytics, das Echtzeit-Bildsegmentierung ermรถglicht. Erfahren Sie mehr รผber die promptable Segmentierung, die Zero-Shot-Performance und die Anwendung. +keywords: Ultralytics, Bildsegmentierung, Segment Anything Model, SAM, SA-1B-Datensatz, Echtzeit-Performance, Zero-Shot-Transfer, Objekterkennung, Bildanalyse, maschinelles Lernen +--- + +# Segment Anything Model (SAM) + +Willkommen an der Spitze der Bildsegmentierung mit dem Segment Anything Model (SAM). Dieses revolutionรคre Modell hat mit promptabler Bildsegmentierung und Echtzeit-Performance neue Standards in diesem Bereich gesetzt. + +## Einfรผhrung in SAM: Das Segment Anything Model + +Das Segment Anything Model (SAM) ist ein innovatives Bildsegmentierungsmodell, das promptable Segmentierung ermรถglicht und so eine beispiellose Vielseitigkeit bei der Bildanalyse bietet. SAM bildet das Herzstรผck der Segment Anything Initiative, einem bahnbrechenden Projekt, das ein neuartiges Modell, eine neue Aufgabe und einen neuen Datensatz fรผr die Bildsegmentierung einfรผhrt. + +Dank seiner fortschrittlichen Konstruktion kann SAM sich an neue Bildverteilungen und Aufgaben anpassen, auch ohne Vorwissen. Das wird als Zero-Shot-Transfer bezeichnet. Trainiert wurde SAM auf dem umfangreichen [SA-1B-Datensatz](https://ai.facebook.com/datasets/segment-anything/), der รผber 1 Milliarde Masken auf 11 Millionen sorgfรคltig kuratierten Bildern enthรคlt. SAM hat beeindruckende Zero-Shot-Performance gezeigt und in vielen Fรคllen frรผhere vollstรคndig รผberwachte Ergebnisse รผbertroffen. + +![Beispielbild aus dem Datensatz](https://user-images.githubusercontent.com/26833433/238056229-0e8ffbeb-f81a-477e-a490-aff3d82fd8ce.jpg) +Beispielimagen mit รผberlagernden Masken aus unserem neu eingefรผhrten Datensatz SA-1B. SA-1B enthรคlt 11 Millionen diverse, hochauflรถsende, lizenzierte und die Privatsphรคre schรผtzende Bilder und 1,1 Milliarden qualitativ hochwertige Segmentierungsmasken. Diese wurden vollautomatisch von SAM annotiert und sind nach menschlichen Bewertungen und zahlreichen Experimenten von hoher Qualitรคt und Vielfalt. Die Bilder sind nach der Anzahl der Masken pro Bild gruppiert (im Durchschnitt sind es etwa 100 Masken pro Bild). + +## Hauptmerkmale des Segment Anything Model (SAM) + +- **Promptable Segmentierungsaufgabe:** SAM wurde mit der Ausfรผhrung einer promptable Segmentierungsaufgabe entwickelt, wodurch es valide Segmentierungsmasken aus beliebigen Prompts generieren kann, z. B. rรคumlichen oder textuellen Hinweisen zur Identifizierung eines Objekts. +- **Fortgeschrittene Architektur:** Das Segment Anything Model verwendet einen leistungsfรคhigen Bild-Encoder, einen Prompt-Encoder und einen leichten Masken-Decoder. Diese einzigartige Architektur ermรถglicht flexibles Prompting, Echtzeitmaskenberechnung und Berรผcksichtigung von Mehrdeutigkeiten in Segmentierungsaufgaben. +- **Der SA-1B-Datensatz:** Eingefรผhrt durch das Segment Anything Projekt, enthรคlt der SA-1B-Datensatz รผber 1 Milliarde Masken auf 11 Millionen Bildern. Als bisher grรถรŸter Segmentierungsdatensatz liefert er SAM eine vielfรคltige und umfangreiche Datenquelle fรผr das Training. +- **Zero-Shot-Performance:** SAM zeigt herausragende Zero-Shot-Performance in verschiedenen Segmentierungsaufgaben und ist damit ein einsatzbereites Werkzeug fรผr vielfรคltige Anwendungen mit minimalem Bedarf an prompt engineering. + +Fรผr eine detaillierte Betrachtung des Segment Anything Models und des SA-1B-Datensatzes besuchen Sie bitte die [Segment Anything Website](https://segment-anything.com) und lesen Sie das Forschungspapier [Segment Anything](https://arxiv.org/abs/2304.02643). + +## Verfรผgbare Modelle, unterstรผtzte Aufgaben und Betriebsmodi + +Diese Tabelle zeigt die verfรผgbaren Modelle mit ihren spezifischen vortrainierten Gewichten, die unterstรผtzten Aufgaben und ihre Kompatibilitรคt mit verschiedenen Betriebsmodi wie [Inference](../modes/predict.md), [Validierung](../modes/val.md), [Training](../modes/train.md) und [Export](../modes/export.md), wobei โœ… Emojis fรผr unterstรผtzte Modi und โŒ Emojis fรผr nicht unterstรผtzte Modi verwendet werden. + +| Modelltyp | Vortrainierte Gewichte | Unterstรผtzte Aufgaben | Inference | Validierung | Training | Export | +|-----------|------------------------|---------------------------------------------|-----------|-------------|----------|--------| +| SAM base | `sam_b.pt` | [Instanzsegmentierung](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| SAM large | `sam_l.pt` | [Instanzsegmentierung](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Wie man SAM verwendet: Vielseitigkeit und Power in der Bildsegmentierung + +Das Segment Anything Model kann fรผr eine Vielzahl von Aufgaben verwendet werden, die รผber die Trainingsdaten hinausgehen. Dazu gehรถren Kantenerkennung, Generierung von Objektvorschlรคgen, Instanzsegmentierung und vorlรคufige Text-to-Mask-Vorhersage. Mit prompt engineering kann SAM sich schnell an neue Aufgaben und Datenverteilungen anpassen und sich so als vielseitiges und leistungsstarkes Werkzeug fรผr alle Anforderungen der Bildsegmentierung etablieren. + +### Beispiel fรผr SAM-Vorhersage + +!!! Example "Segmentierung mit Prompts" + + Bildsegmentierung mit gegebenen Prompts. + + === "Python" + + ```python + from ultralytics import SAM + + # Modell laden + model = SAM('sam_b.pt') + + # Modellinformationen anzeigen (optional) + model.info() + + # Inferenz mit Bounding Box Prompt + model('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + + # Inferenz mit Point Prompt + model('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +!!! Example "Alles segmentieren" + + Das ganze Bild segmentieren. + + === "Python" + + ```python + from ultralytics import SAM + + # Modell laden + model = SAM('sam_b.pt') + + # Modellinformationen anzeigen (optional) + model.info() + + # Inferenz + model('Pfad/zum/Bild.jpg') + ``` + + === "CLI" + + ```bash + # Inferenz mit einem SAM-Modell + yolo predict model=sam_b.pt source=Pfad/zum/Bild.jpg + ``` + +- Die Logik hier besteht darin, das gesamte Bild zu segmentieren, wenn keine Prompts (Bounding Box/Point/Maske) รผbergeben werden. + +!!! Example "Beispiel SAMPredictor" + + Dadurch kรถnnen Sie das Bild einmal festlegen und mehrmals Inferenz mit Prompts ausfรผhren, ohne den Bild-Encoder mehrfach auszufรผhren. + + === "Prompt-Inferenz" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # SAMPredictor erstellen + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # Bild festlegen + predictor.set_image("ultralytics/assets/zidane.jpg") # Festlegung mit Bild-Datei + predictor.set_image(cv2.imread("ultralytics/assets/zidane.jpg")) # Festlegung mit np.ndarray + results = predictor(bboxes=[439, 437, 524, 709]) + results = predictor(points=[900, 370], labels=[1]) + + # Bild zurรผcksetzen + predictor.reset_image() + ``` + + Alles segmentieren mit zusรคtzlichen Argumenten. + + === "Alles segmentieren" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # SAMPredictor erstellen + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # Mit zusรคtzlichen Argumenten segmentieren + results = predictor(source="ultralytics/assets/zidane.jpg", crop_n_layers=1, points_stride=64) + ``` + +- Weitere zusรคtzliche Argumente fรผr `Alles segmentieren` finden Sie in der [`Predictor/generate` Referenz](../../../reference/models/sam/predict.md). + +## Vergleich von SAM und YOLOv8 + +Hier vergleichen wir Meta's kleinstes SAM-Modell, SAM-b, mit Ultralytics kleinstem Segmentierungsmodell, [YOLOv8n-seg](../tasks/segment.md): + +| Modell | GrรถรŸe | Parameter | Geschwindigkeit (CPU) | +|------------------------------------------------|-------------------------------|------------------------------|----------------------------------------| +| Meta's SAM-b | 358 MB | 94,7 M | 51096 ms/pro Bild | +| [MobileSAM](mobile-sam.md) | 40,7 MB | 10,1 M | 46122 ms/pro Bild | +| [FastSAM-s](fast-sam.md) mit YOLOv8-Backbone | 23,7 MB | 11,8 M | 115 ms/pro Bild | +| Ultralytics [YOLOv8n-seg](../tasks/segment.md) | **6,7 MB** (53,4-mal kleiner) | **3,4 M** (27,9-mal kleiner) | **59 ms/pro Bild** (866-mal schneller) | + +Dieser Vergleich zeigt die GrรถรŸen- und Geschwindigkeitsunterschiede zwischen den Modellen. Wรคhrend SAM einzigartige Fรคhigkeiten fรผr die automatische Segmentierung bietet, konkurriert es nicht direkt mit YOLOv8-Segmentierungsmodellen, die kleiner, schneller und effizienter sind. + +Die Tests wurden auf einem Apple M2 MacBook aus dem Jahr 2023 mit 16 GB RAM durchgefรผhrt. Um diesen Test zu reproduzieren: + +!!! Example "Beispiel" + + === "Python" + ```python + from ultralytics import FastSAM, SAM, YOLO + + # SAM-b profilieren + model = SAM('sam_b.pt') + model.info() + model('ultralytics/assets') + + # MobileSAM profilieren + model = SAM('mobile_sam.pt') + model.info() + model('ultralytics/assets') + + # FastSAM-s profilieren + model = FastSAM('FastSAM-s.pt') + model.info() + model('ultralytics/assets') + + # YOLOv8n-seg profilieren + model = YOLO('yolov8n-seg.pt') + model.info() + model('ultralytics/assets') + ``` + +## Auto-Annotierung: Der schnelle Weg zu Segmentierungsdatensรคtzen + +Die Auto-Annotierung ist eine wichtige Funktion von SAM, mit der Benutzer mithilfe eines vortrainierten Detektionsmodells einen [Segmentierungsdatensatz](https://docs.ultralytics.com/datasets/segment) generieren kรถnnen. Diese Funktion ermรถglicht eine schnelle und genaue Annotation einer groรŸen Anzahl von Bildern, ohne dass zeitaufwรคndiges manuelles Labeling erforderlich ist. + +### Generieren Sie Ihren Segmentierungsdatensatz mit einem Detektionsmodell + +Um Ihren Datensatz mit dem Ultralytics-Framework automatisch zu annotieren, verwenden Sie die `auto_annotate` Funktion wie folgt: + +!!! Example "Beispiel" + + === "Python" + ```python + from ultralytics.data.annotator import auto_annotate + + auto_annotate(data="Pfad/zum/Bilderordner", det_model="yolov8x.pt", sam_model='sam_b.pt') + ``` + +| Argument | Typ | Beschreibung | Standard | +|------------|---------------------|---------------------------------------------------------------------------------------------------------------------------|--------------| +| data | str | Pfad zu einem Ordner, der die zu annotierenden Bilder enthรคlt. | | +| det_model | str, optional | Vortrainiertes YOLO-Detektionsmodell. StandardmรครŸig 'yolov8x.pt'. | 'yolov8x.pt' | +| sam_model | str, optional | Vortrainiertes SAM-Segmentierungsmodell. StandardmรครŸig 'sam_b.pt'. | 'sam_b.pt' | +| device | str, optional | Gerรคt, auf dem die Modelle ausgefรผhrt werden. StandardmรครŸig ein leerer String (CPU oder GPU, falls verfรผgbar). | | +| output_dir | str, None, optional | Verzeichnis zum Speichern der annotierten Ergebnisse. StandardmรครŸig ein 'labels'-Ordner im selben Verzeichnis wie 'data'. | None | + +Die `auto_annotate` Funktion nimmt den Pfad zu Ihren Bildern entgegen, mit optionalen Argumenten fรผr das vortrainierte Detektions- und SAM-Segmentierungsmodell, das Gerรคt, auf dem die Modelle ausgefรผhrt werden sollen, und das Ausgabeverzeichnis, in dem die annotierten Ergebnisse gespeichert werden sollen. + +Die Auto-Annotierung mit vortrainierten Modellen kann die Zeit und den Aufwand fรผr die Erstellung hochwertiger Segmentierungsdatensรคtze erheblich reduzieren. Diese Funktion ist besonders vorteilhaft fรผr Forscher und Entwickler, die mit groรŸen Bildersammlungen arbeiten. Sie ermรถglicht es ihnen, sich auf die Modellentwicklung und -bewertung zu konzentrieren, anstatt auf die manuelle Annotation. + +## Zitate und Danksagungen + +Wenn Sie SAM in Ihrer Forschungs- oder Entwicklungsarbeit nรผtzlich finden, erwรคgen Sie bitte, unser Paper zu zitieren: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{kirillov2023segment, + title={Segment Anything}, + author={Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollรกr and Ross Girshick}, + year={2023}, + eprint={2304.02643}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +Wir mรถchten Meta AI fรผr die Erstellung und Pflege dieser wertvollen Ressource fรผr die Computer Vision Community danken. + +*Stichworte: Segment Anything, Segment Anything Model, SAM, Meta SAM, Bildsegmentierung, Promptable Segmentierung, Zero-Shot-Performance, SA-1B-Datensatz, fortschrittliche Architektur, Auto-Annotierung, Ultralytics, vortrainierte Modelle, SAM Base, SAM Large, Instanzsegmentierung, Computer Vision, Kรผnstliche Intelligenz, maschinelles Lernen, Datenannotation, Segmentierungsmasken, Detektionsmodell, YOLO Detektionsmodell, Bibtex, Meta AI.* diff --git a/ultralytics/docs/de/models/sam.md:Zone.Identifier b/ultralytics/docs/de/models/sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/models/sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/models/yolo-nas.md b/ultralytics/docs/de/models/yolo-nas.md new file mode 100755 index 0000000..c1d8ed7 --- /dev/null +++ b/ultralytics/docs/de/models/yolo-nas.md @@ -0,0 +1,121 @@ +--- +comments: true +description: Erfahren Sie mehr รผber YOLO-NAS, ein herausragendes Modell fรผr die Objekterkennung. Erfahren Sie mehr รผber seine Funktionen, vortrainierte Modelle, Nutzung mit der Ultralytics Python API und vieles mehr. +keywords: YOLO-NAS, Deci AI, Objekterkennung, Deep Learning, Neural Architecture Search, Ultralytics Python API, YOLO-Modell, vortrainierte Modelle, Quantisierung, Optimierung, COCO, Objects365, Roboflow 100 +--- + +# YOLO-NAS + +## รœbersicht + +Entwickelt von Deci AI, ist YOLO-NAS ein bahnbrechendes Modell fรผr die Objekterkennung. Es ist das Ergebnis fortschrittlicher Technologien zur Neural Architecture Search und wurde sorgfรคltig entworfen, um die Einschrรคnkungen frรผherer YOLO-Modelle zu รผberwinden. Mit signifikanten Verbesserungen in der Quantisierungsunterstรผtzung und Abwรคgung von Genauigkeit und Latenz stellt YOLO-NAS einen groรŸen Fortschritt in der Objekterkennung dar. + +![Modellbeispielbild](https://learnopencv.com/wp-content/uploads/2023/05/yolo-nas_COCO_map_metrics.png) +**รœbersicht รผber YOLO-NAS.** YOLO-NAS verwendet Quantisierungsblรถcke und selektive Quantisierung fรผr optimale Leistung. Das Modell weist bei der Konvertierung in seine quantisierte Version mit INT8 einen minimalen Prรคzisionsverlust auf, was im Vergleich zu anderen Modellen eine signifikante Verbesserung darstellt. Diese Entwicklungen fรผhren zu einer รผberlegenen Architektur mit beispiellosen Fรคhigkeiten zur Objekterkennung und herausragender Leistung. + +### Schlรผsselfunktionen + +- **Quantisierungsfreundlicher Basiselement:** YOLO-NAS fรผhrt ein neues Basiselement ein, das fรผr Quantisierung geeignet ist und eine der wesentlichen Einschrรคnkungen frรผherer YOLO-Modelle angeht. +- **Raffiniertes Training und Quantisierung:** YOLO-NAS nutzt fortschrittliche Trainingsschemata und post-training Quantisierung zur Leistungsverbesserung. +- **AutoNAC-Optimierung und Vortraining:** YOLO-NAS verwendet die AutoNAC-Optimierung und wird auf prominenten Datensรคtzen wie COCO, Objects365 und Roboflow 100 vortrainiert. Dieses Vortraining macht es รคuรŸerst geeignet fรผr die Objekterkennung in Produktionsumgebungen. + +## Vortrainierte Modelle + +Erleben Sie die Leistungsfรคhigkeit der Objekterkennung der nรคchsten Generation mit den vortrainierten YOLO-NAS-Modellen von Ultralytics. Diese Modelle sind darauf ausgelegt, sowohl bei Geschwindigkeit als auch bei Genauigkeit hervorragende Leistung zu liefern. Wรคhlen Sie aus einer Vielzahl von Optionen, die auf Ihre spezifischen Anforderungen zugeschnitten sind: + +| Modell | mAP | Latenz (ms) | +|------------------|-------|-------------| +| YOLO-NAS S | 47,5 | 3,21 | +| YOLO-NAS M | 51,55 | 5,85 | +| YOLO-NAS L | 52,22 | 7,87 | +| YOLO-NAS S INT-8 | 47,03 | 2,36 | +| YOLO-NAS M INT-8 | 51,0 | 3,78 | +| YOLO-NAS L INT-8 | 52,1 | 4,78 | + +Jede Modellvariante ist darauf ausgelegt, eine Balance zwischen Mean Average Precision (mAP) und Latenz zu bieten und Ihre Objekterkennungsaufgaben fรผr Performance und Geschwindigkeit zu optimieren. + +## Beispiele zur Verwendung + +Ultralytics hat es einfach gemacht, YOLO-NAS-Modelle in Ihre Python-Anwendungen รผber unser `ultralytics` Python-Paket zu integrieren. Das Paket bietet eine benutzerfreundliche Python-API, um den Prozess zu optimieren. + +Die folgenden Beispiele zeigen, wie Sie YOLO-NAS-Modelle mit dem `ultralytics`-Paket fรผr Inferenz und Validierung verwenden: + +### Beispiele fรผr Inferenz und Validierung + +In diesem Beispiel validieren wir YOLO-NAS-s auf dem COCO8-Datensatz. + +!!! Example "Beispiel" + + Dieses Beispiel bietet einfachen Code fรผr Inferenz und Validierung fรผr YOLO-NAS. Fรผr die Verarbeitung von Inferenzergebnissen siehe den [Predict](../modes/predict.md)-Modus. Fรผr die Verwendung von YOLO-NAS mit zusรคtzlichen Modi siehe [Val](../modes/val.md) und [Export](../modes/export.md). Das YOLO-NAS-Modell im `ultralytics`-Paket unterstรผtzt kein Training. + + === "Python" + + Vorab trainierte `*.pt`-Modelldateien von PyTorch kรถnnen der Klasse `NAS()` รผbergeben werden, um eine Modellinstanz in Python zu erstellen: + + ```python + from ultralytics import NAS + + # Laden Sie ein auf COCO vortrainiertes YOLO-NAS-s-Modell + model = NAS('yolo_nas_s.pt') + + # Modelinformationen anzeigen (optional) + model.info() + + # Validieren Sie das Modell am Beispiel des COCO8-Datensatzes + results = model.val(data='coco8.yaml') + + # Fรผhren Sie Inferenz mit dem YOLO-NAS-s-Modell auf dem Bild 'bus.jpg' aus + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI-Befehle sind verfรผgbar, um die Modelle direkt auszufรผhren: + + ```bash + # Laden Sie ein auf COCO vortrainiertes YOLO-NAS-s-Modell und validieren Sie die Leistung am Beispiel des COCO8-Datensatzes + yolo val model=yolo_nas_s.pt data=coco8.yaml + + # Laden Sie ein auf COCO vortrainiertes YOLO-NAS-s-Modell und fรผhren Sie Inferenz auf dem Bild 'bus.jpg' aus + yolo predict model=yolo_nas_s.pt source=path/to/bus.jpg + ``` + +## Unterstรผtzte Aufgaben und Modi + +Wir bieten drei Varianten der YOLO-NAS-Modelle an: Small (s), Medium (m) und Large (l). Jede Variante ist dazu gedacht, unterschiedliche Berechnungs- und Leistungsanforderungen zu erfรผllen: + +- **YOLO-NAS-s**: Optimiert fรผr Umgebungen mit begrenzten Rechenressourcen, bei denen Effizienz entscheidend ist. +- **YOLO-NAS-m**: Bietet einen ausgewogenen Ansatz und ist fรผr die Objekterkennung im Allgemeinen mit hรถherer Genauigkeit geeignet. +- **YOLO-NAS-l**: MaรŸgeschneidert fรผr Szenarien, bei denen hรถchste Genauigkeit gefordert ist und Rechenressourcen weniger einschrรคnkend sind. + +Im Folgenden finden Sie eine detaillierte รœbersicht รผber jedes Modell, einschlieรŸlich Links zu den vortrainierten Gewichten, den unterstรผtzten Aufgaben und deren Kompatibilitรคt mit verschiedenen Betriebsmodi. + +| Modelltyp | Vortrainierte Gewichte | Unterstรผtzte Aufgaben | Inferenz | Validierung | Training | Export | +|------------|-----------------------------------------------------------------------------------------------|---------------------------------------|----------|-------------|----------|--------| +| YOLO-NAS-s | [yolo_nas_s.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_s.pt) | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-m | [yolo_nas_m.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_m.pt) | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-l | [yolo_nas_l.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_l.pt) | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | + +## Zitierungen und Danksagungen + +Wenn Sie YOLO-NAS in Ihrer Forschungs- oder Entwicklungsarbeit verwenden, zitieren Sie bitte SuperGradients: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{supergradients, + doi = {10.5281/ZENODO.7789328}, + url = {https://zenodo.org/record/7789328}, + author = {Aharon, Shay and {Louis-Dupont} and {Ofri Masad} and Yurkova, Kate and {Lotem Fridman} and {Lkdci} and Khvedchenya, Eugene and Rubin, Ran and Bagrov, Natan and Tymchenko, Borys and Keren, Tomer and Zhilko, Alexander and {Eran-Deci}}, + title = {Super-Gradients}, + publisher = {GitHub}, + journal = {GitHub repository}, + year = {2021}, + } + ``` + +Wir mรถchten dem [SuperGradients](https://github.com/Deci-AI/super-gradients/)-Team von Deci AI fรผr ihre Bemรผhungen bei der Erstellung und Pflege dieser wertvollen Ressource fรผr die Computer Vision Community danken. Wir sind der Meinung, dass YOLO-NAS mit seiner innovativen Architektur und seinen herausragenden Fรคhigkeiten zur Objekterkennung ein wichtiges Werkzeug fรผr Entwickler und Forscher gleichermaรŸen wird. + +*Keywords: YOLO-NAS, Deci AI, Objekterkennung, Deep Learning, Neural Architecture Search, Ultralytics Python API, YOLO-Modell, SuperGradients, vortrainierte Modelle, quantisierungsfreundliches Basiselement, fortschrittliche Trainingsschemata, post-training Quantisierung, AutoNAC-Optimierung, COCO, Objects365, Roboflow 100* diff --git a/ultralytics/docs/de/models/yolo-nas.md:Zone.Identifier b/ultralytics/docs/de/models/yolo-nas.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/models/yolo-nas.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/models/yolov3.md b/ultralytics/docs/de/models/yolov3.md new file mode 100755 index 0000000..de1cf46 --- /dev/null +++ b/ultralytics/docs/de/models/yolov3.md @@ -0,0 +1,98 @@ +--- +comments: true +description: Erhalten Sie eine รœbersicht รผber YOLOv3, YOLOv3-Ultralytics und YOLOv3u. Erfahren Sie mehr รผber ihre wichtigsten Funktionen, Verwendung und unterstรผtzte Aufgaben fรผr die Objekterkennung. +keywords: YOLOv3, YOLOv3-Ultralytics, YOLOv3u, Objekterkennung, Inferenz, Training, Ultralytics +--- + +# YOLOv3, YOLOv3-Ultralytics und YOLOv3u + +## รœbersicht + +Dieses Dokument bietet eine รœbersicht รผber drei eng verwandte Modelle zur Objekterkennung, nรคmlich [YOLOv3](https://pjreddie.com/darknet/yolo/), [YOLOv3-Ultralytics](https://github.com/ultralytics/yolov3) und [YOLOv3u](https://github.com/ultralytics/ultralytics). + +1. **YOLOv3:** Dies ist die dritte Version des You Only Look Once (YOLO) Objekterkennungsalgorithmus. Ursprรผnglich entwickelt von Joseph Redmon, verbesserte YOLOv3 seine Vorgรคngermodelle durch die Einfรผhrung von Funktionen wie mehrskaligen Vorhersagen und drei verschiedenen GrรถรŸen von Erkennungskernen. + +2. **YOLOv3-Ultralytics:** Dies ist die Implementierung des YOLOv3-Modells von Ultralytics. Es reproduziert die ursprรผngliche YOLOv3-Architektur und bietet zusรคtzliche Funktionalitรคten, wie die Unterstรผtzung fรผr weitere vortrainierte Modelle und einfachere Anpassungsoptionen. + +3. **YOLOv3u:** Dies ist eine aktualisierte Version von YOLOv3-Ultralytics, die den anchor-freien, objektfreien Split Head aus den YOLOv8-Modellen einbezieht. YOLOv3u verwendet die gleiche Backbone- und Neck-Architektur wie YOLOv3, aber mit dem aktualisierten Erkennungskopf von YOLOv8. + +![Ultralytics YOLOv3](https://raw.githubusercontent.com/ultralytics/assets/main/yolov3/banner-yolov3.png) + +## Wichtigste Funktionen + +- **YOLOv3:** Einfรผhrung der Verwendung von drei unterschiedlichen Skalen fรผr die Erkennung unter Verwendung von drei verschiedenen GrรถรŸen von Erkennungskernen: 13x13, 26x26 und 52x52. Dadurch wurde die Erkennungsgenauigkeit fรผr Objekte unterschiedlicher GrรถรŸe erheblich verbessert. Darรผber hinaus fรผgte YOLOv3 Funktionen wie Mehrfachkennzeichnungen fรผr jeden Begrenzungsrahmen und ein besseres Feature-Extraktionsnetzwerk hinzu. + +- **YOLOv3-Ultralytics:** Ultralytics' Implementierung von YOLOv3 bietet die gleiche Leistung wie das ursprรผngliche Modell, bietet jedoch zusรคtzliche Unterstรผtzung fรผr weitere vortrainierte Modelle, zusรคtzliche Trainingsmethoden und einfachere Anpassungsoptionen. Dadurch wird es vielseitiger und benutzerfreundlicher fรผr praktische Anwendungen. + +- **YOLOv3u:** Dieses aktualisierte Modell enthรคlt den anchor-freien, objektfreien Split Head aus YOLOv8. Durch die Beseitigung der Notwendigkeit vordefinierter Ankerfelder und Objektheitsscores kann dieses Entwurfsmerkmal fรผr den Erkennungskopf die Fรคhigkeit des Modells verbessern, Objekte unterschiedlicher GrรถรŸe und Form zu erkennen. Dadurch wird YOLOv3u robuster und genauer fรผr Aufgaben der Objekterkennung. + +## Unterstรผtzte Aufgaben und Modi + +Die YOLOv3-Serie, einschlieรŸlich YOLOv3, YOLOv3-Ultralytics und YOLOv3u, ist speziell fรผr Aufgaben der Objekterkennung konzipiert. Diese Modelle sind bekannt fรผr ihre Effektivitรคt in verschiedenen realen Szenarien und kombinieren Genauigkeit und Geschwindigkeit. Jede Variante bietet einzigartige Funktionen und Optimierungen, die sie fรผr eine Vielzahl von Anwendungen geeignet machen. + +Alle drei Modelle unterstรผtzen einen umfangreichen Satz von Modi, um Vielseitigkeit in verschiedenen Phasen der Modellbereitstellung und -entwicklung zu gewรคhrleisten. Zu diesen Modi gehรถren [Inferenz](../modes/predict.md), [Validierung](../modes/val.md), [Training](../modes/train.md) und [Export](../modes/export.md), was den Benutzern ein vollstรคndiges Toolkit fรผr eine effektive Objekterkennung bietet. + +| Modelltyp | Unterstรผtzte Aufgaben | Inferenz | Validierung | Training | Export | +|--------------------|---------------------------------------|----------|-------------|----------|--------| +| YOLOv3 | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3-Ultralytics | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3u | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +Diese Tabelle bietet einen schnellen รœberblick รผber die Fรคhigkeiten jeder YOLOv3-Variante und hebt ihre Vielseitigkeit und Eignung fรผr verschiedene Aufgaben und Betriebsmodi in Workflows zur Objekterkennung hervor. + +## Beispiele zur Verwendung + +Dieses Beispiel enthรคlt einfache Trainings- und Inferenzbeispiele fรผr YOLOv3. Fรผr die vollstรคndige Dokumentation zu diesen und anderen [Modi](../modes/index.md) siehe die Seiten zur [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) und [Export](../modes/export.md). + +!!! Example "Beispiel" + + === "Python" + + Vorgefertigte PyTorch-Modelle im `*.pt`-Format sowie Konfigurationsdateien im `*.yaml`-Format kรถnnen an die `YOLO()`-Klasse รผbergeben werden, um eine Modellinstanz in Python zu erstellen: + + ```python + from ultralytics import YOLO + + # Lade ein vortrainiertes YOLOv3n-Modell fรผr COCO + model = YOLO('yolov3n.pt') + + # Zeige Informationen zum Modell an (optional) + model.info() + + # Trainiere das Modell mit dem COCO8-Beispieldatensatz fรผr 100 Epochen + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Fรผhre Inferenz mit dem YOLOv3n-Modell auf dem Bild "bus.jpg" durch + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI-Befehle stehen zur Verfรผgung, um die Modelle direkt auszufรผhren: + + ```bash + # Lade ein vortrainiertes YOLOv3n-Modell und trainiere es mit dem COCO8-Beispieldatensatz fรผr 100 Epochen + yolo train model=yolov3n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Lade ein vortrainiertes YOLOv3n-Modell und fรผhre Inferenz auf dem Bild "bus.jpg" aus + yolo predict model=yolov3n.pt source=path/to/bus.jpg + ``` + +## Zitate und Anerkennungen + +Wenn Sie YOLOv3 in Ihrer Forschung verwenden, zitieren Sie bitte die ursprรผnglichen YOLO-Papiere und das Ultralytics YOLOv3-Repository: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Redmon, Joseph and Farhadi, Ali}, + journal={arXiv preprint arXiv:1804.02767}, + year={2018} + } + ``` + +Vielen Dank an Joseph Redmon und Ali Farhadi fรผr die Entwicklung des originalen YOLOv3. diff --git a/ultralytics/docs/de/models/yolov3.md:Zone.Identifier b/ultralytics/docs/de/models/yolov3.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/models/yolov3.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/models/yolov4.md b/ultralytics/docs/de/models/yolov4.md new file mode 100755 index 0000000..3d3fd2d --- /dev/null +++ b/ultralytics/docs/de/models/yolov4.md @@ -0,0 +1,71 @@ +--- +comments: true +description: Erforschen Sie unseren detaillierten Leitfaden zu YOLOv4, einem hochmodernen Echtzeit-Objektdetektor. Erfahren Sie mehr รผber seine architektonischen Highlights, innovativen Funktionen und Anwendungsbeispiele. +keywords: ultralytics, YOLOv4, Objekterkennung, neuronales Netzwerk, Echtzeit-Erkennung, Objektdetektor, maschinelles Lernen +--- + +# YOLOv4: Schnelle und prรคzise Objekterkennung + +Willkommen auf der Ultralytics-Dokumentationsseite fรผr YOLOv4, einem hochmodernen, Echtzeit-Objektdetektor, der 2020 von Alexey Bochkovskiy unter [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet) verรถffentlicht wurde. YOLOv4 wurde entwickelt, um das optimale Gleichgewicht zwischen Geschwindigkeit und Genauigkeit zu bieten und ist somit eine ausgezeichnete Wahl fรผr viele Anwendungen. + +![YOLOv4 Architekturdiagramm](https://user-images.githubusercontent.com/26833433/246185689-530b7fe8-737b-4bb0-b5dd-de10ef5aface.png) +**YOLOv4 Architekturdiagramm**. Zeigt das komplexe Netzwerkdesign von YOLOv4, einschlieรŸlich der Backbone-, Neck- und Head-Komponenten sowie ihrer verbundenen Schichten fรผr eine optimale Echtzeit-Objekterkennung. + +## Einleitung + +YOLOv4 steht fรผr You Only Look Once Version 4. Es handelt sich um ein Echtzeit-Objekterkennungsmodell, das entwickelt wurde, um die Grenzen frรผherer YOLO-Versionen wie [YOLOv3](yolov3.md) und anderer Objekterkennungsmodelle zu รผberwinden. Im Gegensatz zu anderen konvolutionellen neuronalen Netzwerken (CNN), die auf Objekterkennung basieren, ist YOLOv4 nicht nur fรผr Empfehlungssysteme geeignet, sondern auch fรผr eigenstรคndiges Prozessmanagement und Reduzierung der Benutzereingabe. Durch den Einsatz von herkรถmmlichen Grafikprozessoreinheiten (GPUs) ermรถglicht es YOLOv4 eine Massennutzung zu einem erschwinglichen Preis und ist so konzipiert, dass es in Echtzeit auf einer herkรถmmlichen GPU funktioniert, wobei nur eine solche GPU fรผr das Training erforderlich ist. + +## Architektur + +YOLOv4 nutzt mehrere innovative Funktionen, die zusammenarbeiten, um seine Leistung zu optimieren. Dazu gehรถren Weighted-Residual-Connections (WRC), Cross-Stage-Partial-connections (CSP), Cross mini-Batch Normalization (CmBN), Self-adversarial-training (SAT), Mish-Aktivierung, Mosaic-Datenaugmentation, DropBlock-Regularisierung und CIoU-Verlust. Diese Funktionen werden kombiniert, um erstklassige Ergebnisse zu erzielen. + +Ein typischer Objektdetektor besteht aus mehreren Teilen, darunter der Eingabe, dem Backbone, dem Neck und dem Head. Das Backbone von YOLOv4 ist auf ImageNet vorgeschult und wird zur Vorhersage von Klassen und Begrenzungsrahmen von Objekten verwendet. Das Backbone kann aus verschiedenen Modellen wie VGG, ResNet, ResNeXt oder DenseNet stammen. Der Neck-Teil des Detektors wird verwendet, um Merkmalskarten von verschiedenen Stufen zu sammeln und umfasst normalerweise mehrere Bottom-up-Pfade und mehrere Top-down-Pfade. Der Head-Teil wird schlieรŸlich zur Durchfรผhrung der endgรผltigen Objekterkennung und Klassifizierung verwendet. + +## Bag of Freebies + +YOLOv4 verwendet auch Methoden, die als "Bag of Freebies" bekannt sind. Dabei handelt es sich um Techniken, die die Genauigkeit des Modells wรคhrend des Trainings verbessern, ohne die Kosten der Inferenz zu erhรถhen. Datenaugmentation ist eine hรคufige Bag of Freebies-Technik, die in der Objekterkennung verwendet wird, um die Variabilitรคt der Eingabebilder zu erhรถhen und die Robustheit des Modells zu verbessern. Beispiele fรผr Datenaugmentation sind photometrische Verzerrungen (Anpassung von Helligkeit, Kontrast, Farbton, Sรคttigung und Rauschen eines Bildes) und geometrische Verzerrungen (Hinzufรผgen von zufรคlliger Skalierung, Ausschnitt, Spiegelung und Rotation). Diese Techniken helfen dem Modell, sich besser an verschiedene Arten von Bildern anzupassen. + +## Funktionen und Leistung + +YOLOv4 ist fรผr optimale Geschwindigkeit und Genauigkeit in der Objekterkennung konzipiert. Die Architektur von YOLOv4 umfasst CSPDarknet53 als Backbone, PANet als Neck und YOLOv3 als Detektionskopf. Diese Konstruktion ermรถglicht es YOLOv4, beeindruckend schnelle Objekterkennungen durchzufรผhren und ist somit fรผr Echtzeitanwendungen geeignet. YOLOv4 zeichnet sich auch durch Genauigkeit aus und erzielt erstklassige Ergebnisse in Objekterkennungs-Benchmarks. + +## Beispiele fรผr die Verwendung + +Zum Zeitpunkt der Erstellung dieser Dokumentation unterstรผtzt Ultralytics derzeit keine YOLOv4-Modelle. Daher mรผssen sich Benutzer, die YOLOv4 verwenden mรถchten, direkt an das YOLOv4 GitHub-Repository fรผr Installations- und Verwendungshinweise wenden. + +Hier ist ein kurzer รœberblick รผber die typischen Schritte, die Sie unternehmen kรถnnten, um YOLOv4 zu verwenden: + +1. Besuchen Sie das YOLOv4 GitHub-Repository: [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). + +2. Befolgen Sie die in der README-Datei bereitgestellten Anweisungen zur Installation. Dies beinhaltet in der Regel das Klonen des Repositories, die Installation der erforderlichen Abhรคngigkeiten und das Einrichten der erforderlichen Umgebungsvariablen. + +3. Sobald die Installation abgeschlossen ist, kรถnnen Sie das Modell gemรครŸ den in dem Repository bereitgestellten Verwendungshinweisen trainieren und verwenden. Dies beinhaltet in der Regel die Vorbereitung des Datensatzes, die Konfiguration der Modellparameter, das Training des Modells und die anschlieรŸende Verwendung des trainierten Modells zur Durchfรผhrung der Objekterkennung. + +Bitte beachten Sie, dass die spezifischen Schritte je nach Ihrer spezifischen Anwendung und dem aktuellen Stand des YOLOv4-Repositories variieren kรถnnen. Es wird daher dringend empfohlen, sich direkt an die Anweisungen im YOLOv4-GitHub-Repository zu halten. + +Wir bedauern etwaige Unannehmlichkeiten und werden uns bemรผhen, dieses Dokument mit Verwendungsbeispielen fรผr Ultralytics zu aktualisieren, sobald die Unterstรผtzung fรผr YOLOv4 implementiert ist. + +## Fazit + +YOLOv4 ist ein leistungsstarkes und effizientes Modell zur Objekterkennung, das eine Balance zwischen Geschwindigkeit und Genauigkeit bietet. Durch den Einsatz einzigartiger Funktionen und Bag of Freebies-Techniken wรคhrend des Trainings erzielt es hervorragende Ergebnisse in Echtzeit-Objekterkennungsaufgaben. YOLOv4 kann von jedem mit einer herkรถmmlichen GPU trainiert und verwendet werden, was es fรผr eine Vielzahl von Anwendungen zugรคnglich und praktisch macht. + +## Zitate und Anerkennungen + +Wir mรถchten den Autoren von YOLOv4 fรผr ihren bedeutenden Beitrag auf dem Gebiet der Echtzeit-Objekterkennung danken: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{bochkovskiy2020yolov4, + title={YOLOv4: Optimal Speed and Accuracy of Object Detection}, + author={Alexey Bochkovskiy and Chien-Yao Wang and Hong-Yuan Mark Liao}, + year={2020}, + eprint={2004.10934}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +Die originale YOLOv4-Publikation finden Sie auf [arXiv](https://arxiv.org/abs/2004.10934). Die Autoren haben ihre Arbeit รถffentlich zugรคnglich gemacht und der Code kann auf [GitHub](https://github.com/AlexeyAB/darknet) abgerufen werden. Wir schรคtzen ihre Bemรผhungen, das Fachgebiet voranzubringen und ihre Arbeit der breiteren Community zugรคnglich zu machen. diff --git a/ultralytics/docs/de/models/yolov4.md:Zone.Identifier b/ultralytics/docs/de/models/yolov4.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/models/yolov4.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/models/yolov5.md b/ultralytics/docs/de/models/yolov5.md new file mode 100755 index 0000000..b194f12 --- /dev/null +++ b/ultralytics/docs/de/models/yolov5.md @@ -0,0 +1,113 @@ +--- +comments: true +description: Entdecken Sie YOLOv5u, eine verbesserte Version des YOLOv5-Modells mit einem optimierten Verhรคltnis von Genauigkeit und Geschwindigkeit sowie zahlreiche vorab trainierte Modelle fรผr verschiedene Objekterkennungsaufgaben. +keywords: YOLOv5u, Objekterkennung, vorab trainierte Modelle, Ultralytics, Inferenz, Validierung, YOLOv5, YOLOv8, Ankerfrei, Objektlos, Echtzeitanwendungen, Maschinelles Lernen +--- + +# YOLOv5 + +## รœbersicht + +YOLOv5u steht fรผr eine Weiterentwicklung der Methoden zur Objekterkennung. Basierend auf der grundlegenden Architektur des von Ultralytics entwickelten YOLOv5-Modells integriert YOLOv5u den ankerfreien, objektlosen Split-Kopf, ein Feature, das zuvor in den YOLOv8-Modellen eingefรผhrt wurde. Diese Anpassung verfeinert die Architektur des Modells und fรผhrt zu einem optimierten Verhรคltnis von Genauigkeit und Geschwindigkeit bei der Objekterkennung. Basierend auf den empirischen Ergebnissen und den abgeleiteten Features bietet YOLOv5u eine effiziente Alternative fรผr diejenigen, die robuste Lรถsungen sowohl in der Forschung als auch in praktischen Anwendungen suchen. + +![Ultralytics YOLOv5](https://raw.githubusercontent.com/ultralytics/assets/main/yolov5/v70/splash.png) + +## Hauptmerkmale + +- **Ankerfreier Split-Ultralytics-Kopf:** Herkรถmmliche Objekterkennungsmodelle verwenden vordefinierte Ankerboxen, um die Position von Objekten vorherzusagen. YOLOv5u modernisiert diesen Ansatz. Durch die Verwendung eines ankerfreien Split-Ultralytics-Kopfes wird ein flexiblerer und anpassungsfรคhigerer Detektionsmechanismus gewรคhrleistet, der die Leistung in verschiedenen Szenarien verbessert. + +- **Optimiertes Verhรคltnis von Genauigkeit und Geschwindigkeit:** Geschwindigkeit und Genauigkeit ziehen oft in entgegengesetzte Richtungen. Aber YOLOv5u stellt diese Abwรคgung in Frage. Es bietet eine ausgewogene Balance, die Echtzeitdetektionen ohne EinbuรŸen bei der Genauigkeit ermรถglicht. Diese Funktion ist besonders wertvoll fรผr Anwendungen, die schnelle Reaktionen erfordern, wie autonome Fahrzeuge, Robotik und Echtzeitanalyse von Videos. + +- **Vielfalt an vorab trainierten Modellen:** YOLOv5u bietet eine Vielzahl von vorab trainierten Modellen, da verschiedene Aufgaben unterschiedliche Werkzeuge erfordern. Ob Sie sich auf Inferenz, Validierung oder Training konzentrieren, es wartet ein maรŸgeschneidertes Modell auf Sie. Diese Vielfalt gewรคhrleistet, dass Sie nicht nur eine Einheitslรถsung verwenden, sondern ein speziell fรผr Ihre einzigartige Herausforderung feinabgestimmtes Modell. + +## Unterstรผtzte Aufgaben und Modi + +Die YOLOv5u-Modelle mit verschiedenen vorab trainierten Gewichten eignen sich hervorragend fรผr Aufgaben zur [Objekterkennung](../tasks/detect.md). Sie unterstรผtzen eine umfassende Palette von Modi, die sie fรผr verschiedene Anwendungen von der Entwicklung bis zur Bereitstellung geeignet machen. + +| Modelltyp | Vorab trainierte Gewichte | Aufgabe | Inferenz | Validierung | Training | Export | +|-----------|-----------------------------------------------------------------------------------------------------------------------------|---------------------------------------|----------|-------------|----------|--------| +| YOLOv5u | `yolov5nu`, `yolov5su`, `yolov5mu`, `yolov5lu`, `yolov5xu`, `yolov5n6u`, `yolov5s6u`, `yolov5m6u`, `yolov5l6u`, `yolov5x6u` | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +Diese Tabelle bietet eine detaillierte รœbersicht รผber die verschiedenen Varianten des YOLOv5u-Modells und hebt ihre Anwendbarkeit in der Objekterkennung sowie die Unterstรผtzung unterschiedlicher Betriebsmodi wie [Inferenz](../modes/predict.md), [Validierung](../modes/val.md), [Training](../modes/train.md) und [Export](../modes/export.md) hervor. Diese umfassende Unterstรผtzung ermรถglicht es Benutzern, die Fรคhigkeiten der YOLOv5u-Modelle in einer Vielzahl von Objekterkennungsszenarien voll auszuschรถpfen. + +## Leistungskennzahlen + +!!! Leistung + + === "Erkennung" + + Siehe [Erkennungsdokumentation](https://docs.ultralytics.com/tasks/detect/) fรผr Beispiele zur Verwendung dieser Modelle, die auf [COCO](https://docs.ultralytics.com/datasets/detect/coco/) trainiert wurden und 80 vorab trainierte Klassen enthalten. + + | Modell | YAML | GrรถรŸe
(Pixel) | mAPval
50-95 | Geschwindigkeit
CPU ONNX
(ms) | Geschwindigkeit
A100 TensorRT
(ms) | Parameter
(M) | FLOPs
(B) | + |---------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------|-----------------------|----------------------|------------------------------------------|-----------------------------------------------|--------------------|-------------------| + | [yolov5nu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5nu.pt) | [yolov5n.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 34,3 | 73,6 | 1,06 | 2,6 | 7,7 | + | [yolov5su.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5su.pt) | [yolov5s.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 43,0 | 120,7 | 1,27 | 9,1 | 24,0 | + | [yolov5mu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5mu.pt) | [yolov5m.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 49,0 | 233,9 | 1,86 | 25,1 | 64,2 | + | [yolov5lu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5lu.pt) | [yolov5l.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 52,2 | 408,4 | 2,50 | 53,2 | 135,0 | + | [yolov5xu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5xu.pt) | [yolov5x.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 53,2 | 763,2 | 3,81 | 97,2 | 246,4 | + | | | | | | | | | + | [yolov5n6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5n6u.pt) | [yolov5n6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1.280 | 42,1 | 211,0 | 1,83 | 4,3 | 7,8 | + | [yolov5s6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5s6u.pt) | [yolov5s6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1.280 | 48,6 | 422,6 | 2,34 | 15,3 | 24,6 | + | [yolov5m6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5m6u.pt) | [yolov5m6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1.280 | 53,6 | 810,9 | 4,36 | 41,2 | 65,7 | + | [yolov5l6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5l6u.pt) | [yolov5l6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1.280 | 55,7 | 1.470,9 | 5,47 | 86,1 | 137,4 | + | [yolov5x6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5x6u.pt) | [yolov5x6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1.280 | 56,8 | 2.436,5 | 8,98 | 155,4 | 250,7 | + +## Beispiele zur Verwendung + +Dieses Beispiel enthรคlt einfache Beispiele zur Schulung und Inferenz mit YOLOv5. Die vollstรคndige Dokumentation zu diesen und anderen [Modi](../modes/index.md) finden Sie in den Seiten [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) und [Export](../modes/export.md). + +!!! Example "Beispiel" + + === "Python" + + PyTorch-vortrainierte `*.pt`-Modelle sowie Konfigurationsdateien `*.yaml` kรถnnen an die `YOLO()`-Klasse รผbergeben werden, um eine Modellinstanz in Python zu erstellen: + + ```python + from ultralytics import YOLO + + # Laden Sie ein vortrainiertes YOLOv5n-Modell fรผr COCO-Daten + modell = YOLO('yolov5n.pt') + + # Informationen zum Modell anzeigen (optional) + model.info() + + # Trainieren Sie das Modell anhand des COCO8-Beispieldatensatzes fรผr 100 Epochen + ergebnisse = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Fรผhren Sie die Inferenz mit dem YOLOv5n-Modell auf dem Bild 'bus.jpg' durch + ergebnisse = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI-Befehle sind verfรผgbar, um die Modelle direkt auszufรผhren: + + ```bash + # Laden Sie ein vortrainiertes YOLOv5n-Modell und trainieren Sie es anhand des COCO8-Beispieldatensatzes fรผr 100 Epochen + yolo train model=yolov5n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Laden Sie ein vortrainiertes YOLOv5n-Modell und fรผhren Sie die Inferenz auf dem Bild 'bus.jpg' durch + yolo predict model=yolov5n.pt source=path/to/bus.jpg + ``` + +## Zitate und Danksagungen + +Wenn Sie YOLOv5 oder YOLOv5u in Ihrer Forschung verwenden, zitieren Sie bitte das Ultralytics YOLOv5-Repository wie folgt: + +!!! Quote "" + + === "BibTeX" + ```bibtex + @software{yolov5, + title = {Ultralytics YOLOv5}, + author = {Glenn Jocher}, + year = {2020}, + version = {7.0}, + license = {AGPL-3.0}, + url = {https://github.com/ultralytics/yolov5}, + doi = {10.5281/zenodo.3908559}, + orcid = {0000-0001-5950-6979} + } + ``` + +Bitte beachten Sie, dass die YOLOv5-Modelle unter den Lizenzen [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) und [Enterprise](https://ultralytics.com/license) bereitgestellt werden. diff --git a/ultralytics/docs/de/models/yolov5.md:Zone.Identifier b/ultralytics/docs/de/models/yolov5.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/models/yolov5.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/models/yolov6.md b/ultralytics/docs/de/models/yolov6.md new file mode 100755 index 0000000..0246170 --- /dev/null +++ b/ultralytics/docs/de/models/yolov6.md @@ -0,0 +1,107 @@ +--- +comments: true +description: Erforschen Sie Meituan YOLOv6, ein modernes Objekterkennungsmodell, das eine ausgewogene Kombination aus Geschwindigkeit und Genauigkeit bietet. Tauchen Sie ein in Funktionen, vorab trainierte Modelle und die Verwendung von Python. +keywords: Meituan YOLOv6, Objekterkennung, Ultralytics, YOLOv6 Dokumentation, Bi-direktionale Konkatenation, Anchor-Aided Training, vorab trainierte Modelle, Echtzeitanwendungen +--- + +# Meituan YOLOv6 + +## รœberblick + +[Meituan](https://about.meituan.com/) YOLOv6 ist ein moderner Objekterkenner, der eine bemerkenswerte Balance zwischen Geschwindigkeit und Genauigkeit bietet und somit eine beliebte Wahl fรผr Echtzeitanwendungen darstellt. Dieses Modell bietet mehrere bemerkenswerte Verbesserungen in seiner Architektur und seinem Trainingsschema, einschlieรŸlich der Implementierung eines Bi-direktionalen Konkatenationsmoduls (BiC), einer anchor-aided training (AAT)-Strategie und einem verbesserten Backpropagation- und Neck-Design fรผr Spitzenleistungen auf dem COCO-Datensatz. + +![Meituan YOLOv6](https://user-images.githubusercontent.com/26833433/240750495-4da954ce-8b3b-41c4-8afd-ddb74361d3c2.png) +![Modellbeispielbild](https://user-images.githubusercontent.com/26833433/240750557-3e9ec4f0-0598-49a8-83ea-f33c91eb6d68.png) +**รœbersicht รผber YOLOv6.** Diagramm der Modellarchitektur, das die neu gestalteten Netzwerkkomponenten und Trainingstrategien zeigt, die zu signifikanten Leistungsverbesserungen gefรผhrt haben. (a) Der Nacken von YOLOv6 (N und S sind dargestellt). Beachten Sie, dass bei M/L RepBlocks durch CSPStackRep ersetzt wird. (b) Die Struktur eines BiC-Moduls. (c) Ein SimCSPSPPF-Block. ([Quelle](https://arxiv.org/pdf/2301.05586.pdf)). + +### Hauptmerkmale + +- **Bi-direktionales Konkatenations (BiC) Modul:** YOLOv6 fรผhrt ein BiC-Modul im Nacken des Erkenners ein, das die Lokalisierungssignale verbessert und eine Leistungssteigerung bei vernachlรคssigbarem Geschwindigkeitsabfall liefert. +- **Anchor-aided Training (AAT) Strategie:** Dieses Modell schlรคgt AAT vor, um die Vorteile sowohl von ankerbasierten als auch von ankerfreien Paradigmen zu nutzen, ohne die Inferenzeffizienz zu beeintrรคchtigen. +- **Verbessertes Backpropagation- und Neck-Design:** Durch Vertiefung von YOLOv6 um eine weitere Stufe im Backpropagation und Nacken erreicht dieses Modell Spitzenleistungen auf dem COCO-Datensatz bei hochauflรถsenden Eingaben. +- **Self-Distillation Strategie:** Eine neue Self-Distillation-Strategie wird implementiert, um die Leistung von kleineren Modellen von YOLOv6 zu steigern, indem der Hilfsregressionszweig wรคhrend des Trainings verstรคrkt und bei der Inferenz entfernt wird, um einen deutlichen Geschwindigkeitsabfall zu vermeiden. + +## Leistungsmetriken + +YOLOv6 bietet verschiedene vorab trainierte Modelle mit unterschiedlichen MaรŸstรคben: + +- YOLOv6-N: 37,5% AP auf COCO val2017 bei 1187 FPS mit NVIDIA Tesla T4 GPU. +- YOLOv6-S: 45,0% AP bei 484 FPS. +- YOLOv6-M: 50,0% AP bei 226 FPS. +- YOLOv6-L: 52,8% AP bei 116 FPS. +- YOLOv6-L6: Spitzenleistung in Echtzeit. + +YOLOv6 bietet auch quantisierte Modelle fรผr verschiedene Genauigkeiten sowie Modelle, die fรผr mobile Plattformen optimiert sind. + +## Beispiele zur Verwendung + +In diesem Beispiel werden einfache Schulungs- und Inferenzbeispiele fรผr YOLOv6 bereitgestellt. Weitere Dokumentation zu diesen und anderen [Modi](../modes/index.md) finden Sie auf den Seiten [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) und [Export](../modes/export.md). + +!!! Example "Beispiel" + + === "Python" + + In Python kann PyTorch-vorab trainierte `*.pt`-Modelle sowie Konfigurations-`*.yaml`-Dateien an die `YOLO()`-Klasse รผbergeben werden, um eine Modellinstanz zu erstellen: + + ```python + from ultralytics import YOLO + + # Erstellen Sie ein YOLOv6n-Modell von Grund auf + model = YOLO('yolov6n.yaml') + + # Zeigen Sie Informationen zum Modell an (optional) + model.info() + + # Trainieren Sie das Modell am Beispiel des COCO8-Datensatzes fรผr 100 Epochen + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Fรผhren Sie Inferenz mit dem YOLOv6n-Modell auf dem Bild 'bus.jpg' durch + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI-Befehle stehen zur Verfรผgung, um die Modelle direkt auszufรผhren: + + ```bash + # Erstellen Sie ein YOLOv6n-Modell von Grund auf und trainieren Sie es am Beispiel des COCO8-Datensatzes fรผr 100 Epochen + yolo train model=yolov6n.yaml data=coco8.yaml epochs=100 imgsz=640 + + # Erstellen Sie ein YOLOv6n-Modell von Grund auf und fรผhren Sie Inferenz auf dem Bild 'bus.jpg' durch + yolo predict model=yolov6n.yaml source=path/to/bus.jpg + ``` + +## Unterstรผtzte Aufgaben und Modi + +Die YOLOv6-Serie bietet eine Reihe von Modellen, die jeweils fรผr die Hochleistungs-[Objekterkennung](../tasks/detect.md) optimiert sind. Diese Modelle erfรผllen unterschiedliche Rechenanforderungen und Genauigkeitsanforderungen und sind daher vielseitig fรผr eine Vielzahl von Anwendungen einsetzbar. + +| Modelltyp | Vorab trainierte Gewichte | Unterstรผtzte Aufgaben | Inferenz | Validierung | Training | Exportieren | +|-----------|---------------------------|---------------------------------------|----------|-------------|----------|-------------| +| YOLOv6-N | `yolov6-n.pt` | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-S | `yolov6-s.pt` | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-M | `yolov6-m.pt` | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L | `yolov6-l.pt` | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L6 | `yolov6-l6.pt` | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +Diese Tabelle bietet einen detaillierten รœberblick รผber die YOLOv6-Modellvarianten und hebt ihre Fรคhigkeiten bei der Objekterkennung sowie ihre Kompatibilitรคt mit verschiedenen Betriebsmodi wie [Inferenz](../modes/predict.md), [Validierung](../modes/val.md), [Training](../modes/train.md) und [Exportieren](../modes/export.md) hervor. Diese umfassende Unterstรผtzung ermรถglicht es den Benutzern, die Fรคhigkeiten von YOLOv6-Modellen in einer Vielzahl von Objekterkennungsszenarien vollstรคndig zu nutzen. + +## Zitate und Anerkennungen + +Wir mรถchten den Autoren fรผr ihre bedeutenden Beitrรคge auf dem Gebiet der Echtzeit-Objekterkennung danken: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{li2023yolov6, + title={YOLOv6 v3.0: A Full-Scale Reloading}, + author={Chuyi Li and Lulu Li and Yifei Geng and Hongliang Jiang and Meng Cheng and Bo Zhang and Zaidan Ke and Xiaoming Xu and Xiangxiang Chu}, + year={2023}, + eprint={2301.05586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +Das ursprรผngliche YOLOv6-Papier finden Sie auf [arXiv](https://arxiv.org/abs/2301.05586). Die Autoren haben ihre Arbeit รถffentlich zugรคnglich gemacht, und der Code kann auf [GitHub](https://github.com/meituan/YOLOv6) abgerufen werden. Wir schรคtzen ihre Bemรผhungen zur Weiterentwicklung des Fachgebiets und zur Zugรคnglichmachung ihrer Arbeit fรผr die breitere Gemeinschaft. diff --git a/ultralytics/docs/de/models/yolov6.md:Zone.Identifier b/ultralytics/docs/de/models/yolov6.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/models/yolov6.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/models/yolov7.md b/ultralytics/docs/de/models/yolov7.md new file mode 100755 index 0000000..4edb969 --- /dev/null +++ b/ultralytics/docs/de/models/yolov7.md @@ -0,0 +1,66 @@ +--- +comments: true +description: Erforsche den YOLOv7, einen echtzeitfรคhigen Objektdetektor. Verstehe seine รผberlegene Geschwindigkeit, beeindruckende Genauigkeit und seinen einzigartigen Fokus auf die optimierte Ausbildung mit "trainable bag-of-freebies". +keywords: YOLOv7, echtzeitfรคhiger Objektdetektor, State-of-the-Art, Ultralytics, MS COCO Datensatz, Modellumparameterisierung, dynamische Labelzuweisung, erweiterte Skalierung, umfassende Skalierung +--- + +# YOLOv7: Trainable Bag-of-Freebies + +YOLOv7 ist ein echtzeitfรคhiger Objektdetektor der Spitzenklasse, der alle bekannten Objektdetektoren in Bezug auf Geschwindigkeit und Genauigkeit im Bereich von 5 FPS bis 160 FPS รผbertrifft. Mit einer Genauigkeit von 56,8% AP ist er der prรคziseste Echtzeit-Objektdetektor unter allen bekannten Modellen mit einer FPS von 30 oder hรถher auf der GPU V100. Darรผber hinaus รผbertrifft YOLOv7 andere Objektdetektoren wie YOLOR, YOLOX, Scaled-YOLOv4, YOLOv5 und viele andere in Bezug auf Geschwindigkeit und Genauigkeit. Das Modell wird ausschlieรŸlich auf dem MS COCO-Datensatz trainiert, ohne andere Datensรคtze oder vortrainierte Gewichte zu verwenden. Sourcecode fรผr YOLOv7 ist auf GitHub verfรผgbar. + +![Vergleich von YOLOv7 mit SOTA-Objektdetektoren](https://github.com/ultralytics/ultralytics/assets/26833433/5e1e0420-8122-4c79-b8d0-2860aa79af92) +**Vergleich von Spitzen-Objektdetektoren. +** Aus den Ergebnissen in Tabelle 2 wissen wir, dass die vorgeschlagene Methode das beste Verhรคltnis von Geschwindigkeit und Genauigkeit umfassend aufweist. Vergleichen wir YOLOv7-tiny-SiLU mit YOLOv5-N (r6.1), so ist unsere Methode 127 FPS schneller und um 10,7% genauer beim AP. Darรผber hinaus erreicht YOLOv7 bei einer Bildrate von 161 FPS einen AP von 51,4%, wรคhrend PPYOLOE-L mit demselben AP nur eine Bildrate von 78 FPS aufweist. In Bezug auf die Parameterverwendung ist YOLOv7 um 41% geringer als PPYOLOE-L. Vergleicht man YOLOv7-X mit 114 FPS Inferenzgeschwindigkeit mit YOLOv5-L (r6.1) mit 99 FPS Inferenzgeschwindigkeit, kann YOLOv7-X den AP um 3,9% verbessern. Wenn YOLOv7-X mit YOLOv5-X (r6.1) in รคhnlichem MaรŸstab verglichen wird, ist die Inferenzgeschwindigkeit von YOLOv7-X 31 FPS schneller. Darรผber hinaus reduziert YOLOv7-X in Bezug auf die Anzahl der Parameter und Berechnungen 22% der Parameter und 8% der Berechnungen im Vergleich zu YOLOv5-X (r6.1), verbessert jedoch den AP um 2,2% ([Source](https://arxiv.org/pdf/2207.02696.pdf)). + +## รœbersicht + +Echtzeit-Objekterkennung ist eine wichtige Komponente vieler Computersysteme fรผr Bildverarbeitung, einschlieรŸlich Multi-Object-Tracking, autonomes Fahren, Robotik und medizinische Bildanalyse. In den letzten Jahren konzentrierte sich die Entwicklung der Echtzeit-Objekterkennung auf die Gestaltung effizienter Architekturen und die Verbesserung der Inferenzgeschwindigkeit verschiedener CPUs, GPUs und Neural Processing Units (NPUs). YOLOv7 unterstรผtzt sowohl mobile GPUs als auch GPU-Gerรคte, von der Edge bis zur Cloud. + +Im Gegensatz zu herkรถmmlichen, echtzeitfรคhigen Objektdetektoren, die sich auf die Architekturoptimierung konzentrieren, fรผhrt YOLOv7 eine Fokussierung auf die Optimierung des Schulungsprozesses ein. Dazu gehรถren Module und Optimierungsmethoden, die darauf abzielen, die Genauigkeit der Objekterkennung zu verbessern, ohne die Inferenzkosten zu erhรถhen - ein Konzept, das als "trainable bag-of-freebies" bekannt ist. + +## Hauptmerkmale + +YOLOv7 fรผhrt mehrere Schlรผsselfunktionen ein: + +1. **Modellumparameterisierung**: YOLOv7 schlรคgt ein geplantes umparameterisiertes Modell vor, das eine in verschiedenen Netzwerken anwendbare Strategie darstellt und auf dem Konzept des Gradientenpropagationspfades basiert. + +2. **Dynamische Labelzuweisung**: Das Training des Modells mit mehreren Ausgabeschichten stellt ein neues Problem dar: "Wie weist man dynamische Ziele fรผr die Ausgaben der verschiedenen Zweige zu?" Zur Lรถsung dieses Problems fรผhrt YOLOv7 eine neue Methode zur Labelzuweisung ein, die als coarse-to-fine lead guided label assignment bekannt ist. + +3. **Erweiterte und umfassende Skalierung**: YOLOv7 schlรคgt Methoden zur "erweiterten" und "umfassenden Skalierung" des echtzeitfรคhigen Objektdetektors vor, die Parameter und Berechnungen effektiv nutzen kรถnnen. + +4. **Effizienz**: Die von YOLOv7 vorgeschlagene Methode kann etwa 40 % der Parameter und 50 % der Berechnungen des state-of-the-art echtzeitfรคhigen Objektdetektors wirksam reduzieren und weist eine schnellere Inferenzgeschwindigkeit und eine hรถhere Detektionsgenauigkeit auf. + +## Beispiele zur Nutzung + +Zum Zeitpunkt der Erstellung dieses Textes unterstรผtzt Ultralytics derzeit keine YOLOv7-Modelle. Daher mรผssen sich alle Benutzer, die YOLOv7 verwenden mรถchten, direkt an das YOLOv7 GitHub-Repository fรผr Installations- und Nutzungshinweise wenden. + +Hier ist ein kurzer รœberblick รผber die typischen Schritte, die Sie unternehmen kรถnnten, um YOLOv7 zu verwenden: + +1. Besuchen Sie das YOLOv7 GitHub-Repository: [https://github.com/WongKinYiu/yolov7](https://github.com/WongKinYiu/yolov7). + +2. Befolgen Sie die in der README-Datei bereitgestellten Anweisungen zur Installation. Dies beinhaltet in der Regel das Klonen des Repositories, die Installation der erforderlichen Abhรคngigkeiten und das Einrichten eventuell notwendiger Umgebungsvariablen. + +3. Sobald die Installation abgeschlossen ist, kรถnnen Sie das Modell entsprechend den im Repository bereitgestellten Anleitungen trainieren und verwenden. Dies umfasst in der Regel die Vorbereitung des Datensatzes, das Konfigurieren der Modellparameter, das Training des Modells und anschlieรŸend die Verwendung des trainierten Modells zur Durchfรผhrung der Objekterkennung. + +Bitte beachten Sie, dass die spezifischen Schritte je nach Ihrem spezifischen Anwendungsfall und dem aktuellen Stand des YOLOv7-Repositories variieren kรถnnen. Es wird daher dringend empfohlen, sich direkt an die im YOLOv7 GitHub-Repository bereitgestellten Anweisungen zu halten. + +Wir bedauern etwaige Unannehmlichkeiten und werden uns bemรผhen, dieses Dokument mit Anwendungsbeispielen fรผr Ultralytics zu aktualisieren, sobald die Unterstรผtzung fรผr YOLOv7 implementiert ist. + +## Zitationen und Danksagungen + +Wir mรถchten den Autoren von YOLOv7 fรผr ihre bedeutenden Beitrรคge im Bereich der echtzeitfรคhigen Objekterkennung danken: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{wang2022yolov7, + title={{YOLOv7}: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors}, + author={Wang, Chien-Yao and Bochkovskiy, Alexey and Liao, Hong-Yuan Mark}, + journal={arXiv preprint arXiv:2207.02696}, + year={2022} + } + ``` + +Die ursprรผngliche YOLOv7-Studie kann auf [arXiv](https://arxiv.org/pdf/2207.02696.pdf) gefunden werden. Die Autoren haben ihre Arbeit รถffentlich zugรคnglich gemacht, und der Code kann auf [GitHub](https://github.com/WongKinYiu/yolov7) abgerufen werden. Wir schรคtzen ihre Bemรผhungen, das Feld voranzubringen und ihre Arbeit der breiteren Gemeinschaft zugรคnglich zu machen. diff --git a/ultralytics/docs/de/models/yolov7.md:Zone.Identifier b/ultralytics/docs/de/models/yolov7.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/models/yolov7.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/models/yolov8.md b/ultralytics/docs/de/models/yolov8.md new file mode 100755 index 0000000..e24ca96 --- /dev/null +++ b/ultralytics/docs/de/models/yolov8.md @@ -0,0 +1,162 @@ +--- +comments: true +description: Erfahren Sie mehr รผber die aufregenden Funktionen von YOLOv8, der neuesten Version unseres Echtzeit-Objekterkenners! Erfahren Sie, wie fortschrittliche Architekturen, vortrainierte Modelle und die optimale Balance zwischen Genauigkeit und Geschwindigkeit YOLOv8 zur perfekten Wahl fรผr Ihre Objekterkennungsaufgaben machen. +keywords: YOLOv8, Ultralytics, Echtzeit-Objekterkennung, vortrainierte Modelle, Dokumentation, Objekterkennung, YOLO-Serie, fortschrittliche Architekturen, Genauigkeit, Geschwindigkeit +--- + +# YOLOv8 + +## รœbersicht + +YOLOv8 ist die neueste Version der YOLO-Serie von Echtzeit-Objekterkennern und bietet modernste Leistung in Bezug auf Genauigkeit und Geschwindigkeit. Basierend auf den Fortschritten frรผherer YOLO-Versionen bringt YOLOv8 neue Funktionen und Optimierungen mit sich, die ihn zu einer idealen Wahl fรผr verschiedene Objekterkennungsaufgaben in einer Vielzahl von Anwendungen machen. + +![Ultralytics YOLOv8](https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png) + +## Schlรผsselfunktionen + +- **Fortschrittliche Backbone- und Neck-Architekturen:** YOLOv8 verwendet modernste Backbone- und Neck-Architekturen, die zu einer verbesserten Merkmalsextraktion und Objekterkennungsleistung fรผhren. +- **Ankerfreier Split Ultralytics Head:** YOLOv8 verwendet einen ankerfreien Split Ultralytics Head, der zu einer besseren Genauigkeit und einem effizienteren Erkennungsprozess im Vergleich zu ankerbasierten Ansรคtzen fรผhrt. +- **Optimale Genauigkeits-Geschwindigkeits-Balance:** Mit dem Fokus auf die Aufrechterhaltung einer optimalen Balance zwischen Genauigkeit und Geschwindigkeit eignet sich YOLOv8 fรผr Echtzeit-Objekterkennungsaufgaben in verschiedenen Anwendungsbereichen. +- **Vielfalt an vortrainierten Modellen:** YOLOv8 bietet eine Vielzahl von vortrainierten Modellen, um verschiedenen Aufgaben und Leistungsanforderungen gerecht zu werden. Dies erleichtert die Suche nach dem richtigen Modell fรผr Ihren spezifischen Anwendungsfall. + +## Unterstรผtzte Aufgaben und Modi + +Die YOLOv8-Serie bietet eine Vielzahl von Modellen, von denen jedes auf bestimmte Aufgaben in der Computer Vision spezialisiert ist. Diese Modelle sind so konzipiert, dass sie verschiedenen Anforderungen gerecht werden, von der Objekterkennung bis hin zu komplexeren Aufgaben wie Instanzsegmentierung, Pose/Keypoint-Erkennung und Klassifikation. + +Jede Variante der YOLOv8-Serie ist auf ihre jeweilige Aufgabe optimiert und gewรคhrleistet damit hohe Leistung und Genauigkeit. Darรผber hinaus sind diese Modelle kompatibel mit verschiedenen Betriebsmodi, einschlieรŸlich [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md) und [Export](../modes/export.md). Dadurch wird ihre Verwendung in verschiedenen Phasen der Bereitstellung und Entwicklung erleichtert. + +| Modell | Dateinamen | Aufgabe | Inference | Validation | Training | Export | +|-------------|----------------------------------------------------------------------------------------------------------------|---------------------------------------------|-----------|------------|----------|--------| +| YOLOv8 | `yolov8n.pt` `yolov8s.pt` `yolov8m.pt` `yolov8l.pt` `yolov8x.pt` | [Objekterkennung](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-seg | `yolov8n-seg.pt` `yolov8s-seg.pt` `yolov8m-seg.pt` `yolov8l-seg.pt` `yolov8x-seg.pt` | [Instanzsegmentierung](../tasks/segment.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-pose | `yolov8n-pose.pt` `yolov8s-pose.pt` `yolov8m-pose.pt` `yolov8l-pose.pt` `yolov8x-pose.pt` `yolov8x-pose-p6.pt` | [Pose/Keypoints](../tasks/pose.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-cls | `yolov8n-cls.pt` `yolov8s-cls.pt` `yolov8m-cls.pt` `yolov8l-cls.pt` `yolov8x-cls.pt` | [Klassifikation](../tasks/classify.md) | โœ… | โœ… | โœ… | โœ… | + +Diese Tabelle gibt einen รœberblick รผber die verschiedenen Varianten des YOLOv8-Modells und deren Anwendungsbereiche sowie deren Kompatibilitรคt mit verschiedenen Betriebsmodi wie Inference, Validation, Training und Export. Sie zeigt die Vielseitigkeit und Robustheit der YOLOv8-Serie, was sie fรผr verschiedene Anwendungen in der Computer Vision geeignet macht. + +## Leistungskennzahlen + +!!! Performance + + === "Objekterkennung (COCO)" + + Siehe [Objekterkennungsdokumentation](https://docs.ultralytics.com/tasks/detect/) fรผr Beispiele zur Verwendung dieser Modelle, die auf [COCO](https://docs.ultralytics.com/datasets/detect/coco/) trainiert wurden und 80 vortrainierte Klassen enthalten. + + | Modell | GrรถรŸe
(Pixel) | mAPval
50-95 | Geschwindigkeit
CPU ONNX
(ms) | Geschwindigkeit
A100 TensorRT
(ms) | Parameter
(M) | FLOPs
(B) | + | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ---------------------------------------- | --------------------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37,3 | 80,4 | 0,99 | 3,2 | 8,7 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44,9 | 128,4 | 1,20 | 11,2 | 28,6 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50,2 | 234,7 | 1,83 | 25,9 | 78,9 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52,9 | 375,2 | 2,39 | 43,7 | 165,2 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53,9 | 479,1 | 3,53 | 68,2 | 257,8 | + + === "Objekterkennung (Open Images V7)" + + Siehe [Objekterkennungsdokumentation](https://docs.ultralytics.com/tasks/detect/) fรผr Beispiele zur Verwendung dieser Modelle, die auf [Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/) trainiert wurden und 600 vortrainierte Klassen enthalten. + + | Modell | GrรถรŸe
(Pixel) | mAPval
50-95 | Geschwindigkeit
CPU ONNX
(ms) | Geschwindigkeit
A100 TensorRT
(ms) | Parameter
(M) | FLOPs
(B) | + | ----------------------------------------------------------------------------------------- | --------------------- | -------------------- | ---------------------------------------- | --------------------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18,4 | 142,4 | 1,21 | 3,5 | 10,5 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27,7 | 183,1 | 1,40 | 11,4 | 29,7 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33,6 | 408,5 | 2,26 | 26,2 | 80,6 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34,9 | 596,9 | 2,43 | 44,1 | 167,4 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36,3 | 860,6 | 3,56 | 68,7 | 260,6 | + + === "Segmentierung (COCO)" + + Siehe [Segmentierungsdokumentation](https://docs.ultralytics.com/tasks/segment/) fรผr Beispiele zur Verwendung dieser Modelle, die auf [COCO](https://docs.ultralytics.com/datasets/segment/coco/) trainiert wurden und 80 vortrainierte Klassen enthalten. + + | Modell | GrรถรŸe
(Pixel) | mAPbox
50-95 | mAPmask
50-95 | Geschwindigkeit
CPU ONNX
(ms) | Geschwindigkeit
A100 TensorRT
(ms) | Parameter
(M) | FLOPs
(B) | + | -------------------------------------------------------------------------------------------- | --------------------- | --------------------- | --------------------- | ---------------------------------------- | --------------------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36,7 | 30,5 | 96,1 | 1,21 | 3,4 | 12,6 | + | [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44,6 | 36,8 | 155,7 | 1,47 | 11,8 | 42,6 | + | [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49,9 | 40,8 | 317,0 | 2,18 | 27,3 | 110,2 | + | [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52,3 | 42,6 | 572,4 | 2,79 | 46,0 | 220,5 | + | [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53,4 | 43,4 | 712,1 | 4,02 | 71,8 | 344,1 | + + === "Klassifikation (ImageNet)" + + Siehe [Klassifikationsdokumentation](https://docs.ultralytics.com/tasks/classify/) fรผr Beispiele zur Verwendung dieser Modelle, die auf [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/) trainiert wurden und 1000 vortrainierte Klassen enthalten. + + | Modell | GrรถรŸe
(Pixel) | acc
top1 | acc
top5 | Geschwindigkeit
CPU ONNX
(ms) | Geschwindigkeit
A100 TensorRT
(ms) | Parameter
(M) | FLOPs
(B) bei 640 | + | -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ---------------------------------------- | --------------------------------------------- | ------------------ | ------------------------ | + | [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66,6 | 87,0 | 12,9 | 0,31 | 2,7 | 4,3 | + | [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72,3 | 91,1 | 23,4 | 0,35 | 6,4 | 13,5 | + | [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76,4 | 93,2 | 85,4 | 0,62 | 17,0 | 42,7 | + | [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78,0 | 94,1 | 163,0 | 0,87 | 37,5 | 99,7 | + | [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78,4 | 94,3 | 232,0 | 1,01 | 57,4 | 154,8 | + + === "Pose (COCO)" + + Siehe [Pose Estimation Docs](https://docs.ultralytics.com/tasks/segment/) fรผr Beispiele zur Verwendung dieser Modelle, die auf [COCO](https://docs.ultralytics.com/datasets/pose/coco/) trainiert wurden und 1 vortrainierte Klasse, 'person', enthalten. + + | Modell | GrรถรŸe
(Pixel) | mAPpose
50-95 | mAPpose
50 | Geschwindigkeit
CPU ONNX
(ms) | Geschwindigkeit
A100 TensorRT
(ms) | Parameter
(M) | FLOPs
(B) | + | ---------------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ---------------------------------------- | --------------------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50,4 | 80,1 | 131,8 | 1,18 | 3,3 | 9,2 | + | [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60,0 | 86,2 | 233,2 | 1,42 | 11,6 | 30,2 | + | [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65,0 | 88,8 | 456,3 | 2,00 | 26,4 | 81,0 | + | [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67,6 | 90,0 | 784,5 | 2,59 | 44,4 | 168,6 | + | [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69,2 | 90,2 | 1607,1 | 3,73 | 69,4 | 263,2 | + | [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71,6 | 91,2 | 4088,7 | 10,04 | 99,1 | 1066,4 | + +## Beispiele zur Verwendung + +Dieses Beispiel liefert einfache Trainings- und Inferenzbeispiele fรผr YOLOv8. Fรผr die vollstรคndige Dokumentation zu diesen und anderen [Modi](../modes/index.md) siehe die Seiten [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) und [Export](../modes/export.md). + +Beachten Sie, dass das folgende Beispiel fรผr YOLOv8 [Detect](../tasks/detect.md) Modelle fรผr die Objekterkennung verwendet. Fรผr zusรคtzliche unterstรผtzte Aufgaben siehe die Dokumentation zur [Segmentation](../tasks/segment.md), [Classification](../tasks/classify.md) und [Pose](../tasks/pose.md). + +!!! Example "Beispiel" + + === "Python" + + Vortrainierte PyTorch-`*.pt`-Modelle sowie Konfigurations-`*.yaml`-Dateien kรถnnen der Klasse `YOLO()` in Python รผbergeben werden, um eine Modellinstanz zu erstellen: + + ```python + from ultralytics import YOLO + + # Laden Sie ein vortrainiertes YOLOv8n-Modell fรผr COCO + model = YOLO('yolov8n.pt') + + # Zeigen Sie Informationen zum Modell an (optional) + model.info() + + # Trainieren Sie das Modell mit dem COCO8-Beispieldatensatz fรผr 100 Epochen + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Fรผhren Sie eine Inferenz mit dem YOLOv8n-Modell auf dem Bild 'bus.jpg' aus + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI-Befehle stehen zur direkten Ausfรผhrung der Modelle zur Verfรผgung: + + ```bash + # Laden Sie ein vortrainiertes YOLOv8n-Modell fรผr COCO und trainieren Sie es mit dem COCO8-Beispieldatensatz fรผr 100 Epochen + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Laden Sie ein vortrainiertes YOLOv8n-Modell fรผr COCO und fรผhren Sie eine Inferenz auf dem Bild 'bus.jpg' aus + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## Zitate und Danksagungen + +Wenn Sie das YOLOv8-Modell oder eine andere Software aus diesem Repository in Ihrer Arbeit verwenden, zitieren Sie es bitte in folgendem Format: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @software{yolov8_ultralytics, + author = {Glenn Jocher and Ayush Chaurasia and Jing Qiu}, + title = {Ultralytics YOLOv8}, + version = {8.0.0}, + year = {2023}, + url = {https://github.com/ultralytics/ultralytics}, + orcid = {0000-0001-5950-6979, 0000-0002-7603-6750, 0000-0003-3783-7069}, + license = {AGPL-3.0} + } + ``` + +Bitte beachten Sie, dass dieDOI aussteht und der Zitation hinzugefรผgt wird, sobald sie verfรผgbar ist. YOLOv8-Modelle werden unter den Lizenzen [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) und [Enterprise](https://ultralytics.com/license) bereitgestellt. diff --git a/ultralytics/docs/de/models/yolov8.md:Zone.Identifier b/ultralytics/docs/de/models/yolov8.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/models/yolov8.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/modes/benchmark.md b/ultralytics/docs/de/modes/benchmark.md new file mode 100755 index 0000000..93908a1 --- /dev/null +++ b/ultralytics/docs/de/modes/benchmark.md @@ -0,0 +1,94 @@ +--- +comments: true +description: Lernen Sie, wie Sie die Geschwindigkeit und Genauigkeit von YOLOv8 รผber verschiedene Exportformate hinweg profilieren kรถnnen; erhalten Sie Einblicke in mAP50-95, Genauigkeit_top5 Kennzahlen und mehr. +keywords: Ultralytics, YOLOv8, Benchmarking, Geschwindigkeitsprofilierung, Genauigkeitsprofilierung, mAP50-95, accuracy_top5, ONNX, OpenVINO, TensorRT, YOLO-Exportformate +--- + +# Modell-Benchmarking mit Ultralytics YOLO + +Ultralytics YOLO-ร–kosystem und Integrationen + +## Einfรผhrung + +Nachdem Ihr Modell trainiert und validiert wurde, ist der nรคchste logische Schritt, seine Leistung in verschiedenen realen Szenarien zu bewerten. Der Benchmark-Modus in Ultralytics YOLOv8 dient diesem Zweck, indem er einen robusten Rahmen fรผr die Beurteilung von Geschwindigkeit und Genauigkeit Ihres Modells รผber eine Reihe von Exportformaten hinweg bietet. + +## Warum ist Benchmarking entscheidend? + +- **Informierte Entscheidungen:** Erhalten Sie Einblicke in die Kompromisse zwischen Geschwindigkeit und Genauigkeit. +- **Ressourcenzuweisung:** Verstehen Sie, wie sich verschiedene Exportformate auf unterschiedlicher Hardware verhalten. +- **Optimierung:** Erfahren Sie, welches Exportformat die beste Leistung fรผr Ihren spezifischen Anwendungsfall bietet. +- **Kosteneffizienz:** Nutzen Sie Hardware-Ressourcen basierend auf den Benchmark-Ergebnissen effizienter. + +### Schlรผsselmetriken im Benchmark-Modus + +- **mAP50-95:** Fรผr Objekterkennung, Segmentierung und Posenschรคtzung. +- **accuracy_top5:** Fรผr die Bildklassifizierung. +- **Inferenzzeit:** Zeit, die fรผr jedes Bild in Millisekunden benรถtigt wird. + +### Unterstรผtzte Exportformate + +- **ONNX:** Fรผr optimale CPU-Leistung +- **TensorRT:** Fรผr maximale GPU-Effizienz +- **OpenVINO:** Fรผr die Optimierung von Intel-Hardware +- **CoreML, TensorFlow SavedModel, und mehr:** Fรผr vielfรคltige Deployment-Anforderungen. + +!!! Tip "Tipp" + + * Exportieren Sie in ONNX oder OpenVINO fรผr bis zu 3x CPU-Beschleunigung. + * Exportieren Sie in TensorRT fรผr bis zu 5x GPU-Beschleunigung. + +## Anwendungsbeispiele + +Fรผhren Sie YOLOv8n-Benchmarks auf allen unterstรผtzten Exportformaten einschlieรŸlich ONNX, TensorRT usw. durch. Siehe den Abschnitt Argumente unten fรผr eine vollstรคndige Liste der Exportargumente. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics.utils.benchmarks import benchmark + + # Benchmark auf GPU + benchmark(model='yolov8n.pt', data='coco8.yaml', imgsz=640, half=False, device=0) + ``` + === "CLI" + + ```bash + yolo benchmark model=yolov8n.pt data='coco8.yaml' imgsz=640 half=False device=0 + ``` + +## Argumente + +Argumente wie `model`, `data`, `imgsz`, `half`, `device` und `verbose` bieten Benutzern die Flexibilitรคt, die Benchmarks auf ihre spezifischen Bedรผrfnisse abzustimmen und die Leistung verschiedener Exportformate mรผhelos zu vergleichen. + +| Schlรผssel | Wert | Beschreibung | +|-----------|---------|--------------------------------------------------------------------------------------| +| `model` | `None` | Pfad zur Modelldatei, z. B. yolov8n.pt, yolov8n.yaml | +| `data` | `None` | Pfad zur YAML, die das Benchmarking-Dataset referenziert (unter `val`-Kennzeichnung) | +| `imgsz` | `640` | BildgrรถรŸe als Skalar oder Liste (h, w), z. B. (640, 480) | +| `half` | `False` | FP16-Quantisierung | +| `int8` | `False` | INT8-Quantisierung | +| `device` | `None` | Gerรคt zum Ausfรผhren, z. B. CUDA device=0 oder device=0,1,2,3 oder device=cpu | +| `verbose` | `False` | bei Fehlern nicht fortsetzen (bool), oder Wertebereichsschwelle (float) | + +## Exportformate + +Benchmarks werden automatisch auf allen mรถglichen Exportformaten unten ausgefรผhrt. + +| Format | `format`-Argument | Modell | Metadaten | Argumente | +|--------------------------------------------------------------------|-------------------|---------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +Vollstรคndige Details zum `export` finden Sie auf der [Export](https://docs.ultralytics.com/modes/export/)-Seite. diff --git a/ultralytics/docs/de/modes/benchmark.md:Zone.Identifier b/ultralytics/docs/de/modes/benchmark.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/modes/benchmark.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/modes/export.md b/ultralytics/docs/de/modes/export.md new file mode 100755 index 0000000..c62a1d6 --- /dev/null +++ b/ultralytics/docs/de/modes/export.md @@ -0,0 +1,108 @@ +--- +comments: true +description: Schritt-fรผr-Schritt-Anleitung zum Exportieren Ihrer YOLOv8-Modelle in verschiedene Formate wie ONNX, TensorRT, CoreML und mehr fรผr den Einsatz. +keywords: YOLO, YOLOv8, Ultralytics, Modell-Export, ONNX, TensorRT, CoreML, TensorFlow SavedModel, OpenVINO, PyTorch, Modell exportieren +--- + +# Modell-Export mit Ultralytics YOLO + +Ultralytics YOLO ร–kosystem und Integrationen + +## Einfรผhrung + +Das ultimative Ziel des Trainierens eines Modells besteht darin, es fรผr reale Anwendungen einzusetzen. Der Exportmodus in Ultralytics YOLOv8 bietet eine vielseitige Palette von Optionen fรผr den Export Ihres trainierten Modells in verschiedene Formate, sodass es auf verschiedenen Plattformen und Gerรคten eingesetzt werden kann. Dieser umfassende Leitfaden soll Sie durch die Nuancen des Modell-Exports fรผhren und zeigen, wie Sie maximale Kompatibilitรคt und Leistung erzielen kรถnnen. + +

+
+ +
+ Ansehen: Wie man ein benutzerdefiniertes trainiertes Ultralytics YOLOv8-Modell exportiert und Live-Inferenz auf der Webcam ausfรผhrt. +

+ +## Warum den Exportmodus von YOLOv8 wรคhlen? + +- **Vielseitigkeit:** Export in verschiedene Formate einschlieรŸlich ONNX, TensorRT, CoreML und mehr. +- **Leistung:** Bis zu 5-fache GPU-Beschleunigung mit TensorRT und 3-fache CPU-Beschleunigung mit ONNX oder OpenVINO. +- **Kompatibilitรคt:** Machen Sie Ihr Modell universell einsetzbar in zahlreichen Hardware- und Softwareumgebungen. +- **Benutzerfreundlichkeit:** Einfache CLI- und Python-API fรผr schnellen und unkomplizierten Modell-Export. + +### Schlรผsselfunktionen des Exportmodus + +Hier sind einige der herausragenden Funktionen: + +- **Ein-Klick-Export:** Einfache Befehle fรผr den Export in verschiedene Formate. +- **Batch-Export:** Export von Modellen, die Batch-Inferenz unterstรผtzen. +- **Optimiertes Inferenzverhalten:** Exportierte Modelle sind fรผr schnellere Inferenzzeiten optimiert. +- **Tutorial-Videos:** Ausfรผhrliche Anleitungen und Tutorials fรผr ein reibungsloses Exporterlebnis. + +!!! Tip "Tipp" + + * Exportieren Sie nach ONNX oder OpenVINO fรผr bis zu 3-fache CPU-Beschleunigung. + * Exportieren Sie nach TensorRT fรผr bis zu 5-fache GPU-Beschleunigung. + +## Nutzungsbeispiele + +Exportieren Sie ein YOLOv8n-Modell in ein anderes Format wie ONNX oder TensorRT. Weitere Informationen zu den Exportargumenten finden Sie im Abschnitt โ€žArgumenteโ€œ unten. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Laden eines Modells + model = YOLO('yolov8n.pt') # offizielles Modell laden + model = YOLO('path/to/best.pt') # benutzerdefiniertes trainiertes Modell laden + + # Exportieren des Modells + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # offizielles Modell exportieren + yolo export model=path/to/best.pt format=onnx # benutzerdefiniertes trainiertes Modell exportieren + ``` + +## Argumente + +Exporteinstellungen fรผr YOLO-Modelle beziehen sich auf verschiedene Konfigurationen und Optionen, die verwendet werden, um das Modell zu speichern oder fรผr den Einsatz in anderen Umgebungen oder Plattformen zu exportieren. Diese Einstellungen kรถnnen die Leistung, GrรถรŸe und Kompatibilitรคt des Modells mit verschiedenen Systemen beeinflussen. Zu den gรคngigen Exporteinstellungen von YOLO gehรถren das Format der exportierten Modelldatei (z. B. ONNX, TensorFlow SavedModel), das Gerรคt, auf dem das Modell ausgefรผhrt wird (z. B. CPU, GPU) und das Vorhandensein zusรคtzlicher Funktionen wie Masken oder mehrere Labels pro Box. Andere Faktoren, die den Exportprozess beeinflussen kรถnnen, sind die spezifische Aufgabe, fรผr die das Modell verwendet wird, und die Anforderungen oder Einschrรคnkungen der Zielumgebung oder -plattform. Es ist wichtig, diese Einstellungen sorgfรคltig zu berรผcksichtigen und zu konfigurieren, um sicherzustellen, dass das exportierte Modell fรผr den beabsichtigten Einsatzzweck optimiert ist und in der Zielumgebung effektiv eingesetzt werden kann. + +| Schlรผssel | Wert | Beschreibung | +|-------------|-----------------|----------------------------------------------------------| +| `format` | `'torchscript'` | Format fรผr den Export | +| `imgsz` | `640` | BildgrรถรŸe als Skalar oder (h, w)-Liste, z.B. (640, 480) | +| `keras` | `False` | Verwendung von Keras fรผr TensorFlow SavedModel-Export | +| `optimize` | `False` | TorchScript: Optimierung fรผr mobile Gerรคte | +| `half` | `False` | FP16-Quantisierung | +| `int8` | `False` | INT8-Quantisierung | +| `dynamic` | `False` | ONNX/TensorRT: dynamische Achsen | +| `simplify` | `False` | ONNX/TensorRT: Vereinfachung des Modells | +| `opset` | `None` | ONNX: Opset-Version (optional, Standardwert ist neueste) | +| `workspace` | `4` | TensorRT: ArbeitsbereichgrรถรŸe (GB) | +| `nms` | `False` | CoreML: Hinzufรผgen von NMS | + +## Exportformate + +Verfรผgbare YOLOv8-Exportformate finden Sie in der Tabelle unten. Sie kรถnnen in jedes Format exportieren, indem Sie das `format`-Argument verwenden, z. B. `format='onnx'` oder `format='engine'`. + +| Format | `format`-Argument | Modell | Metadaten | Argumente | +|--------------------------------------------------------------------|-------------------|---------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | diff --git a/ultralytics/docs/de/modes/export.md:Zone.Identifier b/ultralytics/docs/de/modes/export.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/modes/export.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/modes/index.md b/ultralytics/docs/de/modes/index.md new file mode 100755 index 0000000..c73b488 --- /dev/null +++ b/ultralytics/docs/de/modes/index.md @@ -0,0 +1,74 @@ +--- +comments: true +description: Vom Training bis zum Tracking - Nutzen Sie YOLOv8 von Ultralytics optimal. Erhalten Sie Einblicke und Beispiele fรผr jeden unterstรผtzten Modus, einschlieรŸlich Validierung, Export und Benchmarking. +keywords: Ultralytics, YOLOv8, Maschinelles Lernen, Objekterkennung, Training, Validierung, Vorhersage, Export, Tracking, Benchmarking +--- + +# Ultralytics YOLOv8 Modi + +Ultralytics YOLO-ร–kosystem und Integrationen + +## Einfรผhrung + +Ultralytics YOLOv8 ist nicht nur ein weiteres Objekterkennungsmodell; es ist ein vielseitiges Framework, das den gesamten Lebenszyklus von Machine-Learning-Modellen abdeckt - von der Dateneingabe und dem Modelltraining รผber die Validierung und Bereitstellung bis hin zum Tracking in der realen Welt. Jeder Modus dient einem bestimmten Zweck und ist darauf ausgelegt, Ihnen die Flexibilitรคt und Effizienz zu bieten, die fรผr verschiedene Aufgaben und Anwendungsfรคlle erforderlich ist. + +

+
+ +
+ Anschauen: Ultralytics Modi Tutorial: Trainieren, Validieren, Vorhersagen, Exportieren & Benchmarking. +

+ +### Modi im รœberblick + +Das Verstรคndnis der verschiedenen **Modi**, die Ultralytics YOLOv8 unterstรผtzt, ist entscheidend, um das Beste aus Ihren Modellen herauszuholen: + +- **Train**-Modus: Verfeinern Sie Ihr Modell mit angepassten oder vorgeladenen Datensรคtzen. +- **Val**-Modus: Eine Nachtrainingsprรผfung zur Validierung der Modellleistung. +- **Predict**-Modus: Entfesseln Sie die Vorhersagekraft Ihres Modells mit realen Daten. +- **Export**-Modus: Machen Sie Ihr Modell in verschiedenen Formaten einsatzbereit. +- **Track**-Modus: Erweitern Sie Ihr Objekterkennungsmodell um Echtzeit-Tracking-Anwendungen. +- **Benchmark**-Modus: Analysieren Sie die Geschwindigkeit und Genauigkeit Ihres Modells in verschiedenen Einsatzumgebungen. + +Dieser umfassende Leitfaden soll Ihnen einen รœberblick und praktische Einblicke in jeden Modus geben, um Ihnen zu helfen, das volle Potenzial von YOLOv8 zu nutzen. + +## [Trainieren](train.md) + +Der Trainingsmodus wird verwendet, um ein YOLOv8-Modell mit einem angepassten Datensatz zu trainieren. In diesem Modus wird das Modell mit dem angegebenen Datensatz und den Hyperparametern trainiert. Der Trainingsprozess beinhaltet die Optimierung der Modellparameter, damit es die Klassen und Standorte von Objekten in einem Bild genau vorhersagen kann. + +[Trainingsbeispiele](train.md){ .md-button } + +## [Validieren](val.md) + +Der Validierungsmodus wird genutzt, um ein YOLOv8-Modell nach dem Training zu bewerten. In diesem Modus wird das Modell auf einem Validierungsset getestet, um seine Genauigkeit und Generalisierungsleistung zu messen. Dieser Modus kann verwendet werden, um die Hyperparameter des Modells fรผr eine bessere Leistung zu optimieren. + +[Validierungsbeispiele](val.md){ .md-button } + +## [Vorhersagen](predict.md) + +Der Vorhersagemodus wird verwendet, um mit einem trainierten YOLOv8-Modell Vorhersagen fรผr neue Bilder oder Videos zu treffen. In diesem Modus wird das Modell aus einer Checkpoint-Datei geladen, und der Benutzer kann Bilder oder Videos zur Inferenz bereitstellen. Das Modell sagt die Klassen und Standorte von Objekten in den Eingabebildern oder -videos voraus. + +[Vorhersagebeispiele](predict.md){ .md-button } + +## [Exportieren](export.md) + +Der Exportmodus wird verwendet, um ein YOLOv8-Modell in ein Format zu exportieren, das fรผr die Bereitstellung verwendet werden kann. In diesem Modus wird das Modell in ein Format konvertiert, das von anderen Softwareanwendungen oder Hardwaregerรคten verwendet werden kann. Dieser Modus ist nรผtzlich, wenn das Modell in Produktionsumgebungen eingesetzt wird. + +[Exportbeispiele](export.md){ .md-button } + +## [Verfolgen](track.md) + +Der Trackingmodus wird zur Echtzeitverfolgung von Objekten mit einem YOLOv8-Modell verwendet. In diesem Modus wird das Modell aus einer Checkpoint-Datei geladen, und der Benutzer kann einen Live-Videostream fรผr das Echtzeitobjekttracking bereitstellen. Dieser Modus ist nรผtzlich fรผr Anwendungen wie รœberwachungssysteme oder selbstfahrende Autos. + +[Trackingbeispiele](track.md){ .md-button } + +## [Benchmarking](benchmark.md) + +Der Benchmark-Modus wird verwendet, um die Geschwindigkeit und Genauigkeit verschiedener Exportformate fรผr YOLOv8 zu profilieren. Die Benchmarks liefern Informationen รผber die GrรถรŸe des exportierten Formats, seine `mAP50-95`-Metriken (fรผr Objekterkennung, Segmentierung und Pose) +oder `accuracy_top5`-Metriken (fรผr Klassifizierung) und die Inferenzzeit in Millisekunden pro Bild fรผr verschiedene Exportformate wie ONNX, OpenVINO, TensorRT und andere. Diese Informationen kรถnnen den Benutzern dabei helfen, das optimale Exportformat fรผr ihren spezifischen Anwendungsfall basierend auf ihren Anforderungen an Geschwindigkeit und Genauigkeit auszuwรคhlen. + +[Benchmarkbeispiele](benchmark.md){ .md-button } diff --git a/ultralytics/docs/de/modes/index.md:Zone.Identifier b/ultralytics/docs/de/modes/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/modes/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/modes/predict.md b/ultralytics/docs/de/modes/predict.md new file mode 100755 index 0000000..feb64f0 --- /dev/null +++ b/ultralytics/docs/de/modes/predict.md @@ -0,0 +1,226 @@ +--- +comments: true +description: Erkunden Sie, wie der YOLOv8-Prognosemodus fรผr verschiedene Aufgaben verwendet werden kann. Erfahren Sie mehr รผber verschiedene Inferenzquellen wie Bilder, Videos und Datenformate. +keywords: Ultralytics, YOLOv8, Vorhersagemodus, Inferenzquellen, Vorhersageaufgaben, Streaming-Modus, Bildverarbeitung, Videoverarbeitung, maschinelles Lernen, KI +--- + +# Modellvorhersage mit Ultralytics YOLO + +Ultralytics YOLO ร–kosystem und Integrationen + +## Einfรผhrung + +Im Bereich des maschinellen Lernens und der Computer Vision wird der Prozess des Verstehens visueller Daten als 'Inferenz' oder 'Vorhersage' bezeichnet. Ultralytics YOLOv8 bietet eine leistungsstarke Funktion, die als **Prognosemodus** bekannt ist und fรผr eine hochleistungsfรคhige, echtzeitfรคhige Inferenz auf einer breiten Palette von Datenquellen zugeschnitten ist. + +

+
+ +
+ Anschauen: Wie man die Ausgaben vom Ultralytics YOLOv8 Modell fรผr individuelle Projekte extrahiert. +

+ +## Anwendungen in der realen Welt + +| Herstellung | Sport | Sicherheit | +|:---------------------------------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------------------------------:| +| ![Ersatzteilerkennung fรผr Fahrzeuge](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a0f802a8-0776-44cf-8f17-93974a4a28a1) | ![Erkennung von FuรŸballspielern](https://github.com/RizwanMunawar/ultralytics/assets/62513924/7d320e1f-fc57-4d7f-a691-78ee579c3442) | ![Erkennung von stรผrzenden Personen](https://github.com/RizwanMunawar/ultralytics/assets/62513924/86437c4a-3227-4eee-90ef-9efb697bdb43) | +| Erkennung von Fahrzeugersatzteilen | Erkennung von FuรŸballspielern | Erkennung von stรผrzenden Personen | + +## Warum Ultralytics YOLO fรผr Inferenz nutzen? + +Hier sind Grรผnde, warum Sie den Prognosemodus von YOLOv8 fรผr Ihre verschiedenen Inferenzanforderungen in Betracht ziehen sollten: + +- **Vielseitigkeit:** Fรคhig, Inferenzen auf Bilder, Videos und sogar Live-Streams zu machen. +- **Leistung:** Entwickelt fรผr Echtzeit-Hochgeschwindigkeitsverarbeitung ohne Genauigkeitsverlust. +- **Einfache Bedienung:** Intuitive Python- und CLI-Schnittstellen fรผr schnelle Einsatzbereitschaft und Tests. +- **Hohe Anpassbarkeit:** Verschiedene Einstellungen und Parameter, um das Verhalten der Modellinferenz entsprechend Ihren spezifischen Anforderungen zu optimieren. + +### Schlรผsselfunktionen des Prognosemodus + +Der Prognosemodus von YOLOv8 ist robust und vielseitig konzipiert und verfรผgt รผber: + +- **Kompatibilitรคt mit mehreren Datenquellen:** Ganz gleich, ob Ihre Daten in Form von Einzelbildern, einer Bildersammlung, Videodateien oder Echtzeit-Videostreams vorliegen, der Prognosemodus deckt alles ab. +- **Streaming-Modus:** Nutzen Sie die Streaming-Funktion, um einen speichereffizienten Generator von `Results`-Objekten zu erzeugen. Aktivieren Sie dies, indem Sie `stream=True` in der Aufrufmethode des Predictors einstellen. +- **Batchverarbeitung:** Die Mรถglichkeit, mehrere Bilder oder Videoframes in einem einzigen Batch zu verarbeiten, wodurch die Inferenzzeit weiter verkรผrzt wird. +- **Integrationsfreundlich:** Dank der flexiblen API leicht in bestehende Datenpipelines und andere Softwarekomponenten zu integrieren. + +Ultralytics YOLO-Modelle geben entweder eine Python-Liste von `Results`-Objekten zurรผck, oder einen speichereffizienten Python-Generator von `Results`-Objekten, wenn `stream=True` beim Inferenzvorgang an das Modell รผbergeben wird: + +!!! Example "Predict" + + === "Gibt eine Liste mit `stream=False` zurรผck" + ```python + from ultralytics import YOLO + + # Ein Modell laden + model = YOLO('yolov8n.pt') # vortrainiertes YOLOv8n Modell + + # Batch-Inferenz auf einer Liste von Bildern ausfรผhren + results = model(['im1.jpg', 'im2.jpg']) # gibt eine Liste von Results-Objekten zurรผck + + # Ergebnisliste verarbeiten + for result in results: + boxes = result.boxes # Boxes-Objekt fรผr Bbox-Ausgaben + masks = result.masks # Masks-Objekt fรผr Segmentierungsmasken-Ausgaben + keypoints = result.keypoints # Keypoints-Objekt fรผr Pose-Ausgaben + probs = result.probs # Probs-Objekt fรผr Klassifizierungs-Ausgaben + ``` + + === "Gibt einen Generator mit `stream=True` zurรผck" + ```python + from ultralytics import YOLO + + # Ein Modell laden + model = YOLO('yolov8n.pt') # vortrainiertes YOLOv8n Modell + + # Batch-Inferenz auf einer Liste von Bildern ausfรผhren + results = model(['im1.jpg', 'im2.jpg'], stream=True) # gibt einen Generator von Results-Objekten zurรผck + + # Generator von Ergebnissen verarbeiten + for result in results: + boxes = result.boxes # Boxes-Objekt fรผr Bbox-Ausgaben + masks = result.masks # Masks-Objekt fรผr Segmentierungsmasken-Ausgaben + keypoints = result.keypoints # Keypoints-Objekt fรผr Pose-Ausgaben + probs = result.probs # Probs-Objekt fรผr Klassifizierungs-Ausgaben + ``` + +## Inferenzquellen + +YOLOv8 kann verschiedene Arten von Eingabequellen fรผr die Inferenz verarbeiten, wie in der folgenden Tabelle gezeigt. Die Quellen umfassen statische Bilder, Videostreams und verschiedene Datenformate. Die Tabelle gibt ebenfalls an, ob jede Quelle im Streaming-Modus mit dem Argument `stream=True` โœ… verwendet werden kann. Der Streaming-Modus ist vorteilhaft fรผr die Verarbeitung von Videos oder Live-Streams, da er einen Generator von Ergebnissen statt das Laden aller Frames in den Speicher erzeugt. + +!!! Tip "Tipp" + + Verwenden Sie `stream=True` fรผr die Verarbeitung langer Videos oder groรŸer Datensรคtze, um den Speicher effizient zu verwalten. Bei `stream=False` werden die Ergebnisse fรผr alle Frames oder Datenpunkte im Speicher gehalten, was bei groรŸen Eingaben schnell zu Speicherรผberlรคufen fรผhren kann. Im Gegensatz dazu verwendet `stream=True` einen Generator, der nur die Ergebnisse des aktuellen Frames oder Datenpunkts im Speicher behรคlt, was den Speicherverbrauch erheblich reduziert und Speicherรผberlaufprobleme verhindert. + +| Quelle | Argument | Typ | Hinweise | +|--------------------|--------------------------------------------|-------------------|------------------------------------------------------------------------------------------------| +| Bild | `'image.jpg'` | `str` oder `Path` | Einzelbilddatei. | +| URL | `'https://ultralytics.com/images/bus.jpg'` | `str` | URL zu einem Bild. | +| Bildschirmaufnahme | `'screen'` | `str` | Eine Bildschirmaufnahme erstellen. | +| PIL | `Image.open('im.jpg')` | `PIL.Image` | HWC-Format mit RGB-Kanรคlen. | +| OpenCV | `cv2.imread('im.jpg')` | `np.ndarray` | HWC-Format mit BGR-Kanรคlen `uint8 (0-255)`. | +| numpy | `np.zeros((640,1280,3))` | `np.ndarray` | HWC-Format mit BGR-Kanรคlen `uint8 (0-255)`. | +| torch | `torch.zeros(16,3,320,640)` | `torch.Tensor` | BCHW-Format mit RGB-Kanรคlen `float32 (0.0-1.0)`. | +| CSV | `'sources.csv'` | `str` oder `Path` | CSV-Datei mit Pfaden zu Bildern, Videos oder Verzeichnissen. | +| video โœ… | `'video.mp4'` | `str` oder `Path` | Videodatei in Formaten wie MP4, AVI, usw. | +| Verzeichnis โœ… | `'path/'` | `str` oder `Path` | Pfad zu einem Verzeichnis mit Bildern oder Videos. | +| glob โœ… | `'path/*.jpg'` | `str` | Glob-Muster, um mehrere Dateien zu finden. Verwenden Sie das `*` Zeichen als Platzhalter. | +| YouTube โœ… | `'https://youtu.be/LNwODJXcvt4'` | `str` | URL zu einem YouTube-Video. | +| stream โœ… | `'rtsp://example.com/media.mp4'` | `str` | URL fรผr Streaming-Protokolle wie RTSP, RTMP, TCP oder eine IP-Adresse. | +| Multi-Stream โœ… | `'list.streams'` | `str` oder `Path` | `*.streams` Textdatei mit einer Stream-URL pro Zeile, z.B. 8 Streams laufen bei Batch-GrรถรŸe 8. | + +Untenstehend finden Sie Codebeispiele fรผr die Verwendung jedes Quelltyps: + +!!! Example "Vorhersagequellen" + + === "Bild" + Fรผhren Sie die Inferenz auf einer Bilddatei aus. + ```python + from ultralytics import YOLO + + # Ein vortrainiertes YOLOv8n Modell laden + model = YOLO('yolov8n.pt') + + # Pfad zur Bilddatei definieren + quell = 'Pfad/zum/Bild.jpg' + + # Inferenz auf der Quelle ausfรผhren + ergebnisse = model(quell) # Liste von Results-Objekten + ``` + + === "Bildschirmaufnahme" + Fรผhren Sie die Inferenz auf dem aktuellen Bildschirminhalt als Screenshot aus. + ```python + from ultralytics import YOLO + + # Ein vortrainiertes YOLOv8n Modell laden + model = YOLO('yolov8n.pt') + + # Aktuellen Screenshot als Quelle definieren + quell = 'Bildschirm' + + # Inferenz auf der Quelle ausfรผhren + ergebnisse = model(quell) # Liste von Results-Objekten + ``` + + === "URL" + Fรผhren Sie die Inferenz auf einem Bild oder Video aus, das รผber eine URL remote gehostet wird. + ```python + from ultralytics import YOLO + + # Ein vortrainiertes YOLOv8n Modell laden + model = YOLO('yolov8n.pt') + + # Remote-Bild- oder Video-URL definieren + quell = 'https://ultralytics.com/images/bus.jpg' + + # Inferenz auf der Quelle ausfรผhren + ergebnisse = model(quell) # Liste von Results-Objekten + ``` + + === "PIL" + Fรผhren Sie die Inferenz auf einem Bild aus, das mit der Python Imaging Library (PIL) geรถffnet wurde. + ```python + from PIL import Image + from ultralytics import YOLO + + # Ein vortrainiertes YOLOv8n Modell laden + model = YOLO('yolov8n.pt') + + # Ein Bild mit PIL รถffnen + quell = Image.open('Pfad/zum/Bild.jpg') + + # Inferenz auf der Quelle ausfรผhren + ergebnisse = model(quell) # Liste von Results-Objekten + ``` + + === "OpenCV" + Fรผhren Sie die Inferenz auf einem Bild aus, das mit OpenCV gelesen wurde. + ```python + import cv2 + from ultralytics import YOLO + + # Ein vortrainiertes YOLOv8n Modell laden + model = YOLO('yolov8n.pt') + + # Ein Bild mit OpenCV lesen + quell = cv2.imread('Pfad/zum/Bild.jpg') + + # Inferenz auf der Quelle ausfรผhren + ergebnisse = model(quell) # Liste von Results-Objekten + ``` + + === "numpy" + Fรผhren Sie die Inferenz auf einem Bild aus, das als numpy-Array dargestellt wird. + ```python + import numpy as np + from ultralytics import YOLO + + # Ein vortrainiertes YOLOv8n Modell laden + model = YOLO('yolov8n.pt') + + # Ein zufรคlliges numpy-Array der HWC-Form (640, 640, 3) mit Werten im Bereich [0, 255] und Typ uint8 erstellen + quell = np.random.randint(low=0, high=255, size=(640, 640, 3), dtype='uint8') + + # Inferenz auf der Quelle ausfรผhren + ergebnisse = model(quell) # Liste von Results-Objekten + ``` + + === "torch" + Fรผhren Sie die Inferenz auf einem Bild aus, das als PyTorch-Tensor dargestellt wird. + ```python + import torch + from ultralytics import YOLO + + # Ein vortrainiertes YOLOv8n Modell laden + model = YOLO('yolov8n.pt') + + # Ein zufรคlliger torch-Tensor der BCHW-Form (1, 3, 640, 640) mit Werten im Bereich [0, 1] und Typ float32 erstellen + quell = torch.rand(1, 3, 640, 640, dtype=torch.float32) + + # Inferenz auf der Quelle ausfรผhren + ergebnisse = model(quell) # Liste von Results-Objekten diff --git a/ultralytics/docs/de/modes/predict.md:Zone.Identifier b/ultralytics/docs/de/modes/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/modes/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/modes/track.md b/ultralytics/docs/de/modes/track.md new file mode 100755 index 0000000..f29dae9 --- /dev/null +++ b/ultralytics/docs/de/modes/track.md @@ -0,0 +1,200 @@ +--- +comments: true +description: Erfahren Sie, wie Sie Ultralytics YOLO fรผr Objektverfolgung in Videostreams verwenden. Anleitungen zum Einsatz verschiedener Tracker und zur Anpassung von Tracker-Konfigurationen. +keywords: Ultralytics, YOLO, Objektverfolgung, Videostreams, BoT-SORT, ByteTrack, Python-Anleitung, CLI-Anleitung +--- + +# Multi-Objektverfolgung mit Ultralytics YOLO + +Beispiele fรผr Multi-Objektverfolgung + +Objektverfolgung im Bereich der Videoanalytik ist eine essentielle Aufgabe, die nicht nur den Standort und die Klasse von Objekten innerhalb des Frames identifiziert, sondern auch eine eindeutige ID fรผr jedes erkannte Objekt, wรคhrend das Video fortschreitet, erhรคlt. Die Anwendungsmรถglichkeiten sind grenzenlos โ€“ von รœberwachung und Sicherheit bis hin zur Echtzeitsportanalytik. + +## Warum Ultralytics YOLO fรผr Objektverfolgung wรคhlen? + +Die Ausgabe von Ultralytics Trackern ist konsistent mit der standardmรครŸigen Objekterkennung, bietet aber zusรคtzlich Objekt-IDs. Dies erleichtert das Verfolgen von Objekten in Videostreams und das Durchfรผhren nachfolgender Analysen. Hier sind einige Grรผnde, warum Sie Ultralytics YOLO fรผr Ihre Objektverfolgungsaufgaben in Betracht ziehen sollten: + +- **Effizienz:** Verarbeitung von Videostreams in Echtzeit ohne EinbuรŸen bei der Genauigkeit. +- **Flexibilitรคt:** Unterstรผtzt mehrere Tracking-Algorithmen und -Konfigurationen. +- **Benutzerfreundlichkeit:** Einfache Python-API und CLI-Optionen fรผr schnelle Integration und Bereitstellung. +- **Anpassbarkeit:** Einfache Verwendung mit individuell trainierten YOLO-Modellen, ermรถglicht Integration in branchenspezifische Anwendungen. + +

+
+ +
+ Ansehen: Objekterkennung und -verfolgung mit Ultralytics YOLOv8. +

+ +## Anwendungen in der realen Welt + +| Transportwesen | Einzelhandel | Aquakultur | +|:------------------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------------:| +| ![Fahrzeugverfolgung](https://github.com/RizwanMunawar/ultralytics/assets/62513924/ee6e6038-383b-4f21-ac29-b2a1c7d386ab) | ![Personenverfolgung](https://github.com/RizwanMunawar/ultralytics/assets/62513924/93bb4ee2-77a0-4e4e-8eb6-eb8f527f0527) | ![Fischverfolgung](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a5146d0f-bfa8-4e0a-b7df-3c1446cd8142) | +| Fahrzeugverfolgung | Personenverfolgung | Fischverfolgung | + +## Eigenschaften auf einen Blick + +Ultralytics YOLO erweitert seine Objekterkennungsfunktionen, um eine robuste und vielseitige Objektverfolgung bereitzustellen: + +- **Echtzeitverfolgung:** Nahtloses Verfolgen von Objekten in Videos mit hoher Bildfrequenz. +- **Unterstรผtzung mehrerer Tracker:** Auswahl aus einer Vielzahl etablierter Tracking-Algorithmen. +- **Anpassbare Tracker-Konfigurationen:** Anpassen des Tracking-Algorithmus an spezifische Anforderungen durch Einstellung verschiedener Parameter. + +## Verfรผgbare Tracker + +Ultralytics YOLO unterstรผtzt die folgenden Tracking-Algorithmen. Sie kรถnnen aktiviert werden, indem Sie die entsprechende YAML-Konfigurationsdatei wie `tracker=tracker_type.yaml` รผbergeben: + +* [BoT-SORT](https://github.com/NirAharon/BoT-SORT) - Verwenden Sie `botsort.yaml`, um diesen Tracker zu aktivieren. +* [ByteTrack](https://github.com/ifzhang/ByteTrack) - Verwenden Sie `bytetrack.yaml`, um diesen Tracker zu aktivieren. + +Der Standardtracker ist BoT-SORT. + +## Verfolgung + +Um den Tracker auf Videostreams auszufรผhren, verwenden Sie ein trainiertes Erkennungs-, Segmentierungs- oder Posierungsmodell wie YOLOv8n, YOLOv8n-seg und YOLOv8n-pose. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Laden Sie ein offizielles oder individuelles Modell + model = YOLO('yolov8n.pt') # Laden Sie ein offizielles Erkennungsmodell + model = YOLO('yolov8n-seg.pt') # Laden Sie ein offizielles Segmentierungsmodell + model = YOLO('yolov8n-pose.pt') # Laden Sie ein offizielles Posierungsmodell + model = YOLO('path/to/best.pt') # Laden Sie ein individuell trainiertes Modell + + # Fรผhren Sie die Verfolgung mit dem Modell durch + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True) # Verfolgung mit Standardtracker + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True, tracker="bytetrack.yaml") # Verfolgung mit ByteTrack-Tracker + ``` + + === "CLI" + + ```bash + # Fรผhren Sie die Verfolgung mit verschiedenen Modellen รผber die Befehlszeilenschnittstelle durch + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" # Offizielles Erkennungsmodell + yolo track model=yolov8n-seg.pt source="https://youtu.be/LNwODJXcvt4" # Offizielles Segmentierungsmodell + yolo track model=yolov8n-pose.pt source="https://youtu.be/LNwODJXcvt4" # Offizielles Posierungsmodell + yolo track model=path/to/best.pt source="https://youtu.be/LNwODJXcvt4" # Individuell trainiertes Modell + + # Verfolgung mit ByteTrack-Tracker + yolo track model=path/to/best.pt tracker="bytetrack.yaml" + ``` + +Wie in der obigen Nutzung zu sehen ist, ist die Verfolgung fรผr alle Detect-, Segment- und Pose-Modelle verfรผgbar, die auf Videos oder Streaming-Quellen ausgefรผhrt werden. + +## Konfiguration + +### Tracking-Argumente + +Die Tracking-Konfiguration teilt Eigenschaften mit dem Predict-Modus, wie `conf`, `iou` und `show`. Fรผr weitere Konfigurationen siehe die Seite des [Predict](https://docs.ultralytics.com/modes/predict/)-Modells. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Konfigurieren Sie die Tracking-Parameter und fรผhren Sie den Tracker aus + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", conf=0.3, iou=0.5, show=True) + ``` + + === "CLI" + + ```bash + # Konfigurieren Sie die Tracking-Parameter und fรผhren Sie den Tracker รผber die Befehlszeilenschnittstelle aus + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" conf=0.3, iou=0.5 show + ``` + +### Tracker-Auswahl + +Ultralytics ermรถglicht es Ihnen auch, eine modifizierte Tracker-Konfigurationsdatei zu verwenden. Hierfรผr kopieren Sie einfach eine Tracker-Konfigurationsdatei (zum Beispiel `custom_tracker.yaml`) von [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) und รคndern jede Konfiguration (auรŸer dem `tracker_type`), wie es Ihren Bedรผrfnissen entspricht. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Laden Sie das Modell und fรผhren Sie den Tracker mit einer individuellen Konfigurationsdatei aus + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", tracker='custom_tracker.yaml') + ``` + + === "CLI" + + ```bash + # Laden Sie das Modell und fรผhren Sie den Tracker mit einer individuellen Konfigurationsdatei รผber die Befehlszeilenschnittstelle aus + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" tracker='custom_tracker.yaml' + ``` + +Fรผr eine umfassende Liste der Tracking-Argumente siehe die Seite [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers). + +## Python-Beispiele + +### Persistierende Tracks-Schleife + +Hier ist ein Python-Skript, das OpenCV (`cv2`) und YOLOv8 verwendet, um Objektverfolgung in Videoframes durchzufรผhren. Dieses Skript setzt voraus, dass Sie die notwendigen Pakete (`opencv-python` und `ultralytics`) bereits installiert haben. Das Argument `persist=True` teilt dem Tracker mit, dass das aktuelle Bild oder Frame das nรคchste in einer Sequenz ist und Tracks aus dem vorherigen Bild im aktuellen Bild erwartet werden. + +!!! Example "Streaming-For-Schleife mit Tracking" + + ```python + import cv2 + from ultralytics import YOLO + + # Laden Sie das YOLOv8-Modell + model = YOLO('yolov8n.pt') + + # ร–ffnen Sie die Videodatei + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # Schleife durch die Videoframes + while cap.isOpened(): + # Einen Frame aus dem Video lesen + success, frame = cap.read() + + if success: + # Fรผhren Sie YOLOv8-Tracking im Frame aus, wobei Tracks zwischen Frames beibehalten werden + results = model.track(frame, persist=True) + + # Visualisieren Sie die Ergebnisse im Frame + annotated_frame = results[0].plot() + + # Zeigen Sie den kommentierten Frame an + cv2.imshow("YOLOv8-Tracking", annotated_frame) + + # Beenden Sie die Schleife, wenn 'q' gedrรผckt wird + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # Beenden Sie die Schleife, wenn das Ende des Videos erreicht ist + break + + # Geben Sie das Videoaufnahmeobjekt frei und schlieรŸen Sie das Anzeigefenster + cap.release() + cv2.destroyAllWindows() + ``` + +Bitte beachten Sie die ร„nderung von `model(frame)` zu `model.track(frame)`, welche die Objektverfolgung anstelle der einfachen Erkennung aktiviert. Dieses modifizierte Skript fรผhrt den Tracker auf jedem Frame des Videos aus, visualisiert die Ergebnisse und zeigt sie in einem Fenster an. Die Schleife kann durch Drรผcken von 'q' beendet werden. + +## Neue Tracker beisteuern + +Sind Sie versiert in der Multi-Objektverfolgung und haben erfolgreich einen Tracking-Algorithmus mit Ultralytics YOLO implementiert oder angepasst? Wir laden Sie ein, zu unserem Trackers-Bereich in [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) beizutragen! Ihre realen Anwendungen und Lรถsungen kรถnnten fรผr Benutzer, die an Tracking-Aufgaben arbeiten, von unschรคtzbarem Wert sein. + +Indem Sie zu diesem Bereich beitragen, helfen Sie, das Spektrum verfรผgbarer Tracking-Lรถsungen innerhalb des Ultralytics YOLO-Frameworks zu erweitern und fรผgen eine weitere Funktionsschicht fรผr die Gemeinschaft hinzu. + +Um Ihren Beitrag einzuleiten, sehen Sie bitte in unserem [Contributing Guide](https://docs.ultralytics.com/help/contributing) fรผr umfassende Anweisungen zur Einreichung eines Pull Requests (PR) ๐Ÿ› ๏ธ. Wir sind gespannt darauf, was Sie beitragen! + +Gemeinsam verbessern wir die Tracking-Fรคhigkeiten des Ultralytics YOLO-ร–kosystems ๐Ÿ™! diff --git a/ultralytics/docs/de/modes/track.md:Zone.Identifier b/ultralytics/docs/de/modes/track.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/modes/track.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/modes/train.md b/ultralytics/docs/de/modes/train.md new file mode 100755 index 0000000..b7ee65e --- /dev/null +++ b/ultralytics/docs/de/modes/train.md @@ -0,0 +1,206 @@ +--- +comments: true +description: Schritt-fรผr-Schritt-Leitfaden zum Trainieren von YOLOv8-Modellen mit Ultralytics YOLO, einschlieรŸlich Beispielen fรผr Single-GPU- und Multi-GPU-Training +keywords: Ultralytics, YOLOv8, YOLO, Objekterkennung, Trainingsmodus, benutzerdefinierter Datensatz, GPU-Training, Multi-GPU, Hyperparameter, CLI-Beispiele, Python-Beispiele +--- + +# Modelltraining mit Ultralytics YOLO + +Ultralytics YOLO ร–kosystem und Integrationen + +## Einleitung + +Das Training eines Deep-Learning-Modells beinhaltet das Einspeisen von Daten und die Anpassung seiner Parameter, so dass es genaue Vorhersagen treffen kann. Der Trainingsmodus in Ultralytics YOLOv8 ist fรผr das effektive und effiziente Training von Objekterkennungsmodellen konzipiert und nutzt dabei die Fรคhigkeiten moderner Hardware voll aus. Dieser Leitfaden zielt darauf ab, alle Details zu vermitteln, die Sie benรถtigen, um mit dem Training Ihrer eigenen Modelle unter Verwendung des robusten Funktionssatzes von YOLOv8 zu beginnen. + +

+
+ +
+ Video anschauen: Wie man ein YOLOv8-Modell auf Ihrem benutzerdefinierten Datensatz in Google Colab trainiert. +

+ +## Warum Ultralytics YOLO fรผr das Training wรคhlen? + +Hier einige รผberzeugende Grรผnde, sich fรผr den Trainingsmodus von YOLOv8 zu entscheiden: + +- **Effizienz:** Machen Sie das Beste aus Ihrer Hardware, egal ob Sie auf einem Single-GPU-Setup sind oder รผber mehrere GPUs skalieren. +- **Vielseitigkeit:** Training auf benutzerdefinierten Datensรคtzen zusรคtzlich zu den bereits verfรผgbaren Datensรคtzen wie COCO, VOC und ImageNet. +- **Benutzerfreundlich:** Einfache, aber leistungsstarke CLI- und Python-Schnittstellen fรผr ein unkompliziertes Trainingserlebnis. +- **Flexibilitรคt der Hyperparameter:** Eine breite Palette von anpassbaren Hyperparametern, um die Modellleistung zu optimieren. + +### Schlรผsselfunktionen des Trainingsmodus + +Die folgenden sind einige bemerkenswerte Funktionen von YOLOv8s Trainingsmodus: + +- **Automatischer Datensatz-Download:** Standarddatensรคtze wie COCO, VOC und ImageNet werden bei der ersten Verwendung automatisch heruntergeladen. +- **Multi-GPU-Unterstรผtzung:** Skalieren Sie Ihr Training nahtlos รผber mehrere GPUs, um den Prozess zu beschleunigen. +- **Konfiguration der Hyperparameter:** Die Mรถglichkeit zur Modifikation der Hyperparameter รผber YAML-Konfigurationsdateien oder CLI-Argumente. +- **Visualisierung und รœberwachung:** Echtzeit-Tracking von Trainingsmetriken und Visualisierung des Lernprozesses fรผr bessere Einsichten. + +!!! Tip "Tipp" + + * YOLOv8-Datensรคtze wie COCO, VOC, ImageNet und viele andere werden automatisch bei der ersten Verwendung heruntergeladen, d.h. `yolo train data=coco.yaml` + +## Nutzungsbeispiele + +Trainieren Sie YOLOv8n auf dem COCO128-Datensatz fรผr 100 Epochen bei einer BildgrรถรŸe von 640. Das Trainingsgerรคt kann mit dem Argument `device` spezifiziert werden. Wenn kein Argument รผbergeben wird, wird GPU `device=0` verwendet, wenn verfรผgbar, sonst wird `device=cpu` verwendet. Siehe den Abschnitt Argumente unten fรผr eine vollstรคndige Liste der Trainingsargumente. + +!!! Example "Beispiel fรผr Single-GPU- und CPU-Training" + + Das Gerรคt wird automatisch ermittelt. Wenn eine GPU verfรผgbar ist, dann wird diese verwendet, sonst beginnt das Training auf der CPU. + + === "Python" + + ```python + from ultralytics import YOLO + + # Laden Sie ein Modell + model = YOLO('yolov8n.yaml') # bauen Sie ein neues Modell aus YAML + model = YOLO('yolov8n.pt') # laden Sie ein vortrainiertes Modell (empfohlen fรผr das Training) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # bauen Sie aus YAML und รผbertragen Sie Gewichte + + # Trainieren Sie das Modell + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + + === "CLI" + + ```bash + # Bauen Sie ein neues Modell aus YAML und beginnen Sie das Training von Grund auf + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # Beginnen Sie das Training von einem vortrainierten *.pt Modell + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # Bauen Sie ein neues Modell aus YAML, รผbertragen Sie vortrainierte Gewichte darauf und beginnen Sie das Training + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### Multi-GPU-Training + +Multi-GPU-Training ermรถglicht eine effizientere Nutzung von verfรผgbaren Hardware-Ressourcen, indem die Trainingslast รผber mehrere GPUs verteilt wird. Diese Funktion ist รผber sowohl die Python-API als auch die Befehlszeilenschnittstelle verfรผgbar. Um das Multi-GPU-Training zu aktivieren, geben Sie die GPU-Gerรคte-IDs an, die Sie verwenden mรถchten. + +!!! Example "Beispiel fรผr Multi-GPU-Training" + + Um mit 2 GPUs zu trainieren, verwenden Sie die folgenden Befehle fรผr CUDA-Gerรคte 0 und 1. Erweitern Sie dies bei Bedarf auf zusรคtzliche GPUs. + + === "Python" + + ```python + from ultralytics import YOLO + + # Laden Sie ein Modell + model = YOLO('yolov8n.pt') # laden Sie ein vortrainiertes Modell (empfohlen fรผr das Training) + + # Trainieren Sie das Modell mit 2 GPUs + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device=[0, 1]) + ``` + + === "CLI" + + ```bash + # Beginnen Sie das Training von einem vortrainierten *.pt Modell unter Verwendung der GPUs 0 und 1 + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=0,1 + ``` + +### Apple M1- und M2-MPS-Training + +Mit der Unterstรผtzung fรผr Apple M1- und M2-Chips, die in den Ultralytics YOLO-Modellen integriert ist, ist es jetzt mรถglich, Ihre Modelle auf Gerรคten zu trainieren, die das leistungsstarke Metal Performance Shaders (MPS)-Framework nutzen. MPS bietet eine leistungsstarke Methode zur Ausfรผhrung von Berechnungs- und Bildverarbeitungsaufgaben auf Apples benutzerdefinierten Siliziumchips. + +Um das Training auf Apple M1- und M2-Chips zu ermรถglichen, sollten Sie 'mps' als Ihr Gerรคt angeben, wenn Sie den Trainingsprozess starten. Unten ist ein Beispiel, wie Sie dies in Python und รผber die Befehlszeile tun kรถnnten: + +!!! Example "MPS-Training Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Laden Sie ein Modell + model = YOLO('yolov8n.pt') # laden Sie ein vortrainiertes Modell (empfohlen fรผr das Training) + + # Trainieren Sie das Modell mit 2 GPUs + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device='mps') + ``` + + === "CLI" + + ```bash + # Beginnen Sie das Training von einem vortrainierten *.pt Modell unter Verwendung der GPUs 0 und 1 + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=mps + ``` + +Indem sie die Rechenleistung der M1/M2-Chips nutzen, ermรถglicht dies eine effizientere Verarbeitung der Trainingsaufgaben. Fรผr detailliertere Anleitungen und fortgeschrittene Konfigurationsoptionen beziehen Sie sich bitte auf die [PyTorch MPS-Dokumentation](https://pytorch.org/docs/stable/notes/mps.html). + +## Protokollierung + +Beim Training eines YOLOv8-Modells kann es wertvoll sein, die Leistung des Modells im Laufe der Zeit zu verfolgen. Hier kommt die Protokollierung ins Spiel. Ultralytics' YOLO unterstรผtzt drei Typen von Loggern - Comet, ClearML und TensorBoard. + +Um einen Logger zu verwenden, wรคhlen Sie ihn aus dem Dropdown-Menรผ im obigen Codeausschnitt aus und fรผhren ihn aus. Der ausgewรคhlte Logger wird installiert und initialisiert. + +### Comet + +[Comet](https://www.comet.ml/site/) ist eine Plattform, die Datenwissenschaftlern und Entwicklern erlaubt, Experimente und Modelle zu verfolgen, zu vergleichen, zu erklรคren und zu optimieren. Es bietet Funktionen wie Echtzeitmetriken, Code-Diffs und das Verfolgen von Hyperparametern. + +Um Comet zu verwenden: + +!!! Example "Beispiel" + + === "Python" + ```python + # pip installieren comet_ml + import comet_ml + + comet_ml.init() + ``` + +Vergessen Sie nicht, sich auf der Comet-Website anzumelden und Ihren API-Schlรผssel zu erhalten. Sie mรผssen diesen zu Ihren Umgebungsvariablen oder Ihrem Skript hinzufรผgen, um Ihre Experimente zu protokollieren. + +### ClearML + +[ClearML](https://www.clear.ml/) ist eine Open-Source-Plattform, die das Verfolgen von Experimenten automatisiert und hilft, Ressourcen effizient zu teilen. Sie ist darauf ausgelegt, Teams bei der Verwaltung, Ausfรผhrung und Reproduktion ihrer ML-Arbeiten effizienter zu unterstรผtzen. + +Um ClearML zu verwenden: + +!!! Example "Beispiel" + + === "Python" + ```python + # pip installieren clearml + import clearml + + clearml.browser_login() + ``` + +Nach dem Ausfรผhren dieses Skripts mรผssen Sie sich auf dem Browser bei Ihrem ClearML-Konto anmelden und Ihre Sitzung authentifizieren. + +### TensorBoard + +[TensorBoard](https://www.tensorflow.org/tensorboard) ist ein Visualisierungstoolset fรผr TensorFlow. Es ermรถglicht Ihnen, Ihren TensorFlow-Graphen zu visualisieren, quantitative Metriken รผber die Ausfรผhrung Ihres Graphen zu plotten und zusรคtzliche Daten wie Bilder zu zeigen, die durch ihn hindurchgehen. + +Um TensorBoard in [Google Colab](https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb) zu verwenden: + +!!! Example "Beispiel" + + === "CLI" + ```bash + load_ext tensorboard + tensorboard --logdir ultralytics/runs # ersetzen Sie mit Ihrem 'runs' Verzeichnis + ``` + +Um TensorBoard lokal auszufรผhren, fรผhren Sie den folgenden Befehl aus und betrachten Sie die Ergebnisse unter http://localhost:6006/. + +!!! Example "Beispiel" + + === "CLI" + ```bash + tensorboard --logdir ultralytics/runs # ersetzen Sie mit Ihrem 'runs' Verzeichnis + ``` + +Dies lรคdt TensorBoard und weist es an, das Verzeichnis zu verwenden, in dem Ihre Trainingsprotokolle gespeichert sind. + +Nachdem Sie Ihren Logger eingerichtet haben, kรถnnen Sie mit Ihrem Modelltraining fortfahren. Alle Trainingsmetriken werden automatisch in Ihrer gewรคhlten Plattform protokolliert, und Sie kรถnnen auf diese Protokolle zugreifen, um die Leistung Ihres Modells im Laufe der Zeit zu รผberwachen, verschiedene Modelle zu vergleichen und Bereiche fรผr Verbesserungen zu identifizieren. diff --git a/ultralytics/docs/de/modes/train.md:Zone.Identifier b/ultralytics/docs/de/modes/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/modes/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/modes/val.md b/ultralytics/docs/de/modes/val.md new file mode 100755 index 0000000..3f0c188 --- /dev/null +++ b/ultralytics/docs/de/modes/val.md @@ -0,0 +1,86 @@ +--- +comments: true +description: Anleitung zur Validierung von YOLOv8-Modellen. Erfahren Sie, wie Sie die Leistung Ihrer YOLO-Modelle mit Validierungseinstellungen und Metriken in Python und CLI-Beispielen bewerten kรถnnen. +keywords: Ultralytics, YOLO-Dokumente, YOLOv8, Validierung, Modellbewertung, Hyperparameter, Genauigkeit, Metriken, Python, CLI +--- + +# Modellvalidierung mit Ultralytics YOLO + +Ultralytics YOLO-ร–kosystem und Integrationen + +## Einfรผhrung + +Die Validierung ist ein kritischer Schritt im Machine-Learning-Prozess, der es Ihnen ermรถglicht, die Qualitรคt Ihrer trainierten Modelle zu bewerten. Der Val-Modus in Ultralytics YOLOv8 bietet eine robuste Suite von Tools und Metriken zur Bewertung der Leistung Ihrer Objekterkennungsmodelle. Dieser Leitfaden dient als umfassende Ressource, um zu verstehen, wie Sie den Val-Modus effektiv nutzen kรถnnen, um sicherzustellen, dass Ihre Modelle sowohl genau als auch zuverlรคssig sind. + +## Warum mit Ultralytics YOLO validieren? + +Hier sind die Vorteile der Verwendung des Val-Modus von YOLOv8: + +- **Prรคzision:** Erhalten Sie genaue Metriken wie mAP50, mAP75 und mAP50-95, um Ihr Modell umfassend zu bewerten. +- **Bequemlichkeit:** Nutzen Sie integrierte Funktionen, die Trainingseinstellungen speichern und so den Validierungsprozess vereinfachen. +- **Flexibilitรคt:** Validieren Sie Ihr Modell mit den gleichen oder verschiedenen Datensรคtzen und BildgrรถรŸen. +- **Hyperparameter-Tuning:** Verwenden Sie Validierungsmetriken, um Ihr Modell fรผr eine bessere Leistung zu optimieren. + +### Schlรผsselfunktionen des Val-Modus + +Dies sind die bemerkenswerten Funktionen, die der Val-Modus von YOLOv8 bietet: + +- **Automatisierte Einstellungen:** Modelle erinnern sich an ihre Trainingskonfigurationen fรผr eine unkomplizierte Validierung. +- **Unterstรผtzung mehrerer Metriken:** Bewerten Sie Ihr Modell anhand einer Reihe von Genauigkeitsmetriken. +- **CLI- und Python-API:** Wรคhlen Sie zwischen Befehlszeilenschnittstelle oder Python-API basierend auf Ihrer Prรคferenz fรผr die Validierung. +- **Datenkompatibilitรคt:** Funktioniert nahtlos mit Datensรคtzen, die wรคhrend der Trainingsphase sowie mit benutzerdefinierten Datensรคtzen verwendet wurden. + +!!! Tip "Tipp" + + * YOLOv8-Modelle speichern automatisch ihre Trainingseinstellungen, sodass Sie ein Modell mit der gleichen BildgrรถรŸe und dem ursprรผnglichen Datensatz leicht validieren kรถnnen, indem Sie einfach `yolo val model=yolov8n.pt` oder `model('yolov8n.pt').val()` ausfรผhren + +## Beispielverwendung + +Validieren Sie die Genauigkeit des trainierten YOLOv8n-Modells auf dem COCO128-Datensatz. Es muss kein Argument รผbergeben werden, da das `model` seine Trainings-`data` und Argumente als Modellattribute speichert. Siehe Abschnitt โ€žArgumenteโ€œ unten fรผr eine vollstรคndige Liste der Exportargumente. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n.pt') # ein offizielles Modell laden + model = YOLO('path/to/best.pt') # ein benutzerdefiniertes Modell laden + + # Modell validieren + metrics = model.val() # keine Argumente benรถtigt, Datensatz und Einstellungen gespeichert + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # eine Liste enthรคlt map50-95 jeder Kategorie + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # offizielles Modell validieren + yolo detect val model=path/to/best.pt # benutzerdefiniertes Modell validieren + ``` + +## Argumente + +Validierungseinstellungen fรผr YOLO-Modelle beziehen sich auf verschiedene Hyperparameter und Konfigurationen, die verwendet werden, um die Leistung des Modells an einem Validierungsdatensatz zu bewerten. Diese Einstellungen kรถnnen die Leistung, Geschwindigkeit und Genauigkeit des Modells beeinflussen. Einige gรคngige YOLO-Validierungseinstellungen umfassen die Batch-GrรถรŸe, die Hรคufigkeit der Validierung wรคhrend des Trainings und die Metriken zur Bewertung der Modellleistung. Andere Faktoren, die den Validierungsprozess beeinflussen kรถnnen, sind die GrรถรŸe und Zusammensetzung des Validierungsdatensatzes und die spezifische Aufgabe, fรผr die das Modell verwendet wird. Es ist wichtig, diese Einstellungen sorgfรคltig abzustimmen und zu experimentieren, um sicherzustellen, dass das Modell auf dem Validierungsdatensatz gut funktioniert sowie รœberanpassung zu erkennen und zu verhindern. + +| Key | Value | Beschreibung | +|---------------|---------|---------------------------------------------------------------------------------| +| `data` | `None` | Pfad zur Datendatei, z.B. coco128.yaml | +| `imgsz` | `640` | GrรถรŸe der Eingabebilder als ganzzahlige Zahl | +| `batch` | `16` | Anzahl der Bilder pro Batch (-1 fรผr AutoBatch) | +| `save_json` | `False` | Ergebnisse in JSON-Datei speichern | +| `save_hybrid` | `False` | hybride Version der Labels speichern (Labels + zusรคtzliche Vorhersagen) | +| `conf` | `0.001` | Objekterkennungsschwelle fรผr Zuversichtlichkeit | +| `iou` | `0.6` | Schwellenwert fรผr IoU (Intersection over Union) fรผr NMS | +| `max_det` | `300` | maximale Anzahl an Vorhersagen pro Bild | +| `half` | `True` | Halbprรคzision verwenden (FP16) | +| `device` | `None` | Gerรคt zur Ausfรผhrung, z.B. CUDA device=0/1/2/3 oder device=cpu | +| `dnn` | `False` | OpenCV DNN fรผr ONNX-Inf erenz nutzen | +| `plots` | `False` | Diagramme wรคhrend des Trainings anzeigen | +| `rect` | `False` | rechteckige Validierung mit jeder Batch-Charge fรผr minimale Polsterung | +| `split` | `val` | Zu verwendende Daten-Teilmenge fรผr Validierung, z.B. 'val', 'test' oder 'train' | +| diff --git a/ultralytics/docs/de/modes/val.md:Zone.Identifier b/ultralytics/docs/de/modes/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/modes/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/quickstart.md b/ultralytics/docs/de/quickstart.md new file mode 100755 index 0000000..1bfbaf7 --- /dev/null +++ b/ultralytics/docs/de/quickstart.md @@ -0,0 +1,198 @@ +--- +comments: true +description: Entdecken Sie verschiedene Methoden zur Installation von Ultralytics mit Pip, Conda, Git und Docker. Erfahren Sie, wie Sie Ultralytics รผber die Befehlszeilenschnittstelle oder innerhalb Ihrer Python-Projekte verwenden kรถnnen. +keywords: Ultralytics-Installation, pip installieren Ultralytics, Docker installieren Ultralytics, Ultralytics-Befehlszeilenschnittstelle, Ultralytics Python-Schnittstelle +--- + +## Ultralytics installieren + +Ultralytics bietet verschiedene Installationsmethoden, darunter Pip, Conda und Docker. Installiere YOLOv8 รผber das `ultralytics` Pip-Paket fรผr die neueste stabile Verรถffentlichung oder indem du das [Ultralytics GitHub-Repository](https://github.com/ultralytics/ultralytics) klonst fรผr die aktuellste Version. Docker kann verwendet werden, um das Paket in einem isolierten Container auszufรผhren, ohne eine lokale Installation vornehmen zu mรผssen. + +!!! Example "Installieren" + + === "Pip-Installation (empfohlen)" + Installieren Sie das `ultralytics` Paket mit Pip oder aktualisieren Sie eine bestehende Installation, indem Sie `pip install -U ultralytics` ausfรผhren. Besuchen Sie den Python Package Index (PyPI) fรผr weitere Details zum `ultralytics` Paket: [https://pypi.org/project/ultralytics/](https://pypi.org/project/ultralytics/). + + [![PyPI-Version](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + + ```bash + # Installiere das ultralytics Paket von PyPI + pip install ultralytics + ``` + + Sie kรถnnen auch das `ultralytics` Paket direkt vom GitHub [Repository](https://github.com/ultralytics/ultralytics) installieren. Dies kรถnnte nรผtzlich sein, wenn Sie die neueste Entwicklerversion mรถchten. Stellen Sie sicher, dass das Git-Kommandozeilen-Tool auf Ihrem System installiert ist. Der Befehl `@main` installiert den `main` Branch und kann zu einem anderen Branch geรคndert werden, z. B. `@my-branch`, oder ganz entfernt werden, um auf den `main` Branch standardmรครŸig zurรผckzugreifen. + + ```bash + # Installiere das ultralytics Paket von GitHub + pip install git+https://github.com/ultralytics/ultralytics.git@main + ``` + + + === "Conda-Installation" + Conda ist ein alternativer Paketmanager zu Pip, der ebenfalls fรผr die Installation verwendet werden kann. Besuche Anaconda fรผr weitere Details unter [https://anaconda.org/conda-forge/ultralytics](https://anaconda.org/conda-forge/ultralytics). Ultralytics Feedstock Repository fรผr die Aktualisierung des Conda-Pakets befindet sich unter [https://github.com/conda-forge/ultralytics-feedstock/](https://github.com/conda-forge/ultralytics-feedstock/). + + + [![Conda Rezept](https://img.shields.io/badge/recipe-ultralytics-green.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Version](https://img.shields.io/conda/vn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Plattformen](https://img.shields.io/conda/pn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) + + ```bash + # Installiere das ultralytics Paket mit Conda + conda install -c conda-forge ultralytics + ``` + + !!! Note "Hinweis" + + Wenn Sie in einer CUDA-Umgebung installieren, ist es am besten, `ultralytics`, `pytorch` und `pytorch-cuda` im selben Befehl zu installieren, um dem Conda-Paketmanager zu ermรถglichen, Konflikte zu lรถsen, oder `pytorch-cuda` als letztes zu installieren, damit es das CPU-spezifische `pytorch` Paket bei Bedarf รผberschreiben kann. + ```bash + # Installiere alle Pakete zusammen mit Conda + conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics + ``` + + ### Conda Docker-Image + + Ultralytics Conda Docker-Images sind ebenfalls von [DockerHub](https://hub.docker.com/r/ultralytics/ultralytics) verfรผgbar. Diese Bilder basieren auf [Miniconda3](https://docs.conda.io/projects/miniconda/en/latest/) und bieten eine einfache Mรถglichkeit, `ultralytics` in einer Conda-Umgebung zu nutzen. + + ```bash + # Setze Image-Name als Variable + t=ultralytics/ultralytics:latest-conda + + # Ziehe das neuste ultralytics Image von Docker Hub + sudo docker pull $t + + # Fรผhre das ultralytics Image in einem Container mit GPU-Unterstรผtzung aus + sudo docker run -it --ipc=host --gpus all $t # alle GPUs + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # spezifische GPUs angeben + ``` + + === "Git klonen" + Klonen Sie das `ultralytics` Repository, wenn Sie einen Beitrag zur Entwicklung leisten mรถchten oder mit dem neuesten Quellcode experimentieren wollen. Nach dem Klonen navigieren Sie in das Verzeichnis und installieren das Paket im editierbaren Modus `-e` mit Pip. + ```bash + # Klonen Sie das ultralytics Repository + git clone https://github.com/ultralytics/ultralytics + + # Navigiere zum geklonten Verzeichnis + cd ultralytics + + # Installiere das Paket im editierbaren Modus fรผr die Entwicklung + pip install -e . + ``` + +Siehe die `ultralytics` [requirements.txt](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) Datei fรผr eine Liste der Abhรคngigkeiten. Beachten Sie, dass alle oben genannten Beispiele alle erforderlichen Abhรคngigkeiten installieren. + +

+
+ +
+ Watch: Ultralytics YOLO Quick Start Guide +

+ +!!! Tip "Tipp" + + PyTorch-Anforderungen variieren je nach Betriebssystem und CUDA-Anforderungen, daher wird empfohlen, PyTorch zuerst gemรครŸ den Anweisungen unter [https://pytorch.org/get-started/locally](https://pytorch.org/get-started/locally) zu installieren. + + + PyTorch Installationsanweisungen + + +## Ultralytics mit CLI verwenden + +Die Befehlszeilenschnittstelle (CLI) von Ultralytics ermรถglicht einfache Einzeilige Befehle ohne die Notwendigkeit einer Python-Umgebung. CLI erfordert keine Anpassung oder Python-Code. Sie kรถnnen alle Aufgaben einfach vom Terminal aus mit dem `yolo` Befehl ausfรผhren. Schauen Sie sich den [CLI-Leitfaden](/../usage/cli.md) an, um mehr รผber die Verwendung von YOLOv8 รผber die Befehlszeile zu erfahren. + +!!! Example "Beispiel" + + === "Syntax" + + Ultralytics `yolo` Befehle verwenden die folgende Syntax: + ```bash + yolo TASK MODE ARGS + + Wo TASK (optional) einer von [detect, segment, classify] ist + MODE (erforderlich) einer von [train, val, predict, export, track] ist + ARGS (optional) eine beliebige Anzahl von benutzerdefinierten 'arg=value' Paaren wie 'imgsz=320', die Vorgaben รผberschreiben. + ``` + Sehen Sie alle ARGS im vollstรคndigen [Konfigurationsleitfaden](/../usage/cfg.md) oder mit `yolo cfg` + + === "Trainieren" + + Trainieren Sie ein Erkennungsmodell fรผr 10 Epochen mit einer Anfangslernerate von 0.01 + ```bash + yolo train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "Vorhersagen" + + Vorhersagen eines YouTube-Videos mit einem vortrainierten Segmentierungsmodell bei einer BildgrรถรŸe von 320: + ```bash + yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "Val" + + Val ein vortrainiertes Erkennungsmodell bei Batch-GrรถรŸe 1 und BildgrรถรŸe 640: + ```bash + yolo val model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + + === "Exportieren" + + Exportieren Sie ein YOLOv8n-Klassifikationsmodell im ONNX-Format bei einer BildgrรถรŸe von 224 mal 128 (kein TASK erforderlich) + ```bash + yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128 + ``` + + === "Speziell" + + Fรผhren Sie spezielle Befehle aus, um Version, Einstellungen zu sehen, Checks auszufรผhren und mehr: + ```bash + yolo help + yolo checks + yolo version + yolo settings + yolo copy-cfg + yolo cfg + ``` + +!!! Warning "Warnung" + + Argumente mรผssen als `arg=val` Paare รผbergeben werden, getrennt durch ein Gleichheitszeichen `=` und durch Leerzeichen ` ` zwischen den Paaren. Verwenden Sie keine `--` Argumentprรคfixe oder Kommata `,` zwischen den Argumenten. + + - `yolo predict model=yolov8n.pt imgsz=640 conf=0.25`   โœ… + - `yolo predict model yolov8n.pt imgsz 640 conf 0.25`   โŒ + - `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25`   โŒ + +[CLI-Leitfaden](/../usage/cli.md){ .md-button } + +## Ultralytics mit Python verwenden + +Die Python-Schnittstelle von YOLOv8 ermรถglicht eine nahtlose Integration in Ihre Python-Projekte und erleichtert das Laden, Ausfรผhren und Verarbeiten der Modellausgabe. Konzipiert fรผr Einfachheit und Benutzerfreundlichkeit, ermรถglicht die Python-Schnittstelle Benutzern, Objekterkennung, Segmentierung und Klassifizierung schnell in ihren Projekten zu implementieren. Dies macht die Python-Schnittstelle von YOLOv8 zu einem unschรคtzbaren Werkzeug fรผr jeden, der diese Funktionalitรคten in seine Python-Projekte integrieren mรถchte. + +Benutzer kรถnnen beispielsweise ein Modell laden, es trainieren, seine Leistung an einem Validierungsset auswerten und sogar in das ONNX-Format exportieren, und das alles mit nur wenigen Codezeilen. Schauen Sie sich den [Python-Leitfaden](/../usage/python.md) an, um mehr รผber die Verwendung von YOLOv8 in Ihren_python_pro_jek_ten zu erfahren. + +!!! Example "Beispiel" + + ```python + from ultralytics import YOLO + + # Erstellen Sie ein neues YOLO Modell von Grund auf + model = YOLO('yolov8n.yaml') + + # Laden Sie ein vortrainiertes YOLO Modell (empfohlen fรผr das Training) + model = YOLO('yolov8n.pt') + + # Trainieren Sie das Modell mit dem Datensatz 'coco128.yaml' fรผr 3 Epochen + results = model.train(data='coco128.yaml', epochs=3) + + # Bewerten Sie die Leistung des Modells am Validierungssatz + results = model.val() + + # Fรผhren Sie eine Objekterkennung an einem Bild mit dem Modell durch + results = model('https://ultralytics.com/images/bus.jpg') + + # Exportieren Sie das Modell ins ONNX-Format + success = model.export(format='onnx') + ``` + +[Python-Leitfaden](/../usage/python.md){.md-button .md-button--primary} diff --git a/ultralytics/docs/de/quickstart.md:Zone.Identifier b/ultralytics/docs/de/quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/tasks/classify.md b/ultralytics/docs/de/tasks/classify.md new file mode 100755 index 0000000..09622a0 --- /dev/null +++ b/ultralytics/docs/de/tasks/classify.md @@ -0,0 +1,172 @@ +--- +comments: true +description: Erfahren Sie mehr รผber YOLOv8 Classify-Modelle zur Bildklassifizierung. Erhalten Sie detaillierte Informationen รผber die Liste vortrainierter Modelle und wie man Modelle trainiert, validiert, vorhersagt und exportiert. +keywords: Ultralytics, YOLOv8, Bildklassifizierung, Vortrainierte Modelle, YOLOv8n-cls, Training, Validierung, Vorhersage, Modellexport +--- + +# Bildklassifizierung + +Beispiele fรผr Bildklassifizierung + +Bildklassifizierung ist die einfachste der drei Aufgaben und besteht darin, ein ganzes Bild in eine von einem Satz vordefinierter Klassen zu klassifizieren. + +Die Ausgabe eines Bildklassifizierers ist ein einzelnes Klassenlabel und eine Vertrauenspunktzahl. Bildklassifizierung ist nรผtzlich, wenn Sie nur wissen mรผssen, zu welcher Klasse ein Bild gehรถrt, und nicht wissen mรผssen, wo sich Objekte dieser Klasse befinden oder wie ihre genaue Form ist. + +!!! Tip "Tipp" + + YOLOv8 Classify-Modelle verwenden den Suffix `-cls`, z.B. `yolov8n-cls.pt` und sind auf [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) vortrainiert. + +## [Modelle](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Hier werden vortrainierte YOLOv8 Classify-Modelle gezeigt. Detect-, Segment- und Pose-Modelle sind auf dem [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)-Datensatz vortrainiert, wรคhrend Classify-Modelle auf dem [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml)-Datensatz vortrainiert sind. + +[Modelle](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) werden automatisch vom neuesten Ultralytics-[Release](https://github.com/ultralytics/assets/releases) beim ersten Gebrauch heruntergeladen. + +| Modell | GrรถรŸe
(Pixel) | Genauigkeit
top1 | Genauigkeit
top5 | Geschwindigkeit
CPU ONNX
(ms) | Geschwindigkeit
A100 TensorRT
(ms) | Parameter
(M) | FLOPs
(B) bei 640 | +|----------------------------------------------------------------------------------------------|-----------------------|--------------------------|--------------------------|------------------------------------------|-----------------------------------------------|-----------------------|---------------------------| +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- **Genauigkeit**-Werte sind Modellgenauigkeiten auf dem [ImageNet](https://www.image-net.org/)-Datensatz Validierungsset. +
Zur Reproduktion `yolo val classify data=pfad/zu/ImageNet device=0 verwenden` +- **Geschwindigkeit** Durchschnitt รผber ImageNet-Validierungsbilder mit einer [Amazon EC2 P4d](https://aws.amazon.com/de/ec2/instance-types/p4/)-Instanz. +
Zur Reproduktion `yolo val classify data=pfad/zu/ImageNet batch=1 device=0|cpu verwenden` + +## Trainieren + +Trainieren Sie das YOLOv8n-cls-Modell auf dem MNIST160-Datensatz fรผr 100 Epochen bei BildgrรถรŸe 64. Eine vollstรคndige Liste der verfรผgbaren Argumente finden Sie auf der Seite [Konfiguration](/../usage/cfg.md). + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Ein Modell laden + model = YOLO('yolov8n-cls.yaml') # ein neues Modell aus YAML erstellen + model = YOLO('yolov8n-cls.pt') # ein vortrainiertes Modell laden (empfohlen fรผr das Training) + model = YOLO('yolov8n-cls.yaml').load('yolov8n-cls.pt') # aus YAML erstellen und Gewichte รผbertragen + + # Das Modell trainieren + results = model.train(data='mnist160', epochs=100, imgsz=64) + ``` + + === "CLI" + + ```bash + # Ein neues Modell aus YAML erstellen und das Training von Grund auf starten + yolo classify train data=mnist160 model=yolov8n-cls.yaml epochs=100 imgsz=64 + + # Das Training von einem vortrainierten *.pt Modell starten + yolo classify train data=mnist160 model=yolov8n-cls.pt epochs=100 imgsz=64 + + # Ein neues Modell aus YAML erstellen, vortrainierte Gewichte รผbertragen und das Training starten + yolo classify train data=mnist160 model=yolov8n-cls.yaml pretrained=yolov8n-cls.pt epochs=100 imgsz=64 + ``` + +### Datenformat + +Das Datenformat fรผr YOLO-Klassifizierungsdatensรคtze finden Sie im Detail im [Datenleitfaden](../../../datasets/classify/index.md). + +## Validieren + +Validieren Sie die Genauigkeit des trainierten YOLOv8n-cls-Modells auf dem MNIST160-Datensatz. Kein Argument muss รผbergeben werden, da das `modell` seine Trainings`daten` und Argumente als Modellattribute behรคlt. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Ein Modell laden + model = YOLO('yolov8n-cls.pt') # ein offizielles Modell laden + model = YOLO('pfad/zu/best.pt') # ein benutzerdefiniertes Modell laden + + # Das Modell validieren + metrics = model.val() # keine Argumente benรถtigt, Datensatz und Einstellungen gespeichert + metrics.top1 # top1 Genauigkeit + metrics.top5 # top5 Genauigkeit + ``` + === "CLI" + + ```bash + yolo classify val model=yolov8n-cls.pt # ein offizielles Modell validieren + yolo classify val model=pfad/zu/best.pt # ein benutzerdefiniertes Modell validieren + ``` + +## Vorhersagen + +Verwenden Sie ein trainiertes YOLOv8n-cls-Modell, um Vorhersagen auf Bildern durchzufรผhren. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Ein Modell laden + model = YOLO('yolov8n-cls.pt') # ein offizielles Modell laden + model = YOLO('pfad/zu/best.pt') # ein benutzerdefiniertes Modell laden + + # Mit dem Modell vorhersagen + results = model('https://ultralytics.com/images/bus.jpg') # Vorhersage auf einem Bild + ``` + === "CLI" + + ```bash + yolo classify predict model=yolov8n-cls.pt source='https://ultralytics.com/images/bus.jpg' # mit offiziellem Modell vorhersagen + yolo classify predict model=pfad/zu/best.pt source='https://ultralytics.com/images/bus.jpg' # mit benutzerdefiniertem Modell vorhersagen + ``` + +Vollstรคndige Details zum `predict`-Modus finden Sie auf der Seite [Vorhersage](https://docs.ultralytics.com/modes/predict/). + +## Exportieren + +Exportieren Sie ein YOLOv8n-cls-Modell in ein anderes Format wie ONNX, CoreML usw. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Ein Modell laden + model = YOLO('yolov8n-cls.pt') # ein offizielles Modell laden + model = YOLO('pfad/zu/best.pt') # ein benutzerdefiniertes trainiertes Modell laden + + # Das Modell exportieren + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-cls.pt format=onnx # offizielles Modell exportieren + yolo export model=pfad/zu/best.pt format=onnx # benutzerdefiniertes trainiertes Modell exportieren + ``` + +Verfรผgbare YOLOv8-cls Exportformate stehen in der folgenden Tabelle. Sie kรถnnen direkt auf exportierten Modellen vorhersagen oder validieren, d.h. `yolo predict model=yolov8n-cls.onnx`. Nutzungsexempel werden fรผr Ihr Modell nach Abschluss des Exports angezeigt. + +| Format | `format`-Argument | Modell | Metadaten | Argumente | +|--------------------------------------------------------------------|-------------------|-------------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-cls.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-cls.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-cls.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-cls_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-cls.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-cls.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-cls_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-cls.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-cls.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-cls_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-cls_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-cls_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-cls_ncnn_model/` | โœ… | `imgsz`, `half` | + +Vollstรคndige Details zum `export` finden Sie auf der Seite [Export](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/de/tasks/classify.md:Zone.Identifier b/ultralytics/docs/de/tasks/classify.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/tasks/classify.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/tasks/detect.md b/ultralytics/docs/de/tasks/detect.md new file mode 100755 index 0000000..ab2848c --- /dev/null +++ b/ultralytics/docs/de/tasks/detect.md @@ -0,0 +1,184 @@ +--- +comments: true +description: Offizielle Dokumentation fรผr YOLOv8 von Ultralytics. Erfahren Sie, wie Sie Modelle trainieren, validieren, vorhersagen und in verschiedenen Formaten exportieren. EinschlieรŸlich detaillierter Leistungsstatistiken. +keywords: YOLOv8, Ultralytics, Objekterkennung, vortrainierte Modelle, Training, Validierung, Vorhersage, Modell-Export, COCO, ImageNet, PyTorch, ONNX, CoreML +--- + +# Objekterkennung + +Beispiele fรผr Objekterkennung + +Objekterkennung ist eine Aufgabe, die das Identifizieren der Position und Klasse von Objekten in einem Bild oder Videostream umfasst. + +Die Ausgabe eines Objekterkenners ist eine Menge von Begrenzungsrahmen, die die Objekte im Bild umschlieรŸen, zusammen mit Klassenbezeichnungen und Vertrauenswerten fรผr jedes Feld. Objekterkennung ist eine gute Wahl, wenn Sie Objekte von Interesse in einer Szene identifizieren mรผssen, aber nicht genau wissen mรผssen, wo das Objekt ist oder wie seine genaue Form ist. + +

+
+ +
+ Sehen Sie: Objekterkennung mit vortrainiertem Ultralytics YOLOv8 Modell. +

+ +!!! Tip "Tipp" + + YOLOv8 Detect Modelle sind die Standard YOLOv8 Modelle, zum Beispiel `yolov8n.pt`, und sind vortrainiert auf dem [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)-Datensatz. + +## [Modelle](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Hier werden die vortrainierten YOLOv8 Detect Modelle gezeigt. Detect, Segment und Pose Modelle sind vortrainiert auf dem [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)-Datensatz, wรคhrend die Classify Modelle vortrainiert sind auf dem [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml)-Datensatz. + +[Modelle](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) werden automatisch von der neuesten Ultralytics [Verรถffentlichung](https://github.com/ultralytics/assets/releases) bei Erstbenutzung heruntergeladen. + +| Modell | GrรถรŸe
(Pixel) | mAPval
50-95 | Geschwindigkeit
CPU ONNX
(ms) | Geschwindigkeit
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | +|--------------------------------------------------------------------------------------|-----------------------|----------------------|------------------------------------------|-----------------------------------------------|--------------------|-------------------| +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- **mAPval** Werte sind fรผr Single-Modell Single-Scale auf dem [COCO val2017](http://cocodataset.org) Datensatz. +
Reproduzieren mit `yolo val detect data=coco.yaml device=0` +- **Geschwindigkeit** gemittelt รผber COCO Val Bilder mit einer [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/)-Instanz. +
Reproduzieren mit `yolo val detect data=coco128.yaml batch=1 device=0|cpu` + +## Training + +YOLOv8n auf dem COCO128-Datensatz fรผr 100 Epochen bei BildgrรถรŸe 640 trainieren. Fรผr eine vollstรคndige Liste verfรผgbarer Argumente siehe die [Konfigurationsseite](/../usage/cfg.md). + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n.yaml') # ein neues Modell aus YAML aufbauen + model = YOLO('yolov8n.pt') # ein vortrainiertes Modell laden (empfohlen fรผr Training) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # aus YAML aufbauen und Gewichte รผbertragen + + # Das Modell trainieren + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Ein neues Modell aus YAML aufbauen und Training von Grund auf starten + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # Training von einem vortrainierten *.pt Modell starten + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # Ein neues Modell aus YAML aufbauen, vortrainierte Gewichte รผbertragen und Training starten + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### Datenformat + +Das Datenformat fรผr YOLO-Erkennungsdatensรคtze finden Sie detailliert im [Dataset Guide](../../../datasets/detect/index.md). Um Ihren vorhandenen Datensatz von anderen Formaten (wie COCO etc.) in das YOLO-Format zu konvertieren, verwenden Sie bitte das [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO)-Tool von Ultralytics. + +## Validierung + +Genauigkeit des trainierten YOLOv8n-Modells auf dem COCO128-Datensatz validieren. Es mรผssen keine Argumente รผbergeben werden, da das `modell` seine Trainingsdaten und Argumente als Modellattribute beibehรคlt. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n.pt') # ein offizielles Modell laden + model = YOLO('pfad/zum/besten.pt') # ein benutzerdefiniertes Modell laden + + # Das Modell validieren + metrics = model.val() # keine Argumente nรถtig, Datensatz und Einstellungen erinnert + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # eine Liste enthรคlt map50-95 jeder Kategorie + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # offizielles Modell validieren + yolo detect val model=pfad/zum/besten.pt # benutzerdefiniertes Modell validieren + ``` + +## Vorhersage + +Ein trainiertes YOLOv8n-Modell verwenden, um Vorhersagen auf Bildern durchzufรผhren. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n.pt') # ein offizielles Modell laden + model = YOLO('pfad/zum/besten.pt') # ein benutzerdefiniertes Modell laden + + # Mit dem Modell vorhersagen + results = model('https://ultralytics.com/images/bus.jpg') # Vorhersage auf einem Bild + ``` + === "CLI" + + ```bash + yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' # Vorhersage mit offiziellem Modell + yolo detect predict model=pfad/zum/besten.pt source='https://ultralytics.com/images/bus.jpg' # Vorhersage mit benutzerdefiniertem Modell + ``` + +Volle Details รผber den `predict`-Modus finden Sie auf der [Predict-Seite](https://docs.ultralytics.com/modes/predict/). + +## Export + +Ein YOLOv8n-Modell in ein anderes Format wie ONNX, CoreML usw. exportieren. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n.pt') # ein offizielles Modell laden + model = YOLO('pfad/zum/besten.pt') # ein benutzerdefiniert trainiertes Modell laden + + # Das Modell exportieren + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # offizielles Modell exportieren + yolo export model=pfad/zum/besten.pt format=onnx # benutzerdefiniert trainiertes Modell exportieren + ``` + +Verfรผgbare YOLOv8 Exportformate sind in der untenstehenden Tabelle aufgefรผhrt. Sie kรถnnen direkt auf den exportierten Modellen Vorhersagen treffen oder diese validieren, zum Beispiel `yolo predict model=yolov8n.onnx`. Verwendungsbeispiele werden fรผr Ihr Modell nach Abschluss des Exports angezeigt. + +| Format | `format`-Argument | Modell | Metadaten | Argumente | +|--------------------------------------------------------------------|-------------------|---------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +Volle Details zum `export` finden Sie auf der [Export-Seite](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/de/tasks/detect.md:Zone.Identifier b/ultralytics/docs/de/tasks/detect.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/tasks/detect.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/tasks/index.md b/ultralytics/docs/de/tasks/index.md new file mode 100755 index 0000000..4077c5e --- /dev/null +++ b/ultralytics/docs/de/tasks/index.md @@ -0,0 +1,55 @@ +--- +comments: true +description: Erfahren Sie, welche grundlegenden Aufgaben im Bereich der Computer Vision YOLOv8 durchfรผhren kann, einschlieรŸlich Erkennung, Segmentierung, Klassifizierung und Haltungsschรคtzung, und wie sie in Ihren KI-Projekten verwendet werden kรถnnen. +keywords: Ultralytics, YOLOv8, Erkennung, Segmentierung, Klassifizierung, Pose-Schรคtzung, KI-Framework, Computer Vision-Aufgaben +--- + +# Ultralytics YOLOv8 Aufgaben + +
+Ultralytics YOLO unterstรผtzte Aufgaben + +YOLOv8 ist ein KI-Framework, das mehrere Aufgaben im Bereich der Computer Vision **unterstรผtzt**. Das Framework kann fรผr die [Erkennung](detect.md), [Segmentierung](segment.md), [Klassifizierung](classify.md) und die [Pose](pose.md)-Schรคtzung verwendet werden. Jede dieser Aufgaben hat ein unterschiedliches Ziel und Anwendungsgebiete. + +!!! Note "Hinweis" + + ๐Ÿšง Unsere mehrsprachigen Dokumentation befindet sich derzeit im Aufbau und wir arbeiten hart daran, sie zu verbessern. Danke fรผr Ihre Geduld! ๐Ÿ™ + +

+
+ +
+ Schauen Sie zu: Entdecken Sie Ultralytics YOLO Aufgaben: Objekterkennung, Segmentierung, Verfolgung und Pose-Schรคtzung. +

+ +## [Erkennung](detect.md) + +Erkennung ist die primรคre von YOLOv8 unterstรผtzte Aufgabe. Sie beinhaltet das Erkennen von Objekten in einem Bild oder Videobild und das Zeichnen von Rahmen um sie herum. Die erkannten Objekte werden anhand ihrer Merkmale in verschiedene Kategorien klassifiziert. YOLOv8 kann mehrere Objekte in einem einzelnen Bild oder Videobild mit hoher Genauigkeit und Geschwindigkeit erkennen. + +[Beispiele fรผr Erkennung](detect.md){ .md-button } + +## [Segmentierung](segment.md) + +Segmentierung ist eine Aufgabe, die das Aufteilen eines Bildes in unterschiedliche Regionen anhand des Bildinhalts beinhaltet. Jeder Region wird basierend auf ihrem Inhalt eine Markierung zugewiesen. Diese Aufgabe ist nรผtzlich in Anwendungen wie der Bildsegmentierung und medizinischen Bildgebung. YOLOv8 verwendet eine Variante der U-Net-Architektur, um die Segmentierung durchzufรผhren. + +[Beispiele fรผr Segmentierung](segment.md){ .md-button } + +## [Klassifizierung](classify.md) + +Klassifizierung ist eine Aufgabe, die das Einordnen eines Bildes in verschiedene Kategorien umfasst. YOLOv8 kann genutzt werden, um Bilder anhand ihres Inhalts zu klassifizieren. Es verwendet eine Variante der EfficientNet-Architektur, um die Klassifizierung durchzufรผhren. + +[Beispiele fรผr Klassifizierung](classify.md){ .md-button } + +## [Pose](pose.md) + +Die Pose-/Keypoint-Erkennung ist eine Aufgabe, die das Erkennen von spezifischen Punkten in einem Bild oder Videobild beinhaltet. Diese Punkte werden als Keypoints bezeichnet und werden zur Bewegungsverfolgung oder Pose-Schรคtzung verwendet. YOLOv8 kann Keypoints in einem Bild oder Videobild mit hoher Genauigkeit und Geschwindigkeit erkennen. + +[Beispiele fรผr Posen](pose.md){ .md-button } + +## Fazit + +YOLOv8 unterstรผtzt mehrere Aufgaben, einschlieรŸlich Erkennung, Segmentierung, Klassifizierung und Keypoint-Erkennung. Jede dieser Aufgaben hat unterschiedliche Ziele und Anwendungsgebiete. Durch das Verstรคndnis der Unterschiede zwischen diesen Aufgaben kรถnnen Sie die geeignete Aufgabe fรผr Ihre Anwendung im Bereich der Computer Vision auswรคhlen. diff --git a/ultralytics/docs/de/tasks/index.md:Zone.Identifier b/ultralytics/docs/de/tasks/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/tasks/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/tasks/pose.md b/ultralytics/docs/de/tasks/pose.md new file mode 100755 index 0000000..14d0f25 --- /dev/null +++ b/ultralytics/docs/de/tasks/pose.md @@ -0,0 +1,185 @@ +--- +comments: true +description: Erfahren Sie, wie Sie Ultralytics YOLOv8 fรผr Aufgaben der Pose-Schรคtzung verwenden kรถnnen. Finden Sie vortrainierte Modelle, lernen Sie, wie man eigene trainiert, validiert, vorhersagt und exportiert. +keywords: Ultralytics, YOLO, YOLOv8, Pose-Schรคtzung, Erkennung von Schlรผsselpunkten, Objekterkennung, vortrainierte Modelle, maschinelles Lernen, kรผnstliche Intelligenz +--- + +# Pose-Schรคtzung + +![Beispiele fรผr die Pose-Schรคtzung](https://user-images.githubusercontent.com/26833433/243418616-9811ac0b-a4a7-452a-8aba-484ba32bb4a8.png) + +Die Pose-Schรคtzung ist eine Aufgabe, die das Identifizieren der Lage spezifischer Punkte in einem Bild beinhaltet, die normalerweise als Schlรผsselpunkte bezeichnet werden. Die Schlรผsselpunkte kรถnnen verschiedene Teile des Objekts wie Gelenke, Landmarken oder andere charakteristische Merkmale reprรคsentieren. Die Positionen der Schlรผsselpunkte sind รผblicherweise als eine Gruppe von 2D `[x, y]` oder 3D `[x, y, sichtbar]` Koordinaten dargestellt. + +Das Ergebnis eines Pose-Schรคtzungsmodells ist eine Gruppe von Punkten, die die Schlรผsselpunkte auf einem Objekt im Bild darstellen, normalerweise zusammen mit den Konfidenzwerten fรผr jeden Punkt. Die Pose-Schรคtzung eignet sich gut, wenn Sie spezifische Teile eines Objekts in einer Szene identifizieren mรผssen und deren Lage zueinander. + +

+
+ +
+ Ansehen: Pose-Schรคtzung mit Ultralytics YOLOv8. +

+ +!!! Tip "Tipp" + + YOLOv8 _pose_-Modelle verwenden den Suffix `-pose`, z. B. `yolov8n-pose.pt`. Diese Modelle sind auf dem [COCO-Schlรผsselpunkte](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco-pose.yaml)-Datensatz trainiert und fรผr eine Vielzahl von Pose-Schรคtzungsaufgaben geeignet. + +## [Modelle](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Hier werden vortrainierte YOLOv8 Pose-Modelle gezeigt. Erkennungs-, Segmentierungs- und Pose-Modelle sind auf dem [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)-Datensatz vortrainiert, wรคhrend Klassifizierungsmodelle auf dem [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml)-Datensatz vortrainiert sind. + +[Modelle](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) werden automatisch aus der neuesten Ultralytics-[Verรถffentlichung](https://github.com/ultralytics/assets/releases) bei erstmaliger Verwendung heruntergeladen. + +| Modell | GrรถรŸe
(Pixel) | mAPpose
50-95 | mAPpose
50 | Geschwindigkeit
CPU ONNX
(ms) | Geschwindigkeit
A100 TensorRT
(ms) | Parameter
(M) | FLOPs
(B) | +|------------------------------------------------------------------------------------------------------|-----------------------|-----------------------|--------------------|------------------------------------------|-----------------------------------------------|-----------------------|-------------------| +| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50,4 | 80,1 | 131,8 | 1,18 | 3,3 | 9,2 | +| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60,0 | 86,2 | 233,2 | 1,42 | 11,6 | 30,2 | +| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65,0 | 88,8 | 456,3 | 2,00 | 26,4 | 81,0 | +| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67,6 | 90,0 | 784,5 | 2,59 | 44,4 | 168,6 | +| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69,2 | 90,2 | 1607,1 | 3,73 | 69,4 | 263,2 | +| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71,6 | 91,2 | 4088,7 | 10,04 | 99,1 | 1066,4 | + +- **mAPval** Werte gelten fรผr ein einzelnes Modell mit einfacher Skala auf dem [COCO Keypoints val2017](http://cocodataset.org)-Datensatz. +
Zu reproduzieren mit `yolo val pose data=coco-pose.yaml device=0`. +- **Geschwindigkeit** gemittelt รผber COCO-Validierungsbilder mit einer [Amazon EC2 P4d](https://aws.amazon.com/de/ec2/instance-types/p4/)-Instanz. +
Zu reproduzieren mit `yolo val pose data=coco8-pose.yaml batch=1 device=0|cpu`. + +## Trainieren + +Trainieren Sie ein YOLOv8-Pose-Modell auf dem COCO128-Pose-Datensatz. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n-pose.yaml') # ein neues Modell aus YAML bauen + model = YOLO('yolov8n-pose.pt') # ein vortrainiertes Modell laden (empfohlen fรผr das Training) + model = YOLO('yolov8n-pose.yaml').load('yolov8n-pose.pt') # aus YAML bauen und Gewichte รผbertragen + + # Modell trainieren + results = model.train(data='coco8-pose.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Ein neues Modell aus YAML bauen und das Training von Grund auf starten + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml epochs=100 imgsz=640 + + # Training von einem vortrainierten *.pt Modell starten + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.pt epochs=100 imgsz=640 + + # Ein neues Modell aus YAML bauen, vortrainierte Gewichte รผbertragen und das Training starten + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml pretrained=yolov8n-pose.pt epochs=100 imgsz=640 + ``` + +### Datensatzformat + +Das YOLO-Pose-Datensatzformat finden Sie detailliert im [Datensatz-Leitfaden](../../../datasets/pose/index.md). Um Ihren bestehenden Datensatz aus anderen Formaten (wie COCO usw.) in das YOLO-Format zu konvertieren, verwenden Sie bitte das [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO)-Tool von Ultralytics. + +## Validieren + +Die Genauigkeit des trainierten YOLOv8n-Pose-Modells auf dem COCO128-Pose-Datensatz validieren. Es mรผssen keine Argumente รผbergeben werden, da das `Modell` seine Trainings`daten` und Argumente als Modellattribute beibehรคlt. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n-pose.pt') # ein offizielles Modell laden + model = YOLO('pfad/zu/best.pt') # ein benutzerdefiniertes Modell laden + + # Modell validieren + metrics = model.val() # keine Argumente nรถtig, Datensatz und Einstellungen sind gespeichert + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # Liste enthรคlt map50-95 jeder Kategorie + ``` + === "CLI" + + ```bash + yolo pose val model=yolov8n-pose.pt # offizielles Modell validieren + yolo pose val model=pfad/zu/best.pt # benutzerdefiniertes Modell validieren + ``` + +## Vorhersagen + +Ein trainiertes YOLOv8n-Pose-Modell verwenden, um Vorhersagen auf Bildern zu machen. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n-pose.pt') # ein offizielles Modell laden + model = YOLO('pfad/zu/best.pt') # ein benutzerdefiniertes Modell laden + + # Mit dem Modell Vorhersagen machen + results = model('https://ultralytics.com/images/bus.jpg') # Vorhersage auf einem Bild machen + ``` + === "CLI" + + ```bash + yolo pose predict model=yolov8n-pose.pt source='https://ultralytics.com/images/bus.jpg' # Vorhersage mit dem offiziellen Modell machen + yolo pose predict model=pfad/zu/best.pt source='https://ultralytics.com/images/bus.jpg' # Vorhersage mit dem benutzerdefinierten Modell machen + ``` + +Vollstรคndige `predict`-Modusdetails finden Sie auf der [Vorhersage](https://docs.ultralytics.com/modes/predict/)-Seite. + +## Exportieren + +Ein YOLOv8n-Pose-Modell in ein anderes Format wie ONNX, CoreML usw. exportieren. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n-pose.pt') # ein offizielles Modell laden + model = YOLO('pfad/zu/best.pt') # ein benutzerdefiniertes Modell laden + + # Modell exportieren + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-pose.pt format=onnx # offizielles Modell exportieren + yolo export model=pfad/zu/best.pt format=onnx # benutzerdefiniertes Modell exportieren + ``` + +Verfรผgbare YOLOv8-Pose-Exportformate sind in der folgenden Tabelle aufgefรผhrt. Sie kรถnnen direkt auf exportierten Modellen vorhersagen oder validieren, z. B. `yolo predict model=yolov8n-pose.onnx`. Verwendungsbeispiele werden fรผr Ihr Modell nach Abschluss des Exports angezeigt. + +| Format | `format` Argument | Modell | Metadaten | Argumente | +|--------------------------------------------------------------------|-------------------|--------------------------------|-----------|-----------------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-pose.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-pose.torchscript` | โœ… | `imgsz`, `optimieren` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-pose.onnx` | โœ… | `imgsz`, `halb`, `dynamisch`, `vereinfachen`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-pose_openvino_model/` | โœ… | `imgsz`, `halb` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-pose.engine` | โœ… | `imgsz`, `halb`, `dynamisch`, `vereinfachen`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-pose.mlpackage` | โœ… | `imgsz`, `halb`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-pose_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-pose.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-pose.tflite` | โœ… | `imgsz`, `halb`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-pose_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-pose_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-pose_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-pose_ncnn_model/` | โœ… | `imgsz`, `halb` | + +Vollstรคndige `export`-Details finden Sie auf der [Export](https://docs.ultralytics.com/modes/export/)-Seite. diff --git a/ultralytics/docs/de/tasks/pose.md:Zone.Identifier b/ultralytics/docs/de/tasks/pose.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/tasks/pose.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/de/tasks/segment.md b/ultralytics/docs/de/tasks/segment.md new file mode 100755 index 0000000..a70909f --- /dev/null +++ b/ultralytics/docs/de/tasks/segment.md @@ -0,0 +1,188 @@ +--- +comments: true +description: Erfahren Sie, wie Sie Instanzsegmentierungsmodelle mit Ultralytics YOLO verwenden. Anleitungen zum Training, zur Validierung, zur Bildvorhersage und zum Export von Modellen. +Schlagworte: yolov8, Instanzsegmentierung, Ultralytics, COCO-Datensatz, Bildsegmentierung, Objekterkennung, Modelltraining, Modellvalidierung, Bildvorhersage, Modellexport +--- + +# Instanzsegmentierung + +![Beispiele fรผr Instanzsegmentierung](https://user-images.githubusercontent.com/26833433/243418644-7df320b8-098d-47f1-85c5-26604d761286.png) + +Instanzsegmentierung geht einen Schritt weiter als die Objekterkennung und beinhaltet die Identifizierung einzelner Objekte in einem Bild und deren Abtrennung vom Rest des Bildes. + +Das Ergebnis eines Instanzsegmentierungsmodells ist eine Reihe von Masken oder Konturen, die jedes Objekt im Bild umreiรŸen, zusammen mit Klassenbezeichnungen und Vertrauensscores fรผr jedes Objekt. Instanzsegmentierung ist nรผtzlich, wenn man nicht nur wissen muss, wo sich Objekte in einem Bild befinden, sondern auch, welche genaue Form sie haben. + +

+
+ +
+ Schauen Sie: Fรผhren Sie Segmentierung mit dem vortrainierten Ultralytics YOLOv8 Modell in Python aus. +

+ +!!! Tip "Tipp" + + YOLOv8 Segment-Modelle verwenden das Suffix `-seg`, d.h. `yolov8n-seg.pt` und sind auf dem [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)-Datensatz vortrainiert. + +## [Modelle](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Hier werden vortrainierte YOLOv8 Segment-Modelle gezeigt. Detect-, Segment- und Pose-Modelle sind auf dem [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)-Datensatz vortrainiert, wรคhrend Classify-Modelle auf dem [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml)-Datensatz vortrainiert sind. + +[Modelle](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) laden sich automatisch von der neuesten Ultralytics [Verรถffentlichung](https://github.com/ultralytics/assets/releases) beim ersten Gebrauch herunter. + +| Modell | GrรถรŸe
(Pixel) | mAPKasten
50-95 | mAPMasken
50-95 | Geschwindigkeit
CPU ONNX
(ms) | Geschwindigkeit
A100 TensorRT
(ms) | Parameter
(M) | FLOPs
(B) | +|----------------------------------------------------------------------------------------------|-----------------------|-------------------------|-------------------------|------------------------------------------|-----------------------------------------------|-----------------------|-------------------| +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- Die **mAPval**-Werte sind fรผr ein einzelnes Modell, einzelne Skala auf dem [COCO val2017](http://cocodataset.org)-Datensatz. +
Zum Reproduzieren nutzen Sie `yolo val segment data=coco.yaml device=0` +- Die **Geschwindigkeit** ist รผber die COCO-Validierungsbilder gemittelt und verwendet eine [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/)-Instanz. +
Zum Reproduzieren `yolo val segment data=coco128-seg.yaml batch=1 device=0|cpu` + +## Training + +Trainieren Sie YOLOv8n-seg auf dem COCO128-seg-Datensatz fรผr 100 Epochen mit einer BildgrรถรŸe von 640. Eine vollstรคndige Liste der verfรผgbaren Argumente finden Sie auf der Seite [Konfiguration](/../usage/cfg.md). + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n-seg.yaml') # ein neues Modell aus YAML erstellen + model = YOLO('yolov8n-seg.pt') # ein vortrainiertes Modell laden (empfohlen fรผr das Training) + model = YOLO('yolov8n-seg.yaml').load('yolov8n.pt') # aus YAML erstellen und Gewichte รผbertragen + + # Das Modell trainieren + results = model.train(data='coco128-seg.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Ein neues Modell aus YAML erstellen und das Training von vorne beginnen + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml epochs=100 imgsz=640 + + # Das Training von einem vortrainierten *.pt Modell aus starten + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.pt epochs=100 imgsz=640 + + # Ein neues Modell aus YAML erstellen, vortrainierte Gewichte darauf รผbertragen und das Training beginnen + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml pretrained=yolov8n-seg.pt epochs=100 imgsz=640 + ``` + +### Datenformat + +Das YOLO Segmentierungsdatenformat finden Sie detailliert im [Dataset Guide](../../../datasets/segment/index.md). Um Ihre vorhandenen Daten aus anderen Formaten (wie COCO usw.) in das YOLO-Format umzuwandeln, verwenden Sie bitte das [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO)-Tool von Ultralytics. + +## Val + +Validieren Sie die Genauigkeit des trainierten YOLOv8n-seg-Modells auf dem COCO128-seg-Datensatz. Es mรผssen keine Argumente รผbergeben werden, da das `Modell` seine Trainingsdaten und -argumente als Modellattribute behรคlt. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n-seg.pt') # offizielles Modell laden + model = YOLO('pfad/zu/best.pt') # benutzerdefiniertes Modell laden + + # Das Modell validieren + metrics = model.val() # Keine Argumente erforderlich, Datensatz und Einstellungen werden behalten + metrics.box.map # mAP50-95(B) + metrics.box.map50 # mAP50(B) + metrics.box.map75 # mAP75(B) + metrics.box.maps # eine Liste enthรคlt mAP50-95(B) fรผr jede Kategorie + metrics.seg.map # mAP50-95(M) + metrics.seg.map50 # mAP50(M) + metrics.seg.map75 # mAP75(M) + metrics.seg.maps # eine Liste enthรคlt mAP50-95(M) fรผr jede Kategorie + ``` + === "CLI" + + ```bash + yolo segment val model=yolov8n-seg.pt # offizielles Modell validieren + yolo segment val model=pfad/zu/best.pt # benutzerdefiniertes Modell validieren + ``` + +## Predict + +Verwenden Sie ein trainiertes YOLOv8n-seg-Modell fรผr Vorhersagen auf Bildern. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n-seg.pt') # offizielles Modell laden + model = YOLO('pfad/zu/best.pt') # benutzerdefiniertes Modell laden + + # Mit dem Modell Vorhersagen treffen + results = model('https://ultralytics.com/images/bus.jpg') # Vorhersage auf einem Bild + ``` + === "CLI" + + ```bash + yolo segment predict model=yolov8n-seg.pt source='https://ultralytics.com/images/bus.jpg' # Vorhersage mit offiziellem Modell treffen + yolo segment predict model=pfad/zu/best.pt source='https://ultralytics.com/images/bus.jpg' # Vorhersage mit benutzerdefiniertem Modell treffen + ``` + +Die vollstรคndigen Details zum `predict`-Modus finden Sie auf der Seite [Predict](https://docs.ultralytics.com/modes/predict/). + +## Export + +Exportieren Sie ein YOLOv8n-seg-Modell in ein anderes Format wie ONNX, CoreML usw. + +!!! Example "Beispiel" + + === "Python" + + ```python + from ultralytics import YOLO + + # Modell laden + model = YOLO('yolov8n-seg.pt') # offizielles Modell laden + model = YOLO('pfad/zu/best.pt') # benutzerdefiniertes trainiertes Modell laden + + # Das Modell exportieren + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-seg.pt format=onnx # offizielles Modell exportieren + yolo export model=pfad/zu/best.pt format=onnx # benutzerdefiniertes trainiertes Modell exportieren + ``` + +Die verfรผgbaren YOLOv8-seg-Exportformate sind in der folgenden Tabelle aufgefรผhrt. Sie kรถnnen direkt auf exportierten Modellen Vorhersagen treffen oder sie validieren, z.B. `yolo predict model=yolov8n-seg.onnx`. Verwendungsbeispiele werden fรผr Ihr Modell nach dem Export angezeigt. + +| Format | `format`-Argument | Modell | Metadaten | Argumente | +|--------------------------------------------------------------------|-------------------|-------------------------------|-----------|-----------------------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-seg.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-seg.torchscript` | โœ… | `imgsz`, `optimieren` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-seg.onnx` | โœ… | `imgsz`, `halb`, `dynamisch`, `vereinfachen`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-seg_openvino_model/` | โœ… | `imgsz`, `halb` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-seg.engine` | โœ… | `imgsz`, `halb`, `dynamisch`, `vereinfachen`, `Arbeitsspeicher` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-seg.mlpackage` | โœ… | `imgsz`, `halb`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-seg_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-seg.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-seg.tflite` | โœ… | `imgsz`, `halb`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-seg_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-seg_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-seg_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-seg_ncnn_model/` | โœ… | `imgsz`, `halb` | + +Die vollstรคndigen Details zum `export` finden Sie auf der Seite [Export](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/de/tasks/segment.md:Zone.Identifier b/ultralytics/docs/de/tasks/segment.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/de/tasks/segment.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/CNAME b/ultralytics/docs/en/CNAME new file mode 100755 index 0000000..339382a --- /dev/null +++ b/ultralytics/docs/en/CNAME @@ -0,0 +1 @@ +docs.ultralytics.com diff --git a/ultralytics/docs/en/CNAME:Zone.Identifier b/ultralytics/docs/en/CNAME:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/CNAME:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/azureml-quickstart.md b/ultralytics/docs/en/guides/azureml-quickstart.md new file mode 100755 index 0000000..56b1cea --- /dev/null +++ b/ultralytics/docs/en/guides/azureml-quickstart.md @@ -0,0 +1,152 @@ +--- +comments: true +description: Step-by-step Quickstart Guide to Running YOLOv8 Object Detection Models on AzureML for Fast Prototyping and Testing +keywords: Ultralytics, YOLOv8, Object Detection, Azure Machine Learning, Quickstart Guide, Prototype, Compute Instance, Terminal, Notebook, IPython Kernel, CLI, Python SDK +--- + +# YOLOv8 ๐Ÿš€ on AzureML + +## What is Azure? + +[Azure](https://azure.microsoft.com/) is Microsoft's cloud computing platform, designed to help organizations move their workloads to the cloud from on-premises data centers. With the full spectrum of cloud services including those for computing, databases, analytics, machine learning, and networking, users can pick and choose from these services to develop and scale new applications, or run existing applications, in the public cloud. + +## What is Azure Machine Learning (AzureML)? + +Azure Machine Learning, commonly referred to as AzureML, is a fully managed cloud service that enables data scientists and developers to efficiently embed predictive analytics into their applications, helping organizations use massive data sets and bring all the benefits of the cloud to machine learning. AzureML offers a variety of services and capabilities aimed at making machine learning accessible, easy to use, and scalable. It provides capabilities like automated machine learning, drag-and-drop model training, as well as a robust Python SDK so that developers can make the most out of their machine learning models. + +## How Does AzureML Benefit YOLO Users? + +For users of YOLO (You Only Look Once), AzureML provides a robust, scalable, and efficient platform to both train and deploy machine learning models. Whether you are looking to run quick prototypes or scale up to handle more extensive data, AzureML's flexible and user-friendly environment offers various tools and services to fit your needs. You can leverage AzureML to: + +- Easily manage large datasets and computational resources for training. +- Utilize built-in tools for data preprocessing, feature selection, and model training. +- Collaborate more efficiently with capabilities for MLOps (Machine Learning Operations), including but not limited to monitoring, auditing, and versioning of models and data. + +In the subsequent sections, you will find a quickstart guide detailing how to run YOLOv8 object detection models using AzureML, either from a compute terminal or a notebook. + +## Prerequisites + +Before you can get started, make sure you have access to an AzureML workspace. If you don't have one, you can create a new [AzureML workspace](https://learn.microsoft.com/azure/machine-learning/concept-workspace?view=azureml-api-2) by following Azure's official documentation. This workspace acts as a centralized place to manage all AzureML resources. + +## Create a compute instance + +From your AzureML workspace, select Compute > Compute instances > New, select the instance with the resources you need. + +

+ Create Azure Compute Instance +

+ +## Quickstart from Terminal + +Start your compute and open a Terminal: + +

+ Open Terminal +

+ +### Create virtualenv + +Create your conda virtualenv and install pip in it: + +```bash +conda create --name yolov8env -y +conda activate yolov8env +conda install pip -y +``` + +Install the required dependencies: + +```bash +cd ultralytics +pip install -r requirements.txt +pip install ultralytics +pip install onnx>=1.12.0 +``` + +### Perform YOLOv8 tasks + +Predict: + +```bash +yolo predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' +``` + +Train a detection model for 10 epochs with an initial learning_rate of 0.01: + +```bash +yolo train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 +``` + +You can find more [instructions to use the Ultralytics CLI here](../quickstart.md#use-ultralytics-with-cli). + +## Quickstart from a Notebook + +### Create a new IPython kernel + +Open the compute Terminal. + +

+ Open Terminal +

+ +From your compute terminal, you need to create a new ipykernel that will be used by your notebook to manage your dependencies: + +```bash +conda create --name yolov8env -y +conda activate yolov8env +conda install pip -y +conda install ipykernel -y +python -m ipykernel install --user --name yolov8env --display-name "yolov8env" +``` + +Close your terminal and create a new notebook. From your Notebook, you can select the new kernel. + +Then you can open a Notebook cell and install the required dependencies: + +```bash +%%bash +source activate yolov8env +cd ultralytics +pip install -r requirements.txt +pip install ultralytics +pip install onnx>=1.12.0 +``` + +Note that we need to use the `source activate yolov8env` for all the %%bash cells, to make sure that the %%bash cell uses environment we want. + +Run some predictions using the [Ultralytics CLI](../quickstart.md#use-ultralytics-with-cli): + +```bash +%%bash +source activate yolov8env +yolo predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' +``` + +Or with the [Ultralytics Python interface](../quickstart.md#use-ultralytics-with-python), for example to train the model: + +```python +from ultralytics import YOLO + +# Load a model +model = YOLO("yolov8n.pt") # load an official YOLOv8n model + +# Use the model +model.train(data="coco128.yaml", epochs=3) # train the model +metrics = model.val() # evaluate model performance on the validation set +results = model("https://ultralytics.com/images/bus.jpg") # predict on an image +path = model.export(format="onnx") # export the model to ONNX format +``` + +You can use either the Ultralytics CLI or Python interface for running YOLOv8 tasks, as described in the terminal section above. + +By following these steps, you should be able to get YOLOv8 running quickly on AzureML for quick trials. For more advanced uses, you may refer to the full AzureML documentation linked at the beginning of this guide. + +## Explore More with AzureML + +This guide serves as an introduction to get you up and running with YOLOv8 on AzureML. However, it only scratches the surface of what AzureML can offer. To delve deeper and unlock the full potential of AzureML for your machine learning projects, consider exploring the following resources: + +- [Create a Data Asset](https://learn.microsoft.com/azure/machine-learning/how-to-create-data-assets): Learn how to set up and manage your data assets effectively within the AzureML environment. +- [Initiate an AzureML Job](https://learn.microsoft.com/azure/machine-learning/how-to-train-model): Get a comprehensive understanding of how to kickstart your machine learning training jobs on AzureML. +- [Register a Model](https://learn.microsoft.com/azure/machine-learning/how-to-manage-models): Familiarize yourself with model management practices including registration, versioning, and deployment. +- [Train YOLOv8 with AzureML Python SDK](https://medium.com/@ouphi/how-to-train-the-yolov8-model-with-azure-machine-learning-python-sdk-8268696be8ba): Explore a step-by-step guide on using the AzureML Python SDK to train your YOLOv8 models. +- [Train YOLOv8 with AzureML CLI](https://medium.com/@ouphi/how-to-train-the-yolov8-model-with-azureml-and-the-az-cli-73d3c870ba8e): Discover how to utilize the command-line interface for streamlined training and management of YOLOv8 models on AzureML. diff --git a/ultralytics/docs/en/guides/azureml-quickstart.md:Zone.Identifier b/ultralytics/docs/en/guides/azureml-quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/azureml-quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/conda-quickstart.md b/ultralytics/docs/en/guides/conda-quickstart.md new file mode 100755 index 0000000..21f3b89 --- /dev/null +++ b/ultralytics/docs/en/guides/conda-quickstart.md @@ -0,0 +1,132 @@ +--- +comments: true +description: Comprehensive guide to setting up and using Ultralytics YOLO models in a Conda environment. Learn how to install the package, manage dependencies, and get started with object detection projects. +keywords: Ultralytics, YOLO, Conda, environment setup, object detection, package installation, deep learning, machine learning, guide +--- + +# Conda Quickstart Guide for Ultralytics + +

+ Ultralytics Conda Package Visual +

+ +This guide provides a comprehensive introduction to setting up a Conda environment for your Ultralytics projects. Conda is an open-source package and environment management system that offers an excellent alternative to pip for installing packages and dependencies. Its isolated environments make it particularly well-suited for data science and machine learning endeavors. For more details, visit the Ultralytics Conda package on [Anaconda](https://anaconda.org/conda-forge/ultralytics) and check out the Ultralytics feedstock repository for package updates on [GitHub](https://github.com/conda-forge/ultralytics-feedstock/). + +[![Conda Recipe](https://img.shields.io/badge/recipe-ultralytics-green.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Version](https://img.shields.io/conda/vn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) + +## What You Will Learn + +- Setting up a Conda environment +- Installing Ultralytics via Conda +- Initializing Ultralytics in your environment +- Using Ultralytics Docker images with Conda + +--- + +## Prerequisites + +- You should have Anaconda or Miniconda installed on your system. If not, download and install it from [Anaconda](https://www.anaconda.com/) or [Miniconda](https://docs.conda.io/projects/miniconda/en/latest/). + +--- + +## Setting up a Conda Environment + +First, let's create a new Conda environment. Open your terminal and run the following command: + +```bash +conda create --name ultralytics-env python=3.8 -y +``` + +Activate the new environment: + +```bash +conda activate ultralytics-env +``` + +--- + +## Installing Ultralytics + +You can install the Ultralytics package from the conda-forge channel. Execute the following command: + +```bash +conda install -c conda-forge ultralytics +``` + +### Note on CUDA Environment + +If you're working in a CUDA-enabled environment, it's a good practice to install `ultralytics`, `pytorch`, and `pytorch-cuda` together to resolve any conflicts: + +```bash +conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics +``` + +--- + +## Using Ultralytics + +With Ultralytics installed, you can now start using its robust features for object detection, instance segmentation, and more. For example, to predict an image, you can run: + +```python +from ultralytics import YOLO + +model = YOLO('yolov8n.pt') # initialize model +results = model('path/to/image.jpg') # perform inference +results.show() # display results +``` + +--- + +## Ultralytics Conda Docker Image + +If you prefer using Docker, Ultralytics offers Docker images with a Conda environment included. You can pull these images from [DockerHub](https://hub.docker.com/r/ultralytics/ultralytics). + +Pull the latest Ultralytics image: + +```bash +# Set image name as a variable +t=ultralytics/ultralytics:latest-conda + +# Pull the latest Ultralytics image from Docker Hub +sudo docker pull $t +``` + +Run the image: + +```bash +# Run the Ultralytics image in a container with GPU support +sudo docker run -it --ipc=host --gpus all $t # all GPUs +sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # specify GPUs +``` + +--- + +Certainly, you can include the following section in your Conda guide to inform users about speeding up installation using `libmamba`: + +--- + +## Speeding Up Installation with Libmamba + +If you're looking to [speed up the package installation](https://www.anaconda.com/blog/a-faster-conda-for-a-growing-community) process in Conda, you can opt to use `libmamba`, a fast, cross-platform, and dependency-aware package manager that serves as an alternative solver to Conda's default. + +### How to Enable Libmamba + +To enable `libmamba` as the solver for Conda, you can perform the following steps: + +1. First, install the `conda-libmamba-solver` package. This can be skipped if your Conda version is 4.11 or above, as `libmamba` is included by default. + + ```bash + conda install conda-libmamba-solver + ``` + +2. Next, configure Conda to use `libmamba` as the solver: + + ```bash + conda config --set solver libmamba + ``` + +And that's it! Your Conda installation will now use `libmamba` as the solver, which should result in a faster package installation process. + +--- + +Congratulations! You have successfully set up a Conda environment, installed the Ultralytics package, and are now ready to explore its rich functionalities. Feel free to dive deeper into the [Ultralytics documentation](../index.md) for more advanced tutorials and examples. diff --git a/ultralytics/docs/en/guides/conda-quickstart.md:Zone.Identifier b/ultralytics/docs/en/guides/conda-quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/conda-quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/docker-quickstart.md b/ultralytics/docs/en/guides/docker-quickstart.md new file mode 100755 index 0000000..6c45268 --- /dev/null +++ b/ultralytics/docs/en/guides/docker-quickstart.md @@ -0,0 +1,119 @@ +--- +comments: true +description: Complete guide to setting up and using Ultralytics YOLO models with Docker. Learn how to install Docker, manage GPU support, and run YOLO models in isolated containers. +keywords: Ultralytics, YOLO, Docker, GPU, containerization, object detection, package installation, deep learning, machine learning, guide +--- + +# Docker Quickstart Guide for Ultralytics + +

+ Ultralytics Docker Package Visual +

+ +This guide serves as a comprehensive introduction to setting up a Docker environment for your Ultralytics projects. [Docker](https://docker.com/) is a platform for developing, shipping, and running applications in containers. It is particularly beneficial for ensuring that the software will always run the same, regardless of where it's deployed. For more details, visit the Ultralytics Docker repository on [Docker Hub](https://hub.docker.com/r/ultralytics/ultralytics). + +[![Docker Pulls](https://img.shields.io/docker/pulls/ultralytics/ultralytics?logo=docker)](https://hub.docker.com/r/ultralytics/ultralytics) + +## What You Will Learn + +- Setting up Docker with NVIDIA support +- Installing Ultralytics Docker images +- Running Ultralytics in a Docker container +- Mounting local directories into the container + +--- + +## Prerequisites + +- Make sure Docker is installed on your system. If not, you can download and install it from [Docker's website](https://www.docker.com/products/docker-desktop). +- Ensure that your system has an NVIDIA GPU and NVIDIA drivers are installed. + +--- + +## Setting up Docker with NVIDIA Support + +First, verify that the NVIDIA drivers are properly installed by running: + +```bash +nvidia-smi +``` + +### Installing NVIDIA Docker Runtime + +Now, let's install the NVIDIA Docker runtime to enable GPU support in Docker containers: + +```bash +# Add NVIDIA package repositories +curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | sudo apt-key add - +distribution=$(lsb_release -cs) +curl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list + +# Install NVIDIA Docker runtime +sudo apt-get update +sudo apt-get install -y nvidia-docker2 + +# Restart Docker service to apply changes +sudo systemctl restart docker +``` + +### Verify NVIDIA Runtime with Docker + +Run `docker info | grep -i runtime` to ensure that `nvidia` appears in the list of runtimes: + +```bash +docker info | grep -i runtime +``` + +--- + +## Installing Ultralytics Docker Images + +Ultralytics offers several Docker images optimized for various platforms and use-cases: + +- **Dockerfile:** GPU image, ideal for training. +- **Dockerfile-arm64:** For ARM64 architecture, suitable for devices like [Raspberry Pi](raspberry-pi.md). +- **Dockerfile-cpu:** CPU-only version for inference and non-GPU environments. +- **Dockerfile-jetson:** Optimized for NVIDIA Jetson devices. +- **Dockerfile-python:** Minimal Python environment for lightweight applications. +- **Dockerfile-conda:** Includes [Miniconda3](https://docs.conda.io/projects/miniconda/en/latest/) and Ultralytics package installed via Conda. + +To pull the latest image: + +```bash +# Set image name as a variable +t=ultralytics/ultralytics:latest + +# Pull the latest Ultralytics image from Docker Hub +sudo docker pull $t +``` + +--- + +## Running Ultralytics in Docker Container + +Here's how to execute the Ultralytics Docker container: + +```bash +# Run with all GPUs +sudo docker run -it --ipc=host --gpus all $t + +# Run specifying which GPUs to use +sudo docker run -it --ipc=host --gpus '"device=2,3"' $t +``` + +The `-it` flag assigns a pseudo-TTY and keeps stdin open, allowing you to interact with the container. The `--ipc=host` flag enables sharing of host's IPC namespace, essential for sharing memory between processes. The `--gpus` flag allows the container to access the host's GPUs. + +### Note on File Accessibility + +To work with files on your local machine within the container, you can use Docker volumes: + +```bash +# Mount a local directory into the container +sudo docker run -it --ipc=host --gpus all -v /path/on/host:/path/in/container $t +``` + +Replace `/path/on/host` with the directory path on your local machine and `/path/in/container` with the desired path inside the Docker container. + +--- + +Congratulations! You're now set up to use Ultralytics with Docker and ready to take advantage of its powerful capabilities. For alternate installation methods, feel free to explore the [Ultralytics quickstart documentation](../quickstart.md). diff --git a/ultralytics/docs/en/guides/docker-quickstart.md:Zone.Identifier b/ultralytics/docs/en/guides/docker-quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/docker-quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/heatmaps.md b/ultralytics/docs/en/guides/heatmaps.md new file mode 100755 index 0000000..a1c3a69 --- /dev/null +++ b/ultralytics/docs/en/guides/heatmaps.md @@ -0,0 +1,296 @@ +--- +comments: true +description: Advanced Data Visualization with Ultralytics YOLOv8 Heatmaps +keywords: Ultralytics, YOLOv8, Advanced Data Visualization, Heatmap Technology, Object Detection and Tracking, Jupyter Notebook, Python SDK, Command Line Interface +--- + +# Advanced Data Visualization: Heatmaps using Ultralytics YOLOv8 ๐Ÿš€ + +## Introduction to Heatmaps + +A heatmap generated with [Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) transforms complex data into a vibrant, color-coded matrix. This visual tool employs a spectrum of colors to represent varying data values, where warmer hues indicate higher intensities and cooler tones signify lower values. Heatmaps excel in visualizing intricate data patterns, correlations, and anomalies, offering an accessible and engaging approach to data interpretation across diverse domains. + +

+
+ +
+ Watch: Heatmaps using Ultralytics YOLOv8 +

+ +## Why Choose Heatmaps for Data Analysis? + +- **Intuitive Data Distribution Visualization:** Heatmaps simplify the comprehension of data concentration and distribution, converting complex datasets into easy-to-understand visual formats. +- **Efficient Pattern Detection:** By visualizing data in heatmap format, it becomes easier to spot trends, clusters, and outliers, facilitating quicker analysis and insights. +- **Enhanced Spatial Analysis and Decision Making:** Heatmaps are instrumental in illustrating spatial relationships, aiding in decision-making processes in sectors such as business intelligence, environmental studies, and urban planning. + +## Real World Applications + +| Transportation | Retail | +|:-----------------------------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------------------------------:| +| ![Ultralytics YOLOv8 Transportation Heatmap](https://github.com/RizwanMunawar/ultralytics/assets/62513924/288d7053-622b-4452-b4e4-1f41aeb764aa) | ![Ultralytics YOLOv8 Retail Heatmap](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a9139af0-2cb7-41fe-a0d5-29a300dee768) | +| Ultralytics YOLOv8 Transportation Heatmap | Ultralytics YOLOv8 Retail Heatmap | + +???+ tip "heatmap_alpha" + + heatmap_alpha value should be in range (0.0 - 1.0) + +???+ tip "decay_factor" + + Used for removal of heatmap after object removed from frame, value should be in range (0.0 - 1.0) + + +!!! Example "Heatmaps using Ultralytics YOLOv8 Example" + + === "Heatmap" + ```python + from ultralytics import YOLO + from ultralytics.solutions import heatmap + import cv2 + + model = YOLO("yolov8n.pt") + cap = cv2.VideoCapture("path/to/video/file.mp4") + assert cap.isOpened(), "Error reading video file" + + # Video writer + video_writer = cv2.VideoWriter("heatmap_output.avi", + cv2.VideoWriter_fourcc(*'mp4v'), + int(cap.get(5)), + (int(cap.get(3)), int(cap.get(4)))) + + # Init heatmap + heatmap_obj = heatmap.Heatmap() + heatmap_obj.set_args(colormap=cv2.COLORMAP_PARULA , + imw=cap.get(4), # should same as cap height + imh=cap.get(3), # should same as cap width + view_img=True, + shape="circle") + + while cap.isOpened(): + success, im0 = cap.read() + if not success: + print("Video frame is empty or video processing has been successfully completed.") + break + tracks = model.track(im0, persist=True, show=False) + + im0 = heatmap_obj.generate_heatmap(im0, tracks) + video_writer.write(im0) + + cap.release() + video_writer.release() + cv2.destroyAllWindows() + + ``` + + === "Line Counting" + ```python + from ultralytics import YOLO + from ultralytics.solutions import heatmap + import cv2 + + model = YOLO("yolov8n.pt") + cap = cv2.VideoCapture("path/to/video/file.mp4") + assert cap.isOpened(), "Error reading video file" + + # Video writer + video_writer = cv2.VideoWriter("heatmap_output.avi", + cv2.VideoWriter_fourcc(*'mp4v'), + int(cap.get(5)), + (int(cap.get(3)), int(cap.get(4)))) + + line_points = [(256, 409), (694, 532)] # line for object counting + + # Init heatmap + heatmap_obj = heatmap.Heatmap() + heatmap_obj.set_args(colormap=cv2.COLORMAP_PARULA , + imw=cap.get(4), # should same as cap height + imh=cap.get(3), # should same as cap width + view_img=True, + shape="circle", + count_reg_pts=line_points) + + while cap.isOpened(): + success, im0 = cap.read() + if not success: + print("Video frame is empty or video processing has been successfully completed.") + break + tracks = model.track(im0, persist=True, show=False) + + im0 = heatmap_obj.generate_heatmap(im0, tracks) + video_writer.write(im0) + + cap.release() + video_writer.release() + cv2.destroyAllWindows() + ``` + + === "Region Counting" + ```python + from ultralytics import YOLO + from ultralytics.solutions import heatmap + import cv2 + + model = YOLO("yolov8n.pt") + cap = cv2.VideoCapture("path/to/video/file.mp4") + assert cap.isOpened(), "Error reading video file" + + # Video writer + video_writer = cv2.VideoWriter("heatmap_output.avi", + cv2.VideoWriter_fourcc(*'mp4v'), + int(cap.get(5)), + (int(cap.get(3)), int(cap.get(4)))) + + # Define region points + region_points = [(20, 400), (1080, 404), (1080, 360), (20, 360)] + + # Init heatmap + heatmap_obj = heatmap.Heatmap() + heatmap_obj.set_args(colormap=cv2.COLORMAP_PARULA , + imw=cap.get(4), # should same as cap height + imh=cap.get(3), # should same as cap width + view_img=True, + shape="circle", + count_reg_pts=region_points) + + while cap.isOpened(): + success, im0 = cap.read() + if not success: + print("Video frame is empty or video processing has been successfully completed.") + break + tracks = model.track(im0, persist=True, show=False) + + im0 = heatmap_obj.generate_heatmap(im0, tracks) + video_writer.write(im0) + + cap.release() + video_writer.release() + cv2.destroyAllWindows() + ``` + + === "Im0" + ```python + from ultralytics import YOLO + from ultralytics.solutions import heatmap + import cv2 + + model = YOLO("yolov8s.pt") # YOLOv8 custom/pretrained model + + im0 = cv2.imread("path/to/image.png") # path to image file + + # Heatmap Init + heatmap_obj = heatmap.Heatmap() + heatmap_obj.set_args(colormap=cv2.COLORMAP_PARULA , + imw=cap.get(4), # should same as cap height + imh=cap.get(3), # should same as cap width + view_img=True, + shape="circle") + + + results = model.track(im0, persist=True) + im0 = heatmap_obj.generate_heatmap(im0, tracks=results) + cv2.imwrite("ultralytics_output.png", im0) + ``` + + === "Specific Classes" + ```python + from ultralytics import YOLO + from ultralytics.solutions import heatmap + import cv2 + + model = YOLO("yolov8n.pt") + cap = cv2.VideoCapture("path/to/video/file.mp4") + assert cap.isOpened(), "Error reading video file" + + # Video writer + video_writer = cv2.VideoWriter("heatmap_output.avi", + cv2.VideoWriter_fourcc(*'mp4v'), + int(cap.get(5)), + (int(cap.get(3)), int(cap.get(4)))) + + classes_for_heatmap = [0, 2] # classes for heatmap + + # Init heatmap + heatmap_obj = heatmap.Heatmap() + heatmap_obj.set_args(colormap=cv2.COLORMAP_PARULA , + imw=cap.get(4), # should same as cap height + imh=cap.get(3), # should same as cap width + view_img=True, + shape="circle") + + while cap.isOpened(): + success, im0 = cap.read() + if not success: + print("Video frame is empty or video processing has been successfully completed.") + break + tracks = model.track(im0, persist=True, show=False, + classes=classes_for_heatmap) + + im0 = heatmap_obj.generate_heatmap(im0, tracks) + video_writer.write(im0) + + cap.release() + video_writer.release() + cv2.destroyAllWindows() + ``` + +### Arguments `set_args` + +| Name | Type | Default | Description | +|---------------------|----------------|-------------------|-----------------------------------------------------------| +| view_img | `bool` | `False` | Display the frame with heatmap | +| colormap | `cv2.COLORMAP` | `None` | cv2.COLORMAP for heatmap | +| imw | `int` | `None` | Width of Heatmap | +| imh | `int` | `None` | Height of Heatmap | +| heatmap_alpha | `float` | `0.5` | Heatmap alpha value | +| count_reg_pts | `list` | `None` | Object counting region points | +| count_txt_thickness | `int` | `2` | Count values text size | +| count_txt_color | `RGB Color` | `(0, 0, 0)` | Foreground color for Object counts text | +| count_color | `RGB Color` | `(255, 255, 255)` | Background color for Object counts text | +| count_reg_color | `RGB Color` | `(255, 0, 255)` | Counting region color | +| region_thickness | `int` | `5` | Counting region thickness value | +| decay_factor | `float` | `0.99` | Decay factor for heatmap area removal after specific time | +| shape | `str` | `circle` | Heatmap shape for display "rect" or "circle" supported | +| line_dist_thresh | `int` | `15` | Euclidean Distance threshold for line counter | + +### Arguments `model.track` + +| Name | Type | Default | Description | +|-----------|---------|----------------|-------------------------------------------------------------| +| `source` | `im0` | `None` | source directory for images or videos | +| `persist` | `bool` | `False` | persisting tracks between frames | +| `tracker` | `str` | `botsort.yaml` | Tracking method 'bytetrack' or 'botsort' | +| `conf` | `float` | `0.3` | Confidence Threshold | +| `iou` | `float` | `0.5` | IOU Threshold | +| `classes` | `list` | `None` | filter results by class, i.e. classes=0, or classes=[0,2,3] | + +### Heatmap COLORMAPs + +| Colormap Name | Description | +|---------------------------------|----------------------------------------| +| `cv::COLORMAP_AUTUMN` | Autumn color map | +| `cv::COLORMAP_BONE` | Bone color map | +| `cv::COLORMAP_JET` | Jet color map | +| `cv::COLORMAP_WINTER` | Winter color map | +| `cv::COLORMAP_RAINBOW` | Rainbow color map | +| `cv::COLORMAP_OCEAN` | Ocean color map | +| `cv::COLORMAP_SUMMER` | Summer color map | +| `cv::COLORMAP_SPRING` | Spring color map | +| `cv::COLORMAP_COOL` | Cool color map | +| `cv::COLORMAP_HSV` | HSV (Hue, Saturation, Value) color map | +| `cv::COLORMAP_PINK` | Pink color map | +| `cv::COLORMAP_HOT` | Hot color map | +| `cv::COLORMAP_PARULA` | Parula color map | +| `cv::COLORMAP_MAGMA` | Magma color map | +| `cv::COLORMAP_INFERNO` | Inferno color map | +| `cv::COLORMAP_PLASMA` | Plasma color map | +| `cv::COLORMAP_VIRIDIS` | Viridis color map | +| `cv::COLORMAP_CIVIDIS` | Cividis color map | +| `cv::COLORMAP_TWILIGHT` | Twilight color map | +| `cv::COLORMAP_TWILIGHT_SHIFTED` | Shifted Twilight color map | +| `cv::COLORMAP_TURBO` | Turbo color map | +| `cv::COLORMAP_DEEPGREEN` | Deep Green color map | + +These colormaps are commonly used for visualizing data with different color representations. diff --git a/ultralytics/docs/en/guides/heatmaps.md:Zone.Identifier b/ultralytics/docs/en/guides/heatmaps.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/heatmaps.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/hyperparameter-tuning.md b/ultralytics/docs/en/guides/hyperparameter-tuning.md new file mode 100755 index 0000000..f7d2b56 --- /dev/null +++ b/ultralytics/docs/en/guides/hyperparameter-tuning.md @@ -0,0 +1,206 @@ +--- +comments: true +description: Dive into hyperparameter tuning in Ultralytics YOLO models. Learn how to optimize performance using the Tuner class and genetic evolution. +keywords: Ultralytics, YOLO, Hyperparameter Tuning, Tuner Class, Genetic Evolution, Optimization +--- + +# Ultralytics YOLO Hyperparameter Tuning Guide + +## Introduction + +Hyperparameter tuning is not just a one-time set-up but an iterative process aimed at optimizing the machine learning model's performance metrics, such as accuracy, precision, and recall. In the context of Ultralytics YOLO, these hyperparameters could range from learning rate to architectural details, such as the number of layers or types of activation functions used. + +### What are Hyperparameters? + +Hyperparameters are high-level, structural settings for the algorithm. They are set prior to the training phase and remain constant during it. Here are some commonly tuned hyperparameters in Ultralytics YOLO: + +- **Learning Rate** `lr0`: Determines the step size at each iteration while moving towards a minimum in the loss function. +- **Batch Size** `batch`: Number of images processed simultaneously in a forward pass. +- **Number of Epochs** `epochs`: An epoch is one complete forward and backward pass of all the training examples. +- **Architecture Specifics**: Such as channel counts, number of layers, types of activation functions, etc. + +

+ Hyperparameter Tuning Visual +

+ +For a full list of augmentation hyperparameters used in YOLOv8 please refer to the [configurations page](../usage/cfg.md#augmentation). + +### Genetic Evolution and Mutation + +Ultralytics YOLO uses genetic algorithms to optimize hyperparameters. Genetic algorithms are inspired by the mechanism of natural selection and genetics. + +- **Mutation**: In the context of Ultralytics YOLO, mutation helps in locally searching the hyperparameter space by applying small, random changes to existing hyperparameters, producing new candidates for evaluation. +- **Crossover**: Although crossover is a popular genetic algorithm technique, it is not currently used in Ultralytics YOLO for hyperparameter tuning. The focus is mainly on mutation for generating new hyperparameter sets. + +## Preparing for Hyperparameter Tuning + +Before you begin the tuning process, it's important to: + +1. **Identify the Metrics**: Determine the metrics you will use to evaluate the model's performance. This could be AP50, F1-score, or others. +2. **Set the Tuning Budget**: Define how much computational resources you're willing to allocate. Hyperparameter tuning can be computationally intensive. + +## Steps Involved + +### Initialize Hyperparameters + +Start with a reasonable set of initial hyperparameters. This could either be the default hyperparameters set by Ultralytics YOLO or something based on your domain knowledge or previous experiments. + +### Mutate Hyperparameters + +Use the `_mutate` method to produce a new set of hyperparameters based on the existing set. + +### Train Model + +Training is performed using the mutated set of hyperparameters. The training performance is then assessed. + +### Evaluate Model + +Use metrics like AP50, F1-score, or custom metrics to evaluate the model's performance. + +### Log Results + +It's crucial to log both the performance metrics and the corresponding hyperparameters for future reference. + +### Repeat + +The process is repeated until either the set number of iterations is reached or the performance metric is satisfactory. + +## Usage Example + +Here's how to use the `model.tune()` method to utilize the `Tuner` class for hyperparameter tuning of YOLOv8n on COCO8 for 30 epochs with an AdamW optimizer and skipping plotting, checkpointing and validation other than on final epoch for faster Tuning. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Initialize the YOLO model + model = YOLO('yolov8n.pt') + + # Tune hyperparameters on COCO8 for 30 epochs + model.tune(data='coco8.yaml', epochs=30, iterations=300, optimizer='AdamW', plots=False, save=False, val=False) + ``` + +## Results + +After you've successfully completed the hyperparameter tuning process, you will obtain several files and directories that encapsulate the results of the tuning. The following describes each: + +### File Structure + +Here's what the directory structure of the results will look like. Training directories like `train1/` contain individual tuning iterations, i.e. one model trained with one set of hyperparameters. The `tune/` directory contains tuning results from all the individual model trainings: + +```plaintext +runs/ +โ””โ”€โ”€ detect/ + โ”œโ”€โ”€ train1/ + โ”œโ”€โ”€ train2/ + โ”œโ”€โ”€ ... + โ””โ”€โ”€ tune/ + โ”œโ”€โ”€ best_hyperparameters.yaml + โ”œโ”€โ”€ best_fitness.png + โ”œโ”€โ”€ tune_results.csv + โ”œโ”€โ”€ tune_scatter_plots.png + โ””โ”€โ”€ weights/ + โ”œโ”€โ”€ last.pt + โ””โ”€โ”€ best.pt +``` + +### File Descriptions + +#### best_hyperparameters.yaml + +This YAML file contains the best-performing hyperparameters found during the tuning process. You can use this file to initialize future trainings with these optimized settings. + +- **Format**: YAML +- **Usage**: Hyperparameter results +- **Example**: + ```yaml + # 558/900 iterations complete โœ… (45536.81s) + # Results saved to /usr/src/ultralytics/runs/detect/tune + # Best fitness=0.64297 observed at iteration 498 + # Best fitness metrics are {'metrics/precision(B)': 0.87247, 'metrics/recall(B)': 0.71387, 'metrics/mAP50(B)': 0.79106, 'metrics/mAP50-95(B)': 0.62651, 'val/box_loss': 2.79884, 'val/cls_loss': 2.72386, 'val/dfl_loss': 0.68503, 'fitness': 0.64297} + # Best fitness model is /usr/src/ultralytics/runs/detect/train498 + # Best fitness hyperparameters are printed below. + + lr0: 0.00269 + lrf: 0.00288 + momentum: 0.73375 + weight_decay: 0.00015 + warmup_epochs: 1.22935 + warmup_momentum: 0.1525 + box: 18.27875 + cls: 1.32899 + dfl: 0.56016 + hsv_h: 0.01148 + hsv_s: 0.53554 + hsv_v: 0.13636 + degrees: 0.0 + translate: 0.12431 + scale: 0.07643 + shear: 0.0 + perspective: 0.0 + flipud: 0.0 + fliplr: 0.08631 + mosaic: 0.42551 + mixup: 0.0 + copy_paste: 0.0 + ``` + +#### best_fitness.png + +This is a plot displaying fitness (typically a performance metric like AP50) against the number of iterations. It helps you visualize how well the genetic algorithm performed over time. + +- **Format**: PNG +- **Usage**: Performance visualization + +

+ Hyperparameter Tuning Fitness vs Iteration +

+ +#### tune_results.csv + +A CSV file containing detailed results of each iteration during the tuning. Each row in the file represents one iteration, and it includes metrics like fitness score, precision, recall, as well as the hyperparameters used. + +- **Format**: CSV +- **Usage**: Per-iteration results tracking. +- **Example**: + ```csv + fitness,lr0,lrf,momentum,weight_decay,warmup_epochs,warmup_momentum,box,cls,dfl,hsv_h,hsv_s,hsv_v,degrees,translate,scale,shear,perspective,flipud,fliplr,mosaic,mixup,copy_paste + 0.05021,0.01,0.01,0.937,0.0005,3.0,0.8,7.5,0.5,1.5,0.015,0.7,0.4,0.0,0.1,0.5,0.0,0.0,0.0,0.5,1.0,0.0,0.0 + 0.07217,0.01003,0.00967,0.93897,0.00049,2.79757,0.81075,7.5,0.50746,1.44826,0.01503,0.72948,0.40658,0.0,0.0987,0.4922,0.0,0.0,0.0,0.49729,1.0,0.0,0.0 + 0.06584,0.01003,0.00855,0.91009,0.00073,3.42176,0.95,8.64301,0.54594,1.72261,0.01503,0.59179,0.40658,0.0,0.0987,0.46955,0.0,0.0,0.0,0.49729,0.80187,0.0,0.0 + ``` + +#### tune_scatter_plots.png + +This file contains scatter plots generated from `tune_results.csv`, helping you visualize relationships between different hyperparameters and performance metrics. Note that hyperparameters initialized to 0 will not be tuned, such as `degrees` and `shear` below. + +- **Format**: PNG +- **Usage**: Exploratory data analysis + +

+ Hyperparameter Tuning Scatter Plots +

+ +#### weights/ + +This directory contains the saved PyTorch models for the last and the best iterations during the hyperparameter tuning process. + +- **`last.pt`**: The last.pt are the weights from the last epoch of training. +- **`best.pt`**: The best.pt weights for the iteration that achieved the best fitness score. + +Using these results, you can make more informed decisions for your future model trainings and analyses. Feel free to consult these artifacts to understand how well your model performed and how you might improve it further. + +## Conclusion + +The hyperparameter tuning process in Ultralytics YOLO is simplified yet powerful, thanks to its genetic algorithm-based approach focused on mutation. Following the steps outlined in this guide will assist you in systematically tuning your model to achieve better performance. + +### Further Reading + +1. [Hyperparameter Optimization in Wikipedia](https://en.wikipedia.org/wiki/Hyperparameter_optimization) +2. [YOLOv5 Hyperparameter Evolution Guide](../yolov5/tutorials/hyperparameter_evolution.md) +3. [Efficient Hyperparameter Tuning with Ray Tune and YOLOv8](../integrations/ray-tune.md) + +For deeper insights, you can explore the `Tuner` class source code and accompanying documentation. Should you have any questions, feature requests, or need further assistance, feel free to reach out to us on [GitHub](https://github.com/ultralytics/ultralytics/issues/new/choose) or [Discord](https://ultralytics.com/discord). diff --git a/ultralytics/docs/en/guides/hyperparameter-tuning.md:Zone.Identifier b/ultralytics/docs/en/guides/hyperparameter-tuning.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/hyperparameter-tuning.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/index.md b/ultralytics/docs/en/guides/index.md new file mode 100755 index 0000000..e4efcea --- /dev/null +++ b/ultralytics/docs/en/guides/index.md @@ -0,0 +1,47 @@ +--- +comments: true +description: In-depth exploration of Ultralytics' YOLO. Learn about the YOLO object detection model, how to train it on custom data, multi-GPU training, exporting, predicting, deploying, and more. +keywords: Ultralytics, YOLO, Deep Learning, Object detection, PyTorch, Tutorial, Multi-GPU training, Custom data training, SAHI, Tiled Inference +--- + +# Comprehensive Tutorials to Ultralytics YOLO + +Welcome to the Ultralytics' YOLO ๐Ÿš€ Guides! Our comprehensive tutorials cover various aspects of the YOLO object detection model, ranging from training and prediction to deployment. Built on PyTorch, YOLO stands out for its exceptional speed and accuracy in real-time object detection tasks. + +Whether you're a beginner or an expert in deep learning, our tutorials offer valuable insights into the implementation and optimization of YOLO for your computer vision projects. Let's dive in! + +## Guides + +Here's a compilation of in-depth guides to help you master different aspects of Ultralytics YOLO. + +* [YOLO Common Issues](yolo-common-issues.md) โญ RECOMMENDED: Practical solutions and troubleshooting tips to the most frequently encountered issues when working with Ultralytics YOLO models. +* [YOLO Performance Metrics](yolo-performance-metrics.md) โญ ESSENTIAL: Understand the key metrics like mAP, IoU, and F1 score used to evaluate the performance of your YOLO models. Includes practical examples and tips on how to improve detection accuracy and speed. +* [Model Deployment Options](model-deployment-options.md): Overview of YOLO model deployment formats like ONNX, OpenVINO, and TensorRT, with pros and cons for each to inform your deployment strategy. +* [K-Fold Cross Validation](kfold-cross-validation.md) ๐Ÿš€ NEW: Learn how to improve model generalization using K-Fold cross-validation technique. +* [Hyperparameter Tuning](hyperparameter-tuning.md) ๐Ÿš€ NEW: Discover how to optimize your YOLO models by fine-tuning hyperparameters using the Tuner class and genetic evolution algorithms. +* [SAHI Tiled Inference](sahi-tiled-inference.md) ๐Ÿš€ NEW: Comprehensive guide on leveraging SAHI's sliced inference capabilities with YOLOv8 for object detection in high-resolution images. +* [AzureML Quickstart](azureml-quickstart.md) ๐Ÿš€ NEW: Get up and running with Ultralytics YOLO models on Microsoft's Azure Machine Learning platform. Learn how to train, deploy, and scale your object detection projects in the cloud. +* [Conda Quickstart](conda-quickstart.md) ๐Ÿš€ NEW: Step-by-step guide to setting up a [Conda](https://anaconda.org/conda-forge/ultralytics) environment for Ultralytics. Learn how to install and start using the Ultralytics package efficiently with Conda. +* [Docker Quickstart](docker-quickstart.md) ๐Ÿš€ NEW: Complete guide to setting up and using Ultralytics YOLO models with [Docker](https://hub.docker.com/r/ultralytics/ultralytics). Learn how to install Docker, manage GPU support, and run YOLO models in isolated containers for consistent development and deployment. +* [Raspberry Pi](raspberry-pi.md) ๐Ÿš€ NEW: Quickstart tutorial to run YOLO models to the latest Raspberry Pi hardware. +* [Triton Inference Server Integration](triton-inference-server.md) ๐Ÿš€ NEW: Dive into the integration of Ultralytics YOLOv8 with NVIDIA's Triton Inference Server for scalable and efficient deep learning inference deployments. +* [YOLO Thread-Safe Inference](yolo-thread-safe-inference.md) ๐Ÿš€ NEW: Guidelines for performing inference with YOLO models in a thread-safe manner. Learn the importance of thread safety and best practices to prevent race conditions and ensure consistent predictions. +* [Isolating Segmentation Objects](isolating-segmentation-objects.md) ๐Ÿš€ NEW: Step-by-step recipe and explanation on how to extract and/or isolate objects from images using Ultralytics Segmentation. + +## Real-World Projects + +* [Object Counting](object-counting.md) ๐Ÿš€ NEW: Explore the process of real-time object counting with Ultralytics YOLOv8 and acquire the knowledge to effectively count objects in a live video stream. +* [Workouts Monitoring](workouts-monitoring.md) ๐Ÿš€ NEW: Discover the comprehensive approach to monitoring workouts with Ultralytics YOLOv8. Acquire the skills and insights necessary to effectively use YOLOv8 for tracking and analyzing various aspects of fitness routines in real time. +* [Objects Counting in Regions](region-counting.md) ๐Ÿš€ NEW: Explore counting objects in specific regions with Ultralytics YOLOv8 for precise and efficient object detection in varied areas. +* [Security Alarm System](security-alarm-system.md) ๐Ÿš€ NEW: Discover the process of creating a security alarm system with Ultralytics YOLOv8. This system triggers alerts upon detecting new objects in the frame. Subsequently, you can customize the code to align with your specific use case. +* [Heatmaps](heatmaps.md) ๐Ÿš€ NEW: Elevate your understanding of data with our Detection Heatmaps! These intuitive visual tools use vibrant color gradients to vividly illustrate the intensity of data values across a matrix. Essential in computer vision, heatmaps are skillfully designed to highlight areas of interest, providing an immediate, impactful way to interpret spatial information. +* [Instance Segmentation with Object Tracking](instance-segmentation-and-tracking.md) ๐Ÿš€ NEW: Explore our feature on Object Segmentation in Bounding Boxes Shape, providing a visual representation of precise object boundaries for enhanced understanding and analysis. +* [VisionEye View Objects Mapping](vision-eye.md) ๐Ÿš€ NEW: This feature aim computers to discern and focus on specific objects, much like the way the human eye observes details from a particular viewpoint. + +## Contribute to Our Guides + +We welcome contributions from the community! If you've mastered a particular aspect of Ultralytics YOLO that's not yet covered in our guides, we encourage you to share your expertise. Writing a guide is a great way to give back to the community and help us make our documentation more comprehensive and user-friendly. + +To get started, please read our [Contributing Guide](../help/contributing.md) for guidelines on how to open up a Pull Request (PR) ๐Ÿ› ๏ธ. We look forward to your contributions! + +Let's work together to make the Ultralytics YOLO ecosystem more robust and versatile ๐Ÿ™! diff --git a/ultralytics/docs/en/guides/index.md:Zone.Identifier b/ultralytics/docs/en/guides/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/instance-segmentation-and-tracking.md b/ultralytics/docs/en/guides/instance-segmentation-and-tracking.md new file mode 100755 index 0000000..c08b75c --- /dev/null +++ b/ultralytics/docs/en/guides/instance-segmentation-and-tracking.md @@ -0,0 +1,127 @@ +--- +comments: true +description: Instance Segmentation with Object Tracking using Ultralytics YOLOv8 +keywords: Ultralytics, YOLOv8, Instance Segmentation, Object Detection, Object Tracking, Segbbox, Computer Vision, Notebook, IPython Kernel, CLI, Python SDK +--- + +# Instance Segmentation and Tracking using Ultralytics YOLOv8 ๐Ÿš€ + +## What is Instance Segmentation? + +[Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) instance segmentation involves identifying and outlining individual objects in an image, providing a detailed understanding of spatial distribution. Unlike semantic segmentation, it uniquely labels and precisely delineates each object, crucial for tasks like object detection and medical imaging. + +There are two types of instance segmentation tracking available in the Ultralytics package: + +- **Instance Segmentation with Class Objects:** Each class object is assigned a unique color for clear visual separation. + +- **Instance Segmentation with Object Tracks:** Every track is represented by a distinct color, facilitating easy identification and tracking. + +## Samples + +| Instance Segmentation | Instance Segmentation + Object Tracking | +|:---------------------------------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------------------------------------------------------:| +| ![Ultralytics Instance Segmentation](https://github.com/RizwanMunawar/ultralytics/assets/62513924/d4ad3499-1f33-4871-8fbc-1be0b2643aa2) | ![Ultralytics Instance Segmentation with Object Tracking](https://github.com/RizwanMunawar/ultralytics/assets/62513924/2e5c38cc-fd5c-4145-9682-fa94ae2010a0) | +| Ultralytics Instance Segmentation ๐Ÿ˜ | Ultralytics Instance Segmentation with Object Tracking ๐Ÿ”ฅ | + +!!! Example "Instance Segmentation and Tracking" + + === "Instance Segmentation" + ```python + import cv2 + from ultralytics import YOLO + from ultralytics.utils.plotting import Annotator, colors + + model = YOLO("yolov8n-seg.pt") + names = model.model.names + cap = cv2.VideoCapture("path/to/video/file.mp4") + + out = cv2.VideoWriter('instance-segmentation.avi', + cv2.VideoWriter_fourcc(*'MJPG'), + 30, (int(cap.get(3)), int(cap.get(4)))) + + while True: + ret, im0 = cap.read() + if not ret: + print("Video frame is empty or video processing has been successfully completed.") + break + + results = model.predict(im0) + clss = results[0].boxes.cls.cpu().tolist() + masks = results[0].masks.xy + + annotator = Annotator(im0, line_width=2) + + for mask, cls in zip(masks, clss): + annotator.seg_bbox(mask=mask, + mask_color=colors(int(cls), True), + det_label=names[int(cls)]) + + out.write(im0) + cv2.imshow("instance-segmentation", im0) + + if cv2.waitKey(1) & 0xFF == ord('q'): + break + + out.release() + cap.release() + cv2.destroyAllWindows() + + ``` + + === "Instance Segmentation with Object Tracking" + ```python + import cv2 + from ultralytics import YOLO + from ultralytics.utils.plotting import Annotator, colors + + from collections import defaultdict + + track_history = defaultdict(lambda: []) + + model = YOLO("yolov8n-seg.pt") + cap = cv2.VideoCapture("path/to/video/file.mp4") + + out = cv2.VideoWriter('instance-segmentation-object-tracking.avi', + cv2.VideoWriter_fourcc(*'MJPG'), + 30, (int(cap.get(3)), int(cap.get(4)))) + + while True: + ret, im0 = cap.read() + if not ret: + print("Video frame is empty or video processing has been successfully completed.") + break + + results = model.track(im0, persist=True) + masks = results[0].masks.xy + track_ids = results[0].boxes.id.int().cpu().tolist() + + annotator = Annotator(im0, line_width=2) + + for mask, track_id in zip(masks, track_ids): + annotator.seg_bbox(mask=mask, + mask_color=colors(track_id, True), + track_label=str(track_id)) + + out.write(im0) + cv2.imshow("instance-segmentation-object-tracking", im0) + + if cv2.waitKey(1) & 0xFF == ord('q'): + break + + out.release() + cap.release() + cv2.destroyAllWindows() + ``` + +### `seg_bbox` Arguments + +| Name | Type | Default | Description | +|---------------|---------|-----------------|----------------------------------------| +| `mask` | `array` | `None` | Segmentation mask coordinates | +| `mask_color` | `tuple` | `(255, 0, 255)` | Mask color for every segmented box | +| `det_label` | `str` | `None` | Label for segmented object | +| `track_label` | `str` | `None` | Label for segmented and tracked object | + +## Note + +For any inquiries, feel free to post your questions in the [Ultralytics Issue Section](https://github.com/ultralytics/ultralytics/issues/new/choose) or the discussion section mentioned below. diff --git a/ultralytics/docs/en/guides/instance-segmentation-and-tracking.md:Zone.Identifier b/ultralytics/docs/en/guides/instance-segmentation-and-tracking.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/instance-segmentation-and-tracking.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/isolating-segmentation-objects.md b/ultralytics/docs/en/guides/isolating-segmentation-objects.md new file mode 100755 index 0000000..a0436f4 --- /dev/null +++ b/ultralytics/docs/en/guides/isolating-segmentation-objects.md @@ -0,0 +1,325 @@ +--- +comments: true +description: A concise guide on isolating segmented objects using Ultralytics. +keywords: Ultralytics, YOLO, segmentation, Python, object detection, inference, dataset, prediction, instance segmentation, contours, binary mask, object mask, image processing +--- + +# Isolating Segmentation Objects + +After performing the [Segment Task](../tasks/segment.md), it's sometimes desirable to extract the isolated objects from the inference results. This guide provides a generic recipe on how to accomplish this using the Ultralytics [Predict Mode](../modes/predict.md). + +

+ Example Isolated Object Segmentation +

+ +## Recipe Walk Through + +1. Begin with the necessary imports + + ```py + from pathlib import Path + + import cv2 as cv + import numpy as np + from ultralytics import YOLO + ``` + + ???+ tip "Ultralytics Install" + + See the Ultralytics [Quickstart](../quickstart.md/#install-ultralytics) Installation section for a quick walkthrough on installing the required libraries. + + --- + +2. Load a model and run `predict()` method on a source. + + ```py + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-seg.pt') + + # Run inference + result = model.predict() + ``` + + ??? question "No Prediction Arguments?" + + Without specifying a source, the example images from the library will be used: + + ``` + 'ultralytics/assets/bus.jpg' + 'ultralytics/assets/zidane.jpg' + ``` + + This is helpful for rapid testing with the `predict()` method. + + For additional information about Segmentation Models, visit the [Segment Task](../tasks/segment.md#models) page. To learn more about `predict()` method, see [Predict Mode](../modes/predict.md) section of the Documentation. + + --- + +3. Now iterate over the results and the contours. For workflows that want to save an image to file, the source image `base-name` and the detection `class-label` are retrieved for later use (optional). + + ``` { .py .annotate } + # (2) Iterate detection results (helpful for multiple images) + for r in res: + img = np.copy(r.orig_img) + img_name = Path(r.path).stem # source image base-name + + # Iterate each object contour (multiple detections) + for ci,c in enumerate(r): + # (1) Get detection class name + label = c.names[c.boxes.cls.tolist().pop()] + + ``` + + 1. To learn more about working with detection results, see [Boxes Section for Predict Mode](../modes/predict.md#boxes). + 2. To learn more about `predict()` results see [Working with Results for Predict Mode](../modes/predict.md#working-with-results) + + ??? info "For-Loop" + + A single image will only iterate the first loop once. A single image with only a single detection will iterate each loop _only_ once. + + --- + +4. Start with generating a binary mask from the source image and then draw a filled contour onto the mask. This will allow the object to be isolated from the other parts of the image. An example from `bus.jpg` for one of the detected `person` class objects is shown on the right. + + ![Binary Mask Image](https://github.com/ultralytics/ultralytics/assets/62214284/59bce684-fdda-4b17-8104-0b4b51149aca){ width="240", align="right" } + + ``` { .py .annotate } + # Create binary mask + b_mask = np.zeros(img.shape[:2], np.uint8) + + # (1) Extract contour result + contour = c.masks.xy.pop() + # (2) Changing the type + contour = contour.astype(np.int32) + # (3) Reshaping + contour = contour.reshape(-1, 1, 2) + + + # Draw contour onto mask + _ = cv.drawContours(b_mask, + [contour], + -1, + (255, 255, 255), + cv.FILLED) + + ``` + + 1. For more info on `c.masks.xy` see [Masks Section from Predict Mode](../modes/predict.md#masks). + + 2. Here, the values are cast into `np.int32` for compatibility with `drawContours()` function from OpenCV. + + 3. The OpenCV `drawContours()` function expects contours to have a shape of `[N, 1, 2]` expand section below for more details. + +
+ Expand to understand what is happening when defining the contour variable. +

+ + - `c.masks.xy` :: Provides the coordinates of the mask contour points in the format `(x, y)`. For more details, refer to the [Masks Section from Predict Mode](../modes/predict.md#masks). + + - `.pop()` :: As `masks.xy` is a list containing a single element, this element is extracted using the `pop()` method. + + - `.astype(np.int32)` :: Using `masks.xy` will return with a data type of `float32`, but this won't be compatible with the OpenCV `drawContours()` function, so this will change the data type to `int32` for compatibility. + + - `.reshape(-1, 1, 2)` :: Reformats the data into the required shape of `[N, 1, 2]` where `N` is the number of contour points, with each point represented by a single entry `1`, and the entry is composed of `2` values. The `-1` denotes that the number of values along this dimension is flexible. + +

+

+
+ Expand for an explanation of the drawContours() configuration. +

+ + - Encapsulating the `contour` variable within square brackets, `[contour]`, was found to effectively generate the desired contour mask during testing. + + - The value `-1` specified for the `drawContours()` parameter instructs the function to draw all contours present in the image. + + - The `tuple` `(255, 255, 255)` represents the color white, which is the desired color for drawing the contour in this binary mask. + + - The addition of `cv.FILLED` will color all pixels enclosed by the contour boundary the same, in this case, all enclosed pixels will be white. + + - See [OpenCV Documentation on `drawContours()`](https://docs.opencv.org/4.8.0/d6/d6e/group__imgproc__draw.html#ga746c0625f1781f1ffc9056259103edbc) for more information. + +

+

+ + --- + +5. Next the there are 2 options for how to move forward with the image from this point and a subsequent option for each. + + ### Object Isolation Options + + !!! example "" + + === "Black Background Pixels" + + ```py + # Create 3-channel mask + mask3ch = cv.cvtColor(b_mask, cv.COLOR_GRAY2BGR) + + # Isolate object with binary mask + isolated = cv.bitwise_and(mask3ch, img) + + ``` + + ??? question "How does this work?" + + - First, the binary mask is first converted from a single-channel image to a three-channel image. This conversion is necessary for the subsequent step where the mask and the original image are combined. Both images must have the same number of channels to be compatible with the blending operation. + + - The original image and the three-channel binary mask are merged using the OpenCV function `bitwise_and()`. This operation retains only pixel values that are greater than zero `(> 0)` from both images. Since the mask pixels are greater than zero `(> 0)` only within the contour region, the pixels remaining from the original image are those that overlap with the contour. + + ### Isolate with Black Pixels: Sub-options + + ??? info "Full-size Image" + + There are no additional steps required if keeping full size image. + +
+ ![Example Full size Isolated Object Image Black Background](https://github.com/ultralytics/ultralytics/assets/62214284/845c00d0-52a6-4b1e-8010-4ba73e011b99){ width=240 } +
Example full-size output
+
+ + ??? info "Cropped object Image" + + Additional steps required to crop image to only include object region. + + ![Example Crop Isolated Object Image Black Background](https://github.com/ultralytics/ultralytics/assets/62214284/103dbf90-c169-4f77-b791-76cdf09c6f22){ align="right" } + ``` { .py .annotate } + # (1) Bounding box coordinates + x1, y1, x2, y2 = c.boxes.xyxy.cpu().numpy().squeeze().astype(np.int32) + # Crop image to object region + iso_crop = isolated[y1:y2, x1:x2] + + ``` + + 1. For more information on bounding box results, see [Boxes Section from Predict Mode](../modes/predict.md/#boxes) + + ??? question "What does this code do?" + + - The `c.boxes.xyxy.cpu().numpy()` call retrieves the bounding boxes as a NumPy array in the `xyxy` format, where `xmin`, `ymin`, `xmax`, and `ymax` represent the coordinates of the bounding box rectangle. See [Boxes Section from Predict Mode](../modes/predict.md/#boxes) for more details. + + - The `squeeze()` operation removes any unnecessary dimensions from the NumPy array, ensuring it has the expected shape. + + - Converting the coordinate values using `.astype(np.int32)` changes the box coordinates data type from `float32` to `int32`, making them compatible for image cropping using index slices. + + - Finally, the bounding box region is cropped from the image using index slicing. The bounds are defined by the `[ymin:ymax, xmin:xmax]` coordinates of the detection bounding box. + + === "Transparent Background Pixels" + + ```py + # Isolate object with transparent background (when saved as PNG) + isolated = np.dstack([img, b_mask]) + + ``` + + ??? question "How does this work?" + + - Using the NumPy `dstack()` function (array stacking along depth-axis) in conjunction with the binary mask generated, will create an image with four channels. This allows for all pixels outside of the object contour to be transparent when saving as a `PNG` file. + + ### Isolate with Transparent Pixels: Sub-options + + ??? info "Full-size Image" + + There are no additional steps required if keeping full size image. + +
+ ![Example Full size Isolated Object Image No Background](https://github.com/ultralytics/ultralytics/assets/62214284/b1043ee0-369a-4019-941a-9447a9771042){ width=240 } +
Example full-size output + transparent background
+
+ + ??? info "Cropped object Image" + + Additional steps required to crop image to only include object region. + + ![Example Crop Isolated Object Image No Background](https://github.com/ultralytics/ultralytics/assets/62214284/5910244f-d1e1-44af-af7f-6dea4c688da8){ align="right" } + ``` { .py .annotate } + # (1) Bounding box coordinates + x1, y1, x2, y2 = c.boxes.xyxy.cpu().numpy().squeeze().astype(np.int32) + # Crop image to object region + iso_crop = isolated[y1:y2, x1:x2] + + ``` + + 1. For more information on bounding box results, see [Boxes Section from Predict Mode](../modes/predict.md/#boxes) + + ??? question "What does this code do?" + + - When using `c.boxes.xyxy.cpu().numpy()`, the bounding boxes are returned as a NumPy array, using the `xyxy` box coordinates format, which correspond to the points `xmin, ymin, xmax, ymax` for the bounding box (rectangle), see [Boxes Section from Predict Mode](../modes/predict.md/#boxes) for more information. + + - Adding `squeeze()` ensures that any extraneous dimensions are removed from the NumPy array. + + - Converting the coordinate values using `.astype(np.int32)` changes the box coordinates data type from `float32` to `int32` which will be compatible when cropping the image using index slices. + + - Finally the image region for the bounding box is cropped using index slicing, where the bounds are set using the `[ymin:ymax, xmin:xmax]` coordinates of the detection bounding box. + + ??? question "What if I want the cropped object **including** the background?" + + This is a built in feature for the Ultralytics library. See the `save_crop` argument for [Predict Mode Inference Arguments](../modes/predict.md/#inference-arguments) for details. + + --- + +6. What to do next is entirely left to you as the developer. A basic example of one possible next step (saving the image to file for future use) is shown. + + - **NOTE:** this step is optional and can be skipped if not required for your specific use case. + + ??? example "Example Final Step" + + ```py + # Save isolated object to file + _ = cv.imwrite(f'{img_name}_{label}-{ci}.png', iso_crop) + ``` + + - In this example, the `img_name` is the base-name of the source image file, `label` is the detected class-name, and `ci` is the index of the object detection (in case of multiple instances with the same class name). + +## Full Example code + +Here, all steps from the previous section are combined into a single block of code. For repeated use, it would be optimal to define a function to do some or all commands contained in the `for`-loops, but that is an exercise left to the reader. + +``` { .py .annotate } +from pathlib import Path + +import cv2 as cv +import numpy as np +from ultralytics import YOLO + +m = YOLO('yolov8n-seg.pt')#(4)! +res = m.predict()#(3)! + +# iterate detection results (5) +for r in res: + img = np.copy(r.orig_img) + img_name = Path(r.path).stem + + # iterate each object contour (6) + for ci,c in enumerate(r): + label = c.names[c.boxes.cls.tolist().pop()] + + b_mask = np.zeros(img.shape[:2], np.uint8) + + # Create contour mask (1) + contour = c.masks.xy.pop().astype(np.int32).reshape(-1, 1, 2) + _ = cv.drawContours(b_mask, [contour], -1, (255, 255, 255), cv.FILLED) + + # Choose one: + + # OPTION-1: Isolate object with black background + mask3ch = cv.cvtColor(b_mask, cv.COLOR_GRAY2BGR) + isolated = cv.bitwise_and(mask3ch, img) + + # OPTION-2: Isolate object with transparent background (when saved as PNG) + isolated = np.dstack([img, b_mask]) + + # OPTIONAL: detection crop (from either OPT1 or OPT2) + x1, y1, x2, y2 = c.boxes.xyxy.cpu().numpy().squeeze().astype(np.int32) + iso_crop = isolated[y1:y2, x1:x2] + + # TODO your actions go here (2) + +``` + +1. The line populating `contour` is combined into a single line here, where it was split to multiple above. +2. {==What goes here is up to you!==} +3. See [Predict Mode](../modes/predict.md) for additional information. +4. See [Segment Task](../tasks/segment.md#models) for more information. +5. Learn more about [Working with Results](../modes/predict.md#working-with-results) +6. Learn more about [Segmentation Mask Results](../modes/predict.md#masks) diff --git a/ultralytics/docs/en/guides/isolating-segmentation-objects.md:Zone.Identifier b/ultralytics/docs/en/guides/isolating-segmentation-objects.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/isolating-segmentation-objects.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/kfold-cross-validation.md b/ultralytics/docs/en/guides/kfold-cross-validation.md new file mode 100755 index 0000000..9eb53a1 --- /dev/null +++ b/ultralytics/docs/en/guides/kfold-cross-validation.md @@ -0,0 +1,278 @@ +--- +comments: true +description: An in-depth guide demonstrating the implementation of K-Fold Cross Validation with the Ultralytics ecosystem for object detection datasets, leveraging Python, YOLO, and sklearn. +keywords: K-Fold cross validation, Ultralytics, YOLO detection format, Python, sklearn, object detection +--- + +# K-Fold Cross Validation with Ultralytics + +## Introduction + +This comprehensive guide illustrates the implementation of K-Fold Cross Validation for object detection datasets within the Ultralytics ecosystem. We'll leverage the YOLO detection format and key Python libraries such as sklearn, pandas, and PyYaml to guide you through the necessary setup, the process of generating feature vectors, and the execution of a K-Fold dataset split. + +

+ K-Fold Cross Validation Overview +

+ +Whether your project involves the Fruit Detection dataset or a custom data source, this tutorial aims to help you comprehend and apply K-Fold Cross Validation to bolster the reliability and robustness of your machine learning models. While we're applying `k=5` folds for this tutorial, keep in mind that the optimal number of folds can vary depending on your dataset and the specifics of your project. + +Without further ado, let's dive in! + +## Setup + +- Your annotations should be in the [YOLO detection format](../datasets/detect/index.md). + +- This guide assumes that annotation files are locally available. + +- For our demonstration, we use the [Fruit Detection](https://www.kaggle.com/datasets/lakshaytyagi01/fruit-detection/code) dataset. + - This dataset contains a total of 8479 images. + - It includes 6 class labels, each with its total instance counts listed below. + +| Class Label | Instance Count | +|:------------|:--------------:| +| Apple | 7049 | +| Grapes | 7202 | +| Pineapple | 1613 | +| Orange | 15549 | +| Banana | 3536 | +| Watermelon | 1976 | + +- Necessary Python packages include: + + - `ultralytics` + - `sklearn` + - `pandas` + - `pyyaml` + +- This tutorial operates with `k=5` folds. However, you should determine the best number of folds for your specific dataset. + +1. Initiate a new Python virtual environment (`venv`) for your project and activate it. Use `pip` (or your preferred package manager) to install: + + - The Ultralytics library: `pip install -U ultralytics`. Alternatively, you can clone the official [repo](https://github.com/ultralytics/ultralytics). + - Scikit-learn, pandas, and PyYAML: `pip install -U scikit-learn pandas pyyaml`. + +2. Verify that your annotations are in the [YOLO detection format](../datasets/detect/index.md). + + - For this tutorial, all annotation files are found in the `Fruit-Detection/labels` directory. + +## Generating Feature Vectors for Object Detection Dataset + +1. Start by creating a new Python file and import the required libraries. + + ```python + import datetime + import shutil + from pathlib import Path + from collections import Counter + + import yaml + import numpy as np + import pandas as pd + from ultralytics import YOLO + from sklearn.model_selection import KFold + ``` + +2. Proceed to retrieve all label files for your dataset. + + ```python + dataset_path = Path('./Fruit-detection') # replace with 'path/to/dataset' for your custom data + labels = sorted(dataset_path.rglob("*labels/*.txt")) # all data in 'labels' + ``` + +3. Now, read the contents of the dataset YAML file and extract the indices of the class labels. + + ```python + yaml_file = 'path/to/data.yaml' # your data YAML with data directories and names dictionary + with open(yaml_file, 'r', encoding="utf8") as y: + classes = yaml.safe_load(y)['names'] + cls_idx = sorted(classes.keys()) + ``` + +4. Initialize an empty `pandas` DataFrame. + + ```python + indx = [l.stem for l in labels] # uses base filename as ID (no extension) + labels_df = pd.DataFrame([], columns=cls_idx, index=indx) + ``` + +5. Count the instances of each class-label present in the annotation files. + + ```python + for label in labels: + lbl_counter = Counter() + + with open(label,'r') as lf: + lines = lf.readlines() + + for l in lines: + # classes for YOLO label uses integer at first position of each line + lbl_counter[int(l.split(' ')[0])] += 1 + + labels_df.loc[label.stem] = lbl_counter + + labels_df = labels_df.fillna(0.0) # replace `nan` values with `0.0` + ``` + +6. The following is a sample view of the populated DataFrame: + + ```pandas + 0 1 2 3 4 5 + '0000a16e4b057580_jpg.rf.00ab48988370f64f5ca8ea4...' 0.0 0.0 0.0 0.0 0.0 7.0 + '0000a16e4b057580_jpg.rf.7e6dce029fb67f01eb19aa7...' 0.0 0.0 0.0 0.0 0.0 7.0 + '0000a16e4b057580_jpg.rf.bc4d31cdcbe229dd022957a...' 0.0 0.0 0.0 0.0 0.0 7.0 + '00020ebf74c4881c_jpg.rf.508192a0a97aa6c4a3b6882...' 0.0 0.0 0.0 1.0 0.0 0.0 + '00020ebf74c4881c_jpg.rf.5af192a2254c8ecc4188a25...' 0.0 0.0 0.0 1.0 0.0 0.0 + ... ... ... ... ... ... ... + 'ff4cd45896de38be_jpg.rf.c4b5e967ca10c7ced3b9e97...' 0.0 0.0 0.0 0.0 0.0 2.0 + 'ff4cd45896de38be_jpg.rf.ea4c1d37d2884b3e3cbce08...' 0.0 0.0 0.0 0.0 0.0 2.0 + 'ff5fd9c3c624b7dc_jpg.rf.bb519feaa36fc4bf630a033...' 1.0 0.0 0.0 0.0 0.0 0.0 + 'ff5fd9c3c624b7dc_jpg.rf.f0751c9c3aa4519ea3c9d6a...' 1.0 0.0 0.0 0.0 0.0 0.0 + 'fffe28b31f2a70d4_jpg.rf.7ea16bd637ba0711c53b540...' 0.0 6.0 0.0 0.0 0.0 0.0 + ``` + +The rows index the label files, each corresponding to an image in your dataset, and the columns correspond to your class-label indices. Each row represents a pseudo feature-vector, with the count of each class-label present in your dataset. This data structure enables the application of K-Fold Cross Validation to an object detection dataset. + +## K-Fold Dataset Split + +1. Now we will use the `KFold` class from `sklearn.model_selection` to generate `k` splits of the dataset. + + - Important: + - Setting `shuffle=True` ensures a randomized distribution of classes in your splits. + - By setting `random_state=M` where `M` is a chosen integer, you can obtain repeatable results. + + ```python + ksplit = 5 + kf = KFold(n_splits=ksplit, shuffle=True, random_state=20) # setting random_state for repeatable results + + kfolds = list(kf.split(labels_df)) + ``` + +2. The dataset has now been split into `k` folds, each having a list of `train` and `val` indices. We will construct a DataFrame to display these results more clearly. + + ```python + folds = [f'split_{n}' for n in range(1, ksplit + 1)] + folds_df = pd.DataFrame(index=indx, columns=folds) + + for idx, (train, val) in enumerate(kfolds, start=1): + folds_df[f'split_{idx}'].loc[labels_df.iloc[train].index] = 'train' + folds_df[f'split_{idx}'].loc[labels_df.iloc[val].index] = 'val' + ``` + +3. Now we will calculate the distribution of class labels for each fold as a ratio of the classes present in `val` to those present in `train`. + + ```python + fold_lbl_distrb = pd.DataFrame(index=folds, columns=cls_idx) + + for n, (train_indices, val_indices) in enumerate(kfolds, start=1): + train_totals = labels_df.iloc[train_indices].sum() + val_totals = labels_df.iloc[val_indices].sum() + + # To avoid division by zero, we add a small value (1E-7) to the denominator + ratio = val_totals / (train_totals + 1E-7) + fold_lbl_distrb.loc[f'split_{n}'] = ratio + ``` + + The ideal scenario is for all class ratios to be reasonably similar for each split and across classes. This, however, will be subject to the specifics of your dataset. + +4. Next, we create the directories and dataset YAML files for each split. + + ```python + supported_extensions = ['.jpg', '.jpeg', '.png'] + + # Initialize an empty list to store image file paths + images = [] + + # Loop through supported extensions and gather image files + for ext in supported_extensions: + images.extend(sorted((dataset_path / 'images').rglob(f"*{ext}"))) + + # Create the necessary directories and dataset YAML files (unchanged) + save_path = Path(dataset_path / f'{datetime.date.today().isoformat()}_{ksplit}-Fold_Cross-val') + save_path.mkdir(parents=True, exist_ok=True) + ds_yamls = [] + + for split in folds_df.columns: + # Create directories + split_dir = save_path / split + split_dir.mkdir(parents=True, exist_ok=True) + (split_dir / 'train' / 'images').mkdir(parents=True, exist_ok=True) + (split_dir / 'train' / 'labels').mkdir(parents=True, exist_ok=True) + (split_dir / 'val' / 'images').mkdir(parents=True, exist_ok=True) + (split_dir / 'val' / 'labels').mkdir(parents=True, exist_ok=True) + + # Create dataset YAML files + dataset_yaml = split_dir / f'{split}_dataset.yaml' + ds_yamls.append(dataset_yaml) + + with open(dataset_yaml, 'w') as ds_y: + yaml.safe_dump({ + 'path': split_dir.as_posix(), + 'train': 'train', + 'val': 'val', + 'names': classes + }, ds_y) + ``` + +5. Lastly, copy images and labels into the respective directory ('train' or 'val') for each split. + + - __NOTE:__ The time required for this portion of the code will vary based on the size of your dataset and your system hardware. + + ```python + for image, label in zip(images, labels): + for split, k_split in folds_df.loc[image.stem].items(): + # Destination directory + img_to_path = save_path / split / k_split / 'images' + lbl_to_path = save_path / split / k_split / 'labels' + + # Copy image and label files to new directory (SamefileError if file already exists) + shutil.copy(image, img_to_path / image.name) + shutil.copy(label, lbl_to_path / label.name) + ``` + +## Save Records (Optional) + +Optionally, you can save the records of the K-Fold split and label distribution DataFrames as CSV files for future reference. + +```python +folds_df.to_csv(save_path / "kfold_datasplit.csv") +fold_lbl_distrb.to_csv(save_path / "kfold_label_distribution.csv") +``` + +## Train YOLO using K-Fold Data Splits + +1. First, load the YOLO model. + + ```python + weights_path = 'path/to/weights.pt' + model = YOLO(weights_path, task='detect') + ``` + +2. Next, iterate over the dataset YAML files to run training. The results will be saved to a directory specified by the `project` and `name` arguments. By default, this directory is 'exp/runs#' where # is an integer index. + + ```python + results = {} + + # Define your additional arguments here + batch = 16 + project = 'kfold_demo' + epochs = 100 + + for k in range(ksplit): + dataset_yaml = ds_yamls[k] + model.train(data=dataset_yaml,epochs=epochs, batch=batch, project=project) # include any train arguments + results[k] = model.metrics # save output metrics for further analysis + ``` + +## Conclusion + +In this guide, we have explored the process of using K-Fold cross-validation for training the YOLO object detection model. We learned how to split our dataset into K partitions, ensuring a balanced class distribution across the different folds. + +We also explored the procedure for creating report DataFrames to visualize the data splits and label distributions across these splits, providing us a clear insight into the structure of our training and validation sets. + +Optionally, we saved our records for future reference, which could be particularly useful in large-scale projects or when troubleshooting model performance. + +Finally, we implemented the actual model training using each split in a loop, saving our training results for further analysis and comparison. + +This technique of K-Fold cross-validation is a robust way of making the most out of your available data, and it helps to ensure that your model performance is reliable and consistent across different data subsets. This results in a more generalizable and reliable model that is less likely to overfit to specific data patterns. + +Remember that although we used YOLO in this guide, these steps are mostly transferable to other machine learning models. Understanding these steps allows you to apply cross-validation effectively in your own machine learning projects. Happy coding! diff --git a/ultralytics/docs/en/guides/kfold-cross-validation.md:Zone.Identifier b/ultralytics/docs/en/guides/kfold-cross-validation.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/kfold-cross-validation.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/model-deployment-options.md b/ultralytics/docs/en/guides/model-deployment-options.md new file mode 100755 index 0000000..6a4adba --- /dev/null +++ b/ultralytics/docs/en/guides/model-deployment-options.md @@ -0,0 +1,305 @@ +--- +comments: true +description: A guide to help determine which deployment option to choose for your YOLOv8 model, including essential considerations. +keywords: YOLOv8, Deployment, PyTorch, TorchScript, ONNX, OpenVINO, TensorRT, CoreML, TensorFlow, Export +--- + +# Understanding YOLOv8โ€™s Deployment Options + +## Introduction + +*Setting the Scene:* You've come a long way on your journey with YOLOv8. You've diligently collected data, meticulously annotated it, and put in the hours to train and rigorously evaluate your custom YOLOv8 model. Now, itโ€™s time to put your model to work for your specific application, use case, or project. But there's a critical decision that stands before you: how to export and deploy your model effectively. + +This guide walks you through YOLOv8โ€™s deployment options and the essential factors to consider to choose the right option for your project. + +## How to Select the Right Deployment Option for Your YOLOv8 Model + +When it's time to deploy your YOLOv8 model, selecting a suitable export format is very important. As outlined in the [Ultralytics YOLOv8 Modes documentation](../modes/export.md#usage-examples), the model.export() function allows for converting your trained model into a variety of formats tailored to diverse environments and performance requirements. + +The ideal format depends on your model's intended operational context, balancing speed, hardware constraints, and ease of integration. In the following section, we'll take a closer look at each export option, understanding when to choose each one. + +### YOLOv8โ€™s Deployment Options + +Letโ€™s walk through the different YOLOv8 deployment options. For a detailed walkthrough of the export process, visit the [Ultralytics documentation page on exporting](../modes/export.md). + +#### PyTorch + +PyTorch is an open-source machine learning library widely used for applications in deep learning and artificial intelligence. It provides a high level of flexibility and speed, which has made it a favorite among researchers and developers. + +- **Performance Benchmarks**: PyTorch is known for its ease of use and flexibility, which may result in a slight trade-off in raw performance when compared to other frameworks that are more specialized and optimized. + +- **Compatibility and Integration**: Offers excellent compatibility with various data science and machine learning libraries in Python. + +- **Community Support and Ecosystem**: One of the most vibrant communities, with extensive resources for learning and troubleshooting. + +- **Case Studies**: Commonly used in research prototypes, many academic papers reference models deployed in PyTorch. + +- **Maintenance and Updates**: Regular updates with active development and support for new features. + +- **Security Considerations**: Regular patches for security issues, but security is largely dependent on the overall environment itโ€™s deployed in. + +- **Hardware Acceleration**: Supports CUDA for GPU acceleration, essential for speeding up model training and inference. + +#### TorchScript + +TorchScript extends PyTorchโ€™s capabilities by allowing the exportation of models to be run in a C++ runtime environment. This makes it suitable for production environments where Python is unavailable. + +- **Performance Benchmarks**: Can offer improved performance over native PyTorch, especially in production environments. + +- **Compatibility and Integration**: Designed for seamless transition from PyTorch to C++ production environments, though some advanced features might not translate perfectly. + +- **Community Support and Ecosystem**: Benefits from PyTorchโ€™s large community but has a narrower scope of specialized developers. + +- **Case Studies**: Widely used in industry settings where Pythonโ€™s performance overhead is a bottleneck. + +- **Maintenance and Updates**: Maintained alongside PyTorch with consistent updates. + +- **Security Considerations**: Offers improved security by enabling the running of models in environments without full Python installations. + +- **Hardware Acceleration**: Inherits PyTorchโ€™s CUDA support, ensuring efficient GPU utilization. + +#### ONNX + +The Open Neural Network Exchange (ONNX) is a format that allows for model interoperability across different frameworks, which can be critical when deploying to various platforms. + +- **Performance Benchmarks**: ONNX models may experience a variable performance depending on the specific runtime they are deployed on. + +- **Compatibility and Integration**: High interoperability across multiple platforms and hardware due to its framework-agnostic nature. + +- **Community Support and Ecosystem**: Supported by many organizations, leading to a broad ecosystem and a variety of tools for optimization. + +- **Case Studies**: Frequently used to move models between different machine learning frameworks, demonstrating its flexibility. + +- **Maintenance and Updates**: As an open standard, ONNX is regularly updated to support new operations and models. + +- **Security Considerations**: As with any cross-platform tool, it's essential to ensure secure practices in the conversion and deployment pipeline. + +- **Hardware Acceleration**: With ONNX Runtime, models can leverage various hardware optimizations. + +#### OpenVINO + +OpenVINO is an Intel toolkit designed to facilitate the deployment of deep learning models across Intel hardware, enhancing performance and speed. + +- **Performance Benchmarks**: Specifically optimized for Intel CPUs, GPUs, and VPUs, offering significant performance boosts on compatible hardware. + +- **Compatibility and Integration**: Works best within the Intel ecosystem but also supports a range of other platforms. + +- **Community Support and Ecosystem**: Backed by Intel, with a solid user base especially in the computer vision domain. + +- **Case Studies**: Often utilized in IoT and edge computing scenarios where Intel hardware is prevalent. + +- **Maintenance and Updates**: Intel regularly updates OpenVINO to support the latest deep learning models and Intel hardware. + +- **Security Considerations**: Provides robust security features suitable for deployment in sensitive applications. + +- **Hardware Acceleration**: Tailored for acceleration on Intel hardware, leveraging dedicated instruction sets and hardware features. + +For more details on deployment using OpenVINO, refer to the Ultralytics Integration documentation: [Intel OpenVINO Export](../integrations/openvino.md). + +#### TensorRT + +TensorRT is a high-performance deep learning inference optimizer and runtime from NVIDIA, ideal for applications needing speed and efficiency. + +- **Performance Benchmarks**: Delivers top-tier performance on NVIDIA GPUs with support for high-speed inference. + +- **Compatibility and Integration**: Best suited for NVIDIA hardware, with limited support outside this environment. + +- **Community Support and Ecosystem**: Strong support network through NVIDIAโ€™s developer forums and documentation. + +- **Case Studies**: Widely adopted in industries requiring real-time inference on video and image data. + +- **Maintenance and Updates**: NVIDIA maintains TensorRT with frequent updates to enhance performance and support new GPU architectures. + +- **Security Considerations**: Like many NVIDIA products, it has a strong emphasis on security, but specifics depend on the deployment environment. + +- **Hardware Acceleration**: Exclusively designed for NVIDIA GPUs, providing deep optimization and acceleration. + +#### CoreML + +CoreML is Appleโ€™s machine learning framework, optimized for on-device performance in the Apple ecosystem, including iOS, macOS, watchOS, and tvOS. + +- **Performance Benchmarks**: Optimized for on-device performance on Apple hardware with minimal battery usage. + +- **Compatibility and Integration**: Exclusively for Apple's ecosystem, providing a streamlined workflow for iOS and macOS applications. + +- **Community Support and Ecosystem**: Strong support from Apple and a dedicated developer community, with extensive documentation and tools. + +- **Case Studies**: Commonly used in applications that require on-device machine learning capabilities on Apple products. + +- **Maintenance and Updates**: Regularly updated by Apple to support the latest machine learning advancements and Apple hardware. + +- **Security Considerations**: Benefits from Apple's focus on user privacy and data security. + +- **Hardware Acceleration**: Takes full advantage of Apple's neural engine and GPU for accelerated machine learning tasks. + +#### TF SavedModel + +TF SavedModel is TensorFlowโ€™s format for saving and serving machine learning models, particularly suited for scalable server environments. + +- **Performance Benchmarks**: Offers scalable performance in server environments, especially when used with TensorFlow Serving. + +- **Compatibility and Integration**: Wide compatibility across TensorFlow's ecosystem, including cloud and enterprise server deployments. + +- **Community Support and Ecosystem**: Large community support due to TensorFlow's popularity, with a vast array of tools for deployment and optimization. + +- **Case Studies**: Extensively used in production environments for serving deep learning models at scale. + +- **Maintenance and Updates**: Supported by Google and the TensorFlow community, ensuring regular updates and new features. + +- **Security Considerations**: Deployment using TensorFlow Serving includes robust security features for enterprise-grade applications. + +- **Hardware Acceleration**: Supports various hardware accelerations through TensorFlow's backends. + +#### TF GraphDef + +TF GraphDef is a TensorFlow format that represents the model as a graph, which is beneficial for environments where a static computation graph is required. + +- **Performance Benchmarks**: Provides stable performance for static computation graphs, with a focus on consistency and reliability. + +- **Compatibility and Integration**: Easily integrates within TensorFlow's infrastructure but less flexible compared to SavedModel. + +- **Community Support and Ecosystem**: Good support from TensorFlow's ecosystem, with many resources available for optimizing static graphs. + +- **Case Studies**: Useful in scenarios where a static graph is necessary, such as in certain embedded systems. + +- **Maintenance and Updates**: Regular updates alongside TensorFlow's core updates. + +- **Security Considerations**: Ensures safe deployment with TensorFlow's established security practices. + +- **Hardware Acceleration**: Can utilize TensorFlow's hardware acceleration options, though not as flexible as SavedModel. + +#### TF Lite + +TF Lite is TensorFlowโ€™s solution for mobile and embedded device machine learning, providing a lightweight library for on-device inference. + +- **Performance Benchmarks**: Designed for speed and efficiency on mobile and embedded devices. + +- **Compatibility and Integration**: Can be used on a wide range of devices due to its lightweight nature. + +- **Community Support and Ecosystem**: Backed by Google, it has a robust community and a growing number of resources for developers. + +- **Case Studies**: Popular in mobile applications that require on-device inference with minimal footprint. + +- **Maintenance and Updates**: Regularly updated to include the latest features and optimizations for mobile devices. + +- **Security Considerations**: Provides a secure environment for running models on end-user devices. + +- **Hardware Acceleration**: Supports a variety of hardware acceleration options, including GPU and DSP. + +#### TF Edge TPU + +TF Edge TPU is designed for high-speed, efficient computing on Google's Edge TPU hardware, perfect for IoT devices requiring real-time processing. + +- **Performance Benchmarks**: Specifically optimized for high-speed, efficient computing on Google's Edge TPU hardware. + +- **Compatibility and Integration**: Works exclusively with TensorFlow Lite models on Edge TPU devices. + +- **Community Support and Ecosystem**: Growing support with resources provided by Google and third-party developers. + +- **Case Studies**: Used in IoT devices and applications that require real-time processing with low latency. + +- **Maintenance and Updates**: Continually improved upon to leverage the capabilities of new Edge TPU hardware releases. + +- **Security Considerations**: Integrates with Google's robust security for IoT and edge devices. + +- **Hardware Acceleration**: Custom-designed to take full advantage of Google Coral devices. + +#### TF.js + +TensorFlow.js (TF.js) is a library that brings machine learning capabilities directly to the browser, offering a new realm of possibilities for web developers and users alike. It allows for the integration of machine learning models in web applications without the need for back-end infrastructure. + +- **Performance Benchmarks**: Enables machine learning directly in the browser with reasonable performance, depending on the client device. + +- **Compatibility and Integration**: High compatibility with web technologies, allowing for easy integration into web applications. + +- **Community Support and Ecosystem**: Support from a community of web and Node.js developers, with a variety of tools for deploying ML models in browsers. + +- **Case Studies**: Ideal for interactive web applications that benefit from client-side machine learning without the need for server-side processing. + +- **Maintenance and Updates**: Maintained by the TensorFlow team with contributions from the open-source community. + +- **Security Considerations**: Runs within the browser's secure context, utilizing the security model of the web platform. + +- **Hardware Acceleration**: Performance can be enhanced with web-based APIs that access hardware acceleration like WebGL. + +#### PaddlePaddle + +PaddlePaddle is an open-source deep learning framework developed by Baidu. It is designed to be both efficient for researchers and easy to use for developers. It's particularly popular in China and offers specialized support for Chinese language processing. + +- **Performance Benchmarks**: Offers competitive performance with a focus on ease of use and scalability. + +- **Compatibility and Integration**: Well-integrated within Baidu's ecosystem and supports a wide range of applications. + +- **Community Support and Ecosystem**: While the community is smaller globally, it's rapidly growing, especially in China. + +- **Case Studies**: Commonly used in Chinese markets and by developers looking for alternatives to other major frameworks. + +- **Maintenance and Updates**: Regularly updated with a focus on serving Chinese language AI applications and services. + +- **Security Considerations**: Emphasizes data privacy and security, catering to Chinese data governance standards. + +- **Hardware Acceleration**: Supports various hardware accelerations, including Baidu's own Kunlun chips. + +#### ncnn + +ncnn is a high-performance neural network inference framework optimized for the mobile platform. It stands out for its lightweight nature and efficiency, making it particularly well-suited for mobile and embedded devices where resources are limited. + +- **Performance Benchmarks**: Highly optimized for mobile platforms, offering efficient inference on ARM-based devices. + +- **Compatibility and Integration**: Suitable for applications on mobile phones and embedded systems with ARM architecture. + +- **Community Support and Ecosystem**: Supported by a niche but active community focused on mobile and embedded ML applications. + +- **Case Studies**: Favoured for mobile applications where efficiency and speed are critical on Android and other ARM-based systems. + +- **Maintenance and Updates**: Continuously improved to maintain high performance on a range of ARM devices. + +- **Security Considerations**: Focuses on running locally on the device, leveraging the inherent security of on-device processing. + +- **Hardware Acceleration**: Tailored for ARM CPUs and GPUs, with specific optimizations for these architectures. + +## Comparative Analysis of YOLOv8 Deployment Options + +The following table provides a snapshot of the various deployment options available for YOLOv8 models, helping you to assess which may best fit your project needs based on several critical criteria. For an in-depth look at each deployment option's format, please see the [Ultralytics documentation page on export formats](../modes/export.md#export-formats). + +| Deployment Option | Performance Benchmarks | Compatibility and Integration | Community Support and Ecosystem | Case Studies | Maintenance and Updates | Security Considerations | Hardware Acceleration | +|-------------------|-------------------------------------------------|------------------------------------------------|-----------------------------------------------|--------------------------------------------|---------------------------------------------|---------------------------------------------------|------------------------------------| +| PyTorch | Good flexibility; may trade off raw performance | Excellent with Python libraries | Extensive resources and community | Research and prototypes | Regular, active development | Dependent on deployment environment | CUDA support for GPU acceleration | +| TorchScript | Better for production than PyTorch | Smooth transition from PyTorch to C++ | Specialized but narrower than PyTorch | Industry where Python is a bottleneck | Consistent updates with PyTorch | Improved security without full Python | Inherits CUDA support from PyTorch | +| ONNX | Variable depending on runtime | High across different frameworks | Broad ecosystem, supported by many orgs | Flexibility across ML frameworks | Regular updates for new operations | Ensure secure conversion and deployment practices | Various hardware optimizations | +| OpenVINO | Optimized for Intel hardware | Best within Intel ecosystem | Solid in computer vision domain | IoT and edge with Intel hardware | Regular updates for Intel hardware | Robust features for sensitive applications | Tailored for Intel hardware | +| TensorRT | Top-tier on NVIDIA GPUs | Best for NVIDIA hardware | Strong network through NVIDIA | Real-time video and image inference | Frequent updates for new GPUs | Emphasis on security | Designed for NVIDIA GPUs | +| CoreML | Optimized for on-device Apple hardware | Exclusive to Apple ecosystem | Strong Apple and developer support | On-device ML on Apple products | Regular Apple updates | Focus on privacy and security | Apple neural engine and GPU | +| TF SavedModel | Scalable in server environments | Wide compatibility in TensorFlow ecosystem | Large support due to TensorFlow popularity | Serving models at scale | Regular updates by Google and community | Robust features for enterprise | Various hardware accelerations | +| TF GraphDef | Stable for static computation graphs | Integrates well with TensorFlow infrastructure | Resources for optimizing static graphs | Scenarios requiring static graphs | Updates alongside TensorFlow core | Established TensorFlow security practices | TensorFlow acceleration options | +| TF Lite | Speed and efficiency on mobile/embedded | Wide range of device support | Robust community, Google backed | Mobile applications with minimal footprint | Latest features for mobile | Secure environment on end-user devices | GPU and DSP among others | +| TF Edge TPU | Optimized for Google's Edge TPU hardware | Exclusive to Edge TPU devices | Growing with Google and third-party resources | IoT devices requiring real-time processing | Improvements for new Edge TPU hardware | Google's robust IoT security | Custom-designed for Google Coral | +| TF.js | Reasonable in-browser performance | High with web technologies | Web and Node.js developers support | Interactive web applications | TensorFlow team and community contributions | Web platform security model | Enhanced with WebGL and other APIs | +| PaddlePaddle | Competitive, easy to use and scalable | Baidu ecosystem, wide application support | Rapidly growing, especially in China | Chinese market and language processing | Focus on Chinese AI applications | Emphasizes data privacy and security | Including Baidu's Kunlun chips | +| ncnn | Optimized for mobile ARM-based devices | Mobile and embedded ARM systems | Niche but active mobile/embedded ML community | Android and ARM systems efficiency | High performance maintenance on ARM | On-device security advantages | ARM CPUs and GPUs optimizations | + +This comparative analysis gives you a high-level overview. For deployment, it's essential to consider the specific requirements and constraints of your project, and consult the detailed documentation and resources available for each option. + +## Community and Support + +When you're getting started with YOLOv8, having a helpful community and support can make a significant impact. Here's how to connect with others who share your interests and get the assistance you need. + +### Engage with the Broader Community + +- **GitHub Discussions:** The YOLOv8 repository on GitHub has a "Discussions" section where you can ask questions, report issues, and suggest improvements. + +- **Ultralytics Discord Server:** Ultralytics has a [Discord server](https://ultralytics.com/discord/) where you can interact with other users and developers. + +### Official Documentation and Resources + +- **Ultralytics YOLOv8 Docs:** The [official documentation](../index.md) provides a comprehensive overview of YOLOv8, along with guides on installation, usage, and troubleshooting. + +These resources will help you tackle challenges and stay updated on the latest trends and best practices in the YOLOv8 community. + +## Conclusion + +In this guide, we've explored the different deployment options for YOLOv8. We've also discussed the important factors to consider when making your choice. These options allow you to customize your model for various environments and performance requirements, making it suitable for real-world applications. + +Don't forget that the YOLOv8 and Ultralytics community is a valuable source of help. Connect with other developers and experts to learn unique tips and solutions you might not find in regular documentation. Keep seeking knowledge, exploring new ideas, and sharing your experiences. + +Happy deploying! diff --git a/ultralytics/docs/en/guides/model-deployment-options.md:Zone.Identifier b/ultralytics/docs/en/guides/model-deployment-options.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/model-deployment-options.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/object-counting.md b/ultralytics/docs/en/guides/object-counting.md new file mode 100755 index 0000000..dcc4756 --- /dev/null +++ b/ultralytics/docs/en/guides/object-counting.md @@ -0,0 +1,197 @@ +--- +comments: true +description: Object Counting Using Ultralytics YOLOv8 +keywords: Ultralytics, YOLOv8, Object Detection, Object Counting, Object Tracking, Notebook, IPython Kernel, CLI, Python SDK +--- + +# Object Counting using Ultralytics YOLOv8 ๐Ÿš€ + +## What is Object Counting? + +Object counting with [Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) involves accurate identification and counting of specific objects in videos and camera streams. YOLOv8 excels in real-time applications, providing efficient and precise object counting for various scenarios like crowd analysis and surveillance, thanks to its state-of-the-art algorithms and deep learning capabilities. + +

+
+ +
+ Watch: Object Counting using Ultralytics YOLOv8 +

+ +## Advantages of Object Counting? + +- **Resource Optimization:** Object counting facilitates efficient resource management by providing accurate counts, and optimizing resource allocation in applications like inventory management. +- **Enhanced Security:** Object counting enhances security and surveillance by accurately tracking and counting entities, aiding in proactive threat detection. +- **Informed Decision-Making:** Object counting offers valuable insights for decision-making, optimizing processes in retail, traffic management, and various other domains. + +## Real World Applications + +| Logistics | Aquaculture | +|:-------------------------------------------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------------------------------------------:| +| ![Conveyor Belt Packets Counting Using Ultralytics YOLOv8](https://github.com/RizwanMunawar/ultralytics/assets/62513924/70e2d106-510c-4c6c-a57a-d34a765aa757) | ![Fish Counting in Sea using Ultralytics YOLOv8](https://github.com/RizwanMunawar/ultralytics/assets/62513924/c60d047b-3837-435f-8d29-bb9fc95d2191) | +| Conveyor Belt Packets Counting Using Ultralytics YOLOv8 | Fish Counting in Sea using Ultralytics YOLOv8 | + +!!! Example "Object Counting using YOLOv8 Example" + + === "Region" + ```python + from ultralytics import YOLO + from ultralytics.solutions import object_counter + import cv2 + + model = YOLO("yolov8n.pt") + cap = cv2.VideoCapture("path/to/video/file.mp4") + assert cap.isOpened(), "Error reading video file" + + # Define region points + region_points = [(20, 400), (1080, 404), (1080, 360), (20, 360)] + + # Video writer + video_writer = cv2.VideoWriter("object_counting_output.avi", + cv2.VideoWriter_fourcc(*'mp4v'), + int(cap.get(5)), + (int(cap.get(3)), int(cap.get(4)))) + + # Init Object Counter + counter = object_counter.ObjectCounter() + counter.set_args(view_img=True, + reg_pts=region_points, + classes_names=model.names, + draw_tracks=True) + + while cap.isOpened(): + success, im0 = cap.read() + if not success: + print("Video frame is empty or video processing has been successfully completed.") + break + tracks = model.track(im0, persist=True, show=False) + + im0 = counter.start_counting(im0, tracks) + video_writer.write(im0) + + cap.release() + video_writer.release() + cv2.destroyAllWindows() + + ``` + + === "Line" + ```python + from ultralytics import YOLO + from ultralytics.solutions import object_counter + import cv2 + + model = YOLO("yolov8n.pt") + cap = cv2.VideoCapture("path/to/video/file.mp4") + assert cap.isOpened(), "Error reading video file" + + # Define line points + line_points = [(20, 400), (1080, 400)] + + # Video writer + video_writer = cv2.VideoWriter("object_counting_output.avi", + cv2.VideoWriter_fourcc(*'mp4v'), + int(cap.get(5)), + (int(cap.get(3)), int(cap.get(4)))) + + # Init Object Counter + counter = object_counter.ObjectCounter() + counter.set_args(view_img=True, + reg_pts=line_points, + classes_names=model.names, + draw_tracks=True) + + while cap.isOpened(): + success, im0 = cap.read() + if not success: + print("Video frame is empty or video processing has been successfully completed.") + break + tracks = model.track(im0, persist=True, show=False) + + im0 = counter.start_counting(im0, tracks) + video_writer.write(im0) + + cap.release() + video_writer.release() + cv2.destroyAllWindows() + ``` + + === "Specific Classes" + ```python + from ultralytics import YOLO + from ultralytics.solutions import object_counter + import cv2 + + model = YOLO("yolov8n.pt") + cap = cv2.VideoCapture("path/to/video/file.mp4") + assert cap.isOpened(), "Error reading video file" + + line_points = [(20, 400), (1080, 400)] # line or region points + classes_to_count = [0, 2] # person and car classes for count + + # Video writer + video_writer = cv2.VideoWriter("object_counting_output.avi", + cv2.VideoWriter_fourcc(*'mp4v'), + int(cap.get(5)), + (int(cap.get(3)), int(cap.get(4)))) + + # Init Object Counter + counter = object_counter.ObjectCounter() + counter.set_args(view_img=True, + reg_pts=line_points, + classes_names=model.names, + draw_tracks=True) + + while cap.isOpened(): + success, im0 = cap.read() + if not success: + print("Video frame is empty or video processing has been successfully completed.") + break + tracks = model.track(im0, persist=True, show=False, + classes=classes_to_count) + + im0 = counter.start_counting(im0, tracks) + video_writer.write(im0) + + cap.release() + video_writer.release() + cv2.destroyAllWindows() + ``` + +???+ tip "Region is Movable" + + You can move the region anywhere in the frame by clicking on its edges + +### Optional Arguments `set_args` + + +| Name | Type | Default | Description | +|---------------------|-------------|----------------------------|-----------------------------------------------| +| view_img | `bool` | `False` | Display frames with counts | +| line_thickness | `int` | `2` | Increase bounding boxes thickness | +| reg_pts | `list` | `[(20, 400), (1260, 400)]` | Points defining the Region Area | +| classes_names | `dict` | `model.model.names` | Dictionary of Class Names | +| region_color | `RGB Color` | `(255, 0, 255)` | Color of the Object counting Region or Line | +| track_thickness | `int` | `2` | Thickness of Tracking Lines | +| draw_tracks | `bool` | `False` | Enable drawing Track lines | +| track_color | `RGB Color` | `(0, 255, 0)` | Color for each track line | +| line_dist_thresh | `int` | `15` | Euclidean Distance threshold for line counter | +| count_txt_thickness | `int` | `2` | Thickness of Object counts text | +| count_txt_color | `RGB Color` | `(0, 0, 0)` | Foreground color for Object counts text | +| count_color | `RGB Color` | `(255, 255, 255)` | Background color for Object counts text | +| region_thickness | `int` | `5` | Thickness for object counter region or line | + +### Arguments `model.track` + +| Name | Type | Default | Description | +|-----------|---------|----------------|-------------------------------------------------------------| +| `source` | `im0` | `None` | source directory for images or videos | +| `persist` | `bool` | `False` | persisting tracks between frames | +| `tracker` | `str` | `botsort.yaml` | Tracking method 'bytetrack' or 'botsort' | +| `conf` | `float` | `0.3` | Confidence Threshold | +| `iou` | `float` | `0.5` | IOU Threshold | +| `classes` | `list` | `None` | filter results by class, i.e. classes=0, or classes=[0,2,3] | +| `verbose` | `bool` | `True` | Display the object tracking results | diff --git a/ultralytics/docs/en/guides/object-counting.md:Zone.Identifier b/ultralytics/docs/en/guides/object-counting.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/object-counting.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/raspberry-pi.md b/ultralytics/docs/en/guides/raspberry-pi.md new file mode 100755 index 0000000..389db0d --- /dev/null +++ b/ultralytics/docs/en/guides/raspberry-pi.md @@ -0,0 +1,196 @@ +--- +comments: true +description: Quick start guide to setting up YOLO on a Raspberry Pi with a Pi Camera using the libcamera stack. Detailed comparison between Raspberry Pi 3, 4 and 5 models. +keywords: Ultralytics, YOLO, Raspberry Pi, Pi Camera, libcamera, quick start guide, Raspberry Pi 4 vs Raspberry Pi 5, YOLO on Raspberry Pi, hardware setup, machine learning, AI +--- + +# Quick Start Guide: Raspberry Pi and Pi Camera with YOLOv5 and YOLOv8 + +This comprehensive guide aims to expedite your journey with YOLO object detection models on a [Raspberry Pi](https://www.raspberrypi.com/) using a [Pi Camera](https://www.raspberrypi.com/products/camera-module-v2/). Whether you're a student, hobbyist, or a professional, this guide is designed to get you up and running in less than 30 minutes. The instructions here are rigorously tested to minimize setup issues, allowing you to focus on utilizing YOLO for your specific projects. + +

+
+ +
+ Watch: Raspberry Pi 5 updates and improvements. +

+ +## Prerequisites + +- Raspberry Pi 3, 4 or 5 +- Pi Camera +- 64-bit Raspberry Pi Operating System + +Connect the Pi Camera to your Raspberry Pi via a CSI cable and install the 64-bit Raspberry Pi Operating System. Verify your camera with the following command: + +```bash +libcamera-hello +``` + +You should see a video feed from your camera. + +## Choose Your YOLO Version: YOLOv5 or YOLOv8 + +This guide offers you the flexibility to start with either [YOLOv5](https://github.com/ultralytics/yolov5) or [YOLOv8](https://github.com/ultralytics/ultralytics). Both versions have their unique advantages and use-cases. The choice is yours, but remember, the guide's aim is not just quick setup but also a robust foundation for your future work in object detection. + +## Hardware Specifics: At a Glance + +To assist you in making an informed hardware decision, we've summarized the key hardware specifics of Raspberry Pi 3, 4, and 5 in the table below: + +| Feature | Raspberry Pi 3 | Raspberry Pi 4 | Raspberry Pi 5 | +|----------------------------|------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------|----------------------------------------------------------------------| +| **CPU** | 1.2GHz Quad-Core ARM Cortex-A53 | 1.5GHz Quad-core 64-bit ARM Cortex-A72 | 2.4GHz Quad-core 64-bit Arm Cortex-A76 | +| **RAM** | 1GB LPDDR2 | 2GB, 4GB or 8GB LPDDR4 | *Details not yet available* | +| **USB Ports** | 4 x USB 2.0 | 2 x USB 2.0, 2 x USB 3.0 | 2 x USB 3.0, 2 x USB 2.0 | +| **Network** | Ethernet & Wi-Fi 802.11n | Gigabit Ethernet & Wi-Fi 802.11ac | Gigabit Ethernet with PoE+ support, Dual-band 802.11ac Wi-Fiยฎ | +| **Performance** | Slower, may require lighter YOLO models | Faster, can run complex YOLO models | *Details not yet available* | +| **Power Requirement** | 2.5A power supply | 3.0A USB-C power supply | *Details not yet available* | +| **Official Documentation** | [Link](https://www.raspberrypi.org/documentation/hardware/raspberrypi/bcm2837/README.md) | [Link](https://www.raspberrypi.org/documentation/hardware/raspberrypi/bcm2711/README.md) | [Link](https://www.raspberrypi.com/news/introducing-raspberry-pi-5/) | + +Please make sure to follow the instructions specific to your Raspberry Pi model to ensure a smooth setup process. + +## Quick Start with YOLOv5 + +This section outlines how to set up YOLOv5 on a Raspberry Pi with a Pi Camera. These steps are designed to be compatible with the libcamera camera stack introduced in Raspberry Pi OS Bullseye. + +### Install Necessary Packages + +1. Update the Raspberry Pi: + + ```bash + sudo apt-get update + sudo apt-get upgrade -y + sudo apt-get autoremove -y + ``` + +2. Clone the YOLOv5 repository: + + ```bash + cd ~ + git clone https://github.com/Ultralytics/yolov5.git + ``` + +3. Install the required dependencies: + + ```bash + cd ~/yolov5 + pip3 install -r requirements.txt + ``` + +4. For Raspberry Pi 3, install compatible versions of PyTorch and Torchvision (skip for Raspberry Pi 4): + + ```bash + pip3 uninstall torch torchvision + pip3 install torch==1.11.0 torchvision==0.12.0 + ``` + +### Modify `detect.py` + +To enable TCP streams via SSH or the CLI, minor modifications are needed in `detect.py`. + +1. Open `detect.py`: + + ```bash + sudo nano ~/yolov5/detect.py + ``` + +2. Find and modify the `is_url` line to accept TCP streams: + + ```python + is_url = source.lower().startswith(('rtsp://', 'rtmp://', 'http://', 'https://', 'tcp://')) + ``` + +3. Comment out the `view_img` line: + + ```python + # view_img = check_imshow(warn=True) + ``` + +4. Save and exit: + + ```bash + CTRL + O -> ENTER -> CTRL + X + ``` + +### Initiate TCP Stream with Libcamera + +1. Start the TCP stream: + + ```bash + libcamera-vid -n -t 0 --width 1280 --height 960 --framerate 1 --inline --listen -o tcp://127.0.0.1:8888 + ``` + +Keep this terminal session running for the next steps. + +### Perform YOLOv5 Inference + +1. Run the YOLOv5 detection: + + ```bash + cd ~/yolov5 + python3 detect.py --source=tcp://127.0.0.1:8888 + ``` + +## Quick Start with YOLOv8 + +Follow this section if you are interested in setting up YOLOv8 instead. The steps are quite similar but are tailored for YOLOv8's specific needs. + +### Install Necessary Packages + +1. Update the Raspberry Pi: + + ```bash + sudo apt-get update + sudo apt-get upgrade -y + sudo apt-get autoremove -y + ``` + +2. Install the `ultralytics` Python package: + + ```bash + pip3 install ultralytics + ``` + +3. Reboot: + + ```bash + sudo reboot + ``` + +### Initiate TCP Stream with Libcamera + +1. Start the TCP stream: + + ```bash + libcamera-vid -n -t 0 --width 1280 --height 960 --framerate 1 --inline --listen -o tcp://127.0.0.1:8888 + ``` + +### Perform YOLOv8 Inference + +To perform inference with YOLOv8, you can use the following Python code snippet: + +```python +from ultralytics import YOLO + +model = YOLO('yolov8n.pt') +results = model('tcp://127.0.0.1:8888', stream=True) + +while True: + for result in results: + boxes = result.boxes + probs = result.probs +``` + +## Next Steps + +Congratulations on successfully setting up YOLO on your Raspberry Pi! For further learning and support, visit [Ultralytics](https://ultralytics.com/) and [Kashmir World Foundation](https://www.kashmirworldfoundation.org/). + +## Acknowledgements and Citations + +This guide was initially created by Daan Eeltink for Kashmir World Foundation, an organization dedicated to the use of YOLO for the conservation of endangered species. We acknowledge their pioneering work and educational focus in the realm of object detection technologies. + +For more information about Kashmir World Foundation's activities, you can visit their [website](https://www.kashmirworldfoundation.org/). diff --git a/ultralytics/docs/en/guides/raspberry-pi.md:Zone.Identifier b/ultralytics/docs/en/guides/raspberry-pi.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/raspberry-pi.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/region-counting.md b/ultralytics/docs/en/guides/region-counting.md new file mode 100755 index 0000000..5e8b828 --- /dev/null +++ b/ultralytics/docs/en/guides/region-counting.md @@ -0,0 +1,86 @@ +--- +comments: true +description: Object Counting in Different Region using Ultralytics YOLOv8 +keywords: Ultralytics, YOLOv8, Object Detection, Object Counting, Object Tracking, Notebook, IPython Kernel, CLI, Python SDK +--- + +# Object Counting in Different Regions using Ultralytics YOLOv8 ๐Ÿš€ + +## What is Object Counting in Regions? + +Object counting in regions with [Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) involves precisely determining the number of objects within specified areas using advanced computer vision. This approach is valuable for optimizing processes, enhancing security, and improving efficiency in various applications. + +

+
+ +
+ Watch: Ultralytics YOLOv8 Object Counting in Multiple & Movable Regions +

+ +## Advantages of Object Counting in Regions? + +- **Precision and Accuracy:** Object counting in regions with advanced computer vision ensures precise and accurate counts, minimizing errors often associated with manual counting. +- **Efficiency Improvement:** Automated object counting enhances operational efficiency, providing real-time results and streamlining processes across different applications. +- **Versatility and Application:** The versatility of object counting in regions makes it applicable across various domains, from manufacturing and surveillance to traffic monitoring, contributing to its widespread utility and effectiveness. + +## Real World Applications + +| Retail | Market Streets | +|:------------------------------------------------------------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------:| +| ![People Counting in Different Region using Ultralytics YOLOv8](https://github.com/RizwanMunawar/ultralytics/assets/62513924/5ab3bbd7-fd12-4849-928e-5f294d6c3fcf) | ![Crowd Counting in Different Region using Ultralytics YOLOv8](https://github.com/RizwanMunawar/ultralytics/assets/62513924/e7c1aea7-474d-4d78-8d48-b50854ffe1ca) | +| People Counting in Different Region using Ultralytics YOLOv8 | Crowd Counting in Different Region using Ultralytics YOLOv8 | + +## Steps to Run + +### Step 1: Install Required Libraries + +Begin by cloning the Ultralytics repository, installing dependencies, and navigating to the local directory using the provided commands in Step 2. + +```bash +# Clone Ultralytics repo +git clone https://github.com/ultralytics/ultralytics + +# Navigate to the local directory +cd ultralytics/examples/YOLOv8-Region-Counter +``` + +### Step 2: Run Region Counting Using Ultralytics YOLOv8 + +Execute the following basic commands for inference. + +???+ tip "Region is Movable" + + During video playback, you can interactively move the region within the video by clicking and dragging using the left mouse button. + +```bash +# Save results +python yolov8_region_counter.py --source "path/to/video.mp4" --save-img + +# Run model on CPU +python yolov8_region_counter.py --source "path/to/video.mp4" --device cpu + +# Change model file +python yolov8_region_counter.py --source "path/to/video.mp4" --weights "path/to/model.pt" + +# Detect specific classes (e.g., first and third classes) +python yolov8_region_counter.py --source "path/to/video.mp4" --classes 0 2 + +# View results without saving +python yolov8_region_counter.py --source "path/to/video.mp4" --view-img +``` + +### Optional Arguments + +| Name | Type | Default | Description | +|----------------------|--------|--------------|--------------------------------------------| +| `--source` | `str` | `None` | Path to video file, for webcam 0 | +| `--line_thickness` | `int` | `2` | Bounding Box thickness | +| `--save-img` | `bool` | `False` | Save the predicted video/image | +| `--weights` | `str` | `yolov8n.pt` | Weights file path | +| `--classes` | `list` | `None` | Detect specific classes i.e. --classes 0 2 | +| `--region-thickness` | `int` | `2` | Region Box thickness | +| `--track-thickness` | `int` | `2` | Tracking line thickness | diff --git a/ultralytics/docs/en/guides/region-counting.md:Zone.Identifier b/ultralytics/docs/en/guides/region-counting.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/region-counting.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/sahi-tiled-inference.md b/ultralytics/docs/en/guides/sahi-tiled-inference.md new file mode 100755 index 0000000..9728703 --- /dev/null +++ b/ultralytics/docs/en/guides/sahi-tiled-inference.md @@ -0,0 +1,185 @@ +--- +comments: true +description: A comprehensive guide on how to use YOLOv8 with SAHI for standard and sliced inference in object detection tasks. +keywords: YOLOv8, SAHI, Sliced Inference, Object Detection, Ultralytics, Large Scale Image Analysis, High-Resolution Imagery +--- + +# Ultralytics Docs: Using YOLOv8 with SAHI for Sliced Inference + +Welcome to the Ultralytics documentation on how to use YOLOv8 with [SAHI](https://github.com/obss/sahi) (Slicing Aided Hyper Inference). This comprehensive guide aims to furnish you with all the essential knowledge you'll need to implement SAHI alongside YOLOv8. We'll deep-dive into what SAHI is, why sliced inference is critical for large-scale applications, and how to integrate these functionalities with YOLOv8 for enhanced object detection performance. + +

+ SAHI Sliced Inference Overview +

+ +## Introduction to SAHI + +SAHI (Slicing Aided Hyper Inference) is an innovative library designed to optimize object detection algorithms for large-scale and high-resolution imagery. Its core functionality lies in partitioning images into manageable slices, running object detection on each slice, and then stitching the results back together. SAHI is compatible with a range of object detection models, including the YOLO series, thereby offering flexibility while ensuring optimized use of computational resources. + +### Key Features of SAHI + +- **Seamless Integration**: SAHI integrates effortlessly with YOLO models, meaning you can start slicing and detecting without a lot of code modification. +- **Resource Efficiency**: By breaking down large images into smaller parts, SAHI optimizes the memory usage, allowing you to run high-quality detection on hardware with limited resources. +- **High Accuracy**: SAHI maintains the detection accuracy by employing smart algorithms to merge overlapping detection boxes during the stitching process. + +## What is Sliced Inference? + +Sliced Inference refers to the practice of subdividing a large or high-resolution image into smaller segments (slices), conducting object detection on these slices, and then recompiling the slices to reconstruct the object locations on the original image. This technique is invaluable in scenarios where computational resources are limited or when working with extremely high-resolution images that could otherwise lead to memory issues. + +### Benefits of Sliced Inference + +- **Reduced Computational Burden**: Smaller image slices are faster to process, and they consume less memory, enabling smoother operation on lower-end hardware. + +- **Preserved Detection Quality**: Since each slice is treated independently, there is no reduction in the quality of object detection, provided the slices are large enough to capture the objects of interest. + +- **Enhanced Scalability**: The technique allows for object detection to be more easily scaled across different sizes and resolutions of images, making it ideal for a wide range of applications from satellite imagery to medical diagnostics. + + + + + + + + + + +
YOLOv8 without SAHIYOLOv8 with SAHI
YOLOv8 without SAHIYOLOv8 with SAHI
+ +## Installation and Preparation + +### Installation + +To get started, install the latest versions of SAHI and Ultralytics: + +```bash +pip install -U ultralytics sahi +``` + +### Import Modules and Download Resources + +Here's how to import the necessary modules and download a YOLOv8 model and some test images: + +```python +from sahi.utils.yolov8 import download_yolov8s_model +from sahi import AutoDetectionModel +from sahi.utils.cv import read_image +from sahi.utils.file import download_from_url +from sahi.predict import get_prediction, get_sliced_prediction, predict +from pathlib import Path +from IPython.display import Image + +# Download YOLOv8 model +yolov8_model_path = "models/yolov8s.pt" +download_yolov8s_model(yolov8_model_path) + +# Download test images +download_from_url('https://raw.githubusercontent.com/obss/sahi/main/demo/demo_data/small-vehicles1.jpeg', 'demo_data/small-vehicles1.jpeg') +download_from_url('https://raw.githubusercontent.com/obss/sahi/main/demo/demo_data/terrain2.png', 'demo_data/terrain2.png') +``` + +## Standard Inference with YOLOv8 + +### Instantiate the Model + +You can instantiate a YOLOv8 model for object detection like this: + +```python +detection_model = AutoDetectionModel.from_pretrained( + model_type='yolov8', + model_path=yolov8_model_path, + confidence_threshold=0.3, + device="cpu", # or 'cuda:0' +) +``` + +### Perform Standard Prediction + +Perform standard inference using an image path or a numpy image. + +```python +# With an image path +result = get_prediction("demo_data/small-vehicles1.jpeg", detection_model) + +# With a numpy image +result = get_prediction(read_image("demo_data/small-vehicles1.jpeg"), detection_model) +``` + +### Visualize Results + +Export and visualize the predicted bounding boxes and masks: + +```python +result.export_visuals(export_dir="demo_data/") +Image("demo_data/prediction_visual.png") +``` + +## Sliced Inference with YOLOv8 + +Perform sliced inference by specifying the slice dimensions and overlap ratios: + +```python +result = get_sliced_prediction( + "demo_data/small-vehicles1.jpeg", + detection_model, + slice_height=256, + slice_width=256, + overlap_height_ratio=0.2, + overlap_width_ratio=0.2 +) +``` + +## Handling Prediction Results + +SAHI provides a `PredictionResult` object, which can be converted into various annotation formats: + +```python +# Access the object prediction list +object_prediction_list = result.object_prediction_list + +# Convert to COCO annotation, COCO prediction, imantics, and fiftyone formats +result.to_coco_annotations()[:3] +result.to_coco_predictions(image_id=1)[:3] +result.to_imantics_annotations()[:3] +result.to_fiftyone_detections()[:3] +``` + +## Batch Prediction + +For batch prediction on a directory of images: + +```python +predict( + model_type="yolov8", + model_path="path/to/yolov8n.pt", + model_device="cpu", # or 'cuda:0' + model_confidence_threshold=0.4, + source="path/to/dir", + slice_height=256, + slice_width=256, + overlap_height_ratio=0.2, + overlap_width_ratio=0.2, +) +``` + +That's it! Now you're equipped to use YOLOv8 with SAHI for both standard and sliced inference. + +## Citations and Acknowledgments + +If you use SAHI in your research or development work, please cite the original SAHI paper and acknowledge the authors: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{akyon2022sahi, + title={Slicing Aided Hyper Inference and Fine-tuning for Small Object Detection}, + author={Akyon, Fatih Cagatay and Altinuc, Sinan Onur and Temizel, Alptekin}, + journal={2022 IEEE International Conference on Image Processing (ICIP)}, + doi={10.1109/ICIP46576.2022.9897990}, + pages={966-970}, + year={2022} + } + ``` + +We extend our thanks to the SAHI research group for creating and maintaining this invaluable resource for the computer vision community. For more information about SAHI and its creators, visit the [SAHI GitHub repository](https://github.com/obss/sahi). diff --git a/ultralytics/docs/en/guides/sahi-tiled-inference.md:Zone.Identifier b/ultralytics/docs/en/guides/sahi-tiled-inference.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/sahi-tiled-inference.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/security-alarm-system.md b/ultralytics/docs/en/guides/security-alarm-system.md new file mode 100755 index 0000000..d61221a --- /dev/null +++ b/ultralytics/docs/en/guides/security-alarm-system.md @@ -0,0 +1,166 @@ +--- +comments: true +description: Security Alarm System Project Using Ultralytics YOLOv8. Learn How to implement a Security Alarm System Using ultralytics YOLOv8 +keywords: Object Detection, Security Alarm, Object Tracking, YOLOv8, Computer Vision Projects +--- + +# Security Alarm System Project Using Ultralytics YOLOv8 + +Security Alarm System + +The Security Alarm System Project utilizing Ultralytics YOLOv8 integrates advanced computer vision capabilities to enhance security measures. YOLOv8, developed by Ultralytics, provides real-time object detection, allowing the system to identify and respond to potential security threats promptly. This project offers several advantages: + +- **Real-time Detection:** YOLOv8's efficiency enables the Security Alarm System to detect and respond to security incidents in real-time, minimizing response time. +- **Accuracy:** YOLOv8 is known for its accuracy in object detection, reducing false positives and enhancing the reliability of the security alarm system. +- **Integration Capabilities:** The project can be seamlessly integrated with existing security infrastructure, providing an upgraded layer of intelligent surveillance. + +

+
+ +
+ Watch: Security Alarm System Project with Ultralytics YOLOv8 Object Detection +

+ +### Code + +#### Import Libraries + +```python +import torch +import numpy as np +import cv2 +from time import time +from ultralytics import YOLO +from ultralytics.utils.plotting import Annotator, colors +import smtplib +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +``` + +#### Set up the parameters of the message + +???+ tip "Note" + + App Password Generation is necessary + +- Navigate to [App Password Generator](https://myaccount.google.com/apppasswords), designate an app name such as "security project," and obtain a 16-digit password. Copy this password and paste it into the designated password field as instructed. + +```python +password = "" +from_email = "" # must match the email used to generate the password +to_email = "" # receiver email +``` + +#### Server creation and authentication + +```python +server = smtplib.SMTP('smtp.gmail.com: 587') +server.starttls() +server.login(from_email, password) +``` + +#### Email Send Function + +```python +def send_email(to_email, from_email, object_detected=1): + message = MIMEMultipart() + message['From'] = from_email + message['To'] = to_email + message['Subject'] = "Security Alert" + # Add in the message body + message_body = f'ALERT - {object_detected} objects has been detected!!' + + message.attach(MIMEText(message_body, 'plain')) + server.sendmail(from_email, to_email, message.as_string()) +``` + +#### Object Detection and Alert Sender + +```python +class ObjectDetection: + def __init__(self, capture_index): + # default parameters + self.capture_index = capture_index + self.email_sent = False + + # model information + self.model = YOLO("yolov8n.pt") + + # visual information + self.annotator = None + self.start_time = 0 + self.end_time = 0 + + # device information + self.device = 'cuda' if torch.cuda.is_available() else 'cpu' + + def predict(self, im0): + results = self.model(im0) + return results + + def display_fps(self, im0): + self.end_time = time() + fps = 1 / np.round(self.end_time - self.start_time, 2) + text = f'FPS: {int(fps)}' + text_size = cv2.getTextSize(text, cv2.FONT_HERSHEY_SIMPLEX, 1.0, 2)[0] + gap = 10 + cv2.rectangle(im0, (20 - gap, 70 - text_size[1] - gap), (20 + text_size[0] + gap, 70 + gap), (255, 255, 255), -1) + cv2.putText(im0, text, (20, 70), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (0, 0, 0), 2) + + def plot_bboxes(self, results, im0): + class_ids = [] + self.annotator = Annotator(im0, 3, results[0].names) + boxes = results[0].boxes.xyxy.cpu() + clss = results[0].boxes.cls.cpu().tolist() + names = results[0].names + for box, cls in zip(boxes, clss): + class_ids.append(cls) + self.annotator.box_label(box, label=names[int(cls)], color=colors(int(cls), True)) + return im0, class_ids + + def __call__(self): + cap = cv2.VideoCapture(self.capture_index) + assert cap.isOpened() + cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) + cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480) + frame_count = 0 + while True: + self.start_time = time() + ret, im0 = cap.read() + assert ret + results = self.predict(im0) + im0, class_ids = self.plot_bboxes(results, im0) + + if len(class_ids) > 0: # Only send email If not sent before + if not self.email_sent: + send_email(to_email, from_email, len(class_ids)) + self.email_sent = True + else: + self.email_sent = False + + self.display_fps(im0) + cv2.imshow('YOLOv8 Detection', im0) + frame_count += 1 + if cv2.waitKey(5) & 0xFF == 27: + break + cap.release() + cv2.destroyAllWindows() + server.quit() +``` + +#### Call the Object Detection class and Run the Inference + +```python +detector = ObjectDetection(capture_index=0) +detector() +``` + +That's it! When you execute the code, you'll receive a single notification on your email if any object is detected. The notification is sent immediately, not repeatedly. However, feel free to customize the code to suit your project requirements. + +#### Email Received Sample + +Email Received Sample diff --git a/ultralytics/docs/en/guides/security-alarm-system.md:Zone.Identifier b/ultralytics/docs/en/guides/security-alarm-system.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/security-alarm-system.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/triton-inference-server.md b/ultralytics/docs/en/guides/triton-inference-server.md new file mode 100755 index 0000000..fc90fde --- /dev/null +++ b/ultralytics/docs/en/guides/triton-inference-server.md @@ -0,0 +1,137 @@ +--- +comments: true +description: A step-by-step guide on integrating Ultralytics YOLOv8 with Triton Inference Server for scalable and high-performance deep learning inference deployments. +keywords: YOLOv8, Triton Inference Server, ONNX, Deep Learning Deployment, Scalable Inference, Ultralytics, NVIDIA, Object Detection, Cloud Inference +--- + +# Triton Inference Server with Ultralytics YOLOv8 + +The [Triton Inference Server](https://developer.nvidia.com/nvidia-triton-inference-server) (formerly known as TensorRT Inference Server) is an open-source software solution developed by NVIDIA. It provides a cloud inference solution optimized for NVIDIA GPUs. Triton simplifies the deployment of AI models at scale in production. Integrating Ultralytics YOLOv8 with Triton Inference Server allows you to deploy scalable, high-performance deep learning inference workloads. This guide provides steps to set up and test the integration. + +

+
+ +
+ Watch: Getting Started with NVIDIA Triton Inference Server. +

+ +## What is Triton Inference Server? + +Triton Inference Server is designed to deploy a variety of AI models in production. It supports a wide range of deep learning and machine learning frameworks, including TensorFlow, PyTorch, ONNX Runtime, and many others. Its primary use cases are: + +- Serving multiple models from a single server instance. +- Dynamic model loading and unloading without server restart. +- Ensemble inference, allowing multiple models to be used together to achieve results. +- Model versioning for A/B testing and rolling updates. + +## Prerequisites + +Ensure you have the following prerequisites before proceeding: + +- Docker installed on your machine. +- Install `tritonclient`: + ```bash + pip install tritonclient[all] + ``` + +## Exporting YOLOv8 to ONNX Format + +Before deploying the model on Triton, it must be exported to the ONNX format. ONNX (Open Neural Network Exchange) is a format that allows models to be transferred between different deep learning frameworks. Use the `export` function from the `YOLO` class: + +```python +from ultralytics import YOLO + +# Load a model +model = YOLO('yolov8n.pt') # load an official model + +# Export the model +onnx_file = model.export(format='onnx', dynamic=True) +``` + +## Setting Up Triton Model Repository + +The Triton Model Repository is a storage location where Triton can access and load models. + +1. Create the necessary directory structure: + + ```python + from pathlib import Path + + # Define paths + triton_repo_path = Path('tmp') / 'triton_repo' + triton_model_path = triton_repo_path / 'yolo' + + # Create directories + (triton_model_path / '1').mkdir(parents=True, exist_ok=True) + ``` + +2. Move the exported ONNX model to the Triton repository: + + ```python + from pathlib import Path + + # Move ONNX model to Triton Model path + Path(onnx_file).rename(triton_model_path / '1' / 'model.onnx') + + # Create config file + (triton_model_path / 'config.pbtxt').touch() + ``` + +## Running Triton Inference Server + +Run the Triton Inference Server using Docker: + +```python +import subprocess +import time + +from tritonclient.http import InferenceServerClient + +# Define image https://catalog.ngc.nvidia.com/orgs/nvidia/containers/tritonserver +tag = 'nvcr.io/nvidia/tritonserver:23.09-py3' # 6.4 GB + +# Pull the image +subprocess.call(f'docker pull {tag}', shell=True) + +# Run the Triton server and capture the container ID +container_id = subprocess.check_output( + f'docker run -d --rm -v {triton_repo_path}:/models -p 8000:8000 {tag} tritonserver --model-repository=/models', + shell=True).decode('utf-8').strip() + +# Wait for the Triton server to start +triton_client = InferenceServerClient(url='localhost:8000', verbose=False, ssl=False) + +# Wait until model is ready +for _ in range(10): + with contextlib.suppress(Exception): + assert triton_client.is_model_ready(model_name) + break + time.sleep(1) +``` + +Then run inference using the Triton Server model: + +```python +from ultralytics import YOLO + +# Load the Triton Server model +model = YOLO(f'http://localhost:8000/yolo', task='detect') + +# Run inference on the server +results = model('path/to/image.jpg') +``` + +Cleanup the container: + +```python +# Kill and remove the container at the end of the test +subprocess.call(f'docker kill {container_id}', shell=True) +``` + +--- + +By following the above steps, you can deploy and run Ultralytics YOLOv8 models efficiently on Triton Inference Server, providing a scalable and high-performance solution for deep learning inference tasks. If you face any issues or have further queries, refer to the [official Triton documentation](https://docs.nvidia.com/deeplearning/triton-inference-server/user-guide/docs/index.html) or reach out to the Ultralytics community for support. diff --git a/ultralytics/docs/en/guides/triton-inference-server.md:Zone.Identifier b/ultralytics/docs/en/guides/triton-inference-server.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/triton-inference-server.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/vision-eye.md b/ultralytics/docs/en/guides/vision-eye.md new file mode 100755 index 0000000..530d2eb --- /dev/null +++ b/ultralytics/docs/en/guides/vision-eye.md @@ -0,0 +1,116 @@ +--- +comments: true +description: VisionEye View Object Mapping using Ultralytics YOLOv8 +keywords: Ultralytics, YOLOv8, Object Detection, Object Tracking, IDetection, VisionEye, Computer Vision, Notebook, IPython Kernel, CLI, Python SDK +--- + +# VisionEye View Object Mapping using Ultralytics YOLOv8 ๐Ÿš€ + +## What is VisionEye Object Mapping? + +[Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) VisionEye offers the capability for computers to identify and pinpoint objects, simulating the observational precision of the human eye. This functionality enables computers to discern and focus on specific objects, much like the way the human eye observes details from a particular viewpoint. + +## Samples + +| VisionEye View | VisionEye View With Object Tracking | +|:------------------------------------------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:| +| ![VisionEye View Object Mapping using Ultralytics YOLOv8](https://github.com/RizwanMunawar/ultralytics/assets/62513924/7d593acc-2e37-41b0-ad0e-92b4ffae6647) | ![VisionEye View Object Mapping with Object Tracking using Ultralytics YOLOv8](https://github.com/RizwanMunawar/ultralytics/assets/62513924/fcd85952-390f-451e-8fb0-b82e943af89c) | +| VisionEye View Object Mapping using Ultralytics YOLOv8 | VisionEye View Object Mapping with Object Tracking using Ultralytics YOLOv8 | + +!!! Example "VisionEye Object Mapping using YOLOv8" + + === "VisionEye Object Mapping" + ```python + import cv2 + from ultralytics import YOLO + from ultralytics.utils.plotting import colors, Annotator + + model = YOLO("yolov8n.pt") + names = model.model.names + cap = cv2.VideoCapture("path/to/video/file.mp4") + + out = cv2.VideoWriter('visioneye-pinpoint.avi', cv2.VideoWriter_fourcc(*'MJPG'), + 30, (int(cap.get(3)), int(cap.get(4)))) + + center_point = (-10, int(cap.get(4))) + + while True: + ret, im0 = cap.read() + if not ret: + print("Video frame is empty or video processing has been successfully completed.") + break + + results = model.predict(im0) + boxes = results[0].boxes.xyxy.cpu() + clss = results[0].boxes.cls.cpu().tolist() + + annotator = Annotator(im0, line_width=2) + + for box, cls in zip(boxes, clss): + annotator.box_label(box, label=names[int(cls)], color=colors(int(cls))) + annotator.visioneye(box, center_point) + + out.write(im0) + cv2.imshow("visioneye-pinpoint", im0) + + if cv2.waitKey(1) & 0xFF == ord('q'): + break + + out.release() + cap.release() + cv2.destroyAllWindows() + ``` + + === "VisionEye Object Mapping with Object Tracking" + ```python + import cv2 + from ultralytics import YOLO + from ultralytics.utils.plotting import colors, Annotator + + model = YOLO("yolov8n.pt") + cap = cv2.VideoCapture("path/to/video/file.mp4") + + out = cv2.VideoWriter('visioneye-pinpoint.avi', cv2.VideoWriter_fourcc(*'MJPG'), + 30, (int(cap.get(3)), int(cap.get(4)))) + + center_point = (-10, int(cap.get(4))) + + while True: + ret, im0 = cap.read() + if not ret: + print("Video frame is empty or video processing has been successfully completed.") + break + + results = model.track(im0, persist=True) + boxes = results[0].boxes.xyxy.cpu() + track_ids = results[0].boxes.id.int().cpu().tolist() + + annotator = Annotator(im0, line_width=2) + + for box, track_id in zip(boxes, track_ids): + annotator.box_label(box, label=str(track_id), color=colors(int(track_id))) + annotator.visioneye(box, center_point) + + out.write(im0) + cv2.imshow("visioneye-pinpoint", im0) + + if cv2.waitKey(1) & 0xFF == ord('q'): + break + + out.release() + cap.release() + cv2.destroyAllWindows() + ``` + +### `visioneye` Arguments + +| Name | Type | Default | Description | +|---------------|---------|------------------|--------------------------------------------------| +| `color` | `tuple` | `(235, 219, 11)` | Line and object centroid color | +| `pin_color` | `tuple` | `(255, 0, 255)` | VisionEye pinpoint color | +| `thickness` | `int` | `2` | pinpoint to object line thickness | +| `pins_radius` | `int` | `10` | Pinpoint and object centroid point circle radius | + +## Note + +For any inquiries, feel free to post your questions in the [Ultralytics Issue Section](https://github.com/ultralytics/ultralytics/issues/new/choose) or the discussion section mentioned below. diff --git a/ultralytics/docs/en/guides/vision-eye.md:Zone.Identifier b/ultralytics/docs/en/guides/vision-eye.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/vision-eye.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/workouts-monitoring.md b/ultralytics/docs/en/guides/workouts-monitoring.md new file mode 100755 index 0000000..02e8209 --- /dev/null +++ b/ultralytics/docs/en/guides/workouts-monitoring.md @@ -0,0 +1,130 @@ +--- +comments: true +description: Workouts Monitoring Using Ultralytics YOLOv8 +keywords: Ultralytics, YOLOv8, Object Detection, Pose Estimation, PushUps, PullUps, Ab workouts, Notebook, IPython Kernel, CLI, Python SDK +--- + +# Workouts Monitoring using Ultralytics YOLOv8 ๐Ÿš€ + +Monitoring workouts through pose estimation with [Ultralytics YOLOv8](https://github.com/ultralytics/ultralytics/) enhances exercise assessment by accurately tracking key body landmarks and joints in real-time. This technology provides instant feedback on exercise form, tracks workout routines, and measures performance metrics, optimizing training sessions for users and trainers alike. + +## Advantages of Workouts Monitoring? + +- **Optimized Performance:** Tailoring workouts based on monitoring data for better results. +- **Goal Achievement:** Track and adjust fitness goals for measurable progress. +- **Personalization:** Customized workout plans based on individual data for effectiveness. +- **Health Awareness:** Early detection of patterns indicating health issues or overtraining. +- **Informed Decisions:** Data-driven decisions for adjusting routines and setting realistic goals. + +## Real World Applications + +| Workouts Monitoring | Workouts Monitoring | +|:----------------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------------:| +| ![PushUps Counting](https://github.com/RizwanMunawar/ultralytics/assets/62513924/cf016a41-589f-420f-8a8c-2cc8174a16de) | ![PullUps Counting](https://github.com/RizwanMunawar/ultralytics/assets/62513924/cb20f316-fac2-4330-8445-dcf5ffebe329) | +| PushUps Counting | PullUps Counting | + +!!! Example "Workouts Monitoring Example" + + === "Workouts Monitoring" + ```python + from ultralytics import YOLO + from ultralytics.solutions import ai_gym + import cv2 + + model = YOLO("yolov8n-pose.pt") + cap = cv2.VideoCapture("path/to/video/file.mp4") + assert cap.isOpened(), "Error reading video file" + + gym_object = ai_gym.AIGym() # init AI GYM module + gym_object.set_args(line_thickness=2, + view_img=True, + pose_type="pushup", + kpts_to_check=[6, 8, 10]) + + frame_count = 0 + while cap.isOpened(): + success, im0 = cap.read() + if not success: + print("Video frame is empty or video processing has been successfully completed.") + break + frame_count += 1 + results = model.predict(im0, verbose=False) + im0 = gym_object.start_counting(im0, results, frame_count) + + cv2.destroyAllWindows() + ``` + + === "Workouts Monitoring with Save Output" + ```python + from ultralytics import YOLO + from ultralytics.solutions import ai_gym + import cv2 + + model = YOLO("yolov8n-pose.pt") + cap = cv2.VideoCapture("path/to/video/file.mp4") + assert cap.isOpened(), "Error reading video file" + + video_writer = cv2.VideoWriter("workouts.avi", + cv2.VideoWriter_fourcc(*'mp4v'), + int(cap.get(5)), + (int(cap.get(3)), int(cap.get(4)))) + + gym_object = ai_gym.AIGym() # init AI GYM module + gym_object.set_args(line_thickness=2, + view_img=True, + pose_type="pushup", + kpts_to_check=[6, 8, 10]) + + frame_count = 0 + while cap.isOpened(): + success, im0 = cap.read() + if not success: + print("Video frame is empty or video processing has been successfully completed.") + break + frame_count += 1 + results = model.predict(im0, verbose=False) + im0 = gym_object.start_counting(im0, results, frame_count) + video_writer.write(im0) + + cv2.destroyAllWindows() + video_writer.release() + ``` + +???+ tip "Support" + + "pushup", "pullup" and "abworkout" supported + +### KeyPoints Map + +![keyPoints Order Ultralytics YOLOv8 Pose](https://github.com/ultralytics/ultralytics/assets/62513924/f45d8315-b59f-47b7-b9c8-c61af1ce865b) + +### Arguments `set_args` + +| Name | Type | Default | Description | +|-----------------|--------|----------|----------------------------------------------------------------------------------------| +| kpts_to_check | `list` | `None` | List of three keypoints index, for counting specific workout, followed by keypoint Map | +| view_img | `bool` | `False` | Display the frame with counts | +| line_thickness | `int` | `2` | Increase the thickness of count value | +| pose_type | `str` | `pushup` | Pose that need to be monitored, "pullup" and "abworkout" also supported | +| pose_up_angle | `int` | `145` | Pose Up Angle value | +| pose_down_angle | `int` | `90` | Pose Down Angle value | + +### Arguments `model.predict` + +| Name | Type | Default | Description | +|-----------------|----------------|------------------------|----------------------------------------------------------------------------| +| `source` | `str` | `'ultralytics/assets'` | source directory for images or videos | +| `conf` | `float` | `0.25` | object confidence threshold for detection | +| `iou` | `float` | `0.7` | intersection over union (IoU) threshold for NMS | +| `imgsz` | `int or tuple` | `640` | image size as scalar or (h, w) list, i.e. (640, 480) | +| `half` | `bool` | `False` | use half precision (FP16) | +| `device` | `None or str` | `None` | device to run on, i.e. cuda device=0/1/2/3 or device=cpu | +| `max_det` | `int` | `300` | maximum number of detections per image | +| `vid_stride` | `bool` | `False` | video frame-rate stride | +| `stream_buffer` | `bool` | `False` | buffer all streaming frames (True) or return the most recent frame (False) | +| `visualize` | `bool` | `False` | visualize model features | +| `augment` | `bool` | `False` | apply image augmentation to prediction sources | +| `agnostic_nms` | `bool` | `False` | class-agnostic NMS | +| `classes` | `list[int]` | `None` | filter results by class, i.e. classes=0, or classes=[0,2,3] | +| `retina_masks` | `bool` | `False` | use high-resolution segmentation masks | +| `embed` | `list[int]` | `None` | return feature vectors/embeddings from given layers | diff --git a/ultralytics/docs/en/guides/workouts-monitoring.md:Zone.Identifier b/ultralytics/docs/en/guides/workouts-monitoring.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/workouts-monitoring.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/yolo-common-issues.md b/ultralytics/docs/en/guides/yolo-common-issues.md new file mode 100755 index 0000000..71083cf --- /dev/null +++ b/ultralytics/docs/en/guides/yolo-common-issues.md @@ -0,0 +1,276 @@ +--- +comments: true +description: A comprehensive guide to troubleshooting common issues encountered while working with YOLOv8 in the Ultralytics ecosystem. +keywords: Troubleshooting, Ultralytics, YOLOv8, Installation Errors, Training Data, Model Performance, Hyperparameter Tuning, Deployment +--- + +# Troubleshooting Common YOLO Issues + +

+ YOLO Common Issues Image +

+ +## Introduction + +This guide serves as a comprehensive aid for troubleshooting common issues encountered while working with YOLOv8 on your Ultralytics projects. Navigating through these issues can be a breeze with the right guidance, ensuring your projects remain on track without unnecessary delays. + +## Common Issues + +### Installation Errors + +Installation errors can arise due to various reasons, such as incompatible versions, missing dependencies, or incorrect environment setups. First, check to make sure you are doing the following: + +- You're using Python 3.8 or later as recommended. + +- Ensure that you have the correct version of PyTorch (1.8 or later) installed. + +- Consider using virtual environments to avoid conflicts. + +- Follow the [official installation guide](../quickstart.md) step by step. + +Additionally, here are some common installation issues users have encountered, along with their respective solutions: + +- Import Errors or Dependency Issues - If you're getting errors during the import of YOLOv8, or you're having issues related to dependencies, consider the following troubleshooting steps: + + - **Fresh Installation**: Sometimes, starting with a fresh installation can resolve unexpected issues. Especially with libraries like Ultralytics, where updates might introduce changes to the file tree structure or functionalities. + + - **Update Regularly**: Ensure you're using the latest version of the library. Older versions might not be compatible with recent updates, leading to potential conflicts or issues. + + - **Check Dependencies**: Verify that all required dependencies are correctly installed and are of the compatible versions. + + - **Review Changes**: If you initially cloned or installed an older version, be aware that significant updates might affect the library's structure or functionalities. Always refer to the official documentation or changelogs to understand any major changes. + + - Remember, keeping your libraries and dependencies up-to-date is crucial for a smooth and error-free experience. + +- Running YOLOv8 on GPU - If you're having trouble running YOLOv8 on GPU, consider the following troubleshooting steps: + + - **Verify CUDA Compatibility and Installation**: Ensure your GPU is CUDA compatible and that CUDA is correctly installed. Use the `nvidia-smi` command to check the status of your NVIDIA GPU and CUDA version. + + - **Check PyTorch and CUDA Integration**: Ensure PyTorch can utilize CUDA by running `import torch; print(torch.cuda.is_available())` in a Python terminal. If it returns 'True', PyTorch is set up to use CUDA. + + - **Environment Activation**: Ensure you're in the correct environment where all necessary packages are installed. + + - **Update Your Packages**: Outdated packages might not be compatible with your GPU. Keep them updated. + + - **Program Configuration**: Check if the program or code specifies GPU usage. In YOLOv8, this might be in the settings or configuration. + +### Model Training Issues + +This section will address common issues faced while training and their respective explanations and solutions. + +#### Verification of Configuration Settings + +**Issue**: You are unsure whether the configuration settings in the `.yaml` file are being applied correctly during model training. + +**Solution**: The configuration settings in the `.yaml` file should be applied when using the `model.train()` function. To ensure that these settings are correctly applied, follow these steps: + +- Confirm that the path to your `.yaml` configuration file is correct. +- Make sure you pass the path to your `.yaml` file as the `data` argument when calling `model.train()`, as shown below: + +```python +model.train(data='/path/to/your/data.yaml', batch=4) +``` + +#### Accelerating Training with Multiple GPUs + +**Issue**: Training is slow on a single GPU, and you want to speed up the process using multiple GPUs. + +**Solution**: Increasing the batch size can accelerate training, but it's essential to consider GPU memory capacity. To speed up training with multiple GPUs, follow these steps: + +- Ensure that you have multiple GPUs available. + +- Modify your .yaml configuration file to specify the number of GPUs to use, e.g., gpus: 4. + +- Increase the batch size accordingly to fully utilize the multiple GPUs without exceeding memory limits. + +- Modify your training command to utilize multiple GPUs: + +```python +# Adjust the batch size and other settings as needed to optimize training speed +model.train(data='/path/to/your/data.yaml', batch=32, multi_scale=True) +``` + +#### Continuous Monitoring Parameters + +**Issue**: You want to know which parameters should be continuously monitored during training, apart from loss. + +**Solution**: While loss is a crucial metric to monitor, it's also essential to track other metrics for model performance optimization. Some key metrics to monitor during training include: + +- Precision +- Recall +- Mean Average Precision (mAP) + +You can access these metrics from the training logs or by using tools like TensorBoard or wandb for visualization. Implementing early stopping based on these metrics can help you achieve better results. + +#### Tools for Tracking Training Progress + +**Issue**: You are looking for recommendations on tools to track training progress. + +**Solution**: To track and visualize training progress, you can consider using the following tools: + +- [TensorBoard](https://www.tensorflow.org/tensorboard): TensorBoard is a popular choice for visualizing training metrics, including loss, accuracy, and more. You can integrate it with your YOLOv8 training process. +- [Comet](https://bit.ly/yolov8-readme-comet): Comet provides an extensive toolkit for experiment tracking and comparison. It allows you to track metrics, hyperparameters, and even model weights. Integration with YOLO models is also straightforward, providing you with a complete overview of your experiment cycle. +- [Ultralytics HUB](https://hub.ultralytics.com): Ultralytics HUB offers a specialized environment for tracking YOLO models, giving you a one-stop platform to manage metrics, datasets, and even collaborate with your team. Given its tailored focus on YOLO, it offers more customized tracking options. + +Each of these tools offers its own set of advantages, so you may want to consider the specific needs of your project when making a choice. + +#### How to Check if Training is Happening on the GPU + +**Issue**: The 'device' value in the training logs is 'null,' and you're unsure if training is happening on the GPU. + +**Solution**: The 'device' value being 'null' typically means that the training process is set to automatically use an available GPU, which is the default behavior. To ensure training occurs on a specific GPU, you can manually set the 'device' value to the GPU index (e.g., '0' for the first GPU) in your .yaml configuration file: + +```yaml +device: 0 +``` + +This will explicitly assign the training process to the specified GPU. If you wish to train on the CPU, set 'device' to 'cpu'. + +Keep an eye on the 'runs' folder for logs and metrics to monitor training progress effectively. + +#### Key Considerations for Effective Model Training + +Here are some things to keep in mind, if you are facing issues related to model training. + +**Dataset Format and Labels** + +- Importance: The foundation of any machine learning model lies in the quality and format of the data it is trained on. + +- Recommendation: Ensure that your custom dataset and its associated labels adhere to the expected format. It's crucial to verify that annotations are accurate and of high quality. Incorrect or subpar annotations can derail the model's learning process, leading to unpredictable outcomes. + +**Model Convergence** + +- Importance: Achieving model convergence ensures that the model has sufficiently learned from the training data. + +- Recommendation: When training a model 'from scratch', it's vital to ensure that the model reaches a satisfactory level of convergence. This might necessitate a longer training duration, with more epochs, compared to when you're fine-tuning an existing model. + +**Learning Rate and Batch Size** + +- Importance: These hyperparameters play a pivotal role in determining how the model updates its weights during training. + +- Recommendation: Regularly evaluate if the chosen learning rate and batch size are optimal for your specific dataset. Parameters that are not in harmony with the dataset's characteristics can hinder the model's performance. + +**Class Distribution** + +- Importance: The distribution of classes in your dataset can influence the model's prediction tendencies. + +- Recommendation: Regularly assess the distribution of classes within your dataset. If there's a class imbalance, there's a risk that the model will develop a bias towards the more prevalent class. This bias can be evident in the confusion matrix, where the model might predominantly predict the majority class. + +**Cross-Check with Pretrained Weights** + +- Importance: Leveraging pretrained weights can provide a solid starting point for model training, especially when data is limited. + +- Recommendation: As a diagnostic step, consider training your model using the same data but initializing it with pretrained weights. If this approach yields a well-formed confusion matrix, it could suggest that the 'from scratch' model might require further training or adjustments. + +### Issues Related to Model Predictions + +This section will address common issues faced during model prediction. + +#### Getting Bounding Box Predictions With Your YOLOv8 Custom Model + +**Issue**: When running predictions with a custom YOLOv8 model, there are challenges with the format and visualization of the bounding box coordinates. + +**Solution**: + +- Coordinate Format: YOLOv8 provides bounding box coordinates in absolute pixel values. To convert these to relative coordinates (ranging from 0 to 1), you need to divide by the image dimensions. For example, letโ€™s say your image size is 640x640. Then you would do the following: + +```python +# Convert absolute coordinates to relative coordinates +x1 = x1 / 640 # Divide x-coordinates by image width +x2 = x2 / 640 +y1 = y1 / 640 # Divide y-coordinates by image height +y2 = y2 / 640 +``` + +- File Name: To obtain the file name of the image you're predicting on, access the image file path directly from the result object within your prediction loop. + +#### Filtering Objects in YOLOv8 Predictions + +**Issue**: Facing issues with how to filter and display only specific objects in the prediction results when running YOLOv8 using the Ultralytics library. + +**Solution**: To detect specific classes use the classes argument to specify the classes you want to include in the output. For instance, to detect only cars (assuming 'cars' have class index 2): + +```shell +yolo task=detect mode=segment model=yolov8n-seg.pt source='path/to/car.mp4' show=True classes=2 +``` + +#### Understanding Precision Metrics in YOLOv8 + +**Issue**: Confusion regarding the difference between box precision, mask precision, and confusion matrix precision in YOLOv8. + +**Solution**: Box precision measures the accuracy of predicted bounding boxes compared to the actual ground truth boxes using IoU (Intersection over Union) as the metric. Mask precision assesses the agreement between predicted segmentation masks and ground truth masks in pixel-wise object classification. Confusion matrix precision, on the other hand, focuses on overall classification accuracy across all classes and does not consider the geometric accuracy of predictions. It's important to note that a bounding box can be geometrically accurate (true positive) even if the class prediction is wrong, leading to differences between box precision and confusion matrix precision. These metrics evaluate distinct aspects of a model's performance, reflecting the need for different evaluation metrics in various tasks. + +#### Extracting Object Dimensions in YOLOv8 + +**Issue**: Difficulty in retrieving the length and height of detected objects in YOLOv8, especially when multiple objects are detected in an image. + +**Solution**: To retrieve the bounding box dimensions, first use the Ultralytics YOLOv8 model to predict objects in an image. Then, extract the width and height information of bounding boxes from the prediction results. + +```python +from ultralytics import YOLO + +# Load a pre-trained YOLOv8 model +model = YOLO('yolov8n.pt') + +# Specify the source image +source = 'https://ultralytics.com/images/bus.jpg' + +# Make predictions +results = model.predict(source, save=True, imgsz=320, conf=0.5) + +# Extract bounding box dimensions +boxes = results[0].boxes.xywh.cpu() +for box in boxes: + x, y, w, h = box + print(f"Width of Box: {w}, Height of Box: {h}") +``` + +### Deployment Challenges + +#### GPU Deployment Issues + +**Issue:** Deploying models in a multi-GPU environment can sometimes lead to unexpected behaviors like unexpected memory usage, inconsistent results across GPUs, etc. + +**Solution:** Check for default GPU initialization. Some frameworks, like PyTorch, might initialize CUDA operations on a default GPU before transitioning to the designated GPUs. To bypass unexpected default initializations, specify the GPU directly during deployment and prediction. Then, use tools to monitor GPU utilization and memory usage to identify any anomalies in real-time. Also, ensure you're using the latest version of the framework or library. + +#### Model Conversion/Exporting Issues + +**Issue:** During the process of converting or exporting machine learning models to different formats or platforms, users might encounter errors or unexpected behaviors. + +**Solution:** + +- Compatibility Check: Ensure that you are using versions of libraries and frameworks that are compatible with each other. Mismatched versions can lead to unexpected errors during conversion. + +- Environment Reset: If you're using an interactive environment like Jupyter or Colab, consider restarting your environment after making significant changes or installations. A fresh start can sometimes resolve underlying issues. + +- Official Documentation: Always refer to the official documentation of the tool or library you are using for conversion. It often contains specific guidelines and best practices for model exporting. + +- Community Support: Check the library or framework's official repository for similar issues reported by other users. The maintainers or community might have provided solutions or workarounds in discussion threads. + +- Update Regularly: Ensure that you are using the latest version of the tool or library. Developers frequently release updates that fix known bugs or improve functionality. + +- Test Incrementally: Before performing a full conversion, test the process with a smaller model or dataset to identify potential issues early on. + +## Community and Support + +Engaging with a community of like-minded individuals can significantly enhance your experience and success in working with YOLOv8. Below are some channels and resources you may find helpful. + +### Forums and Channels for Getting Help + +**GitHub Issues:** The YOLOv8 repository on GitHub has an [Issues tab](https://github.com/ultralytics/ultralytics/issues) where you can ask questions, report bugs, and suggest new features. The community and maintainers are active here, and itโ€™s a great place to get help with specific problems. + +**Ultralytics Discord Server:** Ultralytics has a [Discord server](https://ultralytics.com/discord/) where you can interact with other users and the developers. + +### Official Documentation and Resources + +**Ultralytics YOLOv8 Docs**: The [official documentation](../index.md) provides a comprehensive overview of YOLOv8, along with guides on installation, usage, and troubleshooting. + +These resources should provide a solid foundation for troubleshooting and improving your YOLOv8 projects, as well as connecting with others in the YOLOv8 community. + +## Conclusion + +Troubleshooting is an integral part of any development process, and being equipped with the right knowledge can significantly reduce the time and effort spent in resolving issues. This guide aimed to address the most common challenges faced by users of the YOLOv8 model within the Ultralytics ecosystem. By understanding and addressing these common issues, you can ensure smoother project progress and achieve better results with your computer vision tasks. + +Remember, the Ultralytics community is a valuable resource. Engaging with fellow developers and experts can provide additional insights and solutions that might not be covered in standard documentation. Always keep learning, experimenting, and sharing your experiences to contribute to the collective knowledge of the community. + +Happy troubleshooting! diff --git a/ultralytics/docs/en/guides/yolo-common-issues.md:Zone.Identifier b/ultralytics/docs/en/guides/yolo-common-issues.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/yolo-common-issues.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/yolo-performance-metrics.md b/ultralytics/docs/en/guides/yolo-performance-metrics.md new file mode 100755 index 0000000..6a7a5e6 --- /dev/null +++ b/ultralytics/docs/en/guides/yolo-performance-metrics.md @@ -0,0 +1,165 @@ +--- +comments: true +description: A comprehensive guide on various performance metrics related to YOLOv8, their significance, and how to interpret them. +keywords: YOLOv8, Performance metrics, Object detection, Intersection over Union (IoU), Average Precision (AP), Mean Average Precision (mAP), Precision, Recall, Validation mode, Ultralytics +--- + +# Performance Metrics Deep Dive + +## Introduction + +Performance metrics are key tools to evaluate the accuracy and efficiency of object detection models. They shed light on how effectively a model can identify and localize objects within images. Additionally, they help in understanding the model's handling of false positives and false negatives. These insights are crucial for evaluating and enhancing the model's performance. In this guide, we will explore various performance metrics associated with YOLOv8, their significance, and how to interpret them. + +## Object Detection Metrics + +Letโ€™s start by discussing some metrics that are not only important to YOLOv8 but are broadly applicable across different object detection models. + +- **Intersection over Union (IoU):** IoU is a measure that quantifies the overlap between a predicted bounding box and a ground truth bounding box. It plays a fundamental role in evaluating the accuracy of object localization. + +- **Average Precision (AP):** AP computes the area under the precision-recall curve, providing a single value that encapsulates the model's precision and recall performance. + +- **Mean Average Precision (mAP):** mAP extends the concept of AP by calculating the average AP values across multiple object classes. This is useful in multi-class object detection scenarios to provide a comprehensive evaluation of the model's performance. + +- **Precision and Recall:** Precision quantifies the proportion of true positives among all positive predictions, assessing the model's capability to avoid false positives. On the other hand, Recall calculates the proportion of true positives among all actual positives, measuring the model's ability to detect all instances of a class. + +- **F1 Score:** The F1 Score is the harmonic mean of precision and recall, providing a balanced assessment of a model's performance while considering both false positives and false negatives. + +## How to Calculate Metrics for YOLOv8 Model + +Now, we can explore [YOLOv8's Validation mode](../modes/val.md) that can be used to compute the above discussed evaluation metrics. + +Using the validation mode is simple. Once you have a trained model, you can invoke the model.val() function. This function will then process the validation dataset and return a variety of performance metrics. But what do these metrics mean? And how should you interpret them? + +### Interpreting the Output + +Let's break down the output of the model.val() function and understand each segment of the output. + +#### Class-wise Metrics + +One of the sections of the output is the class-wise breakdown of performance metrics. This granular information is useful when you are trying to understand how well the model is doing for each specific class, especially in datasets with a diverse range of object categories. For each class in the dataset the following is provided: + +- **Class**: This denotes the name of the object class, such as "person", "car", or "dog". + +- **Images**: This metric tells you the number of images in the validation set that contain the object class. + +- **Instances**: This provides the count of how many times the class appears across all images in the validation set. + +- **Box(P, R, mAP50, mAP50-95)**: This metric provides insights into the model's performance in detecting objects: + + - **P (Precision)**: The accuracy of the detected objects, indicating how many detections were correct. + + - **R (Recall)**: The ability of the model to identify all instances of objects in the images. + + - **mAP50**: Mean average precision calculated at an intersection over union (IoU) threshold of 0.50. It's a measure of the model's accuracy considering only the "easy" detections. + + - **mAP50-95**: The average of the mean average precision calculated at varying IoU thresholds, ranging from 0.50 to 0.95. It gives a comprehensive view of the model's performance across different levels of detection difficulty. + +#### Speed Metrics + +The speed of inference can be as critical as accuracy, especially in real-time object detection scenarios. This section breaks down the time taken for various stages of the validation process, from preprocessing to post-processing. + +#### COCO Metrics Evaluation + +For users validating on the COCO dataset, additional metrics are calculated using the COCO evaluation script. These metrics give insights into precision and recall at different IoU thresholds and for objects of different sizes. + +#### Visual Outputs + +The model.val() function, apart from producing numeric metrics, also yields visual outputs that can provide a more intuitive understanding of the model's performance. Here's a breakdown of the visual outputs you can expect: + +- **F1 Score Curve (`F1_curve.png`)**: This curve represents the F1 score across various thresholds. Interpreting this curve can offer insights into the model's balance between false positives and false negatives over different thresholds. + +- **Precision-Recall Curve (`PR_curve.png`)**: An integral visualization for any classification problem, this curve showcases the trade-offs between precision and recall at varied thresholds. It becomes especially significant when dealing with imbalanced classes. + +- **Precision Curve (`P_curve.png`)**: A graphical representation of precision values at different thresholds. This curve helps in understanding how precision varies as the threshold changes. + +- **Recall Curve (`R_curve.png`)**: Correspondingly, this graph illustrates how the recall values change across different thresholds. + +- **Confusion Matrix (`confusion_matrix.png`)**: The confusion matrix provides a detailed view of the outcomes, showcasing the counts of true positives, true negatives, false positives, and false negatives for each class. + +- **Normalized Confusion Matrix (`confusion_matrix_normalized.png`)**: This visualization is a normalized version of the confusion matrix. It represents the data in proportions rather than raw counts. This format makes it simpler to compare the performance across classes. + +- **Validation Batch Labels (`val_batchX_labels.jpg`)**: These images depict the ground truth labels for distinct batches from the validation dataset. They provide a clear picture of what the objects are and their respective locations as per the dataset. + +- **Validation Batch Predictions (`val_batchX_pred.jpg`)**: Contrasting the label images, these visuals display the predictions made by the YOLOv8 model for the respective batches. By comparing these to the label images, you can easily assess how well the model detects and classifies objects visually. + +#### Results Storage + +For future reference, the results are saved to a directory, typically named runs/detect/val. + +## Choosing the Right Metrics + +Choosing the right metrics to evaluate often depends on the specific application. + +- **mAP:** Suitable for a broad assessment of model performance. + +- **IoU:** Essential when precise object location is crucial. + +- **Precision:** Important when minimizing false detections is a priority. + +- **Recall:** Vital when it's important to detect every instance of an object. + +- **F1 Score:** Useful when a balance between precision and recall is needed. + +For real-time applications, speed metrics like FPS (Frames Per Second) and latency are crucial to ensure timely results. + +## Interpretation of Results + +Itโ€™s important to understand the metrics. Here's what some of the commonly observed lower scores might suggest: + +- **Low mAP:** Indicates the model may need general refinements. + +- **Low IoU:** The model might be struggling to pinpoint objects accurately. Different bounding box methods could help. + +- **Low Precision:** The model may be detecting too many non-existent objects. Adjusting confidence thresholds might reduce this. + +- **Low Recall:** The model could be missing real objects. Improving feature extraction or using more data might help. + +- **Imbalanced F1 Score:** There's a disparity between precision and recall. + +- **Class-specific AP:** Low scores here can highlight classes the model struggles with. + +## Case Studies + +Real-world examples can help clarify how these metrics work in practice. + +### Case 1 + +- **Situation:** mAP and F1 Score are suboptimal, but while Recall is good, Precision isn't. + +- **Interpretation & Action:** There might be too many incorrect detections. Tightening confidence thresholds could reduce these, though it might also slightly decrease recall. + +### Case 2 + +- **Situation:** mAP and Recall are acceptable, but IoU is lacking. + +- **Interpretation & Action:** The model detects objects well but might not be localizing them precisely. Refining bounding box predictions might help. + +### Case 3 + +- **Situation:** Some classes have a much lower AP than others, even with a decent overall mAP. + +- **Interpretation & Action:** These classes might be more challenging for the model. Using more data for these classes or adjusting class weights during training could be beneficial. + +## Connect and Collaborate + +Tapping into a community of enthusiasts and experts can amplify your journey with YOLOv8. Here are some avenues that can facilitate learning, troubleshooting, and networking. + +### Engage with the Broader Community + +- **GitHub Issues:** The YOLOv8 repository on GitHub has an [Issues tab](https://github.com/ultralytics/ultralytics/issues) where you can ask questions, report bugs, and suggest new features. The community and maintainers are active here, and itโ€™s a great place to get help with specific problems. + +- **Ultralytics Discord Server:** Ultralytics has a [Discord server](https://ultralytics.com/discord/) where you can interact with other users and the developers. + +### Official Documentation and Resources: + +- **Ultralytics YOLOv8 Docs:** The [official documentation](../index.md) provides a comprehensive overview of YOLOv8, along with guides on installation, usage, and troubleshooting. + +Using these resources will not only guide you through any challenges but also keep you updated with the latest trends and best practices in the YOLOv8 community. + +## Conclusion + +In this guide, we've taken a close look at the essential performance metrics for YOLOv8. These metrics are key to understanding how well a model is performing and are vital for anyone aiming to fine-tune their models. They offer the necessary insights for improvements and to make sure the model works effectively in real-life situations. + +Remember, the YOLOv8 and Ultralytics community is an invaluable asset. Engaging with fellow developers and experts can open doors to insights and solutions not found in standard documentation. As you journey through object detection, keep the spirit of learning alive, experiment with new strategies, and share your findings. By doing so, you contribute to the community's collective wisdom and ensure its growth. + +Happy object detecting! diff --git a/ultralytics/docs/en/guides/yolo-performance-metrics.md:Zone.Identifier b/ultralytics/docs/en/guides/yolo-performance-metrics.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/yolo-performance-metrics.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/guides/yolo-thread-safe-inference.md b/ultralytics/docs/en/guides/yolo-thread-safe-inference.md new file mode 100755 index 0000000..abf7a36 --- /dev/null +++ b/ultralytics/docs/en/guides/yolo-thread-safe-inference.md @@ -0,0 +1,108 @@ +--- +comments: true +description: This guide provides best practices for performing thread-safe inference with YOLO models, ensuring reliable and concurrent predictions in multi-threaded applications. +keywords: thread-safe, YOLO inference, multi-threading, concurrent predictions, YOLO models, Ultralytics, Python threading, safe YOLO usage, AI concurrency +--- + +# Thread-Safe Inference with YOLO Models + +Running YOLO models in a multi-threaded environment requires careful consideration to ensure thread safety. Python's `threading` module allows you to run several threads concurrently, but when it comes to using YOLO models across these threads, there are important safety issues to be aware of. This page will guide you through creating thread-safe YOLO model inference. + +## Understanding Python Threading + +Python threads are a form of parallelism that allow your program to run multiple operations at once. However, Python's Global Interpreter Lock (GIL) means that only one thread can execute Python bytecode at a time. + +

+ Single vs Multi-Thread Examples +

+ +While this sounds like a limitation, threads can still provide concurrency, especially for I/O-bound operations or when using operations that release the GIL, like those performed by YOLO's underlying C libraries. + +## The Danger of Shared Model Instances + +Instantiating a YOLO model outside your threads and sharing this instance across multiple threads can lead to race conditions, where the internal state of the model is inconsistently modified due to concurrent accesses. This is particularly problematic when the model or its components hold state that is not designed to be thread-safe. + +### Non-Thread-Safe Example: Single Model Instance + +When using threads in Python, it's important to recognize patterns that can lead to concurrency issues. Here is what you should avoid: sharing a single YOLO model instance across multiple threads. + +```python +# Unsafe: Sharing a single model instance across threads +from ultralytics import YOLO +from threading import Thread + +# Instantiate the model outside the thread +shared_model = YOLO("yolov8n.pt") + + +def predict(image_path): + results = shared_model.predict(image_path) + # Process results + + +# Starting threads that share the same model instance +Thread(target=predict, args=("image1.jpg",)).start() +Thread(target=predict, args=("image2.jpg",)).start() +``` + +In the example above, the `shared_model` is used by multiple threads, which can lead to unpredictable results because `predict` could be executed simultaneously by multiple threads. + +### Non-Thread-Safe Example: Multiple Model Instances + +Similarly, here is an unsafe pattern with multiple YOLO model instances: + +```python +# Unsafe: Sharing multiple model instances across threads can still lead to issues +from ultralytics import YOLO +from threading import Thread + +# Instantiate multiple models outside the thread +shared_model_1 = YOLO("yolov8n_1.pt") +shared_model_2 = YOLO("yolov8n_2.pt") + + +def predict(model, image_path): + results = model.predict(image_path) + # Process results + + +# Starting threads with individual model instances +Thread(target=predict, args=(shared_model_1, "image1.jpg")).start() +Thread(target=predict, args=(shared_model_2, "image2.jpg")).start() +``` + +Even though there are two separate model instances, the risk of concurrency issues still exists. If the internal implementation of `YOLO` is not thread-safe, using separate instances might not prevent race conditions, especially if these instances share any underlying resources or states that are not thread-local. + +## Thread-Safe Inference + +To perform thread-safe inference, you should instantiate a separate YOLO model within each thread. This ensures that each thread has its own isolated model instance, eliminating the risk of race conditions. + +### Thread-Safe Example + +Here's how to instantiate a YOLO model inside each thread for safe parallel inference: + +```python +# Safe: Instantiating a single model inside each thread +from ultralytics import YOLO +from threading import Thread + + +def thread_safe_predict(image_path): + # Instantiate a new model inside the thread + local_model = YOLO("yolov8n.pt") + results = local_model.predict(image_path) + # Process results + + +# Starting threads that each have their own model instance +Thread(target=thread_safe_predict, args=("image1.jpg",)).start() +Thread(target=thread_safe_predict, args=("image2.jpg",)).start() +``` + +In this example, each thread creates its own `YOLO` instance. This prevents any thread from interfering with the model state of another, thus ensuring that each thread performs inference safely and without unexpected interactions with the other threads. + +## Conclusion + +When using YOLO models with Python's `threading`, always instantiate your models within the thread that will use them to ensure thread safety. This practice avoids race conditions and makes sure that your inference tasks run reliably. + +For more advanced scenarios and to further optimize your multi-threaded inference performance, consider using process-based parallelism with `multiprocessing` or leveraging a task queue with dedicated worker processes. diff --git a/ultralytics/docs/en/guides/yolo-thread-safe-inference.md:Zone.Identifier b/ultralytics/docs/en/guides/yolo-thread-safe-inference.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/guides/yolo-thread-safe-inference.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/help/CI.md b/ultralytics/docs/en/help/CI.md new file mode 100755 index 0000000..b01549c --- /dev/null +++ b/ultralytics/docs/en/help/CI.md @@ -0,0 +1,61 @@ +--- +comments: true +description: Learn how Ultralytics leverages Continuous Integration (CI) for maintaining high-quality code. Explore our CI tests and the status of these tests for our repositories. +keywords: continuous integration, software development, CI tests, Ultralytics repositories, high-quality code, Docker Deployment, Broken Links, CodeQL, PyPi Publishing +--- + +# Continuous Integration (CI) + +Continuous Integration (CI) is an essential aspect of software development which involves integrating changes and testing them automatically. CI allows us to maintain high-quality code by catching issues early and often in the development process. At Ultralytics, we use various CI tests to ensure the quality and integrity of our codebase. + +## CI Actions + +Here's a brief description of our CI actions: + +- **CI:** This is our primary CI test that involves running unit tests, linting checks, and sometimes more comprehensive tests depending on the repository. +- **Docker Deployment:** This test checks the deployment of the project using Docker to ensure the Dockerfile and related scripts are working correctly. +- **Broken Links:** This test scans the codebase for any broken or dead links in our markdown or HTML files. +- **CodeQL:** CodeQL is a tool from GitHub that performs semantic analysis on our code, helping to find potential security vulnerabilities and maintain high-quality code. +- **PyPi Publishing:** This test checks if the project can be packaged and published to PyPi without any errors. + +### CI Results + +Below is the table showing the status of these CI tests for our main repositories: + +| Repository | CI | Docker Deployment | Broken Links | CodeQL | PyPi and Docs Publishing | +|-----------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [yolov3](https://github.com/ultralytics/yolov3) | [![YOLOv3 CI](https://github.com/ultralytics/yolov3/actions/workflows/ci-testing.yml/badge.svg)](https://github.com/ultralytics/yolov3/actions/workflows/ci-testing.yml) | [![Publish Docker Images](https://github.com/ultralytics/yolov3/actions/workflows/docker.yml/badge.svg)](https://github.com/ultralytics/yolov3/actions/workflows/docker.yml) | [![Check Broken links](https://github.com/ultralytics/yolov3/actions/workflows/links.yml/badge.svg)](https://github.com/ultralytics/yolov3/actions/workflows/links.yml) | [![CodeQL](https://github.com/ultralytics/yolov3/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/ultralytics/yolov3/actions/workflows/codeql-analysis.yml) | | +| [yolov5](https://github.com/ultralytics/yolov5) | [![YOLOv5 CI](https://github.com/ultralytics/yolov5/actions/workflows/ci-testing.yml/badge.svg)](https://github.com/ultralytics/yolov5/actions/workflows/ci-testing.yml) | [![Publish Docker Images](https://github.com/ultralytics/yolov5/actions/workflows/docker.yml/badge.svg)](https://github.com/ultralytics/yolov5/actions/workflows/docker.yml) | [![Check Broken links](https://github.com/ultralytics/yolov5/actions/workflows/links.yml/badge.svg)](https://github.com/ultralytics/yolov5/actions/workflows/links.yml) | [![CodeQL](https://github.com/ultralytics/yolov5/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/ultralytics/yolov5/actions/workflows/codeql-analysis.yml) | | +| [ultralytics](https://github.com/ultralytics/ultralytics) | [![ultralytics CI](https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml/badge.svg)](https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml) | [![Publish Docker Images](https://github.com/ultralytics/ultralytics/actions/workflows/docker.yaml/badge.svg)](https://github.com/ultralytics/ultralytics/actions/workflows/docker.yaml) | [![Check Broken links](https://github.com/ultralytics/ultralytics/actions/workflows/links.yml/badge.svg)](https://github.com/ultralytics/ultralytics/actions/workflows/links.yml) | [![CodeQL](https://github.com/ultralytics/ultralytics/actions/workflows/codeql.yaml/badge.svg)](https://github.com/ultralytics/ultralytics/actions/workflows/codeql.yaml) | [![Publish to PyPI and Deploy Docs](https://github.com/ultralytics/ultralytics/actions/workflows/publish.yml/badge.svg)](https://github.com/ultralytics/ultralytics/actions/workflows/publish.yml) | +| [hub](https://github.com/ultralytics/hub) | [![HUB CI](https://github.com/ultralytics/hub/actions/workflows/ci.yaml/badge.svg)](https://github.com/ultralytics/hub/actions/workflows/ci.yaml) | | [![Check Broken links](https://github.com/ultralytics/hub/actions/workflows/links.yml/badge.svg)](https://github.com/ultralytics/hub/actions/workflows/links.yml) | | | +| [docs](https://github.com/ultralytics/docs) | | | [![Check Broken links](https://github.com/ultralytics/docs/actions/workflows/links.yml/badge.svg)](https://github.com/ultralytics/docs/actions/workflows/links.yml) | | [![pages-build-deployment](https://github.com/ultralytics/docs/actions/workflows/pages/pages-build-deployment/badge.svg)](https://github.com/ultralytics/docs/actions/workflows/pages/pages-build-deployment) | + +Each badge shows the status of the last run of the corresponding CI test on the `main` branch of the respective repository. If a test fails, the badge will display a "failing" status, and if it passes, it will display a "passing" status. + +If you notice a test failing, it would be a great help if you could report it through a GitHub issue in the respective repository. + +Remember, a successful CI test does not mean that everything is perfect. It is always recommended to manually review the code before deployment or merging changes. + +## Code Coverage + +Code coverage is a metric that represents the percentage of your codebase that is executed when your tests run. It provides insight into how well your tests exercise your code and can be crucial in identifying untested parts of your application. A high code coverage percentage is often associated with a lower likelihood of bugs. However, it's essential to understand that code coverage does not guarantee the absence of defects. It merely indicates which parts of the code have been executed by the tests. + +### Integration with [codecov.io](https://codecov.io/) + +At Ultralytics, we have integrated our repositories with [codecov.io](https://codecov.io/), a popular online platform for measuring and visualizing code coverage. Codecov provides detailed insights, coverage comparisons between commits, and visual overlays directly on your code, indicating which lines were covered. + +By integrating with Codecov, we aim to maintain and improve the quality of our code by focusing on areas that might be prone to errors or need further testing. + +### Coverage Results + +To quickly get a glimpse of the code coverage status of the `ultralytics` python package, we have included a badge and sunburst visual of the `ultralytics` coverage results. These images show the percentage of code covered by our tests, offering an at-a-glance metric of our testing efforts. For full details please see https://codecov.io/github/ultralytics/ultralytics. + +| Repository | Code Coverage | +|-----------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------| +| [ultralytics](https://github.com/ultralytics/ultralytics) | [![codecov](https://codecov.io/gh/ultralytics/ultralytics/branch/main/graph/badge.svg?token=HHW7IIVFVY)](https://codecov.io/gh/ultralytics/ultralytics) | + +In the sunburst graphic below, the innermost circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively. + + + Ultralytics Codecov Image + diff --git a/ultralytics/docs/en/help/CI.md:Zone.Identifier b/ultralytics/docs/en/help/CI.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/help/CI.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/help/CLA.md b/ultralytics/docs/en/help/CLA.md new file mode 100755 index 0000000..374914a --- /dev/null +++ b/ultralytics/docs/en/help/CLA.md @@ -0,0 +1,30 @@ +--- +description: Understand terms governing contributions to Ultralytics projects including source code, bug fixes, documentation and more. Read our Contributor License Agreement. +keywords: Ultralytics, Contributor License Agreement, Open Source Software, Contributions, Copyright License, Patent License, Moral Rights +--- + +# Ultralytics Individual Contributor License Agreement + +Thank you for your interest in contributing to open source software projects (โ€œProjectsโ€) made available by Ultralytics Inc. (โ€œUltralyticsโ€). This Individual Contributor License Agreement (โ€œAgreementโ€) sets out the terms governing any source code, object code, bug fixes, configuration changes, tools, specifications, documentation, data, materials, feedback, information or other works of authorship that you submit or have submitted, in any form and in any manner, to Ultralytics in respect of any Projects (collectively โ€œContributionsโ€). If you have any questions respecting this Agreement, please contact hello@ultralytics.com. + +You agree that the following terms apply to all of your past, present and future Contributions. Except for the licenses granted in this Agreement, you retain all of your right, title and interest in and to your Contributions. + +**Copyright License.** You hereby grant, and agree to grant, to Ultralytics a non-exclusive, perpetual, irrevocable, worldwide, fully-paid, royalty-free, transferable copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, and distribute your Contributions and such derivative works, with the right to sublicense the foregoing rights through multiple tiers of sublicensees. + +**Patent License.** You hereby grant, and agree to grant, to Ultralytics a non-exclusive, perpetual, irrevocable, worldwide, fully-paid, royalty-free, transferable patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer your Contributions, where such license applies only to those patent claims licensable by you that are necessarily infringed by your Contributions alone or by combination of your Contributions with the Project to which such Contributions were submitted, with the right to sublicense the foregoing rights through multiple tiers of sublicensees. + +**Moral Rights.** To the fullest extent permitted under applicable law, you hereby waive, and agree not to assert, all of your โ€œmoral rightsโ€ in or relating to your Contributions for the benefit of Ultralytics, its assigns, and their respective direct and indirect sublicensees. + +**Third Party Content/Rights.** If your Contribution includes or is based on any source code, object code, bug fixes, configuration changes, tools, specifications, documentation, data, materials, feedback, information or other works of authorship that were not authored by you (โ€œThird Party Contentโ€) or if you are aware of any third party intellectual property or proprietary rights associated with your Contribution (โ€œThird Party Rightsโ€), then you agree to include with the submission of your Contribution full details respecting such Third Party Content and Third Party Rights, including, without limitation, identification of which aspects of your Contribution contain Third Party Content or are associated with Third Party Rights, the owner/author of the Third Party Content and Third Party Rights, where you obtained the Third Party Content, and any applicable third party license terms or restrictions respecting the Third Party Content and Third Party Rights. For greater certainty, the foregoing obligations respecting the identification of Third Party Content and Third Party Rights do not apply to any portion of a Project that is incorporated into your Contribution to that same Project. + +**Representations.** You represent that, other than the Third Party Content and Third Party Rights identified by you in accordance with this Agreement, you are the sole author of your Contributions and are legally entitled to grant the foregoing licenses and waivers in respect of your Contributions. If your Contributions were created in the course of your employment with your past or present employer(s), you represent that such employer(s) has authorized you to make your Contributions on behalf of such employer(s) or such employer +(s) has waived all of their right, title or interest in or to your Contributions. + +**Disclaimer.** To the fullest extent permitted under applicable law, your Contributions are provided on an "asis" +basis, without any warranties or conditions, express or implied, including, without limitation, any implied warranties or conditions of non-infringement, merchantability or fitness for a particular purpose. You are not required to provide support for your Contributions, except to the extent you desire to provide support. + +**No Obligation.** You acknowledge that Ultralytics is under no obligation to use or incorporate your Contributions into any of the Projects. The decision to use or incorporate your Contributions into any of the Projects will be made at the sole discretion of Ultralytics or its authorized delegates. + +**Disputes.** This Agreement shall be governed by and construed in accordance with the laws of the State of New York, United States of America, without giving effect to its principles or rules regarding conflicts of laws, other than such principles directing application of New York law. The parties hereby submit to venue in, and jurisdiction of the courts located in New York, New York for purposes relating to this Agreement. In the event that any of the provisions of this Agreement shall be held by a court or other tribunal of competent jurisdiction to be unenforceable, the remaining portions hereof shall remain in full force and effect. + +**Assignment.** You agree that Ultralytics may assign this Agreement, and all of its rights, obligations and licenses hereunder. diff --git a/ultralytics/docs/en/help/CLA.md:Zone.Identifier b/ultralytics/docs/en/help/CLA.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/help/CLA.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/help/FAQ.md b/ultralytics/docs/en/help/FAQ.md new file mode 100755 index 0000000..8e4430a --- /dev/null +++ b/ultralytics/docs/en/help/FAQ.md @@ -0,0 +1,39 @@ +--- +comments: true +description: Find solutions to your common Ultralytics YOLO related queries. Learn about hardware requirements, fine-tuning YOLO models, conversion to ONNX/TensorFlow, and more. +keywords: Ultralytics, YOLO, FAQ, hardware requirements, ONNX, TensorFlow, real-time detection, YOLO accuracy +--- + +# Ultralytics YOLO Frequently Asked Questions (FAQ) + +This FAQ section addresses some common questions and issues users might encounter while working with Ultralytics YOLO repositories. + +## 1. What are the hardware requirements for running Ultralytics YOLO? + +Ultralytics YOLO can be run on a variety of hardware configurations, including CPUs, GPUs, and even some edge devices. However, for optimal performance and faster training and inference, we recommend using a GPU with a minimum of 8GB of memory. NVIDIA GPUs with CUDA support are ideal for this purpose. + +## 2. How do I fine-tune a pre-trained YOLO model on my custom dataset? + +To fine-tune a pre-trained YOLO model on your custom dataset, you'll need to create a dataset configuration file (YAML) that defines the dataset's properties, such as the path to the images, the number of classes, and class names. Next, you'll need to modify the model configuration file to match the number of classes in your dataset. Finally, use the `train.py` script to start the training process with your custom dataset and the pre-trained model. You can find a detailed guide on fine-tuning YOLO in the Ultralytics documentation. + +## 3. How do I convert a YOLO model to ONNX or TensorFlow format? + +Ultralytics provides built-in support for converting YOLO models to ONNX format. You can use the `export.py` script to convert a saved model to ONNX format. If you need to convert the model to TensorFlow format, you can use the ONNX model as an intermediary and then use the ONNX-TensorFlow converter to convert the ONNX model to TensorFlow format. + +## 4. Can I use Ultralytics YOLO for real-time object detection? + +Yes, Ultralytics YOLO is designed to be efficient and fast, making it suitable for real-time object detection tasks. The actual performance will depend on your hardware configuration and the complexity of the model. Using a GPU and optimizing the model for your specific use case can help achieve real-time performance. + +## 5. How can I improve the accuracy of my YOLO model? + +Improving the accuracy of a YOLO model may involve several strategies, such as: + +- Fine-tuning the model on more annotated data +- Data augmentation to increase the variety of training samples +- Using a larger or more complex model architecture +- Adjusting the learning rate, batch size, and other hyperparameters +- Using techniques like transfer learning or knowledge distillation + +Remember that there's often a trade-off between accuracy and inference speed, so finding the right balance is crucial for your specific application. + +If you have any more questions or need assistance, don't hesitate to consult the Ultralytics documentation or reach out to the community through GitHub Issues or the official discussion forum. diff --git a/ultralytics/docs/en/help/FAQ.md:Zone.Identifier b/ultralytics/docs/en/help/FAQ.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/help/FAQ.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/help/code_of_conduct.md b/ultralytics/docs/en/help/code_of_conduct.md new file mode 100755 index 0000000..f16bc16 --- /dev/null +++ b/ultralytics/docs/en/help/code_of_conduct.md @@ -0,0 +1,88 @@ +--- +comments: true +description: Explore Ultralytics communityโ€™s Code of Conduct, ensuring a supportive, inclusive environment for contributors & members at all levels. Find our guidelines on acceptable behavior & enforcement. +keywords: Ultralytics, code of conduct, community, contribution, behavior guidelines, enforcement, open source contributions +--- + +# Ultralytics Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socioeconomic status, nationality, personal appearance, race, religion, or sexual identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our community include: + +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience +- Focusing on what is best not just for us as individuals, but for the overall community + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery, and sexual attention or advances of any kind +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email address, without their explicit permission +- Other conduct which could reasonably be considered inappropriate in a professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at hello@ultralytics.com. All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of actions. + +**Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. + +[homepage]: https://www.contributor-covenant.org diff --git a/ultralytics/docs/en/help/code_of_conduct.md:Zone.Identifier b/ultralytics/docs/en/help/code_of_conduct.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/help/code_of_conduct.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/help/contributing.md b/ultralytics/docs/en/help/contributing.md new file mode 100755 index 0000000..18a157a --- /dev/null +++ b/ultralytics/docs/en/help/contributing.md @@ -0,0 +1,96 @@ +--- +comments: true +description: Learn how to contribute to Ultralytics YOLO projects โ€“ guidelines for pull requests, reporting bugs, code conduct and CLA signing. +keywords: Ultralytics, YOLO, open-source, contribute, pull request, bug report, coding guidelines, CLA, code of conduct, GitHub +--- + +# Contributing to Ultralytics Open-Source YOLO Repositories + +First of all, thank you for your interest in contributing to Ultralytics open-source YOLO repositories! Your contributions will help improve the project and benefit the community. This document provides guidelines and best practices to get you started. + +## Table of Contents + +1. [Code of Conduct](#code-of-conduct) +2. [Contributing via Pull Requests](#contributing-via-pull-requests) + - [CLA Signing](#cla-signing) + - [Google-Style Docstrings](#google-style-docstrings) + - [GitHub Actions CI Tests](#github-actions-ci-tests) +3. [Reporting Bugs](#reporting-bugs) +4. [License](#license) +5. [Conclusion](#conclusion) + +## Code of Conduct + +All contributors are expected to adhere to the [Code of Conduct](code_of_conduct.md) to ensure a welcoming and inclusive environment for everyone. + +## Contributing via Pull Requests + +We welcome contributions in the form of pull requests. To make the review process smoother, please follow these guidelines: + +1. **Fork the repository**: Fork the Ultralytics YOLO repository to your own GitHub account. + +2. **Create a branch**: Create a new branch in your forked repository with a descriptive name for your changes. + +3. **Make your changes**: Make the changes you want to contribute. Ensure that your changes follow the coding style of the project and do not introduce new errors or warnings. + +4. **Test your changes**: Test your changes locally to ensure that they work as expected and do not introduce new issues. + +5. **Commit your changes**: Commit your changes with a descriptive commit message. Make sure to include any relevant issue numbers in your commit message. + +6. **Create a pull request**: Create a pull request from your forked repository to the main Ultralytics YOLO repository. In the pull request description, provide a clear explanation of your changes and how they improve the project. + +### CLA Signing + +Before we can accept your pull request, you need to sign a [Contributor License Agreement (CLA)](CLA.md). This is a legal document stating that you agree to the terms of contributing to the Ultralytics YOLO repositories. The CLA ensures that your contributions are properly licensed and that the project can continue to be distributed under the AGPL-3.0 license. + +To sign the CLA, follow the instructions provided by the CLA bot after you submit your PR and add a comment in your PR saying: + +``` +I have read the CLA Document and I sign the CLA +``` + +### Google-Style Docstrings + +When adding new functions or classes, please include a [Google-style docstring](https://google.github.io/styleguide/pyguide.html) to provide clear and concise documentation for other developers. This will help ensure that your contributions are easy to understand and maintain. + +Example Google-style docstring: + +```python +def example_function(arg1: int, arg2: int) -> bool: + """ + Example function that demonstrates Google-style docstrings. + + Args: + arg1 (int): The first argument. + arg2 (int): The second argument. + + Returns: + (bool): True if successful, False otherwise. + + Examples: + >>> result = example_function(1, 2) # returns False + """ + if arg1 == arg2: + return True + return False +``` + +### GitHub Actions CI Tests + +Before your pull request can be merged, all GitHub Actions [Continuous Integration](CI.md) (CI) tests must pass. These tests include linting, unit tests, and other checks to ensure that your changes meet the quality standards of the project. Make sure to review the output of the GitHub Actions and fix any issues + +## Reporting Bugs + +We appreciate bug reports as they play a crucial role in maintaining the project's quality. When reporting bugs it is important to provide a [Minimum Reproducible Example](minimum_reproducible_example.md): a clear, concise code example that replicates the issue. This helps in quick identification and resolution of the bug. + +## License + +Ultralytics embraces the GNU Affero General Public License v3.0 (AGPL-3.0) for its repositories, promoting openness, transparency, and collaborative enhancement in software development. This strong copyleft license ensures that all users and developers retain the freedom to use, modify, and share the software. It fosters community collaboration, ensuring that any improvements remain accessible to all. + +Users and developers are encouraged to familiarize themselves with the terms of AGPL-3.0 to contribute effectively and ethically to the Ultralytics open-source community. + +## Conclusion + +Thank you for your interest in contributing to Ultralytics' open-source YOLO projects. Your participation is crucial in shaping the future of our software and fostering a community of innovation and collaboration. Whether you're improving code, reporting bugs, or suggesting features, your contributions make a significant impact. + +We're eager to see your ideas in action and appreciate your commitment to advancing object detection technology. Let's continue to grow and innovate together in this exciting open-source journey. Happy coding! ๐Ÿš€๐ŸŒŸ diff --git a/ultralytics/docs/en/help/contributing.md:Zone.Identifier b/ultralytics/docs/en/help/contributing.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/help/contributing.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/help/environmental-health-safety.md b/ultralytics/docs/en/help/environmental-health-safety.md new file mode 100755 index 0000000..9fee240 --- /dev/null +++ b/ultralytics/docs/en/help/environmental-health-safety.md @@ -0,0 +1,37 @@ +--- +comments: false +description: Discover Ultralyticsโ€™ EHS policy principles and implementation measures. Committed to safety, environment, and continuous improvement for a sustainable future. +keywords: Ultralytics policy, EHS, environment, health and safety, compliance, prevention, continuous improvement, risk management, emergency preparedness, resource allocation, communication +--- + +# Ultralytics Environmental, Health and Safety (EHS) Policy + +At Ultralytics, we recognize that the long-term success of our company relies not only on the products and services we offer, but also the manner in which we conduct our business. We are committed to ensuring the safety and well-being of our employees, stakeholders, and the environment, and we will continuously strive to mitigate our impact on the environment while promoting health and safety. + +## Policy Principles + +1. **Compliance**: We will comply with all applicable laws, regulations, and standards related to EHS, and we will strive to exceed these standards where possible. + +2. **Prevention**: We will work to prevent accidents, injuries, and environmental harm by implementing risk management measures and ensuring all our operations and procedures are safe. + +3. **Continuous Improvement**: We will continuously improve our EHS performance by setting measurable objectives, monitoring our performance, auditing our operations, and revising our policies and procedures as needed. + +4. **Communication**: We will communicate openly about our EHS performance and will engage with stakeholders to understand and address their concerns and expectations. + +5. **Education and Training**: We will educate and train our employees and contractors in appropriate EHS procedures and practices. + +## Implementation Measures + +1. **Responsibility and Accountability**: Every employee and contractor working at or with Ultralytics is responsible for adhering to this policy. Managers and supervisors are accountable for ensuring this policy is implemented within their areas of control. + +2. **Risk Management**: We will identify, assess, and manage EHS risks associated with our operations and activities to prevent accidents, injuries, and environmental harm. + +3. **Resource Allocation**: We will allocate the necessary resources to ensure the effective implementation of our EHS policy, including the necessary equipment, personnel, and training. + +4. **Emergency Preparedness and Response**: We will develop, maintain, and test emergency preparedness and response plans to ensure we can respond effectively to EHS incidents. + +5. **Monitoring and Review**: We will monitor and review our EHS performance regularly to identify opportunities for improvement and ensure we are meeting our objectives. + +This policy reflects our commitment to minimizing our environmental footprint, ensuring the safety and well-being of our employees, and continuously improving our performance. + +Please remember that the implementation of an effective EHS policy requires the involvement and commitment of everyone working at or with Ultralytics. We encourage you to take personal responsibility for your safety and the safety of others, and to take care of the environment in which we live and work. diff --git a/ultralytics/docs/en/help/environmental-health-safety.md:Zone.Identifier b/ultralytics/docs/en/help/environmental-health-safety.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/help/environmental-health-safety.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/help/index.md b/ultralytics/docs/en/help/index.md new file mode 100755 index 0000000..e8f7376 --- /dev/null +++ b/ultralytics/docs/en/help/index.md @@ -0,0 +1,19 @@ +--- +comments: true +description: Find comprehensive guides and documents on Ultralytics YOLO tasks. Includes FAQs, contributing guides, CI guide, CLA, MRE guide, code of conduct & more. +keywords: Ultralytics, YOLO, guides, documents, FAQ, contributing, CI guide, CLA, MRE guide, code of conduct, EHS policy, security policy, privacy policy +--- + +Welcome to the Ultralytics Help page! We are dedicated to providing you with detailed resources to enhance your experience with the Ultralytics YOLO models and repositories. This page serves as your portal to guides and documentation designed to assist you with various tasks and answer questions you may encounter while engaging with our repositories. + +- [Frequently Asked Questions (FAQ)](FAQ.md): Find answers to common questions and issues encountered by the community of Ultralytics YOLO users and contributors. +- [Contributing Guide](contributing.md): Discover the protocols for making contributions, including how to submit pull requests, report bugs, and more. +- [Continuous Integration (CI) Guide](CI.md): Gain insights into the CI processes we employ, complete with status reports for each Ultralytics repository. +- [Contributor License Agreement (CLA)](CLA.md): Review the CLA to understand the rights and responsibilities associated with contributing to Ultralytics projects. +- [Minimum Reproducible Example (MRE) Guide](minimum_reproducible_example.md): Learn the process for creating an MRE, which is crucial for the timely and effective resolution of bug reports. +- [Code of Conduct](code_of_conduct.md): Our community guidelines support a respectful and open atmosphere for all collaborators. +- [Environmental, Health and Safety (EHS) Policy](environmental-health-safety.md): Delve into our commitment to sustainability and the well-being of all our stakeholders. +- [Security Policy](security.md): Familiarize yourself with our security protocols and the procedure for reporting vulnerabilities. +- [Privacy Policy](privacy.md): Read our privacy policy to understand how we protect your data and respect your privacy in all our services and operations. + +We encourage you to review these resources for a seamless and productive experience. Our aim is to foster a helpful and friendly environment for everyone in the Ultralytics community. Should you require additional support, please feel free to reach out via GitHub Issues or our official discussion forums. Happy coding! diff --git a/ultralytics/docs/en/help/index.md:Zone.Identifier b/ultralytics/docs/en/help/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/help/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/help/minimum_reproducible_example.md b/ultralytics/docs/en/help/minimum_reproducible_example.md new file mode 100755 index 0000000..47a0cdf --- /dev/null +++ b/ultralytics/docs/en/help/minimum_reproducible_example.md @@ -0,0 +1,78 @@ +--- +comments: true +description: Learn how to create minimum reproducible examples (MRE) for efficient bug reporting in Ultralytics YOLO repositories with this step-by-step guide. +keywords: Ultralytics, YOLO, minimum reproducible example, MRE, bug reports, guide, dependencies, code, troubleshooting +--- + +# Creating a Minimum Reproducible Example for Bug Reports in Ultralytics YOLO Repositories + +When submitting a bug report for Ultralytics YOLO repositories, it's essential to provide a [minimum reproducible example](https://docs.ultralytics.com/help/minimum_reproducible_example/) (MRE). An MRE is a small, self-contained piece of code that demonstrates the problem you're experiencing. Providing an MRE helps maintainers and contributors understand the issue and work on a fix more efficiently. This guide explains how to create an MRE when submitting bug reports to Ultralytics YOLO repositories. + +## 1. Isolate the Problem + +The first step in creating an MRE is to isolate the problem. This means removing any unnecessary code or dependencies that are not directly related to the issue. Focus on the specific part of the code that is causing the problem and remove any irrelevant code. + +## 2. Use Public Models and Datasets + +When creating an MRE, use publicly available models and datasets to reproduce the issue. For example, use the 'yolov8n.pt' model and the 'coco8.yaml' dataset. This ensures that the maintainers and contributors can easily run your example and investigate the problem without needing access to proprietary data or custom models. + +## 3. Include All Necessary Dependencies + +Make sure to include all the necessary dependencies in your MRE. If your code relies on external libraries, specify the required packages and their versions. Ideally, provide a `requirements.txt` file or list the dependencies in your bug report. + +## 4. Write a Clear Description of the Issue + +Provide a clear and concise description of the issue you're experiencing. Explain the expected behavior and the actual behavior you're encountering. If applicable, include any relevant error messages or logs. + +## 5. Format Your Code Properly + +When submitting an MRE, format your code properly using code blocks in the issue description. This makes it easier for others to read and understand your code. In GitHub, you can create a code block by wrapping your code with triple backticks (\```) and specifying the language: + +
+```python
+# Your Python code goes here
+```
+
+ +## 6. Test Your MRE + +Before submitting your MRE, test it to ensure that it accurately reproduces the issue. Make sure that others can run your example without any issues or modifications. + +## Example of an MRE + +Here's an example of an MRE for a hypothetical bug report: + +**Bug description:** + +When running the `detect.py` script on the sample image from the 'coco8.yaml' dataset, I get an error related to the dimensions of the input tensor. + +**MRE:** + +```python +import torch +from ultralytics import YOLO + +# Load the model +model = YOLO("yolov8n.pt") + +# Load a 0-channel image +image = torch.rand(1, 0, 640, 640) + +# Run the model +results = model(image) +``` + +**Error message:** + +``` +RuntimeError: Expected input[1, 0, 640, 640] to have 3 channels, but got 0 channels instead +``` + +**Dependencies:** + +- torch==2.0.0 +- ultralytics==8.0.90 + +In this example, the MRE demonstrates the issue with a minimal amount of code, uses a public model ('yolov8n.pt'), includes all necessary dependencies, and provides a clear description of the problem along with the error message. + +By following these guidelines, you'll help the maintainers and contributors of Ultralytics YOLO repositories to understand and resolve your issue more efficiently. diff --git a/ultralytics/docs/en/help/minimum_reproducible_example.md:Zone.Identifier b/ultralytics/docs/en/help/minimum_reproducible_example.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/help/minimum_reproducible_example.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/help/privacy.md b/ultralytics/docs/en/help/privacy.md new file mode 100755 index 0000000..c9bc3a5 --- /dev/null +++ b/ultralytics/docs/en/help/privacy.md @@ -0,0 +1,137 @@ +--- +description: Learn about how Ultralytics collects and uses data to improve user experience, ensure software stability, and address privacy concerns, with options to opt-out. +keywords: Ultralytics, Data Collection, User Privacy, Google Analytics, Sentry, Crash Reporting, Anonymized Data, Privacy Settings, Opt-Out +--- + +# Data Collection for Ultralytics Python Package + +## Overview + +[Ultralytics](https://ultralytics.com) is dedicated to the continuous enhancement of the user experience and the capabilities of our Python package, including the advanced YOLO models we develop. Our approach involves the gathering of anonymized usage statistics and crash reports, helping us identify opportunities for improvement and ensuring the reliability of our software. This transparency document outlines what data we collect, its purpose, and the choice you have regarding this data collection. + +## Anonymized Google Analytics + +[Google Analytics](https://developers.google.com/analytics) is a web analytics service offered by Google that tracks and reports website traffic. It allows us to collect data about how our Python package is used, which is crucial for making informed decisions about design and functionality. + +### What We Collect + +- **Usage Metrics**: These metrics help us understand how frequently and in what ways the package is utilized, what features are favored, and the typical command-line arguments that are used. +- **System Information**: We collect general non-identifiable information about your computing environment to ensure our package performs well across various systems. +- **Performance Data**: Understanding the performance of our models during training, validation, and inference helps us in identifying optimization opportunities. + +For more information about Google Analytics and data privacy, visit [Google Analytics Privacy](https://support.google.com/analytics/answer/6004245). + +### How We Use This Data + +- **Feature Improvement**: Insights from usage metrics guide us in enhancing user satisfaction and interface design. +- **Optimization**: Performance data assist us in fine-tuning our models for better efficiency and speed across diverse hardware and software configurations. +- **Trend Analysis**: By studying usage trends, we can predict and respond to the evolving needs of our community. + +### Privacy Considerations + +We take several measures to ensure the privacy and security of the data you entrust to us: + +- **Anonymization**: We configure Google Analytics to anonymize the data collected, which means no personally identifiable information (PII) is gathered. You can use our services with the assurance that your personal details remain private. +- **Aggregation**: Data is analyzed only in aggregate form. This practice ensures that patterns can be observed without revealing any individual user's activity. +- **No Image Data Collection**: Ultralytics does not collect, process, or view any training or inference images. + +## Sentry Crash Reporting + +[Sentry](https://sentry.io/) is a developer-centric error tracking software that aids in identifying, diagnosing, and resolving issues in real-time, ensuring the robustness and reliability of applications. Within our package, it plays a crucial role by providing insights through crash reporting, significantly contributing to the stability and ongoing refinement of our software. + +!!! Note + + Crash reporting via Sentry is activated only if the `sentry-sdk` Python package is pre-installed on your system. This package isn't included in the `ultralytics` prerequisites and won't be installed automatically by Ultralytics. + +### What We Collect + +If the `sentry-sdk` Python package is pre-installed on your system a crash event may send the following information: + +- **Crash Logs**: Detailed reports on the application's condition at the time of a crash, which are vital for our debugging efforts. +- **Error Messages**: We record error messages generated during the operation of our package to understand and resolve potential issues quickly. + +To learn more about how Sentry handles data, please visit [Sentry's Privacy Policy](https://sentry.io/privacy/). + +### How We Use This Data + +- **Debugging**: Analyzing crash logs and error messages enables us to swiftly identify and correct software bugs. +- **Stability Metrics**: By constantly monitoring for crashes, we aim to improve the stability and reliability of our package. + +### Privacy Considerations + +- **Sensitive Information**: We ensure that crash logs are scrubbed of any personally identifiable or sensitive user data, safeguarding the confidentiality of your information. +- **Controlled Collection**: Our crash reporting mechanism is meticulously calibrated to gather only what is essential for troubleshooting while respecting user privacy. + +By detailing the tools used for data collection and offering additional background information with URLs to their respective privacy pages, users are provided with a comprehensive view of our practices, emphasizing transparency and respect for user privacy. + +## Disabling Data Collection + +We believe in providing our users with full control over their data. By default, our package is configured to collect analytics and crash reports to help improve the experience for all users. However, we respect that some users may prefer to opt out of this data collection. + +To opt out of sending analytics and crash reports, you can simply set `sync=False` in your YOLO settings. This ensures that no data is transmitted from your machine to our analytics tools. + +### Inspecting Settings + +To gain insight into the current configuration of your settings, you can view them directly: + +!!! Example "View settings" + + === "Python" + You can use Python to view your settings. Start by importing the `settings` object from the `ultralytics` module. Print and return settings using the following commands: + ```python + from ultralytics import settings + + # View all settings + print(settings) + + # Return analytics and crash reporting setting + value = settings['sync'] + ``` + + === "CLI" + Alternatively, the command-line interface allows you to check your settings with a simple command: + ```bash + yolo settings + ``` + +### Modifying Settings + +Ultralytics allows users to easily modify their settings. Changes can be performed in the following ways: + +!!! Example "Update settings" + + === "Python" + Within the Python environment, call the `update` method on the `settings` object to change your settings: + ```python + from ultralytics import settings + + # Disable analytics and crash reporting + settings.update({'sync': False}) + + # Reset settings to default values + settings.reset() + ``` + + === "CLI" + If you prefer using the command-line interface, the following commands will allow you to modify your settings: + ```bash + # Disable analytics and crash reporting + yolo settings sync=False + + # Reset settings to default values + yolo settings reset + ``` + +The `sync=False` setting will prevent any data from being sent to Google Analytics or Sentry. Your settings will be respected across all sessions using the Ultralytics package and saved to disk for future sessions. + +## Commitment to Privacy + +Ultralytics takes user privacy seriously. We design our data collection practices with the following principles: + +- **Transparency**: We are open about the data we collect and how it is used. +- **Control**: We give users full control over their data. +- **Security**: We employ industry-standard security measures to protect the data we collect. + +## Questions or Concerns + +If you have any questions or concerns about our data collection practices, please reach out to us via our [contact form](https://ultralytics.com/contact) or via [support@ultralytics.com](mailto:support@ultralytics.com). We are dedicated to ensuring our users feel informed and confident in their privacy when using our package. diff --git a/ultralytics/docs/en/help/privacy.md:Zone.Identifier b/ultralytics/docs/en/help/privacy.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/help/privacy.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/help/security.md b/ultralytics/docs/en/help/security.md new file mode 100755 index 0000000..01ea1f8 --- /dev/null +++ b/ultralytics/docs/en/help/security.md @@ -0,0 +1,36 @@ +--- +description: Explore Ultralytics' comprehensive security strategies safeguarding user data and systems. Learn about our diverse security tools, including Snyk, GitHub CodeQL, and Dependabot Alerts. +keywords: Ultralytics, Comprehensive Security, user data protection, Snyk, GitHub CodeQL, Dependabot, vulnerability management, coding security practices +--- + +# Ultralytics Security Policy + +At [Ultralytics](https://ultralytics.com), the security of our users' data and systems is of utmost importance. To ensure the safety and security of our [open-source projects](https://github.com/ultralytics), we have implemented several measures to detect and prevent security vulnerabilities. + +## Snyk Scanning + +We utilize [Snyk](https://snyk.io/advisor/python/ultralytics) to conduct comprehensive security scans on Ultralytics repositories. Snyk's robust scanning capabilities extend beyond dependency checks; it also examines our code and Dockerfiles for various vulnerabilities. By identifying and addressing these issues proactively, we ensure a higher level of security and reliability for our users. + +[![ultralytics](https://snyk.io/advisor/python/ultralytics/badge.svg)](https://snyk.io/advisor/python/ultralytics) + +## GitHub CodeQL Scanning + +Our security strategy includes GitHub's [CodeQL](https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/about-code-scanning-with-codeql) scanning. CodeQL delves deep into our codebase, identifying complex vulnerabilities like SQL injection and XSS by analyzing the code's semantic structure. This advanced level of analysis ensures early detection and resolution of potential security risks. + +[![CodeQL](https://github.com/ultralytics/ultralytics/actions/workflows/codeql.yaml/badge.svg)](https://github.com/ultralytics/ultralytics/actions/workflows/codeql.yaml) + +## GitHub Dependabot Alerts + +[Dependabot](https://docs.github.com/en/code-security/dependabot) is integrated into our workflow to monitor dependencies for known vulnerabilities. When a vulnerability is identified in one of our dependencies, Dependabot alerts us, allowing for swift and informed remediation actions. + +## GitHub Secret Scanning Alerts + +We employ GitHub [secret scanning](https://docs.github.com/en/code-security/secret-scanning/managing-alerts-from-secret-scanning) alerts to detect sensitive data, such as credentials and private keys, accidentally pushed to our repositories. This early detection mechanism helps prevent potential security breaches and data exposures. + +## Private Vulnerability Reporting + +We enable private vulnerability reporting, allowing users to discreetly report potential security issues. This approach facilitates responsible disclosure, ensuring vulnerabilities are handled securely and efficiently. + +If you suspect or discover a security vulnerability in any of our repositories, please let us know immediately. You can reach out to us directly via our [contact form](https://ultralytics.com/contact) or via [security@ultralytics.com](mailto:security@ultralytics.com). Our security team will investigate and respond as soon as possible. + +We appreciate your help in keeping all Ultralytics open-source projects secure and safe for everyone ๐Ÿ™. diff --git a/ultralytics/docs/en/help/security.md:Zone.Identifier b/ultralytics/docs/en/help/security.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/help/security.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/hub/app/android.md b/ultralytics/docs/en/hub/app/android.md new file mode 100755 index 0000000..0bff31c --- /dev/null +++ b/ultralytics/docs/en/hub/app/android.md @@ -0,0 +1,89 @@ +--- +comments: true +description: Learn about the Ultralytics Android App, enabling real-time object detection using YOLO models. Discover in-app features, quantization methods, and delegate options for optimal performance. +keywords: Ultralytics, Android App, real-time object detection, YOLO models, TensorFlow Lite, FP16 quantization, INT8 quantization, CPU, GPU, Hexagon, NNAPI +--- + +# Ultralytics Android App: Real-time Object Detection with YOLO Models + + + Ultralytics HUB preview image +
+
+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ + Google Play store  +
+ +The Ultralytics Android App is a powerful tool that allows you to run YOLO models directly on your Android device for real-time object detection. This app utilizes TensorFlow Lite for model optimization and various hardware delegates for acceleration, enabling fast and efficient object detection. + +## Quantization and Acceleration + +To achieve real-time performance on your Android device, YOLO models are quantized to either FP16 or INT8 precision. Quantization is a process that reduces the numerical precision of the model's weights and biases, thus reducing the model's size and the amount of computation required. This results in faster inference times without significantly affecting the model's accuracy. + +### FP16 Quantization + +FP16 (or half-precision) quantization converts the model's 32-bit floating-point numbers to 16-bit floating-point numbers. This reduces the model's size by half and speeds up the inference process, while maintaining a good balance between accuracy and performance. + +### INT8 Quantization + +INT8 (or 8-bit integer) quantization further reduces the model's size and computation requirements by converting its 32-bit floating-point numbers to 8-bit integers. This quantization method can result in a significant speedup, but it may lead to a slight reduction in mean average precision (mAP) due to the lower numerical precision. + +!!! Tip "mAP Reduction in INT8 Models" + + The reduced numerical precision in INT8 models can lead to some loss of information during the quantization process, which may result in a slight decrease in mAP. However, this trade-off is often acceptable considering the substantial performance gains offered by INT8 quantization. + +## Delegates and Performance Variability + +Different delegates are available on Android devices to accelerate model inference. These delegates include CPU, [GPU](https://www.tensorflow.org/lite/android/delegates/gpu), [Hexagon](https://www.tensorflow.org/lite/android/delegates/hexagon) and [NNAPI](https://www.tensorflow.org/lite/android/delegates/nnapi). The performance of these delegates varies depending on the device's hardware vendor, product line, and specific chipsets used in the device. + +1. **CPU**: The default option, with reasonable performance on most devices. +2. **GPU**: Utilizes the device's GPU for faster inference. It can provide a significant performance boost on devices with powerful GPUs. +3. **Hexagon**: Leverages Qualcomm's Hexagon DSP for faster and more efficient processing. This option is available on devices with Qualcomm Snapdragon processors. +4. **NNAPI**: The Android Neural Networks API (NNAPI) serves as an abstraction layer for running ML models on Android devices. NNAPI can utilize various hardware accelerators, such as CPU, GPU, and dedicated AI chips (e.g., Google's Edge TPU, or the Pixel Neural Core). + +Here's a table showing the primary vendors, their product lines, popular devices, and supported delegates: + +| Vendor | Product Lines | Popular Devices | Delegates Supported | +|-----------------------------------------|--------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------| +| [Qualcomm](https://www.qualcomm.com/) | [Snapdragon (e.g., 800 series)](https://www.qualcomm.com/snapdragon) | [Samsung Galaxy S21](https://www.samsung.com/global/galaxy/galaxy-s21-5g/), [OnePlus 9](https://www.oneplus.com/9), [Google Pixel 6](https://store.google.com/product/pixel_6) | CPU, GPU, Hexagon, NNAPI | +| [Samsung](https://www.samsung.com/) | [Exynos (e.g., Exynos 2100)](https://www.samsung.com/semiconductor/minisite/exynos/) | [Samsung Galaxy S21 (Global version)](https://www.samsung.com/global/galaxy/galaxy-s21-5g/) | CPU, GPU, NNAPI | +| [MediaTek](https://i.mediatek.com/) | [Dimensity (e.g., Dimensity 1200)](https://i.mediatek.com/dimensity-1200) | [Realme GT](https://www.realme.com/global/realme-gt), [Xiaomi Redmi Note](https://www.mi.com/en/phone/redmi/note-list) | CPU, GPU, NNAPI | +| [HiSilicon](https://www.hisilicon.com/) | [Kirin (e.g., Kirin 990)](https://www.hisilicon.com/en/products/Kirin) | [Huawei P40 Pro](https://consumer.huawei.com/en/phones/p40-pro/), [Huawei Mate 30 Pro](https://consumer.huawei.com/en/phones/mate30-pro/) | CPU, GPU, NNAPI | +| [NVIDIA](https://www.nvidia.com/) | [Tegra (e.g., Tegra X1)](https://developer.nvidia.com/content/tegra-x1) | [NVIDIA Shield TV](https://www.nvidia.com/en-us/shield/shield-tv/), [Nintendo Switch](https://www.nintendo.com/switch/) | CPU, GPU, NNAPI | + +Please note that the list of devices mentioned is not exhaustive and may vary depending on the specific chipsets and device models. Always test your models on your target devices to ensure compatibility and optimal performance. + +Keep in mind that the choice of delegate can affect performance and model compatibility. For example, some models may not work with certain delegates, or a delegate may not be available on a specific device. As such, it's essential to test your model and the chosen delegate on your target devices for the best results. + +## Getting Started with the Ultralytics Android App + +To get started with the Ultralytics Android App, follow these steps: + +1. Download the Ultralytics App from the [Google Play Store](https://play.google.com/store/apps/details?id=com.ultralytics.ultralytics_app). + +2. Launch the app on your Android device and sign in with your Ultralytics account. If you don't have an account yet, create one [here](https://hub.ultralytics.com/). + +3. Once signed in, you will see a list of your trained YOLO models. Select a model to use for object detection. + +4. Grant the app permission to access your device's camera. + +5. Point your device's camera at objects you want to detect. The app will display bounding boxes and class labels in real-time as it detects objects. + +6. Explore the app's settings to adjust the detection threshold, enable or disable specific object classes, and more. + +With the Ultralytics Android App, you now have the power of real-time object detection using YOLO models right at your fingertips. Enjoy exploring the app's features and optimizing its settings to suit your specific use cases. diff --git a/ultralytics/docs/en/hub/app/android.md:Zone.Identifier b/ultralytics/docs/en/hub/app/android.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/hub/app/android.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/hub/app/index.md b/ultralytics/docs/en/hub/app/index.md new file mode 100755 index 0000000..ef962e8 --- /dev/null +++ b/ultralytics/docs/en/hub/app/index.md @@ -0,0 +1,48 @@ +--- +comments: true +description: Explore the Ultralytics HUB App, offering the ability to run YOLOv5 and YOLOv8 models on your iOS and Android devices with optimized performance. +keywords: Ultralytics, HUB App, YOLOv5, YOLOv8, mobile AI, real-time object detection, image recognition, mobile device, hardware acceleration, Apple Neural Engine, Android GPU, NNAPI, custom model training +--- + +# Ultralytics HUB App + + + Ultralytics HUB preview image +
+
+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ + Apple App store + + Google Play store  +
+ +Welcome to the Ultralytics HUB App! We are excited to introduce this powerful mobile app that allows you to run YOLOv5 and YOLOv8 models directly on your [iOS](https://apps.apple.com/xk/app/ultralytics/id1583935240) and [Android](https://play.google.com/store/apps/details?id=com.ultralytics.ultralytics_app) devices. With the HUB App, you can utilize hardware acceleration features like Apple's Neural Engine (ANE) or Android GPU and Neural Network API (NNAPI) delegates to achieve impressive performance on your mobile device. + +## Features + +- **Run YOLOv5 and YOLOv8 models**: Experience the power of YOLO models on your mobile device for real-time object detection and image recognition tasks. +- **Hardware Acceleration**: Benefit from Apple ANE on iOS devices or Android GPU and NNAPI delegates for optimized performance. +- **Custom Model Training**: Train custom models with the Ultralytics HUB platform and preview them live using the HUB App. +- **Mobile Compatibility**: The HUB App supports both iOS and Android devices, bringing the power of YOLO models to a wide range of users. + +## App Documentation + +- [**iOS**](ios.md): Learn about YOLO CoreML models accelerated on Apple's Neural Engine for iPhones and iPads. +- [**Android**](android.md): Explore TFLite acceleration on Android mobile devices. + +Get started today by downloading the Ultralytics HUB App on your mobile device and unlock the potential of YOLOv5 and YOLOv8 models on-the-go. Don't forget to check out our comprehensive [HUB Docs](../index.md) for more information on training, deploying, and using your custom models with the Ultralytics HUB platform. diff --git a/ultralytics/docs/en/hub/app/index.md:Zone.Identifier b/ultralytics/docs/en/hub/app/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/hub/app/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/hub/app/ios.md b/ultralytics/docs/en/hub/app/ios.md new file mode 100755 index 0000000..ac939c9 --- /dev/null +++ b/ultralytics/docs/en/hub/app/ios.md @@ -0,0 +1,79 @@ +--- +comments: true +description: Execute object detection in real-time on your iOS devices utilizing YOLO models. Leverage the power of the Apple Neural Engine and Core ML for fast and efficient object detection. +keywords: Ultralytics, iOS app, object detection, YOLO models, real time, Apple Neural Engine, Core ML, FP16, INT8, quantization +--- + +# Ultralytics iOS App: Real-time Object Detection with YOLO Models + + + Ultralytics HUB preview image +
+
+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ + Apple App store +
+ +The Ultralytics iOS App is a powerful tool that allows you to run YOLO models directly on your iPhone or iPad for real-time object detection. This app utilizes the Apple Neural Engine and Core ML for model optimization and acceleration, enabling fast and efficient object detection. + +## Quantization and Acceleration + +To achieve real-time performance on your iOS device, YOLO models are quantized to either FP16 or INT8 precision. Quantization is a process that reduces the numerical precision of the model's weights and biases, thus reducing the model's size and the amount of computation required. This results in faster inference times without significantly affecting the model's accuracy. + +### FP16 Quantization + +FP16 (or half-precision) quantization converts the model's 32-bit floating-point numbers to 16-bit floating-point numbers. This reduces the model's size by half and speeds up the inference process, while maintaining a good balance between accuracy and performance. + +### INT8 Quantization + +INT8 (or 8-bit integer) quantization further reduces the model's size and computation requirements by converting its 32-bit floating-point numbers to 8-bit integers. This quantization method can result in a significant speedup, but it may lead to a slight reduction in accuracy. + +## Apple Neural Engine + +The Apple Neural Engine (ANE) is a dedicated hardware component integrated into Apple's A-series and M-series chips. It's designed to accelerate machine learning tasks, particularly for neural networks, allowing for faster and more efficient execution of your YOLO models. + +By combining quantized YOLO models with the Apple Neural Engine, the Ultralytics iOS App achieves real-time object detection on your iOS device without compromising on accuracy or performance. + +| Release Year | iPhone Name | Chipset Name | Node Size | ANE TOPs | +|--------------|------------------------------------------------------|-------------------------------------------------------|-----------|----------| +| 2017 | [iPhone X](https://en.wikipedia.org/wiki/IPhone_X) | [A11 Bionic](https://en.wikipedia.org/wiki/Apple_A11) | 10 nm | 0.6 | +| 2018 | [iPhone XS](https://en.wikipedia.org/wiki/IPhone_XS) | [A12 Bionic](https://en.wikipedia.org/wiki/Apple_A12) | 7 nm | 5 | +| 2019 | [iPhone 11](https://en.wikipedia.org/wiki/IPhone_11) | [A13 Bionic](https://en.wikipedia.org/wiki/Apple_A13) | 7 nm | 6 | +| 2020 | [iPhone 12](https://en.wikipedia.org/wiki/IPhone_12) | [A14 Bionic](https://en.wikipedia.org/wiki/Apple_A14) | 5 nm | 11 | +| 2021 | [iPhone 13](https://en.wikipedia.org/wiki/IPhone_13) | [A15 Bionic](https://en.wikipedia.org/wiki/Apple_A15) | 5 nm | 15.8 | +| 2022 | [iPhone 14](https://en.wikipedia.org/wiki/IPhone_14) | [A16 Bionic](https://en.wikipedia.org/wiki/Apple_A16) | 4 nm | 17.0 | + +Please note that this list only includes iPhone models from 2017 onwards, and the ANE TOPs values are approximate. + +## Getting Started with the Ultralytics iOS App + +To get started with the Ultralytics iOS App, follow these steps: + +1. Download the Ultralytics App from the [App Store](https://apps.apple.com/xk/app/ultralytics/id1583935240). + +2. Launch the app on your iOS device and sign in with your Ultralytics account. If you don't have an account yet, create one [here](https://hub.ultralytics.com/). + +3. Once signed in, you will see a list of your trained YOLO models. Select a model to use for object detection. + +4. Grant the app permission to access your device's camera. + +5. Point your device's camera at objects you want to detect. The app will display bounding boxes and class labels in real-time as it detects objects. + +6. Explore the app's settings to adjust the detection threshold, enable or disable specific object classes, and more. + +With the Ultralytics iOS App, you can now leverage the power of YOLO models for real-time object detection on your iPhone or iPad, powered by the Apple Neural Engine and optimized with FP16 or INT8 quantization. diff --git a/ultralytics/docs/en/hub/app/ios.md:Zone.Identifier b/ultralytics/docs/en/hub/app/ios.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/hub/app/ios.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/hub/datasets.md b/ultralytics/docs/en/hub/datasets.md new file mode 100755 index 0000000..1ab7c45 --- /dev/null +++ b/ultralytics/docs/en/hub/datasets.md @@ -0,0 +1,159 @@ +--- +comments: true +description: Learn how Ultralytics HUB datasets streamline your ML workflow. Upload, format, validate, access, share, edit or delete datasets for Ultralytics YOLO model training. +keywords: Ultralytics, HUB datasets, YOLO model training, upload datasets, dataset validation, ML workflow, share datasets +--- + +# HUB Datasets + +[Ultralytics HUB](https://hub.ultralytics.com/) datasets are a practical solution for managing and leveraging your custom datasets. + +Once uploaded, datasets can be immediately utilized for model training. This integrated approach facilitates a seamless transition from dataset management to model training, significantly simplifying the entire process. + +## Upload Dataset + +Ultralytics HUB datasets are just like YOLOv5 and YOLOv8 ๐Ÿš€ datasets. They use the same structure and the same label formats to keep everything simple. + +Before you upload a dataset to Ultralytics HUB, make sure to **place your dataset YAML file inside the dataset root directory** and that **your dataset YAML, directory and ZIP have the same name**, as shown in the example below, and then zip the dataset directory. + +For example, if your dataset is called "coco8", as our [COCO8](https://docs.ultralytics.com/datasets/detect/coco8) example dataset, then you should have a `coco8.yaml` inside your `coco8/` directory, which will create a `coco8.zip` when zipped: + +```bash +zip -r coco8.zip coco8 +``` + +You can download our [COCO8](https://github.com/ultralytics/hub/blob/main/example_datasets/coco8.zip) example dataset and unzip it to see exactly how to structure your dataset. + +

+ COCO8 Dataset Structure +

+ +The dataset YAML is the same standard YOLOv5 and YOLOv8 YAML format. + +!!! Example "coco8.yaml" + + ```yaml + --8<-- "ultralytics/cfg/datasets/coco8.yaml" + ``` + +After zipping your dataset, you should validate it before uploading it to Ultralytics HUB. Ultralytics HUB conducts the dataset validation check post-upload, so by ensuring your dataset is correctly formatted and error-free ahead of time, you can forestall any setbacks due to dataset rejection. + +```py +from ultralytics.hub import check_dataset + +check_dataset('path/to/coco8.zip') +``` + +Once your dataset ZIP is ready, navigate to the [Datasets](https://hub.ultralytics.com/datasets) page by clicking on the **Datasets** button in the sidebar. + +![Ultralytics HUB screenshot of the Home page with an arrow pointing to the Datasets button in the sidebar](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_upload_dataset_2.jpg) + +??? tip "Tip" + + You can also upload a dataset directly from the [Home](https://hub.ultralytics.com/home) page. + + ![Ultralytics HUB screenshot of the Home page with an arrow pointing to the Upload Dataset card](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_upload_dataset_3.jpg) + +Click on the **Upload Dataset** button on the top right of the page. This action will trigger the **Upload Dataset** dialog. + +![Ultralytics HUB screenshot of the Dataset page with an arrow pointing to the Upload Dataset button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_upload_dataset_4.jpg) + +Upload your dataset in the _Dataset .zip file_ field. + +You have the additional option to set a custom name and description for your Ultralytics HUB dataset. + +When you're happy with your dataset configuration, click **Upload**. + +![Ultralytics HUB screenshot of the Upload Dataset dialog with an arrow pointing to the Upload button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_upload_dataset_5.jpg) + +After your dataset is uploaded and processed, you will be able to access it from the Datasets page. + +![Ultralytics HUB screenshot of the Datasets page with an arrow pointing to one of the datasets](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_upload_dataset_6.jpg) + +You can view the images in your dataset grouped by splits (Train, Validation, Test). + +![Ultralytics HUB screenshot of the Dataset page with an arrow pointing to the Images tab](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_upload_dataset_7.jpg) + +??? tip "Tip" + + Each image can be enlarged for better visualization. + + ![Ultralytics HUB screenshot of the Images tab inside the Dataset page with an arrow pointing to the expand icon](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_upload_dataset_8.jpg) + + ![Ultralytics HUB screenshot of the Images tab inside the Dataset page with one of the images expanded](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_upload_dataset_9.jpg) + +Also, you can analyze your dataset by click on the **Overview** tab. + +![Ultralytics HUB screenshot of the Dataset page with an arrow pointing to the Overview tab](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_upload_dataset_10.jpg) + +Next, [train a model](https://docs.ultralytics.com/hub/models/#train-model) on your dataset. + +![Ultralytics HUB screenshot of the Dataset page with an arrow pointing to the Train Model button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_upload_dataset_11.jpg) + +## Share Dataset + +!!! Info "Info" + + Ultralytics HUB's sharing functionality provides a convenient way to share datasets with others. This feature is designed to accommodate both existing Ultralytics HUB users and those who have yet to create an account. + +??? note "Note" + + You have control over the general access of your datasets. + + You can choose to set the general access to "Private", in which case, only you will have access to it. Alternatively, you can set the general access to "Unlisted" which grants viewing access to anyone who has the direct link to the dataset, regardless of whether they have an Ultralytics HUB account or not. + +Navigate to the Dataset page of the dataset you want to share, open the dataset actions dropdown and click on the **Share** option. This action will trigger the **Share Dataset** dialog. + +![Ultralytics HUB screenshot of the Dataset page with an arrow pointing to the Share option](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_share_dataset_1.jpg) + +??? tip "Tip" + + You can also share a dataset directly from the [Datasets](https://hub.ultralytics.com/datasets) page. + + ![Ultralytics HUB screenshot of the Datasets page with an arrow pointing to the Share option of one of the datasets](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_share_dataset_2.jpg) + +Set the general access to "Unlisted" and click **Save**. + +![Ultralytics HUB screenshot of the Share Dataset dialog with an arrow pointing to the dropdown and one to the Save button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_share_dataset_3.jpg) + +Now, anyone who has the direct link to your dataset can view it. + +??? tip "Tip" + + You can easily click on the dataset's link shown in the **Share Dataset** dialog to copy it. + + ![Ultralytics HUB screenshot of the Share Dataset dialog with an arrow pointing to the dataset's link](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_share_dataset_4.jpg) + +## Edit Dataset + +Navigate to the Dataset page of the dataset you want to edit, open the dataset actions dropdown and click on the **Edit** option. This action will trigger the **Update Dataset** dialog. + +![Ultralytics HUB screenshot of the Dataset page with an arrow pointing to the Edit option](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_edit_dataset_1.jpg) + +??? tip "Tip" + + You can also edit a dataset directly from the [Datasets](https://hub.ultralytics.com/datasets) page. + + ![Ultralytics HUB screenshot of the Datasets page with an arrow pointing to the Edit option of one of the datasets](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_edit_dataset_2.jpg) + +Apply the desired modifications to your dataset and then confirm the changes by clicking **Save**. + +![Ultralytics HUB screenshot of the Update Dataset dialog with an arrow pointing to the Save button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_edit_dataset_3.jpg) + +## Delete Dataset + +Navigate to the Dataset page of the dataset you want to delete, open the dataset actions dropdown and click on the **Delete** option. This action will delete the dataset. + +![Ultralytics HUB screenshot of the Dataset page with an arrow pointing to the Delete option](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_delete_dataset_1.jpg) + +??? tip "Tip" + + You can also delete a dataset directly from the [Datasets](https://hub.ultralytics.com/datasets) page. + + ![Ultralytics HUB screenshot of the Datasets page with an arrow pointing to the Delete option of one of the datasets](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_delete_dataset_2.jpg) + +??? note "Note" + + If you change your mind, you can restore the dataset from the [Trash](https://hub.ultralytics.com/trash) page. + + ![Ultralytics HUB screenshot of the Trash page with an arrow pointing to the Restore option of one of the datasets](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/datasets/hub_delete_dataset_3.jpg) diff --git a/ultralytics/docs/en/hub/datasets.md:Zone.Identifier b/ultralytics/docs/en/hub/datasets.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/hub/datasets.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/hub/index.md b/ultralytics/docs/en/hub/index.md new file mode 100755 index 0000000..6920953 --- /dev/null +++ b/ultralytics/docs/en/hub/index.md @@ -0,0 +1,61 @@ +--- +comments: true +description: Gain insights into training and deploying your YOLOv5 and YOLOv8 models with Ultralytics HUB. Explore pre-trained models, templates and various integrations. +keywords: Ultralytics HUB, YOLOv5, YOLOv8, model training, model deployment, pretrained models, model integrations +--- + +# Ultralytics HUB + + + Ultralytics HUB preview image +
+
+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ + CI CPU + + Open In Colab +
+ +๐Ÿ‘‹ Hello from the [Ultralytics](https://ultralytics.com/) Team! We've been working hard these last few months to launch [Ultralytics HUB](https://bit.ly/ultralytics_hub), a new web tool for training and deploying all your YOLOv5 and YOLOv8 ๐Ÿš€ models from one spot! + +## Introduction + +HUB is designed to be user-friendly and intuitive, with a drag-and-drop interface that allows users to easily upload their data and train new models quickly. It offers a range of pre-trained models and templates to choose from, making it easy for users to get started with training their own models. Once a model is trained, it can be easily deployed and used for real-time object detection, instance segmentation and classification tasks. + +

+
+ +
+ Watch: Train Your Custom YOLO Models In A Few Clicks with Ultralytics HUB. +

+ +We hope that the resources here will help you get the most out of HUB. Please browse the HUB Docs for details, raise an issue on GitHub for support, and join our Discord community for questions and discussions! + +- [**Quickstart**](quickstart.md). Start training and deploying YOLO models with HUB in seconds. +- [**Datasets: Preparing and Uploading**](datasets.md). Learn how to prepare and upload your datasets to HUB in YOLO format. +- [**Projects: Creating and Managing**](projects.md). Group your models into projects for improved organization. +- [**Models: Training and Exporting**](models.md). Train YOLOv5 and YOLOv8 models on your custom datasets and export them to various formats for deployment. +- [**Integrations: Options**](integrations.md). Explore different integration options for your trained models, such as TensorFlow, ONNX, OpenVINO, CoreML, and PaddlePaddle. +- [**Ultralytics HUB App**](app/index.md). Learn about the Ultralytics App for iOS and Android, which allows you to run models directly on your mobile device. + * [**iOS**](app/ios.md). Learn about YOLO CoreML models accelerated on Apple's Neural Engine on iPhones and iPads. + * [**Android**](app/android.md). Explore TFLite acceleration on mobile devices. +- [**Inference API**](inference_api.md). Understand how to use the Inference API for running your trained models in the cloud to generate predictions. diff --git a/ultralytics/docs/en/hub/index.md:Zone.Identifier b/ultralytics/docs/en/hub/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/hub/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/hub/inference_api.md b/ultralytics/docs/en/hub/inference_api.md new file mode 100755 index 0000000..7ca6822 --- /dev/null +++ b/ultralytics/docs/en/hub/inference_api.md @@ -0,0 +1,458 @@ +--- +comments: true +description: Access object detection capabilities of YOLOv8 via our RESTful API. Learn how to use the YOLO Inference API with Python or CLI for swift object detection. +keywords: Ultralytics, YOLOv8, Inference API, object detection, RESTful API, Python, CLI, Quickstart +--- + +# YOLO Inference API + +The YOLO Inference API allows you to access the YOLOv8 object detection capabilities via a RESTful API. This enables you to run object detection on images without the need to install and set up the YOLOv8 environment locally. + +![Inference API Screenshot](https://github.com/ultralytics/ultralytics/assets/26833433/c0109ec0-7bb0-46e1-b0d2-bae687960a01) +Screenshot of the Inference API section in the trained model Preview tab. + +## API URL + +The API URL is the address used to access the YOLO Inference API. In this case, the base URL is: + +``` +https://api.ultralytics.com/v1/predict +``` + +## Example Usage in Python + +To access the YOLO Inference API with the specified model and API key using Python, you can use the following code: + +```python +import requests + +# API URL, use actual MODEL_ID +url = f"https://api.ultralytics.com/v1/predict/MODEL_ID" + +# Headers, use actual API_KEY +headers = {"x-api-key": "API_KEY"} + +# Inference arguments (optional) +data = {"size": 640, "confidence": 0.25, "iou": 0.45} + +# Load image and send request +with open("path/to/image.jpg", "rb") as image_file: + files = {"image": image_file} + response = requests.post(url, headers=headers, files=files, data=data) + +print(response.json()) +``` + +In this example, replace `API_KEY` with your actual API key, `MODEL_ID` with the desired model ID, and `path/to/image.jpg` with the path to the image you want to analyze. + +## Example Usage with CLI + +You can use the YOLO Inference API with the command-line interface (CLI) by utilizing the `curl` command. Replace `API_KEY` with your actual API key, `MODEL_ID` with the desired model ID, and `image.jpg` with the path to the image you want to analyze: + +```bash +curl -X POST "https://api.ultralytics.com/v1/predict/MODEL_ID" \ + -H "x-api-key: API_KEY" \ + -F "image=@/path/to/image.jpg" \ + -F "size=640" \ + -F "confidence=0.25" \ + -F "iou=0.45" +``` + +## Passing Arguments + +This command sends a POST request to the YOLO Inference API with the specified `MODEL_ID` in the URL and the `API_KEY` in the request `headers`, along with the image file specified by `@path/to/image.jpg`. + +Here's an example of passing the `size`, `confidence`, and `iou` arguments via the API URL using the `requests` library in Python: + +```python +import requests + +# API URL, use actual MODEL_ID +url = f"https://api.ultralytics.com/v1/predict/MODEL_ID" + +# Headers, use actual API_KEY +headers = {"x-api-key": "API_KEY"} + +# Inference arguments (optional) +data = {"size": 640, "confidence": 0.25, "iou": 0.45} + +# Load image and send request +with open("path/to/image.jpg", "rb") as image_file: + files = {"image": image_file} + response = requests.post(url, headers=headers, files=files, data=data) + +print(response.json()) +``` + +In this example, the `data` dictionary contains the query arguments `size`, `confidence`, and `iou`, which tells the API to run inference at image size 640 with confidence and IoU thresholds of 0.25 and 0.45. + +This will send the query parameters along with the file in the POST request. See the table below for a full list of available inference arguments. + +| Inference Argument | Default | Type | Notes | +|--------------------|---------|---------|------------------------------------------------| +| `size` | `640` | `int` | valid range is `32` - `1280` pixels | +| `confidence` | `0.25` | `float` | valid range is `0.01` - `1.0` | +| `iou` | `0.45` | `float` | valid range is `0.0` - `0.95` | +| `url` | `''` | `str` | optional image URL if not image file is passed | +| `normalize` | `False` | `bool` | | + +## Return JSON format + +The YOLO Inference API returns a JSON list with the detection results. The format of the JSON list will be the same as the one produced locally by the `results[0].tojson()` command. + +The JSON list contains information about the detected objects, their coordinates, classes, and confidence scores. + +### Detect Model Format + +YOLO detection models, such as `yolov8n.pt`, can return JSON responses from local inference, CLI API inference, and Python API inference. All of these methods produce the same JSON response format. + +!!! Example "Detect Model JSON Response" + + === "Local" + ```python + from ultralytics import YOLO + + # Load model + model = YOLO('yolov8n.pt') + + # Run inference + results = model('image.jpg') + + # Print image.jpg results in JSON format + print(results[0].tojson()) + ``` + + === "CLI API" + ```bash + curl -X POST "https://api.ultralytics.com/v1/predict/MODEL_ID" \ + -H "x-api-key: API_KEY" \ + -F "image=@/path/to/image.jpg" \ + -F "size=640" \ + -F "confidence=0.25" \ + -F "iou=0.45" + ``` + + === "Python API" + ```python + import requests + + # API URL, use actual MODEL_ID + url = f"https://api.ultralytics.com/v1/predict/MODEL_ID" + + # Headers, use actual API_KEY + headers = {"x-api-key": "API_KEY"} + + # Inference arguments (optional) + data = {"size": 640, "confidence": 0.25, "iou": 0.45} + + # Load image and send request + with open("path/to/image.jpg", "rb") as image_file: + files = {"image": image_file} + response = requests.post(url, headers=headers, files=files, data=data) + + print(response.json()) + ``` + + === "JSON Response" + ```json + { + "success": True, + "message": "Inference complete.", + "data": [ + { + "name": "person", + "class": 0, + "confidence": 0.8359682559967041, + "box": { + "x1": 0.08974208831787109, + "y1": 0.27418340047200523, + "x2": 0.8706787109375, + "y2": 0.9887352837456598 + } + }, + { + "name": "person", + "class": 0, + "confidence": 0.8189555406570435, + "box": { + "x1": 0.5847355842590332, + "y1": 0.05813225640190972, + "x2": 0.8930277824401855, + "y2": 0.9903111775716146 + } + }, + { + "name": "tie", + "class": 27, + "confidence": 0.2909725308418274, + "box": { + "x1": 0.3433395862579346, + "y1": 0.6070465511745877, + "x2": 0.40964522361755373, + "y2": 0.9849439832899306 + } + } + ] + } + ``` + +### Segment Model Format + +YOLO segmentation models, such as `yolov8n-seg.pt`, can return JSON responses from local inference, CLI API inference, and Python API inference. All of these methods produce the same JSON response format. + +!!! Example "Segment Model JSON Response" + + === "Local" + ```python + from ultralytics import YOLO + + # Load model + model = YOLO('yolov8n-seg.pt') + + # Run inference + results = model('image.jpg') + + # Print image.jpg results in JSON format + print(results[0].tojson()) + ``` + + === "CLI API" + ```bash + curl -X POST "https://api.ultralytics.com/v1/predict/MODEL_ID" \ + -H "x-api-key: API_KEY" \ + -F "image=@/path/to/image.jpg" \ + -F "size=640" \ + -F "confidence=0.25" \ + -F "iou=0.45" + ``` + + === "Python API" + ```python + import requests + + # API URL, use actual MODEL_ID + url = f"https://api.ultralytics.com/v1/predict/MODEL_ID" + + # Headers, use actual API_KEY + headers = {"x-api-key": "API_KEY"} + + # Inference arguments (optional) + data = {"size": 640, "confidence": 0.25, "iou": 0.45} + + # Load image and send request + with open("path/to/image.jpg", "rb") as image_file: + files = {"image": image_file} + response = requests.post(url, headers=headers, files=files, data=data) + + print(response.json()) + ``` + + === "JSON Response" + Note `segments` `x` and `y` lengths may vary from one object to another. Larger or more complex objects may have more segment points. + ```json + { + "success": True, + "message": "Inference complete.", + "data": [ + { + "name": "person", + "class": 0, + "confidence": 0.856913149356842, + "box": { + "x1": 0.1064866065979004, + "y1": 0.2798851860894097, + "x2": 0.8738358497619629, + "y2": 0.9894873725043403 + }, + "segments": { + "x": [ + 0.421875, + 0.4203124940395355, + 0.41718751192092896 + ... + ], + "y": [ + 0.2888889014720917, + 0.2916666567325592, + 0.2916666567325592 + ... + ] + } + }, + { + "name": "person", + "class": 0, + "confidence": 0.8512625694274902, + "box": { + "x1": 0.5757311820983887, + "y1": 0.053943040635850696, + "x2": 0.8960096359252929, + "y2": 0.985154045952691 + }, + "segments": { + "x": [ + 0.7515624761581421, + 0.75, + 0.7437499761581421 + ... + ], + "y": [ + 0.0555555559694767, + 0.05833333358168602, + 0.05833333358168602 + ... + ] + } + }, + { + "name": "tie", + "class": 27, + "confidence": 0.6485961675643921, + "box": { + "x1": 0.33911995887756347, + "y1": 0.6057066175672743, + "x2": 0.4081430912017822, + "y2": 0.9916408962673611 + }, + "segments": { + "x": [ + 0.37187498807907104, + 0.37031251192092896, + 0.3687500059604645 + ... + ], + "y": [ + 0.6111111044883728, + 0.6138888597488403, + 0.6138888597488403 + ... + ] + } + } + ] + } + ``` + +### Pose Model Format + +YOLO pose models, such as `yolov8n-pose.pt`, can return JSON responses from local inference, CLI API inference, and Python API inference. All of these methods produce the same JSON response format. + +!!! Example "Pose Model JSON Response" + + === "Local" + ```python + from ultralytics import YOLO + + # Load model + model = YOLO('yolov8n-seg.pt') + + # Run inference + results = model('image.jpg') + + # Print image.jpg results in JSON format + print(results[0].tojson()) + ``` + + === "CLI API" + ```bash + curl -X POST "https://api.ultralytics.com/v1/predict/MODEL_ID" \ + -H "x-api-key: API_KEY" \ + -F "image=@/path/to/image.jpg" \ + -F "size=640" \ + -F "confidence=0.25" \ + -F "iou=0.45" + ``` + + === "Python API" + ```python + import requests + + # API URL, use actual MODEL_ID + url = f"https://api.ultralytics.com/v1/predict/MODEL_ID" + + # Headers, use actual API_KEY + headers = {"x-api-key": "API_KEY"} + + # Inference arguments (optional) + data = {"size": 640, "confidence": 0.25, "iou": 0.45} + + # Load image and send request + with open("path/to/image.jpg", "rb") as image_file: + files = {"image": image_file} + response = requests.post(url, headers=headers, files=files, data=data) + + print(response.json()) + ``` + + === "JSON Response" + Note COCO-keypoints pretrained models will have 17 human keypoints. The `visible` part of the keypoints indicates whether a keypoint is visible or obscured. Obscured keypoints may be outside the image or may not be visible, i.e. a person's eyes facing away from the camera. + ```json + { + "success": True, + "message": "Inference complete.", + "data": [ + { + "name": "person", + "class": 0, + "confidence": 0.8439509868621826, + "box": { + "x1": 0.1125, + "y1": 0.28194444444444444, + "x2": 0.7953125, + "y2": 0.9902777777777778 + }, + "keypoints": { + "x": [ + 0.5058594942092896, + 0.5103894472122192, + 0.4920862317085266 + ... + ], + "y": [ + 0.48964157700538635, + 0.4643048942089081, + 0.4465252459049225 + ... + ], + "visible": [ + 0.8726999163627625, + 0.653947651386261, + 0.9130823612213135 + ... + ] + } + }, + { + "name": "person", + "class": 0, + "confidence": 0.7474289536476135, + "box": { + "x1": 0.58125, + "y1": 0.0625, + "x2": 0.8859375, + "y2": 0.9888888888888889 + }, + "keypoints": { + "x": [ + 0.778544008731842, + 0.7976160049438477, + 0.7530890107154846 + ... + ], + "y": [ + 0.27595141530036926, + 0.2378823608160019, + 0.23644638061523438 + ... + ], + "visible": [ + 0.8900790810585022, + 0.789978563785553, + 0.8974530100822449 + ... + ] + } + } + ] + } + ``` diff --git a/ultralytics/docs/en/hub/inference_api.md:Zone.Identifier b/ultralytics/docs/en/hub/inference_api.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/hub/inference_api.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/hub/integrations.md b/ultralytics/docs/en/hub/integrations.md new file mode 100755 index 0000000..d8470cf --- /dev/null +++ b/ultralytics/docs/en/hub/integrations.md @@ -0,0 +1,62 @@ +--- +comments: true +description: Explore integration options for Ultralytics HUB. Currently featuring Roboflow for dataset integration and multiple export formats for your trained models. +keywords: Ultralytics HUB, Integrations, Roboflow, Dataset, Export, YOLOv5, YOLOv8, ONNX, CoreML, TensorRT, TensorFlow +--- + +# HUB Integrations + +๐Ÿšง **Under Construction** ๐Ÿšง + +Welcome to the Integrations guide for [Ultralytics HUB](https://hub.ultralytics.com/)! We are in the process of expanding this section to provide you with comprehensive guidance on integrating your YOLOv5 and YOLOv8 models with various platforms and formats. Currently, Roboflow is our available dataset integration, with a wide range of export integrations for your trained models. + +

+
+ +
+ Watch: Train Your Custom YOLO Models In A Few Clicks with Ultralytics HUB. +

+ +## Available Integrations + +### Dataset Integrations + +- **Roboflow**: Seamlessly import your datasets for training. + +### Export Integrations + +| Format | `format` Argument | Model | Metadata | Arguments | +|--------------------------------------------------------------------|-------------------|---------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](../integrations/openvino.md) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half`, `int8` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras`, `int8` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz`, `half`, `int8` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [NCNN](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +## Coming Soon + +- Additional Dataset Integrations +- Detailed Export Integration Guides +- Step-by-Step Tutorials for Each Integration + +## Need Immediate Assistance? + +While we're in the process of creating detailed guides: + +- Browse through other [HUB Docs](https://docs.ultralytics.com/hub/) for detailed guides and tutorials. +- Raise an issue on our [GitHub](https://github.com/ultralytics/hub/) for technical support. +- Join our [Discord Community](https://ultralytics.com/discord/) for live discussions and community support. + +We appreciate your patience as we work to make this section comprehensive and user-friendly. Stay tuned for updates! diff --git a/ultralytics/docs/en/hub/integrations.md:Zone.Identifier b/ultralytics/docs/en/hub/integrations.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/hub/integrations.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/hub/models.md b/ultralytics/docs/en/hub/models.md new file mode 100755 index 0000000..41091ad --- /dev/null +++ b/ultralytics/docs/en/hub/models.md @@ -0,0 +1,213 @@ +--- +comments: true +description: Learn how to use Ultralytics HUB models for efficient and user-friendly AI model training. For easy model creation, training, evaluation and deployment, follow our detailed guide. +keywords: Ultralytics, HUB Models, AI model training, model creation, model training, model evaluation, model deployment +--- + +# Ultralytics HUB Models + +[Ultralytics HUB](https://hub.ultralytics.com/) models provide a streamlined solution for training vision AI models on your custom datasets. + +The process is user-friendly and efficient, involving a simple three-step creation and accelerated training powered by Ultralytics YOLOv8. During training, real-time updates on model metrics are available so that you can monitor each step of the progress. Once training is completed, you can preview your model and easily deploy it to real-world applications. Therefore, Ultralytics HUB offers a comprehensive yet straightforward system for model creation, training, evaluation, and deployment. + +## Train Model + +Navigate to the [Models](https://hub.ultralytics.com/models) page by clicking on the **Models** button in the sidebar. + +![Ultralytics HUB screenshot of the Home page with an arrow pointing to the Models button in the sidebar](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_1.jpg) + +??? tip "Tip" + + You can also train a model directly from the [Home](https://hub.ultralytics.com/home) page. + + ![Ultralytics HUB screenshot of the Home page with an arrow pointing to the Train Model card](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_2.jpg) + +Click on the **Train Model** button on the top right of the page. This action will trigger the **Train Model** dialog. + +![Ultralytics HUB screenshot of the Models page with an arrow pointing to the Train Model button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_3.jpg) + +The **Train Model** dialog has three simple steps, explained below. + +### 1. Dataset + +In this step, you have to select the dataset you want to train your model on. After you selected a dataset, click **Continue**. + +![Ultralytics HUB screenshot of the Train Model dialog with an arrow pointing to a dataset and one to the Continue button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_4.jpg) + +??? tip "Tip" + + You can skip this step if you train a model directly from the Dataset page. + + ![Ultralytics HUB screenshot of the Dataset page with an arrow pointing to the Train Model button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_5.jpg) + +### 2. Model + +In this step, you have to choose the project in which you want to create your model, the name of your model and your model's architecture. + +??? note "Note" + + Ultralytics HUB will try to pre-select the project. + + If you opened the **Train Model** dialog as described above, Ultralytics HUB will pre-select the last project you used. + + If you opened the **Train Model** dialog from the Project page, Ultralytics HUB will pre-select the project you were inside of. + + ![Ultralytics HUB screenshot of the Project page with an arrow pointing to the Train Model button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_6.jpg) + + In case you don't have a project created yet, you can set the name of your project in this step and it will be created together with your model. + + ![Ultralytics HUB screenshot of the Train Model dialog with an arrow pointing to the project name](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_7.jpg) + +!!! Info "Info" + + You can read more about the available [YOLOv8](https://docs.ultralytics.com/models/yolov8) (and [YOLOv5](https://docs.ultralytics.com/models/yolov5)) architectures in our documentation. + +When you're happy with your model configuration, click **Continue**. + +![Ultralytics HUB screenshot of the Train Model dialog with an arrow pointing to a model architecture and one to the Continue button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_8.jpg) + +??? note "Note" + + By default, your model will use a pre-trained model (trained on the [COCO](https://docs.ultralytics.com/datasets/detect/coco) dataset) to reduce training time. + + You can change this behaviour by opening the **Advanced Options** accordion. + +### 3. Train + +In this step, you will start training you model. + +Ultralytics HUB offers three training options: + +- Ultralytics Cloud **(COMING SOON)** +- Google Colab +- Bring your own agent + +In order to start training your model, follow the instructions presented in this step. + +![Ultralytics HUB screenshot of the Train Model dialog with an arrow pointing to each step](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_9.jpg) + +??? note "Note" + + When you are on this step, before the training starts, you can change the default training configuration by opening the **Advanced Options** accordion. + + ![Ultralytics HUB screenshot of the Train Model dialog with an arrow pointing to the Train Advanced Options](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_10.jpg) + +??? note "Note" + + When you are on this step, you have the option to close the **Train Model** dialog and start training your model from the Model page later. + + ![Ultralytics HUB screenshot of the Model page with an arrow pointing to the Start Training card](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_11.jpg) + +To start training your model using Google Colab, simply follow the instructions shown above or on the Google Colab notebook. + + + Open In Colab + + +When the training starts, you can click **Done** and monitor the training progress on the Model page. + +![Ultralytics HUB screenshot of the Train Model dialog with an arrow pointing to the Done button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_12.jpg) + +![Ultralytics HUB screenshot of the Model page of a model that is currently training](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_13.jpg) + +??? note "Note" + + In case the training stops and a checkpoint was saved, you can resume training your model from the Model page. + + ![Ultralytics HUB screenshot of the Model page with an arrow pointing to the Resume Training card](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_train_model_14.jpg) + +## Preview Model + +Ultralytics HUB offers a variety of ways to preview your trained model. + +You can preview your model if you click on the **Preview** tab and upload an image in the **Test** card. + +![Ultralytics HUB screenshot of the Preview tab (Test card) inside the Model page](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_preview_model_1.jpg) + +You can also use our Ultralytics Cloud API to effortlessly [run inference](https://docs.ultralytics.com/hub/inference_api) with your custom model. + +![Ultralytics HUB screenshot of the Preview tab (Ultralytics Cloud API card) inside the Model page](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_preview_model_2.jpg) + +Furthermore, you can preview your model in real-time directly on your [iOS](https://apps.apple.com/xk/app/ultralytics/id1583935240) or [Android](https://play.google.com/store/apps/details?id=com.ultralytics.ultralytics_app) mobile device by [downloading](https://ultralytics.com/app_install) our [Ultralytics HUB Mobile Application](app/index.md). + +![Ultralytics HUB screenshot of the Deploy tab inside the Model page with arrow pointing to the Real-Time Preview card](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_preview_model_3.jpg) + +## Deploy Model + +You can export your model to 13 different formats, including ONNX, OpenVINO, CoreML, TensorFlow, Paddle and many others. + +![Ultralytics HUB screenshot of the Deploy tab inside the Model page with all formats exported](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_deploy_model_1.jpg) + +??? tip "Tip" + + You can customize the export options of each format if you open the export actions dropdown and click on the **Advanced** option. + + ![Ultralytics HUB screenshot of the Deploy tab inside the Model page with an arrow pointing to the Advanced option](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_deploy_model_2.jpg) + +## Share Model + +!!! Info "Info" + + Ultralytics HUB's sharing functionality provides a convenient way to share models with others. This feature is designed to accommodate both existing Ultralytics HUB users and those who have yet to create an account. + +??? note "Note" + + You have control over the general access of your models. + + You can choose to set the general access to "Private", in which case, only you will have access to it. Alternatively, you can set the general access to "Unlisted" which grants viewing access to anyone who has the direct link to the model, regardless of whether they have an Ultralytics HUB account or not. + +Navigate to the Model page of the model you want to share, open the model actions dropdown and click on the **Share** option. This action will trigger the **Share Model** dialog. + +![Ultralytics HUB screenshot of the Model page with an arrow pointing to the Share option](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_share_model_1.jpg) + +??? tip "Tip" + + You can also share a model directly from the [Models](https://hub.ultralytics.com/models) page or from the Project page of the project where your model is located. + + ![Ultralytics HUB screenshot of the Models page with an arrow pointing to the Share option of one of the models](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_share_model_2.jpg) + +Set the general access to "Unlisted" and click **Save**. + +![Ultralytics HUB screenshot of the Share Model dialog with an arrow pointing to the dropdown and one to the Save button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_share_model_3.jpg) + +Now, anyone who has the direct link to your model can view it. + +??? tip "Tip" + + You can easily click on the model's link shown in the **Share Model** dialog to copy it. + + ![Ultralytics HUB screenshot of the Share Model dialog with an arrow pointing to the model's link](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_share_model_4.jpg) + +## Edit Model + +Navigate to the Model page of the model you want to edit, open the model actions dropdown and click on the **Edit** option. This action will trigger the **Update Model** dialog. + +![Ultralytics HUB screenshot of the Model page with an arrow pointing to the Edit option](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_edit_model_1.jpg) + +??? tip "Tip" + + You can also edit a model directly from the [Models](https://hub.ultralytics.com/models) page or from the Project page of the project where your model is located. + + ![Ultralytics HUB screenshot of the Models page with an arrow pointing to the Edit option of one of the models](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_edit_model_2.jpg) + +Apply the desired modifications to your model and then confirm the changes by clicking **Save**. + +![Ultralytics HUB screenshot of the Update Model dialog with an arrow pointing to the Save button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_edit_model_3.jpg) + +## Delete Model + +Navigate to the Model page of the model you want to delete, open the model actions dropdown and click on the **Delete** option. This action will delete the model. + +![Ultralytics HUB screenshot of the Model page with an arrow pointing to the Delete option](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_delete_model_1.jpg) + +??? tip "Tip" + + You can also delete a model directly from the [Models](https://hub.ultralytics.com/models) page or from the Project page of the project where your model is located. + + ![Ultralytics HUB screenshot of the Models page with an arrow pointing to the Delete option of one of the models](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_delete_model_2.jpg) + +??? note "Note" + + If you change your mind, you can restore the model from the [Trash](https://hub.ultralytics.com/trash) page. + + ![Ultralytics HUB screenshot of the Trash page with an arrow pointing to the Restore option of one of the models](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/models/hub_delete_model_3.jpg) diff --git a/ultralytics/docs/en/hub/models.md:Zone.Identifier b/ultralytics/docs/en/hub/models.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/hub/models.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/hub/projects.md b/ultralytics/docs/en/hub/projects.md new file mode 100755 index 0000000..c0cbf20 --- /dev/null +++ b/ultralytics/docs/en/hub/projects.md @@ -0,0 +1,180 @@ +--- +comments: true +description: Learn how to manage Ultralytics HUB projects. Understand effective strategies to create, share, edit, delete, and compare models in an organized workspace. +keywords: Ultralytics, HUB projects, Create project, Edit project, Share project, Delete project, Compare Models, Model Management +--- + +# Ultralytics HUB Projects + +[Ultralytics HUB](https://hub.ultralytics.com/) projects provide an effective solution for consolidating and managing your models. If you are working with several models that perform similar tasks or have related purposes, Ultralytics HUB projects allow you to group these models together. + +This creates a unified and organized workspace that facilitates easier model management, comparison and development. Having similar models or various iterations together can facilitate rapid benchmarking, as you can compare their effectiveness. This can lead to faster, more insightful iterative development and refinement of your models. + +

+
+ +
+ Watch: Train YOLOv8 Pose Model on Tiger-Pose Dataset Using Ultralytics HUB +

+ +## Create Project + +Navigate to the [Projects](https://hub.ultralytics.com/projects) page by clicking on the **Projects** button in the sidebar. + +![Ultralytics HUB screenshot of the Home page with an arrow pointing to the Projects button in the sidebar](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_create_project_1.jpg) + +??? tip "Tip" + + You can also create a project directly from the [Home](https://hub.ultralytics.com/home) page. + + ![Ultralytics HUB screenshot of the Home page with an arrow pointing to the Create Project card](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_create_project_2.jpg) + +Click on the **Create Project** button on the top right of the page. This action will trigger the **Create Project** dialog, opening up a suite of options for tailoring your project to your needs. + +![Ultralytics HUB screenshot of the Projects page with an arrow pointing to the Create Project button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_create_project_3.jpg) + +Type the name of your project in the _Project name_ field or keep the default name and finalize the project creation with a single click. + +You have the additional option to enrich your project with a description and a unique image, enhancing its recognizability on the Projects page. + +When you're happy with your project configuration, click **Create**. + +![Ultralytics HUB screenshot of the Create Project dialog with an arrow pointing to the Create button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_create_project_4.jpg) + +After your project is created, you will be able to access it from the Projects page. + +![Ultralytics HUB screenshot of the Projects page with an arrow pointing to one of the projects](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_create_project_5.jpg) + +Next, [train a model](https://docs.ultralytics.com/hub/models/#train-model) inside your project. + +![Ultralytics HUB screenshot of the Project page with an arrow pointing to the Train Model button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_create_project_6.jpg) + +## Share Project + +!!! Info "Info" + + Ultralytics HUB's sharing functionality provides a convenient way to share projects with others. This feature is designed to accommodate both existing Ultralytics HUB users and those who have yet to create an account. + +??? note "Note" + + You have control over the general access of your projects. + + You can choose to set the general access to "Private", in which case, only you will have access to it. Alternatively, you can set the general access to "Unlisted" which grants viewing access to anyone who has the direct link to the project, regardless of whether they have an Ultralytics HUB account or not. + +Navigate to the Project page of the project you want to share, open the project actions dropdown and click on the **Share** option. This action will trigger the **Share Project** dialog. + +![Ultralytics HUB screenshot of the Project page with an arrow pointing to the Share option](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_share_project_1.jpg) + +??? tip "Tip" + + You can also share a project directly from the [Projects](https://hub.ultralytics.com/projects) page. + + ![Ultralytics HUB screenshot of the Projects page with an arrow pointing to the Share option of one of the projects](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_share_project_2.jpg) + +Set the general access to "Unlisted" and click **Save**. + +![Ultralytics HUB screenshot of the Share Project dialog with an arrow pointing to the dropdown and one to the Save button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_share_project_3.jpg) + +!!! Warning "Warning" + + When changing the general access of a project, the general access of the models inside the project will be changed as well. + +Now, anyone who has the direct link to your project can view it. + +??? tip "Tip" + + You can easily click on the project's link shown in the **Share Project** dialog to copy it. + + ![Ultralytics HUB screenshot of the Share Project dialog with an arrow pointing to the project's link](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_share_project_4.jpg) + +## Edit Project + +Navigate to the Project page of the project you want to edit, open the project actions dropdown and click on the **Edit** option. This action will trigger the **Update Project** dialog. + +![Ultralytics HUB screenshot of the Project page with an arrow pointing to the Edit option](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_edit_project_1.jpg) + +??? tip "Tip" + + You can also edit a project directly from the [Projects](https://hub.ultralytics.com/projects) page. + + ![Ultralytics HUB screenshot of the Projects page with an arrow pointing to the Edit option of one of the projects](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_edit_project_2.jpg) + +Apply the desired modifications to your project and then confirm the changes by clicking **Save**. + +![Ultralytics HUB screenshot of the Update Project dialog with an arrow pointing to the Save button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_edit_project_3.jpg) + +## Delete Project + +Navigate to the Project page of the project you want to delete, open the project actions dropdown and click on the **Delete** option. This action will delete the project. + +![Ultralytics HUB screenshot of the Project page with an arrow pointing to the Delete option](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_delete_project_1.jpg) + +??? tip "Tip" + + You can also delete a project directly from the [Projects](https://hub.ultralytics.com/projects) page. + + ![Ultralytics HUB screenshot of the Projects page with an arrow pointing to the Delete option of one of the projects](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_delete_project_2.jpg) + +!!! Warning "Warning" + + When deleting a project, the models inside the project will be deleted as well. + +??? note "Note" + + If you change your mind, you can restore the project from the [Trash](https://hub.ultralytics.com/trash) page. + + ![Ultralytics HUB screenshot of the Trash page with an arrow pointing to the Restore option of one of the projects](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_delete_project_3.jpg) + +## Compare Models + +Navigate to the Project page of the project where the models you want to compare are located. To use the model comparison feature, click on the **Charts** tab. + +![Ultralytics HUB screenshot of the Project page with an arrow pointing to the Charts tab](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_compare_models_1.jpg) + +This will display all the relevant charts. Each chart corresponds to a different metric and contains the performance of each model for that metric. The models are represented by different colors and you can hover over each data point to get more information. + +![Ultralytics HUB screenshot of the Charts tab inside the Project page](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_compare_models_2.jpg) + +??? tip "Tip" + + Each chart can be enlarged for better visualization. + + ![Ultralytics HUB screenshot of the Charts tab inside the Project page with an arrow pointing to the expand icon](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_compare_models_3.jpg) + + ![Ultralytics HUB screenshot of the Charts tab inside the Project page with one of the charts expanded](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_compare_models_4.jpg) + +??? tip "Tip" + + You have the flexibility to customize your view by selectively hiding certain models. This feature allows you to concentrate on the models of interest. + + ![Ultralytics HUB screenshot of the Charts tab inside the Project page with an arrow pointing to the hide/unhide icon of one of the model](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_compare_models_5.jpg) + +## Reorder Models + +??? note "Note" + + Ultralytics HUB's reordering functionality works only inside projects you own. + +Navigate to the Project page of the project where the models you want to reorder are located. Click on the designated reorder icon of the model you want to move and drag it to the desired location. + +![Ultralytics HUB screenshot of the Project page with an arrow pointing to the reorder icon](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_reorder_models_1.jpg) + +## Transfer Models + +Navigate to the Project page of the project where the model you want to mode is located, open the project actions dropdown and click on the **Transfer** option. This action will trigger the **Transfer Model** dialog. + +![Ultralytics HUB screenshot of the Project page with an arrow pointing to the Transfer option of one of the models](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_transfer_models_1.jpg) + +??? tip "Tip" + + You can also transfer a model directly from the [Models](https://hub.ultralytics.com/models) page. + + ![Ultralytics HUB screenshot of the Models page with an arrow pointing to the Transfer option of one of the models](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_transfer_models_2.jpg) + +Select the project you want to transfer the model to and click **Save**. + +![Ultralytics HUB screenshot of the Transfer Model dialog with an arrow pointing to the dropdown and one to the Save button](https://raw.githubusercontent.com/ultralytics/assets/main/docs/hub/projects/hub_transfer_models_3.jpg) diff --git a/ultralytics/docs/en/hub/projects.md:Zone.Identifier b/ultralytics/docs/en/hub/projects.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/hub/projects.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/hub/quickstart.md b/ultralytics/docs/en/hub/quickstart.md new file mode 100755 index 0000000..3728dc4 --- /dev/null +++ b/ultralytics/docs/en/hub/quickstart.md @@ -0,0 +1,52 @@ +--- +comments: true +description: Kickstart your journey with Ultralytics HUB. Learn how to train and deploy YOLOv5 and YOLOv8 models in seconds with our Quickstart guide. +keywords: Ultralytics HUB, Quickstart, YOLOv5, YOLOv8, model training, quick deployment, drag-and-drop interface, real-time object detection +--- + +# Quickstart Guide for Ultralytics HUB + +๐Ÿšง **Under Construction** ๐Ÿšง + +Thank you for visiting the Quickstart guide for [Ultralytics HUB](https://hub.ultralytics.com/)! We're currently hard at work building out this page to provide you with step-by-step instructions on how to get up and running with HUB in no time. + +

+
+ +
+ Watch: Train Your Custom YOLO Models In A Few Clicks with Ultralytics HUB. +

+ +In the meantime, here's a brief overview of what you can expect from Ultralytics HUB: + +## What is Ultralytics HUB? + +Ultralytics HUB is your one-stop solution for training and deploying YOLOv5 and YOLOv8 models. It's designed with user experience in mind, featuring a drag-and-drop interface to make uploading data and training new models a breeze. Whether you're a beginner or an experienced machine learning practitioner, HUB has a range of pre-trained models and templates to accelerate your projects. + +## Key Features + +- **User-Friendly Interface**: Simply drag and drop your data to start training. +- **Pre-Trained Models**: Choose from a selection of pre-trained models to kick-start your projects. +- **Real-Time Object Detection**: Deploy trained models easily for real-time object detection, instance segmentation, and classification tasks. + +## Coming Soon + +- Detailed Steps to Start Your First Project +- Guide on Preparing and Uploading Datasets +- Tutorial on Model Training and Exporting +- Integration Options and How-To's +- And much more! + +## Need Help Now? + +While we're polishing this page, feel free to: + +- Browse through other [HUB Docs](https://docs.ultralytics.com/hub/) for detailed guides and tutorials. +- Raise an issue on our [GitHub](https://github.com/ultralytics/hub/) for technical support. +- Join our [Discord Community](https://ultralytics.com/discord/) for live discussions and community support. + +Stay tuned! We'll be back soon with more detailed information to help you get the most out of Ultralytics HUB. Thank you for your patience and interest! diff --git a/ultralytics/docs/en/hub/quickstart.md:Zone.Identifier b/ultralytics/docs/en/hub/quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/hub/quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/index.md b/ultralytics/docs/en/index.md new file mode 100755 index 0000000..4f4b0d6 --- /dev/null +++ b/ultralytics/docs/en/index.md @@ -0,0 +1,79 @@ +--- +comments: true +description: Explore a complete guide to Ultralytics YOLOv8, a high-speed, high-accuracy object detection & image segmentation model. Installation, prediction, training tutorials and more. +keywords: Ultralytics, YOLOv8, object detection, image segmentation, machine learning, deep learning, computer vision, YOLOv8 installation, YOLOv8 prediction, YOLOv8 training, YOLO history, YOLO licenses +--- + +
+

+ + Ultralytics YOLO banner +

+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ Ultralytics CI + Ultralytics Code Coverage + YOLOv8 Citation + Docker Pulls + Discord +
+ Run on Gradient + Open In Colab + Open In Kaggle +
+ +Introducing [Ultralytics](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics), the latest version of the acclaimed real-time object detection and image segmentation model. YOLOv8 is built on cutting-edge advancements in deep learning and computer vision, offering unparalleled performance in terms of speed and accuracy. Its streamlined design makes it suitable for various applications and easily adaptable to different hardware platforms, from edge devices to cloud APIs. + +Explore the YOLOv8 Docs, a comprehensive resource designed to help you understand and utilize its features and capabilities. Whether you are a seasoned machine learning practitioner or new to the field, this hub aims to maximize YOLOv8's potential in your projects + +## Where to Start + +- **Install** `ultralytics` with pip and get up and running in minutes   [:material-clock-fast: Get Started](quickstart.md){ .md-button } +- **Predict** new images and videos with YOLOv8   [:octicons-image-16: Predict on Images](modes/predict.md){ .md-button } +- **Train** a new YOLOv8 model on your own custom dataset   [:fontawesome-solid-brain: Train a Model](modes/train.md){ .md-button } +- **Explore** YOLOv8 tasks like segment, classify, pose and track   [:material-magnify-expand: Explore Tasks](tasks/index.md){ .md-button } + +

+
+ +
+ Watch: How to Train a YOLOv8 model on Your Custom Dataset in Google Colab. +

+ +## YOLO: A Brief History + +[YOLO](https://arxiv.org/abs/1506.02640) (You Only Look Once), a popular object detection and image segmentation model, was developed by Joseph Redmon and Ali Farhadi at the University of Washington. Launched in 2015, YOLO quickly gained popularity for its high speed and accuracy. + +- [YOLOv2](https://arxiv.org/abs/1612.08242), released in 2016, improved the original model by incorporating batch normalization, anchor boxes, and dimension clusters. +- [YOLOv3](https://pjreddie.com/media/files/papers/YOLOv3.pdf), launched in 2018, further enhanced the model's performance using a more efficient backbone network, multiple anchors and spatial pyramid pooling. +- [YOLOv4](https://arxiv.org/abs/2004.10934) was released in 2020, introducing innovations like Mosaic data augmentation, a new anchor-free detection head, and a new loss function. +- [YOLOv5](https://github.com/ultralytics/yolov5) further improved the model's performance and added new features such as hyperparameter optimization, integrated experiment tracking and automatic export to popular export formats. +- [YOLOv6](https://github.com/meituan/YOLOv6) was open-sourced by [Meituan](https://about.meituan.com/) in 2022 and is in use in many of the company's autonomous delivery robots. +- [YOLOv7](https://github.com/WongKinYiu/yolov7) added additional tasks such as pose estimation on the COCO keypoints dataset. +- [YOLOv8](https://github.com/ultralytics/ultralytics) is the latest version of YOLO by Ultralytics. As a cutting-edge, state-of-the-art (SOTA) model, YOLOv8 builds on the success of previous versions, introducing new features and improvements for enhanced performance, flexibility, and efficiency. YOLOv8 supports a full range of vision AI tasks, including [detection](tasks/detect.md), [segmentation](tasks/segment.md), [pose estimation](tasks/pose.md), [tracking](modes/track.md), and [classification](tasks/classify.md). This versatility allows users to leverage YOLOv8's capabilities across diverse applications and domains. + +## YOLO Licenses: How is Ultralytics YOLO licensed? + +Ultralytics offers two licensing options to accommodate diverse use cases: + +- **AGPL-3.0 License**: This [OSI-approved](https://opensource.org/licenses/) open-source license is ideal for students and enthusiasts, promoting open collaboration and knowledge sharing. See the [LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) file for more details. +- **Enterprise License**: Designed for commercial use, this license permits seamless integration of Ultralytics software and AI models into commercial goods and services, bypassing the open-source requirements of AGPL-3.0. If your scenario involves embedding our solutions into a commercial offering, reach out through [Ultralytics Licensing](https://ultralytics.com/license). + +Our licensing strategy is designed to ensure that any improvements to our open-source projects are returned to the community. We hold the principles of open source close to our hearts โค๏ธ, and our mission is to guarantee that our contributions can be utilized and expanded upon in ways that are beneficial to all. diff --git a/ultralytics/docs/en/index.md:Zone.Identifier b/ultralytics/docs/en/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/integrations/clearml.md b/ultralytics/docs/en/integrations/clearml.md new file mode 100755 index 0000000..ca3e85f --- /dev/null +++ b/ultralytics/docs/en/integrations/clearml.md @@ -0,0 +1,184 @@ +--- +comments: true +description: Learn how to streamline and optimize your YOLOv8 model training with ClearML. This guide provides insights into integrating ClearML's MLOps tools for efficient model training, from initial setup to advanced experiment tracking and model management. +keywords: Ultralytics, YOLOv8, Object Detection, ClearML, Model Training, MLOps, Experiment Tracking, Workflow Optimization +--- + +# Training YOLOv8 with ClearML: Streamlining Your MLOps Workflow + +MLOps bridges the gap between creating and deploying machine learning models in real-world settings. It focuses on efficient deployment, scalability, and ongoing management to ensure models perform well in practical applications. + +[Ultralytics YOLOv8](https://ultralytics.com) effortlessly integrates with ClearML, streamlining and enhancing your object detection model's training and management. This guide will walk you through the integration process, detailing how to set up ClearML, manage experiments, automate model management, and collaborate effectively. + +## ClearML + +

+ ClearML Overview +

+ +[ClearML](https://clear.ml/) is an innovative open-source MLOps platform that is skillfully designed to automate, monitor, and orchestrate machine learning workflows. Its key features include automated logging of all training and inference data for full experiment reproducibility, an intuitive web UI for easy data visualization and analysis, advanced hyperparameter optimization algorithms, and robust model management for efficient deployment across various platforms. + +## YOLOv8 Training with ClearML + +You can bring automation and efficiency to your machine learning workflow by improving your training process by integrating YOLOv8 with ClearML. + +## Installation + +To install the required packages, run: + +!!! Tip "Installation" + + === "CLI" + + ```bash + # Install the required packages for YOLOv8 and ClearML + pip install ultralytics clearml + ``` + +For detailed instructions and best practices related to the installation process, be sure to check our [YOLOv8 Installation guide](../quickstart.md). While installing the required packages for YOLOv8, if you encounter any difficulties, consult our [Common Issues guide](../guides/yolo-common-issues.md) for solutions and tips. + +## Configuring ClearML + +Once you have installed the necessary packages, the next step is to initialize and configure your ClearML SDK. This involves setting up your ClearML account and obtaining the necessary credentials for a seamless connection between your development environment and the ClearML server. + +Begin by initializing the ClearML SDK in your environment. The โ€˜clearml-initโ€™ command starts the setup process and prompts you for the necessary credentials. + +!!! Tip "Initial SDK Setup" + + === "CLI" + + ```bash + # Initialize your ClearML SDK setup process + clearml-init + ``` + +After executing this command, visit the [ClearML Settings page](https://app.clear.ml/settings/workspace-configuration). Navigate to the top right corner and select "Settings." Go to the "Workspace" section and click on "Create new credentials." Use the credentials provided in the "Create Credentials" pop-up to complete the setup as instructed, depending on whether you are configuring ClearML in a Jupyter Notebook or a local Python environment. + +## Usage + +Before diving into the usage instructions, be sure to check out the range of [YOLOv8 models offered by Ultralytics](../models/index.md). This will help you choose the most appropriate model for your project requirements. + +!!! Example "Usage" + + === "Python" + + ```python + from clearml import Task + from ultralytics import YOLO + + # Step 1: Creating a ClearML Task + task = Task.init( + project_name="my_project", + task_name="my_yolov8_task" + ) + + # Step 2: Selecting the YOLOv8 Model + model_variant = "yolov8n" + task.set_parameter("model_variant", model_variant) + + # Step 3: Loading the YOLOv8 Model + model = YOLO(f'{model_variant}.pt') + + # Step 4: Setting Up Training Arguments + args = dict(data="coco128.yaml", epochs=16) + task.connect(args) + + # Step 5: Initiating Model Training + results = model.train(**args) + ``` + +### Understanding the Code + +Letโ€™s understand the steps showcased in the usage code snippet above. + +**Step 1: Creating a ClearML Task**: A new task is initialized in ClearML, specifying your project and task names. This task will track and manage your model's training. + +**Step 2: Selecting the YOLOv8 Model**: The `model_variant` variable is set to 'yolov8n', one of the YOLOv8 models. This variant is then logged in ClearML for tracking. + +**Step 3: Loading the YOLOv8 Model**: The selected YOLOv8 model is loaded using Ultralytics' YOLO class, preparing it for training. + +**Step 4: Setting Up Training Arguments**: Key training arguments like the dataset (`coco128.yaml`) and the number of epochs (`16`) are organized in a dictionary and connected to the ClearML task. This allows for tracking and potential modification via the ClearML UI. For a detailed understanding of the model training process and best practices, refer to our [YOLOv8 Model Training guide](../modes/train.md). + +**Step 5: Initiating Model Training**: The model training is started with the specified arguments. The results of the training process are captured in the `results` variable. + +### Understanding the Output + +Upon running the usage code snippet above, you can expect the following output: + +- A confirmation message indicating the creation of a new ClearML task, along with its unique ID. +- An informational message about the script code being stored, indicating that the code execution is being tracked by ClearML. +- A URL link to the ClearML results page where you can monitor the training progress and view detailed logs. +- Download progress for the YOLOv8 model and the specified dataset, followed by a summary of the model architecture and training configuration. +- Initialization messages for various training components like TensorBoard, Automatic Mixed Precision (AMP), and dataset preparation. +- Finally, the training process starts, with progress updates as the model trains on the specified dataset. For an in-depth understanding of the performance metrics used during training, read [our guide on performance metrics](../guides/yolo-performance-metrics.md). + +### Viewing the ClearML Results Page + +By clicking on the URL link to the ClearML results page in the output of the usage code snippet, you can access a comprehensive view of your model's training process. + +#### Key Features of the ClearML Results Page + +- **Real-Time Metrics Tracking** + - Track critical metrics like loss, accuracy, and validation scores as they occur. + - Provides immediate feedback for timely model performance adjustments. + +- **Experiment Comparison** + - Compare different training runs side-by-side. + - Essential for hyperparameter tuning and identifying the most effective models. + +- **Detailed Logs and Outputs** + - Access comprehensive logs, graphical representations of metrics, and console outputs. + - Gain a deeper understanding of model behavior and issue resolution. + +- **Resource Utilization Monitoring** + - Monitor the utilization of computational resources, including CPU, GPU, and memory. + - Key to optimizing training efficiency and costs. + +- **Model Artifacts Management** + - View, download, and share model artifacts like trained models and checkpoints. + - Enhances collaboration and streamlines model deployment and sharing. + +For a visual walkthrough of what the ClearML Results Page looks like, watch the video below: + +

+
+ +
+ Watch: YOLOv8 MLOps Integration using ClearML +

+ +### Advanced Features in ClearML + +ClearML offers several advanced features to enhance your MLOps experience. + +#### Remote Execution + +ClearML's remote execution feature facilitates the reproduction and manipulation of experiments on different machines. It logs essential details like installed packages and uncommitted changes. When a task is enqueued, the ClearML Agent pulls it, recreates the environment, and runs the experiment, reporting back with detailed results. + +Deploying a ClearML Agent is straightforward and can be done on various machines using the following command: + +```bash +clearml-agent daemon --queue [--docker] +``` + +This setup is applicable to cloud VMs, local GPUs, or laptops. ClearML Autoscalers help manage cloud workloads on platforms like AWS, GCP, and Azure, automating the deployment of agents and adjusting resources based on your resource budget. + +### Cloning, Editing, and Enqueuing + +ClearML's user-friendly interface allows easy cloning, editing, and enqueuing of tasks. Users can clone an existing experiment, adjust parameters or other details through the UI, and enqueue the task for execution. This streamlined process ensures that the ClearML Agent executing the task uses updated configurations, making it ideal for iterative experimentation and model fine-tuning. + +


+ Cloning, Editing, and Enqueuing with ClearML +

+ +## Summary + +This guide has led you through the process of integrating ClearML with Ultralytics' YOLOv8. Covering everything from initial setup to advanced model management, you've discovered how to leverage ClearML for efficient training, experiment tracking, and workflow optimization in your machine learning projects. + +For further details on usage, visit [ClearML's official documentation](https://clear.ml/docs/latest/docs/integrations/yolov8/). + +Additionally, explore more integrations and capabilities of Ultralytics by visiting the [Ultralytics integration guide page](../integrations/index.md), which is a treasure trove of resources and insights. diff --git a/ultralytics/docs/en/integrations/clearml.md:Zone.Identifier b/ultralytics/docs/en/integrations/clearml.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/integrations/clearml.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/integrations/comet.md b/ultralytics/docs/en/integrations/comet.md new file mode 100755 index 0000000..1aa25d3 --- /dev/null +++ b/ultralytics/docs/en/integrations/comet.md @@ -0,0 +1,174 @@ +--- +comments: true +description: Discover how to track and enhance YOLOv8 model training with Comet ML's logging tools, from setup to monitoring key metrics and managing experiments for in-depth analysis. +keywords: Ultralytics, YOLOv8, Object Detection, Comet ML, Model Training, Model Metrics Logging, Experiment Tracking, Offline Experiment Management +--- + +# Elevating YOLOv8 Training: Simplify Your Logging Process with Comet ML + +Logging key training details such as parameters, metrics, image predictions, and model checkpoints is essential in machine learningโ€”it keeps your project transparent, your progress measurable, and your results repeatable. + +[Ultralytics YOLOv8](https://ultralytics.com) seamlessly integrates with Comet ML, efficiently capturing and optimizing every aspect of your YOLOv8 object detection model's training process. In this guide, we'll cover the installation process, Comet ML setup, real-time insights, custom logging, and offline usage, ensuring that your YOLOv8 training is thoroughly documented and fine-tuned for outstanding results. + +## Comet ML + +

+ Comet ML Overview +

+ +[Comet ML](https://www.comet.ml/) is a platform for tracking, comparing, explaining, and optimizing machine learning models and experiments. It allows you to log metrics, parameters, media, and more during your model training and monitor your experiments through an aesthetically pleasing web interface. Comet ML helps data scientists iterate more rapidly, enhances transparency and reproducibility, and aids in the development of production models. + +## Harnessing the Power of YOLOv8 and Comet ML + +By combining Ultralytics YOLOv8 with Comet ML, you unlock a range of benefits. These include simplified experiment management, real-time insights for quick adjustments, flexible and tailored logging options, and the ability to log experiments offline when internet access is limited. This integration empowers you to make data-driven decisions, analyze performance metrics, and achieve exceptional results. + +## Installation + +To install the required packages, run: + +!!! Tip "Installation" + + === "CLI" + + ```bash + # Install the required packages for YOLOv8 and Comet ML + pip install ultralytics comet_ml torch torchvision + ``` + +## Configuring Comet ML + +After installing the required packages, youโ€™ll need to sign up, get a [Comet API Key](https://www.comet.com/signup), and configure it. + +!!! Tip "Configuring Comet ML" + + === "CLI" + + ```bash + # Set your Comet Api Key + export COMET_API_KEY= + ``` + +Then, you can initialize your Comet project. Comet will automatically detect the API key and proceed with the setup. + +```python +import comet_ml + +comet_ml.init(project_name="comet-example-yolov8-coco128") +``` + +*Note:* If you are using a Google Colab notebook, the code above will prompt you to enter your API key for initialization. + +## Usage + +Before diving into the usage instructions, be sure to check out the range of [YOLOv8 models offered by Ultralytics](../models/index.md). This will help you choose the most appropriate model for your project requirements. + +!!! Example "Usage" + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO("yolov8n.pt") + + # train the model + results = model.train( + data="coco128.yaml", + project="comet-example-yolov8-coco128", + batch=32, + save_period=1, + save_json=True, + epochs=3 + ) + ``` + +After running the training code, Comet ML will create an experiment in your Comet workspace to track the run automatically. You will then be provided with a link to view the detailed logging of your [YOLOv8 model's training](../modes/train.md) process. + +Comet automatically logs the following data with no additional configuration: metrics such as mAP and loss, hyperparameters, model checkpoints, interactive confusion matrix, and image bounding box predictions. + +## Understanding Your Model's Performance with Comet ML Visualizations + +Let's dive into what you'll see on the Comet ML dashboard once your YOLOv8 model begins training. The dashboard is where all the action happens, presenting a range of automatically logged information through visuals and statistics. Hereโ€™s a quick tour: + +**Experiment Panels** + +The experiment panels section of the Comet ML dashboard organize and present the different runs and their metrics, such as segment mask loss, class loss, precision, and mean average precision. + +

+ Comet ML Overview +

+ +**Metrics** + +In the metrics section, you have the option to examine the metrics in a tabular format as well, which is displayed in a dedicated pane as illustrated here. + +

+ Comet ML Overview +

+ +**Interactive Confusion Matrix** + +The confusion matrix, found in the Confusion Matrix tab, provides an interactive way to assess the model's classification accuracy. It details the correct and incorrect predictions, allowing you to understand the model's strengths and weaknesses. + +

+ Comet ML Overview +

+ +**System Metrics** + +Comet ML logs system metrics to help identify any bottlenecks in the training process. It includes metrics such as GPU utilization, GPU memory usage, CPU utilization, and RAM usage. These are essential for monitoring the efficiency of resource usage during model training. + +

+ Comet ML Overview +

+ +## Customizing Comet ML Logging + +Comet ML offers the flexibility to customize its logging behavior by setting environment variables. These configurations allow you to tailor Comet ML to your specific needs and preferences. Here are some helpful customization options: + +### Logging Image Predictions + +You can control the number of image predictions that Comet ML logs during your experiments. By default, Comet ML logs 100 image predictions from the validation set. However, you can change this number to better suit your requirements. For example, to log 200 image predictions, use the following code: + +```python +import os +os.environ["COMET_MAX_IMAGE_PREDICTIONS"] = "200" +``` + +### Batch Logging Interval + +Comet ML allows you to specify how often batches of image predictions are logged. The `COMET_EVAL_BATCH_LOGGING_INTERVAL` environment variable controls this frequency. The default setting is 1, which logs predictions from every validation batch. You can adjust this value to log predictions at a different interval. For instance, setting it to 4 will log predictions from every fourth batch. + +```python +import os +os.environ['COMET_EVAL_BATCH_LOGGING_INTERVAL'] = "4" +``` + +### Disabling Confusion Matrix Logging + +In some cases, you may not want to log the confusion matrix from your validation set after every epoch. You can disable this feature by setting the `COMET_EVAL_LOG_CONFUSION_MATRIX` environment variable to "false." The confusion matrix will only be logged once, after the training is completed. + +```python +import os +os.environ["COMET_EVAL_LOG_CONFUSION_MATRIX"] = "false" +``` + +### Offline Logging + +If you find yourself in a situation where internet access is limited, Comet ML provides an offline logging option. You can set the `COMET_MODE` environment variable to "offline" to enable this feature. Your experiment data will be saved locally in a directory that you can later upload to Comet ML when internet connectivity is available. + +```python +import os +os.environ["COMET_MODE"] = "offline" +``` + +## Summary + +This guide has walked you through integrating Comet ML with Ultralytics' YOLOv8. From installation to customization, you've learned to streamline experiment management, gain real-time insights, and adapt logging to your project's needs. + +Explore [Comet ML's official documentation](https://www.comet.com/docs/v2/integrations/third-party-tools/yolov8/) for more insights on integrating with YOLOv8. + +Furthermore, if you're looking to dive deeper into the practical applications of YOLOv8, specifically for image segmentation tasks, this detailed guide on [fine-tuning YOLOv8 with Comet ML](https://www.comet.com/site/blog/fine-tuning-yolov8-for-image-segmentation-with-comet/) offers valuable insights and step-by-step instructions to enhance your model's performance. + +Additionally, to explore other exciting integrations with Ultralytics, check out the [integration guide page](../integrations/index.md), which offers a wealth of resources and information. diff --git a/ultralytics/docs/en/integrations/comet.md:Zone.Identifier b/ultralytics/docs/en/integrations/comet.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/integrations/comet.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/integrations/dvc.md b/ultralytics/docs/en/integrations/dvc.md new file mode 100755 index 0000000..542a91a --- /dev/null +++ b/ultralytics/docs/en/integrations/dvc.md @@ -0,0 +1,171 @@ +--- +comments: true +description: This guide provides a step-by-step approach to integrating DVCLive with Ultralytics YOLOv8 for advanced experiment tracking. Learn how to set up your environment, run experiments with varied configurations, and analyze results using DVCLive's powerful tracking and visualization tools. +keywords: DVCLive, Ultralytics, YOLOv8, Machine Learning, Experiment Tracking, Data Version Control, ML Workflows, Model Training, Hyperparameter Tuning +--- + +# Advanced YOLOv8 Experiment Tracking with DVCLive + +Experiment tracking in machine learning is critical to model development and evaluation. It involves recording and analyzing various parameters, metrics, and outcomes from numerous training runs. This process is essential for understanding model performance and making data-driven decisions to refine and optimize models. + +Integrating DVCLive with [Ultralytics YOLOv8](https://ultralytics.com) transforms the way experiments are tracked and managed. This integration offers a seamless solution for automatically logging key experiment details, comparing results across different runs, and visualizing data for in-depth analysis. In this guide, we'll understand how DVCLive can be used to streamline the process. + +## DVCLive + +

+ DVCLive Overview +

+ +[DVCLive](https://dvc.org/doc/dvclive), developed by DVC, is an innovative open-source tool for experiment tracking in machine learning. Integrating seamlessly with Git and DVC, it automates the logging of crucial experiment data like model parameters and training metrics. Designed for simplicity, DVCLive enables effortless comparison and analysis of multiple runs, enhancing the efficiency of machine learning projects with intuitive data visualization and analysis tools. + +## YOLOv8 Training with DVCLive + +YOLOv8 training sessions can be effectively monitored with DVCLive. Additionally, DVC provides integral features for visualizing these experiments, including the generation of a report that enables the comparison of metric plots across all tracked experiments, offering a comprehensive view of the training process. + +## Installation + +To install the required packages, run: + +!!! Tip "Installation" + + === "CLI" + + ```bash + # Install the required packages for YOLOv8 and DVCLive + pip install ultralytics dvclive + ``` + +For detailed instructions and best practices related to the installation process, be sure to check our [YOLOv8 Installation guide](../quickstart.md). While installing the required packages for YOLOv8, if you encounter any difficulties, consult our [Common Issues guide](../guides/yolo-common-issues.md) for solutions and tips. + +## Configuring DVCLive + +Once you have installed the necessary packages, the next step is to set up and configure your environment with the necessary credentials. This setup ensures a smooth integration of DVCLive into your existing workflow. + +Begin by initializing a Git repository, as Git plays a crucial role in version control for both your code and DVCLive configurations. + +!!! Tip "Initial Environment Setup" + + === "CLI" + + ```bash + # Initialize a Git repository + git init -q + + # Configure Git with your details + git config --local user.email "you@example.com" + git config --local user.name "Your Name" + + # Initialize DVCLive in your project + dvc init -q + + # Commit the DVCLive setup to your Git repository + git commit -m "DVC init" + ``` + +In these commands, ensure to replace "you@example.com" with the email address associated with your Git account, and "Your Name" with your Git account username. + +## Usage + +Before diving into the usage instructions, be sure to check out the range of [YOLOv8 models offered by Ultralytics](../models/index.md). This will help you choose the most appropriate model for your project requirements. + +### Training YOLOv8 Models with DVCLive + +Start by running your YOLOv8 training sessions. You can use different model configurations and training parameters to suit your project needs. For instance: + +```bash +# Example training commands for YOLOv8 with varying configurations +yolo train model=yolov8n.pt data=coco8.yaml epochs=5 imgsz=512 +yolo train model=yolov8n.pt data=coco8.yaml epochs=5 imgsz=640 +``` + +Adjust the model, data, epochs, and imgsz parameters according to your specific requirements. For a detailed understanding of the model training process and best practices, refer to our [YOLOv8 Model Training guide](../modes/train.md). + +### Monitoring Experiments with DVCLive + +DVCLive enhances the training process by enabling the tracking and visualization of key metrics. When installed, Ultralytics YOLOv8 automatically integrates with DVCLive for experiment tracking, which you can later analyze for performance insights. For a comprehensive understanding of the specific performance metrics used during training, be sure to explore [our detailed guide on performance metrics](../guides/yolo-performance-metrics.md). + +### Analyzing Results + +After your YOLOv8 training sessions are complete, you can leverage DVCLive's powerful visualization tools for in-depth analysis of the results. DVCLive's integration ensures that all training metrics are systematically logged, facilitating a comprehensive evaluation of your model's performance. + +To start the analysis, you can extract the experiment data using DVC's API and process it with Pandas for easier handling and visualization: + +```python +import dvc.api +import pandas as pd + +# Define the columns of interest +columns = ["Experiment", "epochs", "imgsz", "model", "metrics.mAP50-95(B)"] + +# Retrieve experiment data +df = pd.DataFrame(dvc.api.exp_show(), columns=columns) + +# Clean the data +df.dropna(inplace=True) +df.reset_index(drop=True, inplace=True) + +# Display the DataFrame +print(df) +``` + +The output of the code snippet above provides a clear tabular view of the different experiments conducted with YOLOv8 models. Each row represents a different training run, detailing the experiment's name, the number of epochs, image size (imgsz), the specific model used, and the mAP50-95(B) metric. This metric is crucial for evaluating the model's accuracy, with higher values indicating better performance. + +#### Visualizing Results with Plotly + +For a more interactive and visual analysis of your experiment results, you can use Plotly's parallel coordinates plot. This type of plot is particularly useful for understanding the relationships and trade-offs between different parameters and metrics. + +```python +from plotly.express import parallel_coordinates + +# Create a parallel coordinates plot +fig = parallel_coordinates(df, columns, color="metrics.mAP50-95(B)") + +# Display the plot +fig.show() +``` + +The output of the code snippet above generates a plot that will visually represent the relationships between epochs, image size, model type, and their corresponding mAP50-95(B) scores, enabling you to spot trends and patterns in your experiment data. + +#### Generating Comparative Visualizations with DVC + +DVC provides a useful command to generate comparative plots for your experiments. This can be especially helpful to compare the performance of different models over various training runs. + +```bash +# Generate DVC comparative plots +dvc plots diff $(dvc exp list --names-only) +``` + +After executing this command, DVC generates plots comparing the metrics across different experiments, which are saved as HTML files. Below is an example image illustrating typical plots generated by this process. The image showcases various graphs, including those representing mAP, recall, precision, loss values, and more, providing a visual overview of key performance metrics: + +

+ DVCLive Plots +

+ +### Displaying DVC Plots + +If you are using a Jupyter Notebook and you want to display the generated DVC plots, you can use the IPython display functionality. + +```python +from IPython.display import HTML + +# Display the DVC plots as HTML +HTML(filename='./dvc_plots/index.html') +``` + +This code will render the HTML file containing the DVC plots directly in your Jupyter Notebook, providing an easy and convenient way to analyze the visualized experiment data. + +### Making Data-Driven Decisions + +Use the insights gained from these visualizations to make informed decisions about model optimizations, hyperparameter tuning, and other modifications to enhance your model's performance. + +### Iterating on Experiments + +Based on your analysis, iterate on your experiments. Adjust model configurations, training parameters, or even the data inputs, and repeat the training and analysis process. This iterative approach is key to refining your model for the best possible performance. + +## Summary + +This guide has led you through the process of integrating DVCLive with Ultralytics' YOLOv8. You have learned how to harness the power of DVCLive for detailed experiment monitoring, effective visualization, and insightful analysis in your machine learning endeavors. + +For further details on usage, visit [DVCLiveโ€™s official documentation](https://dvc.org/doc/dvclive/ml-frameworks/yolo). + +Additionally, explore more integrations and capabilities of Ultralytics by visiting the [Ultralytics integration guide page](../integrations/index.md), which is a collection of great resources and insights. diff --git a/ultralytics/docs/en/integrations/dvc.md:Zone.Identifier b/ultralytics/docs/en/integrations/dvc.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/integrations/dvc.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/integrations/index.md b/ultralytics/docs/en/integrations/index.md new file mode 100755 index 0000000..3c26f85 --- /dev/null +++ b/ultralytics/docs/en/integrations/index.md @@ -0,0 +1,71 @@ +--- +comments: true +description: Explore Ultralytics integrations with tools for dataset management, model optimization, ML workflows automation, experiment tracking, version control, and more. Learn about our support for various model export formats for deployment. +keywords: Ultralytics integrations, Roboflow, Neural Magic, ClearML, Comet ML, DVC, Ultralytics HUB, MLFlow, Neptune, Ray Tune, TensorBoard, W&B, model export formats, PyTorch, TorchScript, ONNX, OpenVINO, TensorRT, CoreML, TF SavedModel, TF GraphDef, TF Lite, TF Edge TPU, TF.js, PaddlePaddle, NCNN +--- + +# Ultralytics Integrations + +Welcome to the Ultralytics Integrations page! This page provides an overview of our partnerships with various tools and platforms, designed to streamline your machine learning workflows, enhance dataset management, simplify model training, and facilitate efficient deployment. + +Ultralytics YOLO ecosystem and integrations + +## Datasets Integrations + +- [Roboflow](roboflow.md): Facilitate seamless dataset management for Ultralytics models, offering robust annotation, preprocessing, and augmentation capabilities. + +## Training Integrations + +- [Comet ML](comet.md): Enhance your model development with Ultralytics by tracking, comparing, and optimizing your machine learning experiments. + +- [ClearML](clearml.md): Automate your Ultralytics ML workflows, monitor experiments, and foster team collaboration. + +- [DVC](dvc.md): Implement version control for your Ultralytics machine learning projects, synchronizing data, code, and models effectively. + +- [Ultralytics HUB](https://hub.ultralytics.com): Access and contribute to a community of pre-trained Ultralytics models. + +- [MLFlow](mlflow.md): Streamline the entire ML lifecycle of Ultralytics models, from experimentation and reproducibility to deployment. + +- [Neptune](https://neptune.ai/): Maintain a comprehensive log of your ML experiments with Ultralytics in this metadata store designed for MLOps. + +- [Ray Tune](ray-tune.md): Optimize the hyperparameters of your Ultralytics models at any scale. + +- [TensorBoard](https://tensorboard.dev/): Visualize your Ultralytics ML workflows, monitor model metrics, and foster team collaboration. + +- [Weights & Biases (W&B)](https://wandb.ai/site): Monitor experiments, visualize metrics, and foster reproducibility and collaboration on Ultralytics projects. + +## Deployment Integrations + +- [Neural Magic](https://neuralmagic.com/): Leverage Quantization Aware Training (QAT) and pruning techniques to optimize Ultralytics models for superior performance and leaner size. + +### Export Formats + +We also support a variety of model export formats for deployment in different environments. Here are the available formats: + +| Format | `format` Argument | Model | Metadata | Arguments | +|--------------------------------------------------------------------|-------------------|---------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](openvino.md) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half`, `int8` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras`, `int8` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz`, `half`, `int8` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [NCNN](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +Explore the links to learn more about each integration and how to get the most out of them with Ultralytics. + +## Contribute to Our Integrations + +We're always excited to see how the community integrates Ultralytics YOLO with other technologies, tools, and platforms! If you have successfully integrated YOLO with a new system or have valuable insights to share, consider contributing to our Integrations Docs. + +By writing a guide or tutorial, you can help expand our documentation and provide real-world examples that benefit the community. It's an excellent way to contribute to the growing ecosystem around Ultralytics YOLO. + +To contribute, please check out our [Contributing Guide](https://docs.ultralytics.com/help/contributing) for instructions on how to submit a Pull Request (PR) ๐Ÿ› ๏ธ. We eagerly await your contributions! + +Let's collaborate to make the Ultralytics YOLO ecosystem more expansive and feature-rich ๐Ÿ™! diff --git a/ultralytics/docs/en/integrations/index.md:Zone.Identifier b/ultralytics/docs/en/integrations/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/integrations/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/integrations/mlflow.md b/ultralytics/docs/en/integrations/mlflow.md new file mode 100755 index 0000000..58052df --- /dev/null +++ b/ultralytics/docs/en/integrations/mlflow.md @@ -0,0 +1,112 @@ +--- +comments: true +description: Uncover the utility of MLflow for effective experiment logging in your Ultralytics YOLO projects. +keywords: ultralytics docs, YOLO, MLflow, experiment logging, metrics tracking, parameter logging, artifact logging +--- + +# MLflow Integration for Ultralytics YOLO + +MLflow ecosystem + +## Introduction + +Experiment logging is a crucial aspect of machine learning workflows that enables tracking of various metrics, parameters, and artifacts. It helps to enhance model reproducibility, debug issues, and improve model performance. [Ultralytics](https://ultralytics.com) YOLO, known for its real-time object detection capabilities, now offers integration with [MLflow](https://mlflow.org/), an open-source platform for complete machine learning lifecycle management. + +This documentation page is a comprehensive guide to setting up and utilizing the MLflow logging capabilities for your Ultralytics YOLO project. + +## What is MLflow? + +[MLflow](https://mlflow.org/) is an open-source platform developed by [Databricks](https://www.databricks.com/) for managing the end-to-end machine learning lifecycle. It includes tools for tracking experiments, packaging code into reproducible runs, and sharing and deploying models. MLflow is designed to work with any machine learning library and programming language. + +## Features + +- **Metrics Logging**: Logs metrics at the end of each epoch and at the end of the training. +- **Parameter Logging**: Logs all the parameters used in the training. +- **Artifacts Logging**: Logs model artifacts, including weights and configuration files, at the end of the training. + +## Setup and Prerequisites + +Ensure MLflow is installed. If not, install it using pip: + +```bash +pip install mlflow +``` + +Make sure that MLflow logging is enabled in Ultralytics settings. Usually, this is controlled by the settings `mflow` key. See the [settings](https://docs.ultralytics.com/quickstart/#ultralytics-settings) page for more info. + +!!! Example "Update Ultralytics MLflow Settings" + + === "Python" + Within the Python environment, call the `update` method on the `settings` object to change your settings: + ```python + from ultralytics import settings + + # Update a setting + settings.update({'mlflow': True}) + + # Reset settings to default values + settings.reset() + ``` + + === "CLI" + If you prefer using the command-line interface, the following commands will allow you to modify your settings: + ```bash + # Update a setting + yolo settings runs_dir='/path/to/runs' + + # Reset settings to default values + yolo settings reset + ``` + +## How to Use + +### Commands + +1. **Set a Project Name**: You can set the project name via an environment variable: + ```bash + export MLFLOW_EXPERIMENT_NAME= + ``` + Or use the `project=` argument when training a YOLO model, i.e. `yolo train project=my_project`. + +2. **Set a Run Name**: Similar to setting a project name, you can set the run name via an environment variable: + ```bash + export MLFLOW_RUN= + ``` + Or use the `name=` argument when training a YOLO model, i.e. `yolo train project=my_project name=my_name`. + +3. **Start Local MLflow Server**: To start tracking, use: + ```bash + mlflow server --backend-store-uri runs/mlflow' + ``` + This will start a local server at http://127.0.0.1:5000 by default and save all mlflow logs to the 'runs/mlflow' directory. To specify a different URI, set the `MLFLOW_TRACKING_URI` environment variable. + +4. **Kill MLflow Server Instances**: To stop all running MLflow instances, run: + ```bash + ps aux | grep 'mlflow' | grep -v 'grep' | awk '{print $2}' | xargs kill -9 + ``` + +### Logging + +The logging is taken care of by the `on_pretrain_routine_end`, `on_fit_epoch_end`, and `on_train_end` callback functions. These functions are automatically called during the respective stages of the training process, and they handle the logging of parameters, metrics, and artifacts. + +## Examples + +1. **Logging Custom Metrics**: You can add custom metrics to be logged by modifying the `trainer.metrics` dictionary before `on_fit_epoch_end` is called. + +2. **View Experiment**: To view your logs, navigate to your MLflow server (usually http://127.0.0.1:5000) and select your experiment and run. + YOLO MLflow Experiment + +3. **View Run**: Runs are individual models inside an experiment. Click on a Run and see the Run details, including uploaded artifacts and model weights. + YOLO MLflow Run + +## Disabling MLflow + +To turn off MLflow logging: + +```bash +yolo settings mlflow=False +``` + +## Conclusion + +MLflow logging integration with Ultralytics YOLO offers a streamlined way to keep track of your machine learning experiments. It empowers you to monitor performance metrics and manage artifacts effectively, thus aiding in robust model development and deployment. For further details please visit the MLflow [official documentation](https://mlflow.org/docs/latest/index.html). diff --git a/ultralytics/docs/en/integrations/mlflow.md:Zone.Identifier b/ultralytics/docs/en/integrations/mlflow.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/integrations/mlflow.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/integrations/openvino.md b/ultralytics/docs/en/integrations/openvino.md new file mode 100755 index 0000000..6f552ff --- /dev/null +++ b/ultralytics/docs/en/integrations/openvino.md @@ -0,0 +1,284 @@ +--- +comments: true +description: Discover the power of deploying your Ultralytics YOLOv8 model using OpenVINO format for up to 10x speedup vs PyTorch. +keywords: ultralytics docs, YOLOv8, export YOLOv8, YOLOv8 model deployment, exporting YOLOv8, OpenVINO, OpenVINO format +--- + +# Intel OpenVINO Export + +OpenVINO Ecosystem + +In this guide, we cover exporting YOLOv8 models to the [OpenVINO](https://docs.openvino.ai/) format, which can provide up to 3x [CPU](https://docs.openvino.ai/2023.0/openvino_docs_OV_UG_supported_plugins_CPU.html) speedup as well as accelerating on other Intel hardware ([iGPU](https://docs.openvino.ai/2023.0/openvino_docs_OV_UG_supported_plugins_GPU.html), [dGPU](https://docs.openvino.ai/2023.0/openvino_docs_OV_UG_supported_plugins_GPU.html), [VPU](https://docs.openvino.ai/2022.3/openvino_docs_OV_UG_supported_plugins_VPU.html), etc.). + +OpenVINO, short for Open Visual Inference & Neural Network Optimization toolkit, is a comprehensive toolkit for optimizing and deploying AI inference models. Even though the name contains Visual, OpenVINO also supports various additional tasks including language, audio, time series, etc. + +

+
+ +
+ Watch: How To Export and Optimize an Ultralytics YOLOv8 Model for Inference with OpenVINO. +

+ +## Usage Examples + +Export a YOLOv8n model to OpenVINO format and run inference with the exported model. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a YOLOv8n PyTorch model + model = YOLO('yolov8n.pt') + + # Export the model + model.export(format='openvino') # creates 'yolov8n_openvino_model/' + + # Load the exported OpenVINO model + ov_model = YOLO('yolov8n_openvino_model/') + + # Run inference + results = ov_model('https://ultralytics.com/images/bus.jpg') + ``` + === "CLI" + + ```bash + # Export a YOLOv8n PyTorch model to OpenVINO format + yolo export model=yolov8n.pt format=openvino # creates 'yolov8n_openvino_model/' + + # Run inference with the exported model + yolo predict model=yolov8n_openvino_model source='https://ultralytics.com/images/bus.jpg' + ``` + +## Arguments + +| Key | Value | Description | +|----------|--------------|------------------------------------------------------| +| `format` | `'openvino'` | format to export to | +| `imgsz` | `640` | image size as scalar or (h, w) list, i.e. (640, 480) | +| `half` | `False` | FP16 quantization | + +## Benefits of OpenVINO + +1. **Performance**: OpenVINO delivers high-performance inference by utilizing the power of Intel CPUs, integrated and discrete GPUs, and FPGAs. +2. **Support for Heterogeneous Execution**: OpenVINO provides an API to write once and deploy on any supported Intel hardware (CPU, GPU, FPGA, VPU, etc.). +3. **Model Optimizer**: OpenVINO provides a Model Optimizer that imports, converts, and optimizes models from popular deep learning frameworks such as PyTorch, TensorFlow, TensorFlow Lite, Keras, ONNX, PaddlePaddle, and Caffe. +4. **Ease of Use**: The toolkit comes with more than [80 tutorial notebooks](https://github.com/openvinotoolkit/openvino_notebooks) (including [YOLOv8 optimization](https://github.com/openvinotoolkit/openvino_notebooks/tree/main/notebooks/230-yolov8-optimization)) teaching different aspects of the toolkit. + +## OpenVINO Export Structure + +When you export a model to OpenVINO format, it results in a directory containing the following: + +1. **XML file**: Describes the network topology. +2. **BIN file**: Contains the weights and biases binary data. +3. **Mapping file**: Holds mapping of original model output tensors to OpenVINO tensor names. + +You can use these files to run inference with the OpenVINO Inference Engine. + +## Using OpenVINO Export in Deployment + +Once you have the OpenVINO files, you can use the OpenVINO Runtime to run the model. The Runtime provides a unified API to inference across all supported Intel hardware. It also provides advanced capabilities like load balancing across Intel hardware and asynchronous execution. For more information on running the inference, refer to the [Inference with OpenVINO Runtime Guide](https://docs.openvino.ai/2023.0/openvino_docs_OV_UG_OV_Runtime_User_Guide.html). + +Remember, you'll need the XML and BIN files as well as any application-specific settings like input size, scale factor for normalization, etc., to correctly set up and use the model with the Runtime. + +In your deployment application, you would typically do the following steps: + +1. Initialize OpenVINO by creating `core = Core()`. +2. Load the model using the `core.read_model()` method. +3. Compile the model using the `core.compile_model()` function. +4. Prepare the input (image, text, audio, etc.). +5. Run inference using `compiled_model(input_data)`. + +For more detailed steps and code snippets, refer to the [OpenVINO documentation](https://docs.openvino.ai/) or [API tutorial](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/002-openvino-api/002-openvino-api.ipynb). + +## OpenVINO YOLOv8 Benchmarks + +YOLOv8 benchmarks below were run by the Ultralytics team on 4 different model formats measuring speed and accuracy: PyTorch, TorchScript, ONNX and OpenVINO. Benchmarks were run on Intel Flex and Arc GPUs, and on Intel Xeon CPUs at FP32 precision (with the `half=False` argument). + +!!! Note + + The benchmarking results below are for reference and might vary based on the exact hardware and software configuration of a system, as well as the current workload of the system at the time the benchmarks are run. + + All benchmarks run with `openvino` Python package version [2023.0.1](https://pypi.org/project/openvino/2023.0.1/). + +### Intel Flex GPU + +The Intelยฎ Data Center GPU Flex Series is a versatile and robust solution designed for the intelligent visual cloud. This GPU supports a wide array of workloads including media streaming, cloud gaming, AI visual inference, and virtual desktop Infrastructure workloads. It stands out for its open architecture and built-in support for the AV1 encode, providing a standards-based software stack for high-performance, cross-architecture applications. The Flex Series GPU is optimized for density and quality, offering high reliability, availability, and scalability. + +Benchmarks below run on Intelยฎ Data Center GPU Flex 170 at FP32 precision. + +
+Flex GPU benchmarks +
+ +| Model | Format | Status | Size (MB) | mAP50-95(B) | Inference time (ms/im) | +|---------|-------------|--------|-----------|-------------|------------------------| +| YOLOv8n | PyTorch | โœ… | 6.2 | 0.3709 | 21.79 | +| YOLOv8n | TorchScript | โœ… | 12.4 | 0.3704 | 23.24 | +| YOLOv8n | ONNX | โœ… | 12.2 | 0.3704 | 37.22 | +| YOLOv8n | OpenVINO | โœ… | 12.3 | 0.3703 | 3.29 | +| YOLOv8s | PyTorch | โœ… | 21.5 | 0.4471 | 31.89 | +| YOLOv8s | TorchScript | โœ… | 42.9 | 0.4472 | 32.71 | +| YOLOv8s | ONNX | โœ… | 42.8 | 0.4472 | 43.42 | +| YOLOv8s | OpenVINO | โœ… | 42.9 | 0.4470 | 3.92 | +| YOLOv8m | PyTorch | โœ… | 49.7 | 0.5013 | 50.75 | +| YOLOv8m | TorchScript | โœ… | 99.2 | 0.4999 | 47.90 | +| YOLOv8m | ONNX | โœ… | 99.0 | 0.4999 | 63.16 | +| YOLOv8m | OpenVINO | โœ… | 49.8 | 0.4997 | 7.11 | +| YOLOv8l | PyTorch | โœ… | 83.7 | 0.5293 | 77.45 | +| YOLOv8l | TorchScript | โœ… | 167.2 | 0.5268 | 85.71 | +| YOLOv8l | ONNX | โœ… | 166.8 | 0.5268 | 88.94 | +| YOLOv8l | OpenVINO | โœ… | 167.0 | 0.5264 | 9.37 | +| YOLOv8x | PyTorch | โœ… | 130.5 | 0.5404 | 100.09 | +| YOLOv8x | TorchScript | โœ… | 260.7 | 0.5371 | 114.64 | +| YOLOv8x | ONNX | โœ… | 260.4 | 0.5371 | 110.32 | +| YOLOv8x | OpenVINO | โœ… | 260.6 | 0.5367 | 15.02 | + +This table represents the benchmark results for five different models (YOLOv8n, YOLOv8s, YOLOv8m, YOLOv8l, YOLOv8x) across four different formats (PyTorch, TorchScript, ONNX, OpenVINO), giving us the status, size, mAP50-95(B) metric, and inference time for each combination. + +### Intel Arc GPU + +Intelยฎ Arcโ„ข represents Intel's foray into the dedicated GPU market. The Arcโ„ข series, designed to compete with leading GPU manufacturers like AMD and Nvidia, caters to both the laptop and desktop markets. The series includes mobile versions for compact devices like laptops, and larger, more powerful versions for desktop computers. + +The Arcโ„ข series is divided into three categories: Arcโ„ข 3, Arcโ„ข 5, and Arcโ„ข 7, with each number indicating the performance level. Each category includes several models, and the 'M' in the GPU model name signifies a mobile, integrated variant. + +Early reviews have praised the Arcโ„ข series, particularly the integrated A770M GPU, for its impressive graphics performance. The availability of the Arcโ„ข series varies by region, and additional models are expected to be released soon. Intelยฎ Arcโ„ข GPUs offer high-performance solutions for a range of computing needs, from gaming to content creation. + +Benchmarks below run on Intelยฎ Arc 770 GPU at FP32 precision. + +
+Arc GPU benchmarks +
+ +| Model | Format | Status | Size (MB) | metrics/mAP50-95(B) | Inference time (ms/im) | +|---------|-------------|--------|-----------|---------------------|------------------------| +| YOLOv8n | PyTorch | โœ… | 6.2 | 0.3709 | 88.79 | +| YOLOv8n | TorchScript | โœ… | 12.4 | 0.3704 | 102.66 | +| YOLOv8n | ONNX | โœ… | 12.2 | 0.3704 | 57.98 | +| YOLOv8n | OpenVINO | โœ… | 12.3 | 0.3703 | 8.52 | +| YOLOv8s | PyTorch | โœ… | 21.5 | 0.4471 | 189.83 | +| YOLOv8s | TorchScript | โœ… | 42.9 | 0.4472 | 227.58 | +| YOLOv8s | ONNX | โœ… | 42.7 | 0.4472 | 142.03 | +| YOLOv8s | OpenVINO | โœ… | 42.9 | 0.4469 | 9.19 | +| YOLOv8m | PyTorch | โœ… | 49.7 | 0.5013 | 411.64 | +| YOLOv8m | TorchScript | โœ… | 99.2 | 0.4999 | 517.12 | +| YOLOv8m | ONNX | โœ… | 98.9 | 0.4999 | 298.68 | +| YOLOv8m | OpenVINO | โœ… | 99.1 | 0.4996 | 12.55 | +| YOLOv8l | PyTorch | โœ… | 83.7 | 0.5293 | 725.73 | +| YOLOv8l | TorchScript | โœ… | 167.1 | 0.5268 | 892.83 | +| YOLOv8l | ONNX | โœ… | 166.8 | 0.5268 | 576.11 | +| YOLOv8l | OpenVINO | โœ… | 167.0 | 0.5262 | 17.62 | +| YOLOv8x | PyTorch | โœ… | 130.5 | 0.5404 | 988.92 | +| YOLOv8x | TorchScript | โœ… | 260.7 | 0.5371 | 1186.42 | +| YOLOv8x | ONNX | โœ… | 260.4 | 0.5371 | 768.90 | +| YOLOv8x | OpenVINO | โœ… | 260.6 | 0.5367 | 19 | + +### Intel Xeon CPU + +The Intelยฎ Xeonยฎ CPU is a high-performance, server-grade processor designed for complex and demanding workloads. From high-end cloud computing and virtualization to artificial intelligence and machine learning applications, Xeonยฎ CPUs provide the power, reliability, and flexibility required for today's data centers. + +Notably, Xeonยฎ CPUs deliver high compute density and scalability, making them ideal for both small businesses and large enterprises. By choosing Intelยฎ Xeonยฎ CPUs, organizations can confidently handle their most demanding computing tasks and foster innovation while maintaining cost-effectiveness and operational efficiency. + +Benchmarks below run on 4th Gen Intelยฎ Xeonยฎ Scalable CPU at FP32 precision. + +
+Xeon CPU benchmarks +
+ +| Model | Format | Status | Size (MB) | metrics/mAP50-95(B) | Inference time (ms/im) | +|---------|-------------|--------|-----------|---------------------|------------------------| +| YOLOv8n | PyTorch | โœ… | 6.2 | 0.3709 | 24.36 | +| YOLOv8n | TorchScript | โœ… | 12.4 | 0.3704 | 23.93 | +| YOLOv8n | ONNX | โœ… | 12.2 | 0.3704 | 39.86 | +| YOLOv8n | OpenVINO | โœ… | 12.3 | 0.3704 | 11.34 | +| YOLOv8s | PyTorch | โœ… | 21.5 | 0.4471 | 33.77 | +| YOLOv8s | TorchScript | โœ… | 42.9 | 0.4472 | 34.84 | +| YOLOv8s | ONNX | โœ… | 42.8 | 0.4472 | 43.23 | +| YOLOv8s | OpenVINO | โœ… | 42.9 | 0.4471 | 13.86 | +| YOLOv8m | PyTorch | โœ… | 49.7 | 0.5013 | 53.91 | +| YOLOv8m | TorchScript | โœ… | 99.2 | 0.4999 | 53.51 | +| YOLOv8m | ONNX | โœ… | 99.0 | 0.4999 | 64.16 | +| YOLOv8m | OpenVINO | โœ… | 99.1 | 0.4996 | 28.79 | +| YOLOv8l | PyTorch | โœ… | 83.7 | 0.5293 | 75.78 | +| YOLOv8l | TorchScript | โœ… | 167.2 | 0.5268 | 79.13 | +| YOLOv8l | ONNX | โœ… | 166.8 | 0.5268 | 88.45 | +| YOLOv8l | OpenVINO | โœ… | 167.0 | 0.5263 | 56.23 | +| YOLOv8x | PyTorch | โœ… | 130.5 | 0.5404 | 96.60 | +| YOLOv8x | TorchScript | โœ… | 260.7 | 0.5371 | 114.28 | +| YOLOv8x | ONNX | โœ… | 260.4 | 0.5371 | 111.02 | +| YOLOv8x | OpenVINO | โœ… | 260.6 | 0.5371 | 83.28 | + +### Intel Core CPU + +The Intelยฎ Coreยฎ series is a range of high-performance processors by Intel. The lineup includes Core i3 (entry-level), Core i5 (mid-range), Core i7 (high-end), and Core i9 (extreme performance). Each series caters to different computing needs and budgets, from everyday tasks to demanding professional workloads. With each new generation, improvements are made to performance, energy efficiency, and features. + +Benchmarks below run on 13th Gen Intelยฎ Coreยฎ i7-13700H CPU at FP32 precision. + +
+Core CPU benchmarks +
+ +| Model | Format | Status | Size (MB) | metrics/mAP50-95(B) | Inference time (ms/im) | +|---------|-------------|--------|-----------|---------------------|------------------------| +| YOLOv8n | PyTorch | โœ… | 6.2 | 0.4478 | 104.61 | +| YOLOv8n | TorchScript | โœ… | 12.4 | 0.4525 | 112.39 | +| YOLOv8n | ONNX | โœ… | 12.2 | 0.4525 | 28.02 | +| YOLOv8n | OpenVINO | โœ… | 12.3 | 0.4504 | 23.53 | +| YOLOv8s | PyTorch | โœ… | 21.5 | 0.5885 | 194.83 | +| YOLOv8s | TorchScript | โœ… | 43.0 | 0.5962 | 202.01 | +| YOLOv8s | ONNX | โœ… | 42.8 | 0.5962 | 65.74 | +| YOLOv8s | OpenVINO | โœ… | 42.9 | 0.5966 | 38.66 | +| YOLOv8m | PyTorch | โœ… | 49.7 | 0.6101 | 355.23 | +| YOLOv8m | TorchScript | โœ… | 99.2 | 0.6120 | 424.78 | +| YOLOv8m | ONNX | โœ… | 99.0 | 0.6120 | 173.39 | +| YOLOv8m | OpenVINO | โœ… | 99.1 | 0.6091 | 69.80 | +| YOLOv8l | PyTorch | โœ… | 83.7 | 0.6591 | 593.00 | +| YOLOv8l | TorchScript | โœ… | 167.2 | 0.6580 | 697.54 | +| YOLOv8l | ONNX | โœ… | 166.8 | 0.6580 | 342.15 | +| YOLOv8l | OpenVINO | โœ… | 167.0 | 0.0708 | 117.69 | +| YOLOv8x | PyTorch | โœ… | 130.5 | 0.6651 | 804.65 | +| YOLOv8x | TorchScript | โœ… | 260.8 | 0.6650 | 921.46 | +| YOLOv8x | ONNX | โœ… | 260.4 | 0.6650 | 526.66 | +| YOLOv8x | OpenVINO | โœ… | 260.6 | 0.6619 | 158.73 | + +## Reproduce Our Results + +To reproduce the Ultralytics benchmarks above on all export [formats](../modes/export.md) run this code: + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a YOLOv8n PyTorch model + model = YOLO('yolov8n.pt') + + # Benchmark YOLOv8n speed and accuracy on the COCO128 dataset for all all export formats + results= model.benchmarks(data='coco128.yaml') + ``` + === "CLI" + + ```bash + # Benchmark YOLOv8n speed and accuracy on the COCO128 dataset for all all export formats + yolo benchmark model=yolov8n.pt data=coco128.yaml + ``` + + Note that benchmarking results might vary based on the exact hardware and software configuration of a system, as well as the current workload of the system at the time the benchmarks are run. For the most reliable results use a dataset with a large number of images, i.e. `data='coco128.yaml' (128 val images), or `data='coco.yaml'` (5000 val images). + +## Conclusion + +The benchmarking results clearly demonstrate the benefits of exporting the YOLOv8 model to the OpenVINO format. Across different models and hardware platforms, the OpenVINO format consistently outperforms other formats in terms of inference speed while maintaining comparable accuracy. + +For the Intelยฎ Data Center GPU Flex Series, the OpenVINO format was able to deliver inference speeds almost 10 times faster than the original PyTorch format. On the Xeon CPU, the OpenVINO format was twice as fast as the PyTorch format. The accuracy of the models remained nearly identical across the different formats. + +The benchmarks underline the effectiveness of OpenVINO as a tool for deploying deep learning models. By converting models to the OpenVINO format, developers can achieve significant performance improvements, making it easier to deploy these models in real-world applications. + +For more detailed information and instructions on using OpenVINO, refer to the [official OpenVINO documentation](https://docs.openvino.ai/). diff --git a/ultralytics/docs/en/integrations/openvino.md:Zone.Identifier b/ultralytics/docs/en/integrations/openvino.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/integrations/openvino.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/integrations/ray-tune.md b/ultralytics/docs/en/integrations/ray-tune.md new file mode 100755 index 0000000..3825cf7 --- /dev/null +++ b/ultralytics/docs/en/integrations/ray-tune.md @@ -0,0 +1,179 @@ +--- +comments: true +description: Discover how to streamline hyperparameter tuning for YOLOv8 models with Ray Tune. Learn to accelerate tuning, integrate with Weights & Biases, and analyze results. +keywords: Ultralytics, YOLOv8, Ray Tune, hyperparameter tuning, machine learning optimization, Weights & Biases integration, result analysis +--- + +# Efficient Hyperparameter Tuning with Ray Tune and YOLOv8 + +Hyperparameter tuning is vital in achieving peak model performance by discovering the optimal set of hyperparameters. This involves running trials with different hyperparameters and evaluating each trialโ€™s performance. + +## Accelerate Tuning with Ultralytics YOLOv8 and Ray Tune + +[Ultralytics YOLOv8](https://ultralytics.com) incorporates Ray Tune for hyperparameter tuning, streamlining the optimization of YOLOv8 model hyperparameters. With Ray Tune, you can utilize advanced search strategies, parallelism, and early stopping to expedite the tuning process. + +### Ray Tune + +

+ Ray Tune Overview +

+ +[Ray Tune](https://docs.ray.io/en/latest/tune/index.html) is a hyperparameter tuning library designed for efficiency and flexibility. It supports various search strategies, parallelism, and early stopping strategies, and seamlessly integrates with popular machine learning frameworks, including Ultralytics YOLOv8. + +### Integration with Weights & Biases + +YOLOv8 also allows optional integration with [Weights & Biases](https://wandb.ai/site) for monitoring the tuning process. + +## Installation + +To install the required packages, run: + +!!! Tip "Installation" + + === "CLI" + + ```bash + # Install and update Ultralytics and Ray Tune packages + pip install -U ultralytics "ray[tune]" + + # Optionally install W&B for logging + pip install wandb + ``` + +## Usage + +!!! Example "Usage" + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a YOLOv8n model + model = YOLO('yolov8n.pt') + + # Start tuning hyperparameters for YOLOv8n training on the COCO8 dataset + result_grid = model.tune(data='coco8.yaml', use_ray=True) + ``` + +## `tune()` Method Parameters + +The `tune()` method in YOLOv8 provides an easy-to-use interface for hyperparameter tuning with Ray Tune. It accepts several arguments that allow you to customize the tuning process. Below is a detailed explanation of each parameter: + +| Parameter | Type | Description | Default Value | +|-----------------|------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------| +| `data` | `str` | The dataset configuration file (in YAML format) to run the tuner on. This file should specify the training and validation data paths, as well as other dataset-specific settings. | | +| `space` | `dict, optional` | A dictionary defining the hyperparameter search space for Ray Tune. Each key corresponds to a hyperparameter name, and the value specifies the range of values to explore during tuning. If not provided, YOLOv8 uses a default search space with various hyperparameters. | | +| `grace_period` | `int, optional` | The grace period in epochs for the [ASHA scheduler](https://docs.ray.io/en/latest/tune/api/schedulers.html) in Ray Tune. The scheduler will not terminate any trial before this number of epochs, allowing the model to have some minimum training before making a decision on early stopping. | 10 | +| `gpu_per_trial` | `int, optional` | The number of GPUs to allocate per trial during tuning. This helps manage GPU usage, particularly in multi-GPU environments. If not provided, the tuner will use all available GPUs. | None | +| `iterations` | `int, optional` | The maximum number of trials to run during tuning. This parameter helps control the total number of hyperparameter combinations tested, ensuring the tuning process does not run indefinitely. | 10 | +| `**train_args` | `dict, optional` | Additional arguments to pass to the `train()` method during tuning. These arguments can include settings like the number of training epochs, batch size, and other training-specific configurations. | {} | + +By customizing these parameters, you can fine-tune the hyperparameter optimization process to suit your specific needs and available computational resources. + +## Default Search Space Description + +The following table lists the default search space parameters for hyperparameter tuning in YOLOv8 with Ray Tune. Each parameter has a specific value range defined by `tune.uniform()`. + +| Parameter | Value Range | Description | +|-------------------|----------------------------|------------------------------------------| +| `lr0` | `tune.uniform(1e-5, 1e-1)` | Initial learning rate | +| `lrf` | `tune.uniform(0.01, 1.0)` | Final learning rate factor | +| `momentum` | `tune.uniform(0.6, 0.98)` | Momentum | +| `weight_decay` | `tune.uniform(0.0, 0.001)` | Weight decay | +| `warmup_epochs` | `tune.uniform(0.0, 5.0)` | Warmup epochs | +| `warmup_momentum` | `tune.uniform(0.0, 0.95)` | Warmup momentum | +| `box` | `tune.uniform(0.02, 0.2)` | Box loss weight | +| `cls` | `tune.uniform(0.2, 4.0)` | Class loss weight | +| `hsv_h` | `tune.uniform(0.0, 0.1)` | Hue augmentation range | +| `hsv_s` | `tune.uniform(0.0, 0.9)` | Saturation augmentation range | +| `hsv_v` | `tune.uniform(0.0, 0.9)` | Value (brightness) augmentation range | +| `degrees` | `tune.uniform(0.0, 45.0)` | Rotation augmentation range (degrees) | +| `translate` | `tune.uniform(0.0, 0.9)` | Translation augmentation range | +| `scale` | `tune.uniform(0.0, 0.9)` | Scaling augmentation range | +| `shear` | `tune.uniform(0.0, 10.0)` | Shear augmentation range (degrees) | +| `perspective` | `tune.uniform(0.0, 0.001)` | Perspective augmentation range | +| `flipud` | `tune.uniform(0.0, 1.0)` | Vertical flip augmentation probability | +| `fliplr` | `tune.uniform(0.0, 1.0)` | Horizontal flip augmentation probability | +| `mosaic` | `tune.uniform(0.0, 1.0)` | Mosaic augmentation probability | +| `mixup` | `tune.uniform(0.0, 1.0)` | Mixup augmentation probability | +| `copy_paste` | `tune.uniform(0.0, 1.0)` | Copy-paste augmentation probability | + +## Custom Search Space Example + +In this example, we demonstrate how to use a custom search space for hyperparameter tuning with Ray Tune and YOLOv8. By providing a custom search space, you can focus the tuning process on specific hyperparameters of interest. + +!!! Example "Usage" + + ```python + from ultralytics import YOLO + + # Define a YOLO model + model = YOLO("yolov8n.pt") + + # Run Ray Tune on the model + result_grid = model.tune(data="coco128.yaml", + space={"lr0": tune.uniform(1e-5, 1e-1)}, + epochs=50, + use_ray=True) + ``` + +In the code snippet above, we create a YOLO model with the "yolov8n.pt" pretrained weights. Then, we call the `tune()` method, specifying the dataset configuration with "coco128.yaml". We provide a custom search space for the initial learning rate `lr0` using a dictionary with the key "lr0" and the value `tune.uniform(1e-5, 1e-1)`. Finally, we pass additional training arguments, such as the number of epochs directly to the tune method as `epochs=50`. + +## Processing Ray Tune Results + +After running a hyperparameter tuning experiment with Ray Tune, you might want to perform various analyses on the obtained results. This guide will take you through common workflows for processing and analyzing these results. + +### Loading Tune Experiment Results from a Directory + +After running the tuning experiment with `tuner.fit()`, you can load the results from a directory. This is useful, especially if you're performing the analysis after the initial training script has exited. + +```python +experiment_path = f"{storage_path}/{exp_name}" +print(f"Loading results from {experiment_path}...") + +restored_tuner = tune.Tuner.restore(experiment_path, trainable=train_mnist) +result_grid = restored_tuner.get_results() +``` + +### Basic Experiment-Level Analysis + +Get an overview of how trials performed. You can quickly check if there were any errors during the trials. + +```python +if result_grid.errors: + print("One or more trials failed!") +else: + print("No errors!") +``` + +### Basic Trial-Level Analysis + +Access individual trial hyperparameter configurations and the last reported metrics. + +```python +for i, result in enumerate(result_grid): + print(f"Trial #{i}: Configuration: {result.config}, Last Reported Metrics: {result.metrics}") +``` + +### Plotting the Entire History of Reported Metrics for a Trial + +You can plot the history of reported metrics for each trial to see how the metrics evolved over time. + +```python +import matplotlib.pyplot as plt + +for result in result_grid: + plt.plot(result.metrics_dataframe["training_iteration"], result.metrics_dataframe["mean_accuracy"], label=f"Trial {i}") + +plt.xlabel('Training Iterations') +plt.ylabel('Mean Accuracy') +plt.legend() +plt.show() +``` + +## Summary + +In this documentation, we covered common workflows to analyze the results of experiments run with Ray Tune using Ultralytics. The key steps include loading the experiment results from a directory, performing basic experiment-level and trial-level analysis and plotting metrics. + +Explore further by looking into Ray Tuneโ€™s [Analyze Results](https://docs.ray.io/en/latest/tune/examples/tune_analyze_results.html) docs page to get the most out of your hyperparameter tuning experiments. diff --git a/ultralytics/docs/en/integrations/ray-tune.md:Zone.Identifier b/ultralytics/docs/en/integrations/ray-tune.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/integrations/ray-tune.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/integrations/roboflow.md b/ultralytics/docs/en/integrations/roboflow.md new file mode 100755 index 0000000..b0bfb54 --- /dev/null +++ b/ultralytics/docs/en/integrations/roboflow.md @@ -0,0 +1,239 @@ +--- +comments: true +description: Learn how to use Roboflow with Ultralytics for labeling and managing images for use in training, and for evaluating model performance. +keywords: Ultralytics, YOLOv8, Roboflow, vector analysis, confusion matrix, data management, image labeling +--- + +# Roboflow + +[Roboflow](https://roboflow.com/?ref=ultralytics) has everything you need to build and deploy computer vision models. Connect Roboflow at any step in your pipeline with APIs and SDKs, or use the end-to-end interface to automate the entire process from image to inference. Whether youโ€™re in need of [data labeling](https://roboflow.com/annotate?ref=ultralytics), [model training](https://roboflow.com/train?ref=ultralytics), or [model deployment](https://roboflow.com/deploy?ref=ultralytics), Roboflow gives you building blocks to bring custom computer vision solutions to your project. + +!!! Warning + + Roboflow users can use Ultralytics under the [AGPL license](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) or procure an [Enterprise license](https://ultralytics.com/license) directly from Ultralytics. Be aware that Roboflow does **not** provide Ultralytics licenses, and it is the responsibility of the user to ensure appropriate licensing. + +In this guide, we are going to showcase how to find, label, and organize data for use in training a custom Ultralytics YOLOv8 model. Use the table of contents below to jump directly to a specific section: + +- Gather data for training a custom YOLOv8 model +- Upload, convert and label data for YOLOv8 format +- Pre-process and augment data for model robustness +- Dataset management for [YOLOv8](https://docs.ultralytics.com/models/yolov8/) +- Export data in 40+ formats for model training +- Upload custom YOLOv8 model weights for testing and deployment +- Gather Data for Training a Custom YOLOv8 Model + +Roboflow provides two services that can help you collect data for YOLOv8 models: [Universe](https://universe.roboflow.com/?ref=ultralytics) and [Collect](https://roboflow.com/collect?ref=ultralytics). + +Universe is an online repository with over 250,000 vision datasets totalling over 100 million images. + +

+Roboflow Universe +

+ +With a [free Roboflow account](https://app.roboflow.com/?ref=ultralytics), you can export any dataset available on Universe. To export a dataset, click the "Download this Dataset" button on any dataset. + + +

+Roboflow Universe dataset export +

+ +For YOLOv8, select "YOLOv8" as the export format: + +

+Roboflow Universe dataset export +

+ +Universe also has a page that aggregates all [public fine-tuned YOLOv8 models uploaded to Roboflow](https://universe.roboflow.com/search?q=model:yolov8). You can use this page to explore pre-trained models you can use for testing or [for automated data labeling](https://docs.roboflow.com/annotate/use-roboflow-annotate/model-assisted-labeling) or to prototype with [Roboflow inference](https://roboflow.com/inference?ref=ultralytics). + +If you want to gather images yourself, try [Collect](https://github.com/roboflow/roboflow-collect), an open source project that allows you to automatically gather images using a webcam on the edge. You can use text or image prompts with Collect to instruct what data should be collected, allowing you to capture only the useful data you need to build your vision model. + +## Upload, Convert and Label Data for YOLOv8 Format + +[Roboflow Annotate](https://docs.roboflow.com/annotate/use-roboflow-annotate) is an online annotation tool for use in labeling images for object detection, classification, and segmentation. + +To label data for a YOLOv8 object detection, instance segmentation, or classification model, first create a project in Roboflow. + +

+Create a Roboflow project +

+ +Next, upload your images, and any pre-existing annotations you have from other tools ([using one of the 40+ supported import formats](https://roboflow.com/formats?ref=ultralytics)), into Roboflow. + +

+Upload images to Roboflow +

+ +Select the batch of images you have uploaded on the Annotate page to which you are taken after uploading images. Then, click "Start Annotating" to label images. + +To label with bounding boxes, press the `B` key on your keyboard or click the box icon in the sidebar. Click on a point where you want to start your bounding box, then drag to create the box: + +

+Annotating an image in Roboflow +

+ +A pop-up will appear asking you to select a class for your annotation once you have created an annotation. + +To label with polygons, press the `P` key on your keyboard, or the polygon icon in the sidebar. With the polygon annotation tool enabled, click on individual points in the image to draw a polygon. + +Roboflow offers a SAM-based label assistant with which you can label images faster than ever. SAM (Segment Anything Model) is a state-of-the-art computer vision model that can precisely label images. With SAM, you can significantly speed up the image labeling process. Annotating images with polygons becomes as simple as a few clicks, rather than the tedious process of precisely clicking points around an object. + +To use the label assistant, click the cursor icon in the sidebar, SAM will be loaded for use in your project. + +

+Annotating an image in Roboflow with SAM-powered label assist +

+ +Hover over any object in the image and SAM will recommend an annotation. You can hover to find the right place to annotate, then click to create your annotation. To amend your annotation to be more or less specific, you can click inside or outside the annotation SAM has created on the document. + +You can also add tags to images from the Tags panel in the sidebar. You can apply tags to data from a particular area, taken from a specific camera, and more. You can then use these tags to search through data for images matching a tag and generate versions of a dataset with images that contain a particular tag or set of tags. + +

+Adding tags to an image in Roboflow +

+ +Models hosted on Roboflow can be used with Label Assist, an automated annotation tool that uses your YOLOv8 model to recommend annotations. To use Label Assist, first upload a YOLOv8 model to Roboflow (see instructions later in the guide). Then, click the magic wand icon in the left sidebar and select your model for use in Label Assist. + +Choose a model, then click "Continue" to enable Label Assist: + +

+Enabling Label Assist +

+ +When you open new images for annotation, Label Assist will trigger and recommend annotations. + +

+ALabel Assist recommending an annotation +

+ +## Dataset Management for YOLOv8 + +Roboflow provides a suite of tools for understanding computer vision datasets. + +First, you can use dataset search to find images that meet a semantic text description (i.e. find all images that contain people), or that meet a specified label (i.e. the image is associated with a specific tag). To use dataset search, click "Dataset" in the sidebar. Then, input a search query using the search bar and associated filters at the top of the page. + +For example, the following text query finds images that contain people in a dataset: + +

+Searching for an image +

+ +You can narrow your search to images with a particular tag using the "Tags" selector: + +

+Filter images by tag +

+ +Before you start training a model with your dataset, we recommend using Roboflow [Health Check](https://docs.roboflow.com/datasets/dataset-health-check), a web tool that provides an insight into your dataset and how you can improve the dataset prior to training a vision model. + +To use Health Check, click the "Health Check" sidebar link. A list of statistics will appear that show the average size of images in your dataset, class balance, a heatmap of where annotations are in your images, and more. + +

+Roboflow Health Check analysis +

+ +Health Check may recommend changes to help enhance dataset performance. For example, the class balance feature may show that there is an imbalance in labels that, if solved, may boost performance or your model. + +## Export Data in 40+ Formats for Model Training + +To export your data, you will need a dataset version. A version is a state of your dataset frozen-in-time. To create a version, first click "Versions" in the sidebar. Then, click the "Create New Version" button. On this page, you will be able to choose augmentations and preprocessing steps to apply to your dataset: + +

+Creating a dataset version on Roboflow +

+ +For each augmentation you select, a pop-up will appear allowing you to tune the augmentation to your needs. Here is an example of tuning a brightness augmentation within specified parameters: + +

+Applying augmentations to a dataset +

+ +When your dataset version has been generated, you can export your data into a range of formats. Click the "Export Dataset" button on your dataset version page to export your data: + +

+Exporting a dataset +

+ +You are now ready to train YOLOv8 on a custom dataset. Follow this [written guide](https://blog.roboflow.com/how-to-train-yolov8-on-a-custom-dataset/) and [YouTube video](https://www.youtube.com/watch?v=wuZtUMEiKWY) for step-by-step instructions or refer to the [Ultralytics documentation](https://docs.ultralytics.com/modes/train/). + +## Upload Custom YOLOv8 Model Weights for Testing and Deployment + +Roboflow offers an infinitely scalable API for deployed models and SDKs for use with NVIDIA Jetsons, Luxonis OAKs, Raspberry Pis, GPU-based devices, and more. + +You can deploy YOLOv8 models by uploading YOLOv8 weights to Roboflow. You can do this in a few lines of Python code. Create a new Python file and add the following code: + +```python +import roboflow # install with 'pip install roboflow' + +roboflow.login() + +rf = roboflow.Roboflow() + +project = rf.workspace(WORKSPACE_ID).project("football-players-detection-3zvbc") +dataset = project.version(VERSION).download("yolov8") + +project.version(dataset.version).deploy(model_type="yolov8", model_path=f"{HOME}/runs/detect/train/") +``` + +In this code, replace the project ID and version ID with the values for your account and project. [Learn how to retrieve your Roboflow API key](https://docs.roboflow.com/api-reference/authentication#retrieve-an-api-key). + +When you run the code above, you will be asked to authenticate. Then, your model will be uploaded and an API will be created for your project. This process can take up to 30 minutes to complete. + +To test your model and find deployment instructions for supported SDKs, go to the "Deploy" tab in the Roboflow sidebar. At the top of this page, a widget will appear with which you can test your model. You can use your webcam for live testing or upload images or videos. + +

+Running inference on an example image +

+ +You can also use your uploaded model as a [labeling assistant](https://docs.roboflow.com/annotate/use-roboflow-annotate/model-assisted-labeling). This feature uses your trained model to recommend annotations on images uploaded to Roboflow. + +## How to Evaluate YOLOv8 Models + +Roboflow provides a range of features for use in evaluating models. + +Once you have uploaded a model to Roboflow, you can access our model evaluation tool, which provides a confusion matrix showing the performance of your model as well as an interactive vector analysis plot. These features can help you find opportunities to improve your model. + +To access a confusion matrix, go to your model page on the Roboflow dashboard, then click "View Detailed Evaluation": + +

+Start a Roboflow model evaluation +

+ +A pop-up will appear showing a confusion matrix: + +

+A confusion matrix +

+ +Hover over a box on the confusion matrix to see the value associated with the box. Click on a box to see images in the respective category. Click on an image to view the model predictions and ground truth data associated with that image. + +For more insights, click Vector Analysis. This will show a scatter plot of the images in your dataset, calculated using CLIP. The closer images are in the plot, the more similar they are, semantically. Each image is represented as a dot with a color between white and red. The more red the dot, the worse the model performed. + +

+A vector analysis plot +

+ +You can use Vector Analysis to: + +- Find clusters of images; +- Identify clusters where the model performs poorly, and; +- Visualize commonalities between images on which the model performs poorly. + +## Learning Resources + +Want to learn more about using Roboflow for creating YOLOv8 models? The following resources may be helpful in your work. + +- [Train YOLOv8 on a Custom Dataset](https://github.com/roboflow/notebooks/blob/main/notebooks/train-yolov8-object-detection-on-custom-dataset.ipynb): Follow our interactive notebook that shows you how to train a YOLOv8 model on a custom dataset. +- [Autodistill](https://autodistill.github.io/autodistill/): Use large foundation vision models to label data for specific models. You can label images for use in training YOLOv8 classification, detection, and segmentation models with Autodistill. +- [Supervision](https://roboflow.github.io/supervision/): A Python package with helpful utilities for use in working with computer vision models. You can use supervision to filter detections, compute confusion matrices, and more, all in a few lines of Python code. +- [Roboflow Blog](https://blog.roboflow.com/): The Roboflow Blog features over 500 articles on computer vision, covering topics from how to train a YOLOv8 model to annotation best practices. +- [Roboflow YouTube channel](https://www.youtube.com/@Roboflow): Browse dozens of in-depth computer vision guides on our YouTube channel, covering topics from training YOLOv8 models to automated image labeling. + +## Project Showcase + +Below are a few of the many pieces of feedback we have received for using YOLOv8 and Roboflow together to create computer vision models. + +

+Showcase image +Showcase image +Showcase image +

diff --git a/ultralytics/docs/en/integrations/roboflow.md:Zone.Identifier b/ultralytics/docs/en/integrations/roboflow.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/integrations/roboflow.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/models/fast-sam.md b/ultralytics/docs/en/models/fast-sam.md new file mode 100755 index 0000000..d528d47 --- /dev/null +++ b/ultralytics/docs/en/models/fast-sam.md @@ -0,0 +1,193 @@ +--- +comments: true +description: Explore FastSAM, a CNN-based solution for real-time object segmentation in images. Enhanced user interaction, computational efficiency and adaptable across vision tasks. +keywords: FastSAM, machine learning, CNN-based solution, object segmentation, real-time solution, Ultralytics, vision tasks, image processing, industrial applications, user interaction +--- + +# Fast Segment Anything Model (FastSAM) + +The Fast Segment Anything Model (FastSAM) is a novel, real-time CNN-based solution for the Segment Anything task. This task is designed to segment any object within an image based on various possible user interaction prompts. FastSAM significantly reduces computational demands while maintaining competitive performance, making it a practical choice for a variety of vision tasks. + +![Fast Segment Anything Model (FastSAM) architecture overview](https://user-images.githubusercontent.com/26833433/248551984-d98f0f6d-7535-45d0-b380-2e1440b52ad7.jpg) + +## Overview + +FastSAM is designed to address the limitations of the [Segment Anything Model (SAM)](sam.md), a heavy Transformer model with substantial computational resource requirements. The FastSAM decouples the segment anything task into two sequential stages: all-instance segmentation and prompt-guided selection. The first stage uses [YOLOv8-seg](../tasks/segment.md) to produce the segmentation masks of all instances in the image. In the second stage, it outputs the region-of-interest corresponding to the prompt. + +## Key Features + +1. **Real-time Solution:** By leveraging the computational efficiency of CNNs, FastSAM provides a real-time solution for the segment anything task, making it valuable for industrial applications that require quick results. + +2. **Efficiency and Performance:** FastSAM offers a significant reduction in computational and resource demands without compromising on performance quality. It achieves comparable performance to SAM but with drastically reduced computational resources, enabling real-time application. + +3. **Prompt-guided Segmentation:** FastSAM can segment any object within an image guided by various possible user interaction prompts, providing flexibility and adaptability in different scenarios. + +4. **Based on YOLOv8-seg:** FastSAM is based on [YOLOv8-seg](../tasks/segment.md), an object detector equipped with an instance segmentation branch. This allows it to effectively produce the segmentation masks of all instances in an image. + +5. **Competitive Results on Benchmarks:** On the object proposal task on MS COCO, FastSAM achieves high scores at a significantly faster speed than [SAM](sam.md) on a single NVIDIA RTX 3090, demonstrating its efficiency and capability. + +6. **Practical Applications:** The proposed approach provides a new, practical solution for a large number of vision tasks at a really high speed, tens or hundreds of times faster than current methods. + +7. **Model Compression Feasibility:** FastSAM demonstrates the feasibility of a path that can significantly reduce the computational effort by introducing an artificial prior to the structure, thus opening new possibilities for large model architecture for general vision tasks. + +## Available Models, Supported Tasks, and Operating Modes + +This table presents the available models with their specific pre-trained weights, the tasks they support, and their compatibility with different operating modes like [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md), and [Export](../modes/export.md), indicated by โœ… emojis for supported modes and โŒ emojis for unsupported modes. + +| Model Type | Pre-trained Weights | Tasks Supported | Inference | Validation | Training | Export | +|------------|---------------------|----------------------------------------------|-----------|------------|----------|--------| +| FastSAM-s | `FastSAM-s.pt` | [Instance Segmentation](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| FastSAM-x | `FastSAM-x.pt` | [Instance Segmentation](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Usage Examples + +The FastSAM models are easy to integrate into your Python applications. Ultralytics provides user-friendly Python API and CLI commands to streamline development. + +### Predict Usage + +To perform object detection on an image, use the `predict` method as shown below: + +!!! Example + + === "Python" + ```python + from ultralytics import FastSAM + from ultralytics.models.fastsam import FastSAMPrompt + + # Define an inference source + source = 'path/to/bus.jpg' + + # Create a FastSAM model + model = FastSAM('FastSAM-s.pt') # or FastSAM-x.pt + + # Run inference on an image + everything_results = model(source, device='cpu', retina_masks=True, imgsz=1024, conf=0.4, iou=0.9) + + # Prepare a Prompt Process object + prompt_process = FastSAMPrompt(source, everything_results, device='cpu') + + # Everything prompt + ann = prompt_process.everything_prompt() + + # Bbox default shape [0,0,0,0] -> [x1,y1,x2,y2] + ann = prompt_process.box_prompt(bbox=[200, 200, 300, 300]) + + # Text prompt + ann = prompt_process.text_prompt(text='a photo of a dog') + + # Point prompt + # points default [[0,0]] [[x1,y1],[x2,y2]] + # point_label default [0] [1,0] 0:background, 1:foreground + ann = prompt_process.point_prompt(points=[[200, 200]], pointlabel=[1]) + prompt_process.plot(annotations=ann, output='./') + ``` + + === "CLI" + ```bash + # Load a FastSAM model and segment everything with it + yolo segment predict model=FastSAM-s.pt source=path/to/bus.jpg imgsz=640 + ``` + +This snippet demonstrates the simplicity of loading a pre-trained model and running a prediction on an image. + +### Val Usage + +Validation of the model on a dataset can be done as follows: + +!!! Example + + === "Python" + ```python + from ultralytics import FastSAM + + # Create a FastSAM model + model = FastSAM('FastSAM-s.pt') # or FastSAM-x.pt + + # Validate the model + results = model.val(data='coco8-seg.yaml') + ``` + + === "CLI" + ```bash + # Load a FastSAM model and validate it on the COCO8 example dataset at image size 640 + yolo segment val model=FastSAM-s.pt data=coco8.yaml imgsz=640 + ``` + +Please note that FastSAM only supports detection and segmentation of a single class of object. This means it will recognize and segment all objects as the same class. Therefore, when preparing the dataset, you need to convert all object category IDs to 0. + +## FastSAM official Usage + +FastSAM is also available directly from the [https://github.com/CASIA-IVA-Lab/FastSAM](https://github.com/CASIA-IVA-Lab/FastSAM) repository. Here is a brief overview of the typical steps you might take to use FastSAM: + +### Installation + +1. Clone the FastSAM repository: + ```shell + git clone https://github.com/CASIA-IVA-Lab/FastSAM.git + ``` + +2. Create and activate a Conda environment with Python 3.9: + ```shell + conda create -n FastSAM python=3.9 + conda activate FastSAM + ``` + +3. Navigate to the cloned repository and install the required packages: + ```shell + cd FastSAM + pip install -r requirements.txt + ``` + +4. Install the CLIP model: + ```shell + pip install git+https://github.com/openai/CLIP.git + ``` + +### Example Usage + +1. Download a [model checkpoint](https://drive.google.com/file/d/1m1sjY4ihXBU1fZXdQ-Xdj-mDltW-2Rqv/view?usp=sharing). + +2. Use FastSAM for inference. Example commands: + + - Segment everything in an image: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg + ``` + + - Segment specific objects using text prompt: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --text_prompt "the yellow dog" + ``` + + - Segment objects within a bounding box (provide box coordinates in xywh format): + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --box_prompt "[570,200,230,400]" + ``` + + - Segment objects near specific points: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --point_prompt "[[520,360],[620,300]]" --point_label "[1,0]" + ``` + +Additionally, you can try FastSAM through a [Colab demo](https://colab.research.google.com/drive/1oX14f6IneGGw612WgVlAiy91UHwFAvr9?usp=sharing) or on the [HuggingFace web demo](https://huggingface.co/spaces/An-619/FastSAM) for a visual experience. + +## Citations and Acknowledgements + +We would like to acknowledge the FastSAM authors for their significant contributions in the field of real-time instance segmentation: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{zhao2023fast, + title={Fast Segment Anything}, + author={Xu Zhao and Wenchao Ding and Yongqi An and Yinglong Du and Tao Yu and Min Li and Ming Tang and Jinqiao Wang}, + year={2023}, + eprint={2306.12156}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +The original FastSAM paper can be found on [arXiv](https://arxiv.org/abs/2306.12156). The authors have made their work publicly available, and the codebase can be accessed on [GitHub](https://github.com/CASIA-IVA-Lab/FastSAM). We appreciate their efforts in advancing the field and making their work accessible to the broader community. diff --git a/ultralytics/docs/en/models/fast-sam.md:Zone.Identifier b/ultralytics/docs/en/models/fast-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/models/fast-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/models/index.md b/ultralytics/docs/en/models/index.md new file mode 100755 index 0000000..93bbdb9 --- /dev/null +++ b/ultralytics/docs/en/models/index.md @@ -0,0 +1,94 @@ +--- +comments: true +description: Explore the diverse range of YOLO family, SAM, MobileSAM, FastSAM, YOLO-NAS, and RT-DETR models supported by Ultralytics. Get started with examples for both CLI and Python usage. +keywords: Ultralytics, documentation, YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS, RT-DETR, models, architectures, Python, CLI +--- + +# Models Supported by Ultralytics + +Welcome to Ultralytics' model documentation! We offer support for a wide range of models, each tailored to specific tasks like [object detection](../tasks/detect.md), [instance segmentation](../tasks/segment.md), [image classification](../tasks/classify.md), [pose estimation](../tasks/pose.md), and [multi-object tracking](../modes/track.md). If you're interested in contributing your model architecture to Ultralytics, check out our [Contributing Guide](../help/contributing.md). + +## Featured Models + +Here are some of the key models supported: + +1. **[YOLOv3](yolov3.md)**: The third iteration of the YOLO model family, originally by Joseph Redmon, known for its efficient real-time object detection capabilities. +2. **[YOLOv4](yolov4.md)**: A darknet-native update to YOLOv3, released by Alexey Bochkovskiy in 2020. +3. **[YOLOv5](yolov5.md)**: An improved version of the YOLO architecture by Ultralytics, offering better performance and speed trade-offs compared to previous versions. +4. **[YOLOv6](yolov6.md)**: Released by [Meituan](https://about.meituan.com/) in 2022, and in use in many of the company's autonomous delivery robots. +5. **[YOLOv7](yolov7.md)**: Updated YOLO models released in 2022 by the authors of YOLOv4. +6. **[YOLOv8](yolov8.md) NEW ๐Ÿš€**: The latest version of the YOLO family, featuring enhanced capabilities such as instance segmentation, pose/keypoints estimation, and classification. +7. **[Segment Anything Model (SAM)](sam.md)**: Meta's Segment Anything Model (SAM). +8. **[Mobile Segment Anything Model (MobileSAM)](mobile-sam.md)**: MobileSAM for mobile applications, by Kyung Hee University. +9. **[Fast Segment Anything Model (FastSAM)](fast-sam.md)**: FastSAM by Image & Video Analysis Group, Institute of Automation, Chinese Academy of Sciences. +10. **[YOLO-NAS](yolo-nas.md)**: YOLO Neural Architecture Search (NAS) Models. +11. **[Realtime Detection Transformers (RT-DETR)](rtdetr.md)**: Baidu's PaddlePaddle Realtime Detection Transformer (RT-DETR) models. + +

+
+ +
+ Watch: Run Ultralytics YOLO models in just a few lines of code. +

+ +## Getting Started: Usage Examples + +This example provides simple YOLO training and inference examples. For full documentation on these and other [modes](../modes/index.md) see the [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) and [Export](../modes/export.md) docs pages. + +Note the below example is for YOLOv8 [Detect](../tasks/detect.md) models for object detection. For additional supported tasks see the [Segment](../tasks/segment.md), [Classify](../tasks/classify.md) and [Pose](../tasks/pose.md) docs. + +!!! Example + + === "Python" + + PyTorch pretrained `*.pt` models as well as configuration `*.yaml` files can be passed to the `YOLO()`, `SAM()`, `NAS()` and `RTDETR()` classes to create a model instance in Python: + + ```python + from ultralytics import YOLO + + # Load a COCO-pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Display model information (optional) + model.info() + + # Train the model on the COCO8 example dataset for 100 epochs + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Run inference with the YOLOv8n model on the 'bus.jpg' image + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI commands are available to directly run the models: + + ```bash + # Load a COCO-pretrained YOLOv8n model and train it on the COCO8 example dataset for 100 epochs + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Load a COCO-pretrained YOLOv8n model and run inference on the 'bus.jpg' image + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## Contributing New Models + +Interested in contributing your model to Ultralytics? Great! We're always open to expanding our model portfolio. + +1. **Fork the Repository**: Start by forking the [Ultralytics GitHub repository](https://github.com/ultralytics/ultralytics). + +2. **Clone Your Fork**: Clone your fork to your local machine and create a new branch to work on. + +3. **Implement Your Model**: Add your model following the coding standards and guidelines provided in our [Contributing Guide](../help/contributing.md). + +4. **Test Thoroughly**: Make sure to test your model rigorously, both in isolation and as part of the pipeline. + +5. **Create a Pull Request**: Once you're satisfied with your model, create a pull request to the main repository for review. + +6. **Code Review & Merging**: After review, if your model meets our criteria, it will be merged into the main repository. + +For detailed steps, consult our [Contributing Guide](../help/contributing.md). diff --git a/ultralytics/docs/en/models/index.md:Zone.Identifier b/ultralytics/docs/en/models/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/models/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/models/mobile-sam.md b/ultralytics/docs/en/models/mobile-sam.md new file mode 100755 index 0000000..51700f6 --- /dev/null +++ b/ultralytics/docs/en/models/mobile-sam.md @@ -0,0 +1,117 @@ +--- +comments: true +description: Learn more about MobileSAM, its implementation, comparison with the original SAM, and how to download and test it in the Ultralytics framework. Improve your mobile applications today. +keywords: MobileSAM, Ultralytics, SAM, mobile applications, Arxiv, GPU, API, image encoder, mask decoder, model download, testing method +--- + +![MobileSAM Logo](https://github.com/ChaoningZhang/MobileSAM/blob/master/assets/logo2.png?raw=true) + +# Mobile Segment Anything (MobileSAM) + +The MobileSAM paper is now available on [arXiv](https://arxiv.org/pdf/2306.14289.pdf). + +A demonstration of MobileSAM running on a CPU can be accessed at this [demo link](https://huggingface.co/spaces/dhkim2810/MobileSAM). The performance on a Mac i5 CPU takes approximately 3 seconds. On the Hugging Face demo, the interface and lower-performance CPUs contribute to a slower response, but it continues to function effectively. + +MobileSAM is implemented in various projects including [Grounding-SAM](https://github.com/IDEA-Research/Grounded-Segment-Anything), [AnyLabeling](https://github.com/vietanhdev/anylabeling), and [Segment Anything in 3D](https://github.com/Jumpat/SegmentAnythingin3D). + +MobileSAM is trained on a single GPU with a 100k dataset (1% of the original images) in less than a day. The code for this training will be made available in the future. + +## Available Models, Supported Tasks, and Operating Modes + +This table presents the available models with their specific pre-trained weights, the tasks they support, and their compatibility with different operating modes like [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md), and [Export](../modes/export.md), indicated by โœ… emojis for supported modes and โŒ emojis for unsupported modes. + +| Model Type | Pre-trained Weights | Tasks Supported | Inference | Validation | Training | Export | +|------------|---------------------|----------------------------------------------|-----------|------------|----------|--------| +| MobileSAM | `mobile_sam.pt` | [Instance Segmentation](../tasks/segment.md) | โœ… | โŒ | โŒ | โŒ | + +## Adapting from SAM to MobileSAM + +Since MobileSAM retains the same pipeline as the original SAM, we have incorporated the original's pre-processing, post-processing, and all other interfaces. Consequently, those currently using the original SAM can transition to MobileSAM with minimal effort. + +MobileSAM performs comparably to the original SAM and retains the same pipeline except for a change in the image encoder. Specifically, we replace the original heavyweight ViT-H encoder (632M) with a smaller Tiny-ViT (5M). On a single GPU, MobileSAM operates at about 12ms per image: 8ms on the image encoder and 4ms on the mask decoder. + +The following table provides a comparison of ViT-based image encoders: + +| Image Encoder | Original SAM | MobileSAM | +|---------------|--------------|-----------| +| Parameters | 611M | 5M | +| Speed | 452ms | 8ms | + +Both the original SAM and MobileSAM utilize the same prompt-guided mask decoder: + +| Mask Decoder | Original SAM | MobileSAM | +|--------------|--------------|-----------| +| Parameters | 3.876M | 3.876M | +| Speed | 4ms | 4ms | + +Here is the comparison of the whole pipeline: + +| Whole Pipeline (Enc+Dec) | Original SAM | MobileSAM | +|--------------------------|--------------|-----------| +| Parameters | 615M | 9.66M | +| Speed | 456ms | 12ms | + +The performance of MobileSAM and the original SAM are demonstrated using both a point and a box as prompts. + +![Image with Point as Prompt](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +![Image with Box as Prompt](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +With its superior performance, MobileSAM is approximately 5 times smaller and 7 times faster than the current FastSAM. More details are available at the [MobileSAM project page](https://github.com/ChaoningZhang/MobileSAM). + +## Testing MobileSAM in Ultralytics + +Just like the original SAM, we offer a straightforward testing method in Ultralytics, including modes for both Point and Box prompts. + +### Model Download + +You can download the model [here](https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt). + +### Point Prompt + +!!! Example + + === "Python" + ```python + from ultralytics import SAM + + # Load the model + model = SAM('mobile_sam.pt') + + # Predict a segment based on a point prompt + model.predict('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +### Box Prompt + +!!! Example + + === "Python" + ```python + from ultralytics import SAM + + # Load the model + model = SAM('mobile_sam.pt') + + # Predict a segment based on a box prompt + model.predict('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + ``` + +We have implemented `MobileSAM` and `SAM` using the same API. For more usage information, please see the [SAM page](sam.md). + +## Citations and Acknowledgements + +If you find MobileSAM useful in your research or development work, please consider citing our paper: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{mobile_sam, + title={Faster Segment Anything: Towards Lightweight SAM for Mobile Applications}, + author={Zhang, Chaoning and Han, Dongshen and Qiao, Yu and Kim, Jung Uk and Bae, Sung Ho and Lee, Seungkyu and Hong, Choong Seon}, + journal={arXiv preprint arXiv:2306.14289}, + year={2023} + } + ``` diff --git a/ultralytics/docs/en/models/mobile-sam.md:Zone.Identifier b/ultralytics/docs/en/models/mobile-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/models/mobile-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/models/rtdetr.md b/ultralytics/docs/en/models/rtdetr.md new file mode 100755 index 0000000..b7d449a --- /dev/null +++ b/ultralytics/docs/en/models/rtdetr.md @@ -0,0 +1,93 @@ +--- +comments: true +description: Discover the features and benefits of RT-DETR, Baiduโ€™s efficient and adaptable real-time object detector powered by Vision Transformers, including pre-trained models. +keywords: RT-DETR, Baidu, Vision Transformers, object detection, real-time performance, CUDA, TensorRT, IoU-aware query selection, Ultralytics, Python API, PaddlePaddle +--- + +# Baidu's RT-DETR: A Vision Transformer-Based Real-Time Object Detector + +## Overview + +Real-Time Detection Transformer (RT-DETR), developed by Baidu, is a cutting-edge end-to-end object detector that provides real-time performance while maintaining high accuracy. It leverages the power of Vision Transformers (ViT) to efficiently process multiscale features by decoupling intra-scale interaction and cross-scale fusion. RT-DETR is highly adaptable, supporting flexible adjustment of inference speed using different decoder layers without retraining. The model excels on accelerated backends like CUDA with TensorRT, outperforming many other real-time object detectors. + +![Model example image](https://user-images.githubusercontent.com/26833433/238963168-90e8483f-90aa-4eb6-a5e1-0d408b23dd33.png) +**Overview of Baidu's RT-DETR.** The RT-DETR model architecture diagram shows the last three stages of the backbone {S3, S4, S5} as the input to the encoder. The efficient hybrid encoder transforms multiscale features into a sequence of image features through intrascale feature interaction (AIFI) and cross-scale feature-fusion module (CCFM). The IoU-aware query selection is employed to select a fixed number of image features to serve as initial object queries for the decoder. Finally, the decoder with auxiliary prediction heads iteratively optimizes object queries to generate boxes and confidence scores ([source](https://arxiv.org/pdf/2304.08069.pdf)). + +### Key Features + +- **Efficient Hybrid Encoder:** Baidu's RT-DETR uses an efficient hybrid encoder that processes multiscale features by decoupling intra-scale interaction and cross-scale fusion. This unique Vision Transformers-based design reduces computational costs and allows for real-time object detection. +- **IoU-aware Query Selection:** Baidu's RT-DETR improves object query initialization by utilizing IoU-aware query selection. This allows the model to focus on the most relevant objects in the scene, enhancing the detection accuracy. +- **Adaptable Inference Speed:** Baidu's RT-DETR supports flexible adjustments of inference speed by using different decoder layers without the need for retraining. This adaptability facilitates practical application in various real-time object detection scenarios. + +## Pre-trained Models + +The Ultralytics Python API provides pre-trained PaddlePaddle RT-DETR models with different scales: + +- RT-DETR-L: 53.0% AP on COCO val2017, 114 FPS on T4 GPU +- RT-DETR-X: 54.8% AP on COCO val2017, 74 FPS on T4 GPU + +## Usage Examples + +This example provides simple RT-DETRR training and inference examples. For full documentation on these and other [modes](../modes/index.md) see the [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) and [Export](../modes/export.md) docs pages. + +!!! Example + + === "Python" + + ```python + from ultralytics import RTDETR + + # Load a COCO-pretrained RT-DETR-l model + model = RTDETR('rtdetr-l.pt') + + # Display model information (optional) + model.info() + + # Train the model on the COCO8 example dataset for 100 epochs + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Run inference with the RT-DETR-l model on the 'bus.jpg' image + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ```bash + # Load a COCO-pretrained RT-DETR-l model and train it on the COCO8 example dataset for 100 epochs + yolo train model=rtdetr-l.pt data=coco8.yaml epochs=100 imgsz=640 + + # Load a COCO-pretrained RT-DETR-l model and run inference on the 'bus.jpg' image + yolo predict model=rtdetr-l.pt source=path/to/bus.jpg + ``` + +## Supported Tasks and Modes + +This table presents the model types, the specific pre-trained weights, the tasks supported by each model, and the various modes ([Train](../modes/train.md) , [Val](../modes/val.md), [Predict](../modes/predict.md), [Export](../modes/export.md)) that are supported, indicated by โœ… emojis. + +| Model Type | Pre-trained Weights | Tasks Supported | Inference | Validation | Training | Export | +|---------------------|---------------------|----------------------------------------|-----------|------------|----------|--------| +| RT-DETR Large | `rtdetr-l.pt` | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| RT-DETR Extra-Large | `rtdetr-x.pt` | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +## Citations and Acknowledgements + +If you use Baidu's RT-DETR in your research or development work, please cite the [original paper](https://arxiv.org/abs/2304.08069): + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{lv2023detrs, + title={DETRs Beat YOLOs on Real-time Object Detection}, + author={Wenyu Lv and Shangliang Xu and Yian Zhao and Guanzhong Wang and Jinman Wei and Cheng Cui and Yuning Du and Qingqing Dang and Yi Liu}, + year={2023}, + eprint={2304.08069}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +We would like to acknowledge Baidu and the [PaddlePaddle](https://github.com/PaddlePaddle/PaddleDetection) team for creating and maintaining this valuable resource for the computer vision community. Their contribution to the field with the development of the Vision Transformers-based real-time object detector, RT-DETR, is greatly appreciated. + +*Keywords: RT-DETR, Transformer, ViT, Vision Transformers, Baidu RT-DETR, PaddlePaddle, Paddle Paddle RT-DETR, real-time object detection, Vision Transformers-based object detection, pre-trained PaddlePaddle RT-DETR models, Baidu's RT-DETR usage, Ultralytics Python API* diff --git a/ultralytics/docs/en/models/rtdetr.md:Zone.Identifier b/ultralytics/docs/en/models/rtdetr.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/models/rtdetr.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/models/sam.md b/ultralytics/docs/en/models/sam.md new file mode 100755 index 0000000..3db6f3d --- /dev/null +++ b/ultralytics/docs/en/models/sam.md @@ -0,0 +1,226 @@ +--- +comments: true +description: Explore the cutting-edge Segment Anything Model (SAM) from Ultralytics that allows real-time image segmentation. Learn about its promptable segmentation, zero-shot performance, and how to use it. +keywords: Ultralytics, image segmentation, Segment Anything Model, SAM, SA-1B dataset, real-time performance, zero-shot transfer, object detection, image analysis, machine learning +--- + +# Segment Anything Model (SAM) + +Welcome to the frontier of image segmentation with the Segment Anything Model, or SAM. This revolutionary model has changed the game by introducing promptable image segmentation with real-time performance, setting new standards in the field. + +## Introduction to SAM: The Segment Anything Model + +The Segment Anything Model, or SAM, is a cutting-edge image segmentation model that allows for promptable segmentation, providing unparalleled versatility in image analysis tasks. SAM forms the heart of the Segment Anything initiative, a groundbreaking project that introduces a novel model, task, and dataset for image segmentation. + +SAM's advanced design allows it to adapt to new image distributions and tasks without prior knowledge, a feature known as zero-shot transfer. Trained on the expansive [SA-1B dataset](https://ai.facebook.com/datasets/segment-anything/), which contains more than 1 billion masks spread over 11 million carefully curated images, SAM has displayed impressive zero-shot performance, surpassing previous fully supervised results in many cases. + +![Dataset sample image](https://user-images.githubusercontent.com/26833433/238056229-0e8ffbeb-f81a-477e-a490-aff3d82fd8ce.jpg) +Example images with overlaid masks from our newly introduced dataset, SA-1B. SA-1B contains 11M diverse, high-resolution, licensed, and privacy protecting images and 1.1B high-quality segmentation masks. These masks were annotated fully automatically by SAM, and as verified by human ratings and numerous experiments, are of high quality and diversity. Images are grouped by number of masks per image for visualization (there are โˆผ100 masks per image on average). + +## Key Features of the Segment Anything Model (SAM) + +- **Promptable Segmentation Task:** SAM was designed with a promptable segmentation task in mind, allowing it to generate valid segmentation masks from any given prompt, such as spatial or text clues identifying an object. +- **Advanced Architecture:** The Segment Anything Model employs a powerful image encoder, a prompt encoder, and a lightweight mask decoder. This unique architecture enables flexible prompting, real-time mask computation, and ambiguity awareness in segmentation tasks. +- **The SA-1B Dataset:** Introduced by the Segment Anything project, the SA-1B dataset features over 1 billion masks on 11 million images. As the largest segmentation dataset to date, it provides SAM with a diverse and large-scale training data source. +- **Zero-Shot Performance:** SAM displays outstanding zero-shot performance across various segmentation tasks, making it a ready-to-use tool for diverse applications with minimal need for prompt engineering. + +For an in-depth look at the Segment Anything Model and the SA-1B dataset, please visit the [Segment Anything website](https://segment-anything.com) and check out the research paper [Segment Anything](https://arxiv.org/abs/2304.02643). + +## Available Models, Supported Tasks, and Operating Modes + +This table presents the available models with their specific pre-trained weights, the tasks they support, and their compatibility with different operating modes like [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md), and [Export](../modes/export.md), indicated by โœ… emojis for supported modes and โŒ emojis for unsupported modes. + +| Model Type | Pre-trained Weights | Tasks Supported | Inference | Validation | Training | Export | +|------------|---------------------|----------------------------------------------|-----------|------------|----------|--------| +| SAM base | `sam_b.pt` | [Instance Segmentation](../tasks/segment.md) | โœ… | โŒ | โŒ | โŒ | +| SAM large | `sam_l.pt` | [Instance Segmentation](../tasks/segment.md) | โœ… | โŒ | โŒ | โŒ | + +## How to Use SAM: Versatility and Power in Image Segmentation + +The Segment Anything Model can be employed for a multitude of downstream tasks that go beyond its training data. This includes edge detection, object proposal generation, instance segmentation, and preliminary text-to-mask prediction. With prompt engineering, SAM can swiftly adapt to new tasks and data distributions in a zero-shot manner, establishing it as a versatile and potent tool for all your image segmentation needs. + +### SAM prediction example + +!!! Example "Segment with prompts" + + Segment image with given prompts. + + === "Python" + + ```python + from ultralytics import SAM + + # Load a model + model = SAM('sam_b.pt') + + # Display model information (optional) + model.info() + + # Run inference with bboxes prompt + model('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + + # Run inference with points prompt + model('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +!!! Example "Segment everything" + + Segment the whole image. + + === "Python" + + ```python + from ultralytics import SAM + + # Load a model + model = SAM('sam_b.pt') + + # Display model information (optional) + model.info() + + # Run inference + model('path/to/image.jpg') + ``` + + === "CLI" + + ```bash + # Run inference with a SAM model + yolo predict model=sam_b.pt source=path/to/image.jpg + ``` + +- The logic here is to segment the whole image if you don't pass any prompts(bboxes/points/masks). + +!!! Example "SAMPredictor example" + + This way you can set image once and run prompts inference multiple times without running image encoder multiple times. + + === "Prompt inference" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # Create SAMPredictor + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # Set image + predictor.set_image("ultralytics/assets/zidane.jpg") # set with image file + predictor.set_image(cv2.imread("ultralytics/assets/zidane.jpg")) # set with np.ndarray + results = predictor(bboxes=[439, 437, 524, 709]) + results = predictor(points=[900, 370], labels=[1]) + + # Reset image + predictor.reset_image() + ``` + + Segment everything with additional args. + + === "Segment everything" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # Create SAMPredictor + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # Segment with additional args + results = predictor(source="ultralytics/assets/zidane.jpg", crop_n_layers=1, points_stride=64) + ``` + +- More additional args for `Segment everything` see [`Predictor/generate` Reference](../reference/models/sam/predict.md). + +## SAM comparison vs YOLOv8 + +Here we compare Meta's smallest SAM model, SAM-b, with Ultralytics smallest segmentation model, [YOLOv8n-seg](../tasks/segment.md): + +| Model | Size | Parameters | Speed (CPU) | +|------------------------------------------------|----------------------------|------------------------|----------------------------| +| Meta's SAM-b | 358 MB | 94.7 M | 51096 ms/im | +| [MobileSAM](mobile-sam.md) | 40.7 MB | 10.1 M | 46122 ms/im | +| [FastSAM-s](fast-sam.md) with YOLOv8 backbone | 23.7 MB | 11.8 M | 115 ms/im | +| Ultralytics [YOLOv8n-seg](../tasks/segment.md) | **6.7 MB** (53.4x smaller) | **3.4 M** (27.9x less) | **59 ms/im** (866x faster) | + +This comparison shows the order-of-magnitude differences in the model sizes and speeds between models. Whereas SAM presents unique capabilities for automatic segmenting, it is not a direct competitor to YOLOv8 segment models, which are smaller, faster and more efficient. + +Tests run on a 2023 Apple M2 Macbook with 16GB of RAM. To reproduce this test: + +!!! Example + + === "Python" + ```python + from ultralytics import FastSAM, SAM, YOLO + + # Profile SAM-b + model = SAM('sam_b.pt') + model.info() + model('ultralytics/assets') + + # Profile MobileSAM + model = SAM('mobile_sam.pt') + model.info() + model('ultralytics/assets') + + # Profile FastSAM-s + model = FastSAM('FastSAM-s.pt') + model.info() + model('ultralytics/assets') + + # Profile YOLOv8n-seg + model = YOLO('yolov8n-seg.pt') + model.info() + model('ultralytics/assets') + ``` + +## Auto-Annotation: A Quick Path to Segmentation Datasets + +Auto-annotation is a key feature of SAM, allowing users to generate a [segmentation dataset](https://docs.ultralytics.com/datasets/segment) using a pre-trained detection model. This feature enables rapid and accurate annotation of a large number of images, bypassing the need for time-consuming manual labeling. + +### Generate Your Segmentation Dataset Using a Detection Model + +To auto-annotate your dataset with the Ultralytics framework, use the `auto_annotate` function as shown below: + +!!! Example + + === "Python" + ```python + from ultralytics.data.annotator import auto_annotate + + auto_annotate(data="path/to/images", det_model="yolov8x.pt", sam_model='sam_b.pt') + ``` + +| Argument | Type | Description | Default | +|------------|---------------------|---------------------------------------------------------------------------------------------------------|--------------| +| data | str | Path to a folder containing images to be annotated. | | +| det_model | str, optional | Pre-trained YOLO detection model. Defaults to 'yolov8x.pt'. | 'yolov8x.pt' | +| sam_model | str, optional | Pre-trained SAM segmentation model. Defaults to 'sam_b.pt'. | 'sam_b.pt' | +| device | str, optional | Device to run the models on. Defaults to an empty string (CPU or GPU, if available). | | +| output_dir | str, None, optional | Directory to save the annotated results. Defaults to a 'labels' folder in the same directory as 'data'. | None | + +The `auto_annotate` function takes the path to your images, with optional arguments for specifying the pre-trained detection and SAM segmentation models, the device to run the models on, and the output directory for saving the annotated results. + +Auto-annotation with pre-trained models can dramatically cut down the time and effort required for creating high-quality segmentation datasets. This feature is especially beneficial for researchers and developers dealing with large image collections, as it allows them to focus on model development and evaluation rather than manual annotation. + +## Citations and Acknowledgements + +If you find SAM useful in your research or development work, please consider citing our paper: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{kirillov2023segment, + title={Segment Anything}, + author={Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollรกr and Ross Girshick}, + year={2023}, + eprint={2304.02643}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +We would like to express our gratitude to Meta AI for creating and maintaining this valuable resource for the computer vision community. + +*keywords: Segment Anything, Segment Anything Model, SAM, Meta SAM, image segmentation, promptable segmentation, zero-shot performance, SA-1B dataset, advanced architecture, auto-annotation, Ultralytics, pre-trained models, SAM base, SAM large, instance segmentation, computer vision, AI, artificial intelligence, machine learning, data annotation, segmentation masks, detection model, YOLO detection model, bibtex, Meta AI.* diff --git a/ultralytics/docs/en/models/sam.md:Zone.Identifier b/ultralytics/docs/en/models/sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/models/sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/models/yolo-nas.md b/ultralytics/docs/en/models/yolo-nas.md new file mode 100755 index 0000000..a8e550b --- /dev/null +++ b/ultralytics/docs/en/models/yolo-nas.md @@ -0,0 +1,121 @@ +--- +comments: true +description: Explore detailed documentation of YOLO-NAS, a superior object detection model. Learn about its features, pre-trained models, usage with Ultralytics Python API, and more. +keywords: YOLO-NAS, Deci AI, object detection, deep learning, neural architecture search, Ultralytics Python API, YOLO model, pre-trained models, quantization, optimization, COCO, Objects365, Roboflow 100 +--- + +# YOLO-NAS + +## Overview + +Developed by Deci AI, YOLO-NAS is a groundbreaking object detection foundational model. It is the product of advanced Neural Architecture Search technology, meticulously designed to address the limitations of previous YOLO models. With significant improvements in quantization support and accuracy-latency trade-offs, YOLO-NAS represents a major leap in object detection. + +![Model example image](https://learnopencv.com/wp-content/uploads/2023/05/yolo-nas_COCO_map_metrics.png) +**Overview of YOLO-NAS.** YOLO-NAS employs quantization-aware blocks and selective quantization for optimal performance. The model, when converted to its INT8 quantized version, experiences a minimal precision drop, a significant improvement over other models. These advancements culminate in a superior architecture with unprecedented object detection capabilities and outstanding performance. + +### Key Features + +- **Quantization-Friendly Basic Block:** YOLO-NAS introduces a new basic block that is friendly to quantization, addressing one of the significant limitations of previous YOLO models. +- **Sophisticated Training and Quantization:** YOLO-NAS leverages advanced training schemes and post-training quantization to enhance performance. +- **AutoNAC Optimization and Pre-training:** YOLO-NAS utilizes AutoNAC optimization and is pre-trained on prominent datasets such as COCO, Objects365, and Roboflow 100. This pre-training makes it extremely suitable for downstream object detection tasks in production environments. + +## Pre-trained Models + +Experience the power of next-generation object detection with the pre-trained YOLO-NAS models provided by Ultralytics. These models are designed to deliver top-notch performance in terms of both speed and accuracy. Choose from a variety of options tailored to your specific needs: + +| Model | mAP | Latency (ms) | +|------------------|-------|--------------| +| YOLO-NAS S | 47.5 | 3.21 | +| YOLO-NAS M | 51.55 | 5.85 | +| YOLO-NAS L | 52.22 | 7.87 | +| YOLO-NAS S INT-8 | 47.03 | 2.36 | +| YOLO-NAS M INT-8 | 51.0 | 3.78 | +| YOLO-NAS L INT-8 | 52.1 | 4.78 | + +Each model variant is designed to offer a balance between Mean Average Precision (mAP) and latency, helping you optimize your object detection tasks for both performance and speed. + +## Usage Examples + +Ultralytics has made YOLO-NAS models easy to integrate into your Python applications via our `ultralytics` python package. The package provides a user-friendly Python API to streamline the process. + +The following examples show how to use YOLO-NAS models with the `ultralytics` package for inference and validation: + +### Inference and Validation Examples + +In this example we validate YOLO-NAS-s on the COCO8 dataset. + +!!! Example + + This example provides simple inference and validation code for YOLO-NAS. For handling inference results see [Predict](../modes/predict.md) mode. For using YOLO-NAS with additional modes see [Val](../modes/val.md) and [Export](../modes/export.md). YOLO-NAS on the `ultralytics` package does not support training. + + === "Python" + + PyTorch pretrained `*.pt` models files can be passed to the `NAS()` class to create a model instance in python: + + ```python + from ultralytics import NAS + + # Load a COCO-pretrained YOLO-NAS-s model + model = NAS('yolo_nas_s.pt') + + # Display model information (optional) + model.info() + + # Validate the model on the COCO8 example dataset + results = model.val(data='coco8.yaml') + + # Run inference with the YOLO-NAS-s model on the 'bus.jpg' image + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI commands are available to directly run the models: + + ```bash + # Load a COCO-pretrained YOLO-NAS-s model and validate it's performance on the COCO8 example dataset + yolo val model=yolo_nas_s.pt data=coco8.yaml + + # Load a COCO-pretrained YOLO-NAS-s model and run inference on the 'bus.jpg' image + yolo predict model=yolo_nas_s.pt source=path/to/bus.jpg + ``` + +## Supported Tasks and Modes + +We offer three variants of the YOLO-NAS models: Small (s), Medium (m), and Large (l). Each variant is designed to cater to different computational and performance needs: + +- **YOLO-NAS-s**: Optimized for environments where computational resources are limited but efficiency is key. +- **YOLO-NAS-m**: Offers a balanced approach, suitable for general-purpose object detection with higher accuracy. +- **YOLO-NAS-l**: Tailored for scenarios requiring the highest accuracy, where computational resources are less of a constraint. + +Below is a detailed overview of each model, including links to their pre-trained weights, the tasks they support, and their compatibility with different operating modes. + +| Model Type | Pre-trained Weights | Tasks Supported | Inference | Validation | Training | Export | +|------------|-----------------------------------------------------------------------------------------------|----------------------------------------|-----------|------------|----------|--------| +| YOLO-NAS-s | [yolo_nas_s.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_s.pt) | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-m | [yolo_nas_m.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_m.pt) | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-l | [yolo_nas_l.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_l.pt) | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | + +## Citations and Acknowledgements + +If you employ YOLO-NAS in your research or development work, please cite SuperGradients: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{supergradients, + doi = {10.5281/ZENODO.7789328}, + url = {https://zenodo.org/record/7789328}, + author = {Aharon, Shay and {Louis-Dupont} and {Ofri Masad} and Yurkova, Kate and {Lotem Fridman} and {Lkdci} and Khvedchenya, Eugene and Rubin, Ran and Bagrov, Natan and Tymchenko, Borys and Keren, Tomer and Zhilko, Alexander and {Eran-Deci}}, + title = {Super-Gradients}, + publisher = {GitHub}, + journal = {GitHub repository}, + year = {2021}, + } + ``` + +We express our gratitude to Deci AI's [SuperGradients](https://github.com/Deci-AI/super-gradients/) team for their efforts in creating and maintaining this valuable resource for the computer vision community. We believe YOLO-NAS, with its innovative architecture and superior object detection capabilities, will become a critical tool for developers and researchers alike. + +*Keywords: YOLO-NAS, Deci AI, object detection, deep learning, neural architecture search, Ultralytics Python API, YOLO model, SuperGradients, pre-trained models, quantization-friendly basic block, advanced training schemes, post-training quantization, AutoNAC optimization, COCO, Objects365, Roboflow 100* diff --git a/ultralytics/docs/en/models/yolo-nas.md:Zone.Identifier b/ultralytics/docs/en/models/yolo-nas.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/models/yolo-nas.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/models/yolov3.md b/ultralytics/docs/en/models/yolov3.md new file mode 100755 index 0000000..2e6d34b --- /dev/null +++ b/ultralytics/docs/en/models/yolov3.md @@ -0,0 +1,98 @@ +--- +comments: true +description: Get an overview of YOLOv3, YOLOv3-Ultralytics and YOLOv3u. Learn about their key features, usage, and supported tasks for object detection. +keywords: YOLOv3, YOLOv3-Ultralytics, YOLOv3u, Object Detection, Inference, Training, Ultralytics +--- + +# YOLOv3, YOLOv3-Ultralytics, and YOLOv3u + +## Overview + +This document presents an overview of three closely related object detection models, namely [YOLOv3](https://pjreddie.com/darknet/yolo/), [YOLOv3-Ultralytics](https://github.com/ultralytics/yolov3), and [YOLOv3u](https://github.com/ultralytics/ultralytics). + +1. **YOLOv3:** This is the third version of the You Only Look Once (YOLO) object detection algorithm. Originally developed by Joseph Redmon, YOLOv3 improved on its predecessors by introducing features such as multiscale predictions and three different sizes of detection kernels. + +2. **YOLOv3-Ultralytics:** This is Ultralytics' implementation of the YOLOv3 model. It reproduces the original YOLOv3 architecture and offers additional functionalities, such as support for more pre-trained models and easier customization options. + +3. **YOLOv3u:** This is an updated version of YOLOv3-Ultralytics that incorporates the anchor-free, objectness-free split head used in YOLOv8 models. YOLOv3u maintains the same backbone and neck architecture as YOLOv3 but with the updated detection head from YOLOv8. + +![Ultralytics YOLOv3](https://raw.githubusercontent.com/ultralytics/assets/main/yolov3/banner-yolov3.png) + +## Key Features + +- **YOLOv3:** Introduced the use of three different scales for detection, leveraging three different sizes of detection kernels: 13x13, 26x26, and 52x52. This significantly improved detection accuracy for objects of different sizes. Additionally, YOLOv3 added features such as multi-label predictions for each bounding box and a better feature extractor network. + +- **YOLOv3-Ultralytics:** Ultralytics' implementation of YOLOv3 provides the same performance as the original model but comes with added support for more pre-trained models, additional training methods, and easier customization options. This makes it more versatile and user-friendly for practical applications. + +- **YOLOv3u:** This updated model incorporates the anchor-free, objectness-free split head from YOLOv8. By eliminating the need for pre-defined anchor boxes and objectness scores, this detection head design can improve the model's ability to detect objects of varying sizes and shapes. This makes YOLOv3u more robust and accurate for object detection tasks. + +## Supported Tasks and Modes + +The YOLOv3 series, including YOLOv3, YOLOv3-Ultralytics, and YOLOv3u, are designed specifically for object detection tasks. These models are renowned for their effectiveness in various real-world scenarios, balancing accuracy and speed. Each variant offers unique features and optimizations, making them suitable for a range of applications. + +All three models support a comprehensive set of modes, ensuring versatility in various stages of model deployment and development. These modes include [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md), and [Export](../modes/export.md), providing users with a complete toolkit for effective object detection. + +| Model Type | Tasks Supported | Inference | Validation | Training | Export | +|--------------------|----------------------------------------|-----------|------------|----------|--------| +| YOLOv3 | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3-Ultralytics | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3u | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +This table provides an at-a-glance view of the capabilities of each YOLOv3 variant, highlighting their versatility and suitability for various tasks and operational modes in object detection workflows. + +## Usage Examples + +This example provides simple YOLOv3 training and inference examples. For full documentation on these and other [modes](../modes/index.md) see the [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) and [Export](../modes/export.md) docs pages. + +!!! Example + + === "Python" + + PyTorch pretrained `*.pt` models as well as configuration `*.yaml` files can be passed to the `YOLO()` class to create a model instance in python: + + ```python + from ultralytics import YOLO + + # Load a COCO-pretrained YOLOv3n model + model = YOLO('yolov3n.pt') + + # Display model information (optional) + model.info() + + # Train the model on the COCO8 example dataset for 100 epochs + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Run inference with the YOLOv3n model on the 'bus.jpg' image + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI commands are available to directly run the models: + + ```bash + # Load a COCO-pretrained YOLOv3n model and train it on the COCO8 example dataset for 100 epochs + yolo train model=yolov3n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Load a COCO-pretrained YOLOv3n model and run inference on the 'bus.jpg' image + yolo predict model=yolov3n.pt source=path/to/bus.jpg + ``` + +## Citations and Acknowledgements + +If you use YOLOv3 in your research, please cite the original YOLO papers and the Ultralytics YOLOv3 repository: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Redmon, Joseph and Farhadi, Ali}, + journal={arXiv preprint arXiv:1804.02767}, + year={2018} + } + ``` + +Thank you to Joseph Redmon and Ali Farhadi for developing the original YOLOv3. diff --git a/ultralytics/docs/en/models/yolov3.md:Zone.Identifier b/ultralytics/docs/en/models/yolov3.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/models/yolov3.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/models/yolov4.md b/ultralytics/docs/en/models/yolov4.md new file mode 100755 index 0000000..b24ec75 --- /dev/null +++ b/ultralytics/docs/en/models/yolov4.md @@ -0,0 +1,71 @@ +--- +comments: true +description: Explore our detailed guide on YOLOv4, a state-of-the-art real-time object detector. Understand its architectural highlights, innovative features, and application examples. +keywords: ultralytics, YOLOv4, object detection, neural network, real-time detection, object detector, machine learning +--- + +# YOLOv4: High-Speed and Precise Object Detection + +Welcome to the Ultralytics documentation page for YOLOv4, a state-of-the-art, real-time object detector launched in 2020 by Alexey Bochkovskiy at [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). YOLOv4 is designed to provide the optimal balance between speed and accuracy, making it an excellent choice for many applications. + +![YOLOv4 architecture diagram](https://user-images.githubusercontent.com/26833433/246185689-530b7fe8-737b-4bb0-b5dd-de10ef5aface.png) +**YOLOv4 architecture diagram**. Showcasing the intricate network design of YOLOv4, including the backbone, neck, and head components, and their interconnected layers for optimal real-time object detection. + +## Introduction + +YOLOv4 stands for You Only Look Once version 4. It is a real-time object detection model developed to address the limitations of previous YOLO versions like [YOLOv3](yolov3.md) and other object detection models. Unlike other convolutional neural network (CNN) based object detectors, YOLOv4 is not only applicable for recommendation systems but also for standalone process management and human input reduction. Its operation on conventional graphics processing units (GPUs) allows for mass usage at an affordable price, and it is designed to work in real-time on a conventional GPU while requiring only one such GPU for training. + +## Architecture + +YOLOv4 makes use of several innovative features that work together to optimize its performance. These include Weighted-Residual-Connections (WRC), Cross-Stage-Partial-connections (CSP), Cross mini-Batch Normalization (CmBN), Self-adversarial-training (SAT), Mish-activation, Mosaic data augmentation, DropBlock regularization, and CIoU loss. These features are combined to achieve state-of-the-art results. + +A typical object detector is composed of several parts including the input, the backbone, the neck, and the head. The backbone of YOLOv4 is pre-trained on ImageNet and is used to predict classes and bounding boxes of objects. The backbone could be from several models including VGG, ResNet, ResNeXt, or DenseNet. The neck part of the detector is used to collect feature maps from different stages and usually includes several bottom-up paths and several top-down paths. The head part is what is used to make the final object detections and classifications. + +## Bag of Freebies + +YOLOv4 also makes use of methods known as "bag of freebies," which are techniques that improve the accuracy of the model during training without increasing the cost of inference. Data augmentation is a common bag of freebies technique used in object detection, which increases the variability of the input images to improve the robustness of the model. Some examples of data augmentation include photometric distortions (adjusting the brightness, contrast, hue, saturation, and noise of an image) and geometric distortions (adding random scaling, cropping, flipping, and rotating). These techniques help the model to generalize better to different types of images. + +## Features and Performance + +YOLOv4 is designed for optimal speed and accuracy in object detection. The architecture of YOLOv4 includes CSPDarknet53 as the backbone, PANet as the neck, and YOLOv3 as the detection head. This design allows YOLOv4 to perform object detection at an impressive speed, making it suitable for real-time applications. YOLOv4 also excels in accuracy, achieving state-of-the-art results in object detection benchmarks. + +## Usage Examples + +As of the time of writing, Ultralytics does not currently support YOLOv4 models. Therefore, any users interested in using YOLOv4 will need to refer directly to the YOLOv4 GitHub repository for installation and usage instructions. + +Here is a brief overview of the typical steps you might take to use YOLOv4: + +1. Visit the YOLOv4 GitHub repository: [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). + +2. Follow the instructions provided in the README file for installation. This typically involves cloning the repository, installing necessary dependencies, and setting up any necessary environment variables. + +3. Once installation is complete, you can train and use the model as per the usage instructions provided in the repository. This usually involves preparing your dataset, configuring the model parameters, training the model, and then using the trained model to perform object detection. + +Please note that the specific steps may vary depending on your specific use case and the current state of the YOLOv4 repository. Therefore, it is strongly recommended to refer directly to the instructions provided in the YOLOv4 GitHub repository. + +We regret any inconvenience this may cause and will strive to update this document with usage examples for Ultralytics once support for YOLOv4 is implemented. + +## Conclusion + +YOLOv4 is a powerful and efficient object detection model that strikes a balance between speed and accuracy. Its use of unique features and bag of freebies techniques during training allows it to perform excellently in real-time object detection tasks. YOLOv4 can be trained and used by anyone with a conventional GPU, making it accessible and practical for a wide range of applications. + +## Citations and Acknowledgements + +We would like to acknowledge the YOLOv4 authors for their significant contributions in the field of real-time object detection: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{bochkovskiy2020yolov4, + title={YOLOv4: Optimal Speed and Accuracy of Object Detection}, + author={Alexey Bochkovskiy and Chien-Yao Wang and Hong-Yuan Mark Liao}, + year={2020}, + eprint={2004.10934}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +The original YOLOv4 paper can be found on [arXiv](https://arxiv.org/abs/2004.10934). The authors have made their work publicly available, and the codebase can be accessed on [GitHub](https://github.com/AlexeyAB/darknet). We appreciate their efforts in advancing the field and making their work accessible to the broader community. diff --git a/ultralytics/docs/en/models/yolov4.md:Zone.Identifier b/ultralytics/docs/en/models/yolov4.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/models/yolov4.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/models/yolov5.md b/ultralytics/docs/en/models/yolov5.md new file mode 100755 index 0000000..ab55225 --- /dev/null +++ b/ultralytics/docs/en/models/yolov5.md @@ -0,0 +1,113 @@ +--- +comments: true +description: Discover YOLOv5u, a boosted version of the YOLOv5 model featuring an improved accuracy-speed tradeoff and numerous pre-trained models for various object detection tasks. +keywords: YOLOv5u, object detection, pre-trained models, Ultralytics, Inference, Validation, YOLOv5, YOLOv8, anchor-free, objectness-free, real-time applications, machine learning +--- + +# YOLOv5 + +## Overview + +YOLOv5u represents an advancement in object detection methodologies. Originating from the foundational architecture of the [YOLOv5](https://github.com/ultralytics/yolov5) model developed by Ultralytics, YOLOv5u integrates the anchor-free, objectness-free split head, a feature previously introduced in the [YOLOv8](yolov8.md) models. This adaptation refines the model's architecture, leading to an improved accuracy-speed tradeoff in object detection tasks. Given the empirical results and its derived features, YOLOv5u provides an efficient alternative for those seeking robust solutions in both research and practical applications. + +![Ultralytics YOLOv5](https://raw.githubusercontent.com/ultralytics/assets/main/yolov5/v70/splash.png) + +## Key Features + +- **Anchor-free Split Ultralytics Head:** Traditional object detection models rely on predefined anchor boxes to predict object locations. However, YOLOv5u modernizes this approach. By adopting an anchor-free split Ultralytics head, it ensures a more flexible and adaptive detection mechanism, consequently enhancing the performance in diverse scenarios. + +- **Optimized Accuracy-Speed Tradeoff:** Speed and accuracy often pull in opposite directions. But YOLOv5u challenges this tradeoff. It offers a calibrated balance, ensuring real-time detections without compromising on accuracy. This feature is particularly invaluable for applications that demand swift responses, such as autonomous vehicles, robotics, and real-time video analytics. + +- **Variety of Pre-trained Models:** Understanding that different tasks require different toolsets, YOLOv5u provides a plethora of pre-trained models. Whether you're focusing on Inference, Validation, or Training, there's a tailor-made model awaiting you. This variety ensures you're not just using a one-size-fits-all solution, but a model specifically fine-tuned for your unique challenge. + +## Supported Tasks and Modes + +The YOLOv5u models, with various pre-trained weights, excel in [Object Detection](../tasks/detect.md) tasks. They support a comprehensive range of modes, making them suitable for diverse applications, from development to deployment. + +| Model Type | Pre-trained Weights | Task | Inference | Validation | Training | Export | +|------------|-----------------------------------------------------------------------------------------------------------------------------|----------------------------------------|-----------|------------|----------|--------| +| YOLOv5u | `yolov5nu`, `yolov5su`, `yolov5mu`, `yolov5lu`, `yolov5xu`, `yolov5n6u`, `yolov5s6u`, `yolov5m6u`, `yolov5l6u`, `yolov5x6u` | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +This table provides a detailed overview of the YOLOv5u model variants, highlighting their applicability in object detection tasks and support for various operational modes such as [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md), and [Export](../modes/export.md). This comprehensive support ensures that users can fully leverage the capabilities of YOLOv5u models in a wide range of object detection scenarios. + +## Performance Metrics + +!!! Performance + + === "Detection" + + See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examples with these models trained on [COCO](https://docs.ultralytics.com/datasets/detect/coco/), which include 80 pre-trained classes. + + | Model | YAML | size
(pixels) | mAPval
50-95 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | + |---------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------|-----------------------|----------------------|--------------------------------|-------------------------------------|--------------------|-------------------| + | [yolov5nu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5nu.pt) | [yolov5n.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 34.3 | 73.6 | 1.06 | 2.6 | 7.7 | + | [yolov5su.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5su.pt) | [yolov5s.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 43.0 | 120.7 | 1.27 | 9.1 | 24.0 | + | [yolov5mu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5mu.pt) | [yolov5m.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 49.0 | 233.9 | 1.86 | 25.1 | 64.2 | + | [yolov5lu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5lu.pt) | [yolov5l.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 52.2 | 408.4 | 2.50 | 53.2 | 135.0 | + | [yolov5xu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5xu.pt) | [yolov5x.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 53.2 | 763.2 | 3.81 | 97.2 | 246.4 | + | | | | | | | | | + | [yolov5n6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5n6u.pt) | [yolov5n6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 42.1 | 211.0 | 1.83 | 4.3 | 7.8 | + | [yolov5s6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5s6u.pt) | [yolov5s6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 48.6 | 422.6 | 2.34 | 15.3 | 24.6 | + | [yolov5m6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5m6u.pt) | [yolov5m6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 53.6 | 810.9 | 4.36 | 41.2 | 65.7 | + | [yolov5l6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5l6u.pt) | [yolov5l6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 55.7 | 1470.9 | 5.47 | 86.1 | 137.4 | + | [yolov5x6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5x6u.pt) | [yolov5x6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 56.8 | 2436.5 | 8.98 | 155.4 | 250.7 | + +## Usage Examples + +This example provides simple YOLOv5 training and inference examples. For full documentation on these and other [modes](../modes/index.md) see the [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) and [Export](../modes/export.md) docs pages. + +!!! Example + + === "Python" + + PyTorch pretrained `*.pt` models as well as configuration `*.yaml` files can be passed to the `YOLO()` class to create a model instance in python: + + ```python + from ultralytics import YOLO + + # Load a COCO-pretrained YOLOv5n model + model = YOLO('yolov5n.pt') + + # Display model information (optional) + model.info() + + # Train the model on the COCO8 example dataset for 100 epochs + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Run inference with the YOLOv5n model on the 'bus.jpg' image + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI commands are available to directly run the models: + + ```bash + # Load a COCO-pretrained YOLOv5n model and train it on the COCO8 example dataset for 100 epochs + yolo train model=yolov5n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Load a COCO-pretrained YOLOv5n model and run inference on the 'bus.jpg' image + yolo predict model=yolov5n.pt source=path/to/bus.jpg + ``` + +## Citations and Acknowledgements + +If you use YOLOv5 or YOLOv5u in your research, please cite the Ultralytics YOLOv5 repository as follows: + +!!! Quote "" + + === "BibTeX" + ```bibtex + @software{yolov5, + title = {Ultralytics YOLOv5}, + author = {Glenn Jocher}, + year = {2020}, + version = {7.0}, + license = {AGPL-3.0}, + url = {https://github.com/ultralytics/yolov5}, + doi = {10.5281/zenodo.3908559}, + orcid = {0000-0001-5950-6979} + } + ``` + +Please note that YOLOv5 models are provided under [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) and [Enterprise](https://ultralytics.com/license) licenses. diff --git a/ultralytics/docs/en/models/yolov5.md:Zone.Identifier b/ultralytics/docs/en/models/yolov5.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/models/yolov5.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/models/yolov6.md b/ultralytics/docs/en/models/yolov6.md new file mode 100755 index 0000000..a3aaca5 --- /dev/null +++ b/ultralytics/docs/en/models/yolov6.md @@ -0,0 +1,107 @@ +--- +comments: true +description: Explore Meituan YOLOv6, a state-of-the-art object detection model striking a balance between speed and accuracy. Dive into features, pre-trained models, and Python usage. +keywords: Meituan YOLOv6, object detection, Ultralytics, YOLOv6 docs, Bi-directional Concatenation, Anchor-Aided Training, pretrained models, real-time applications +--- + +# Meituan YOLOv6 + +## Overview + +[Meituan](https://about.meituan.com/) YOLOv6 is a cutting-edge object detector that offers remarkable balance between speed and accuracy, making it a popular choice for real-time applications. This model introduces several notable enhancements on its architecture and training scheme, including the implementation of a Bi-directional Concatenation (BiC) module, an anchor-aided training (AAT) strategy, and an improved backbone and neck design for state-of-the-art accuracy on the COCO dataset. + +![Meituan YOLOv6](https://user-images.githubusercontent.com/26833433/240750495-4da954ce-8b3b-41c4-8afd-ddb74361d3c2.png) +![Model example image](https://user-images.githubusercontent.com/26833433/240750557-3e9ec4f0-0598-49a8-83ea-f33c91eb6d68.png) +**Overview of YOLOv6.** Model architecture diagram showing the redesigned network components and training strategies that have led to significant performance improvements. (a) The neck of YOLOv6 (N and S are shown). Note for M/L, RepBlocks is replaced with CSPStackRep. (b) The structure of a BiC module. (c) A SimCSPSPPF block. ([source](https://arxiv.org/pdf/2301.05586.pdf)). + +### Key Features + +- **Bidirectional Concatenation (BiC) Module:** YOLOv6 introduces a BiC module in the neck of the detector, enhancing localization signals and delivering performance gains with negligible speed degradation. +- **Anchor-Aided Training (AAT) Strategy:** This model proposes AAT to enjoy the benefits of both anchor-based and anchor-free paradigms without compromising inference efficiency. +- **Enhanced Backbone and Neck Design:** By deepening YOLOv6 to include another stage in the backbone and neck, this model achieves state-of-the-art performance on the COCO dataset at high-resolution input. +- **Self-Distillation Strategy:** A new self-distillation strategy is implemented to boost the performance of smaller models of YOLOv6, enhancing the auxiliary regression branch during training and removing it at inference to avoid a marked speed decline. + +## Performance Metrics + +YOLOv6 provides various pre-trained models with different scales: + +- YOLOv6-N: 37.5% AP on COCO val2017 at 1187 FPS with NVIDIA Tesla T4 GPU. +- YOLOv6-S: 45.0% AP at 484 FPS. +- YOLOv6-M: 50.0% AP at 226 FPS. +- YOLOv6-L: 52.8% AP at 116 FPS. +- YOLOv6-L6: State-of-the-art accuracy in real-time. + +YOLOv6 also provides quantized models for different precisions and models optimized for mobile platforms. + +## Usage Examples + +This example provides simple YOLOv6 training and inference examples. For full documentation on these and other [modes](../modes/index.md) see the [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) and [Export](../modes/export.md) docs pages. + +!!! Example + + === "Python" + + PyTorch pretrained `*.pt` models as well as configuration `*.yaml` files can be passed to the `YOLO()` class to create a model instance in python: + + ```python + from ultralytics import YOLO + + # Build a YOLOv6n model from scratch + model = YOLO('yolov6n.yaml') + + # Display model information (optional) + model.info() + + # Train the model on the COCO8 example dataset for 100 epochs + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Run inference with the YOLOv6n model on the 'bus.jpg' image + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI commands are available to directly run the models: + + ```bash + # Build a YOLOv6n model from scratch and train it on the COCO8 example dataset for 100 epochs + yolo train model=yolov6n.yaml data=coco8.yaml epochs=100 imgsz=640 + + # Build a YOLOv6n model from scratch and run inference on the 'bus.jpg' image + yolo predict model=yolov6n.yaml source=path/to/bus.jpg + ``` + +## Supported Tasks and Modes + +The YOLOv6 series offers a range of models, each optimized for high-performance [Object Detection](../tasks/detect.md). These models cater to varying computational needs and accuracy requirements, making them versatile for a wide array of applications. + +| Model Type | Pre-trained Weights | Tasks Supported | Inference | Validation | Training | Export | +|------------|---------------------|----------------------------------------|-----------|------------|----------|--------| +| YOLOv6-N | `yolov6-n.pt` | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-S | `yolov6-s.pt` | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-M | `yolov6-m.pt` | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L | `yolov6-l.pt` | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L6 | `yolov6-l6.pt` | [Object Detection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +This table provides a detailed overview of the YOLOv6 model variants, highlighting their capabilities in object detection tasks and their compatibility with various operational modes such as [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md), and [Export](../modes/export.md). This comprehensive support ensures that users can fully leverage the capabilities of YOLOv6 models in a broad range of object detection scenarios. + +## Citations and Acknowledgements + +We would like to acknowledge the authors for their significant contributions in the field of real-time object detection: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{li2023yolov6, + title={YOLOv6 v3.0: A Full-Scale Reloading}, + author={Chuyi Li and Lulu Li and Yifei Geng and Hongliang Jiang and Meng Cheng and Bo Zhang and Zaidan Ke and Xiaoming Xu and Xiangxiang Chu}, + year={2023}, + eprint={2301.05586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +The original YOLOv6 paper can be found on [arXiv](https://arxiv.org/abs/2301.05586). The authors have made their work publicly available, and the codebase can be accessed on [GitHub](https://github.com/meituan/YOLOv6). We appreciate their efforts in advancing the field and making their work accessible to the broader community. diff --git a/ultralytics/docs/en/models/yolov6.md:Zone.Identifier b/ultralytics/docs/en/models/yolov6.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/models/yolov6.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/models/yolov7.md b/ultralytics/docs/en/models/yolov7.md new file mode 100755 index 0000000..f3d99d5 --- /dev/null +++ b/ultralytics/docs/en/models/yolov7.md @@ -0,0 +1,65 @@ +--- +comments: true +description: Explore the YOLOv7, a real-time object detector. Understand its superior speed, impressive accuracy, and unique trainable bag-of-freebies optimization focus. +keywords: YOLOv7, real-time object detector, state-of-the-art, Ultralytics, MS COCO dataset, model re-parameterization, dynamic label assignment, extended scaling, compound scaling +--- + +# YOLOv7: Trainable Bag-of-Freebies + +YOLOv7 is a state-of-the-art real-time object detector that surpasses all known object detectors in both speed and accuracy in the range from 5 FPS to 160 FPS. It has the highest accuracy (56.8% AP) among all known real-time object detectors with 30 FPS or higher on GPU V100. Moreover, YOLOv7 outperforms other object detectors such as YOLOR, YOLOX, Scaled-YOLOv4, YOLOv5, and many others in speed and accuracy. The model is trained on the MS COCO dataset from scratch without using any other datasets or pre-trained weights. Source code for YOLOv7 is available on GitHub. + +![YOLOv7 comparison with SOTA object detectors](https://github.com/ultralytics/ultralytics/assets/26833433/5e1e0420-8122-4c79-b8d0-2860aa79af92) +**Comparison of state-of-the-art object detectors.** From the results in Table 2 we know that the proposed method has the best speed-accuracy trade-off comprehensively. If we compare YOLOv7-tiny-SiLU with YOLOv5-N (r6.1), our method is 127 fps faster and 10.7% more accurate on AP. In addition, YOLOv7 has 51.4% AP at frame rate of 161 fps, while PPYOLOE-L with the same AP has only 78 fps frame rate. In terms of parameter usage, YOLOv7 is 41% less than PPYOLOE-L. If we compare YOLOv7-X with 114 fps inference speed to YOLOv5-L (r6.1) with 99 fps inference speed, YOLOv7-X can improve AP by 3.9%. If YOLOv7-X is compared with YOLOv5-X (r6.1) of similar scale, the inference speed of YOLOv7-X is 31 fps faster. In addition, in terms the amount of parameters and computation, YOLOv7-X reduces 22% of parameters and 8% of computation compared to YOLOv5-X (r6.1), but improves AP by 2.2% ([Source](https://arxiv.org/pdf/2207.02696.pdf)). + +## Overview + +Real-time object detection is an important component in many computer vision systems, including multi-object tracking, autonomous driving, robotics, and medical image analysis. In recent years, real-time object detection development has focused on designing efficient architectures and improving the inference speed of various CPUs, GPUs, and neural processing units (NPUs). YOLOv7 supports both mobile GPU and GPU devices, from the edge to the cloud. + +Unlike traditional real-time object detectors that focus on architecture optimization, YOLOv7 introduces a focus on the optimization of the training process. This includes modules and optimization methods designed to improve the accuracy of object detection without increasing the inference cost, a concept known as the "trainable bag-of-freebies". + +## Key Features + +YOLOv7 introduces several key features: + +1. **Model Re-parameterization**: YOLOv7 proposes a planned re-parameterized model, which is a strategy applicable to layers in different networks with the concept of gradient propagation path. + +2. **Dynamic Label Assignment**: The training of the model with multiple output layers presents a new issue: "How to assign dynamic targets for the outputs of different branches?" To solve this problem, YOLOv7 introduces a new label assignment method called coarse-to-fine lead guided label assignment. + +3. **Extended and Compound Scaling**: YOLOv7 proposes "extend" and "compound scaling" methods for the real-time object detector that can effectively utilize parameters and computation. + +4. **Efficiency**: The method proposed by YOLOv7 can effectively reduce about 40% parameters and 50% computation of state-of-the-art real-time object detector, and has faster inference speed and higher detection accuracy. + +## Usage Examples + +As of the time of writing, Ultralytics does not currently support YOLOv7 models. Therefore, any users interested in using YOLOv7 will need to refer directly to the YOLOv7 GitHub repository for installation and usage instructions. + +Here is a brief overview of the typical steps you might take to use YOLOv7: + +1. Visit the YOLOv7 GitHub repository: [https://github.com/WongKinYiu/yolov7](https://github.com/WongKinYiu/yolov7). + +2. Follow the instructions provided in the README file for installation. This typically involves cloning the repository, installing necessary dependencies, and setting up any necessary environment variables. + +3. Once installation is complete, you can train and use the model as per the usage instructions provided in the repository. This usually involves preparing your dataset, configuring the model parameters, training the model, and then using the trained model to perform object detection. + +Please note that the specific steps may vary depending on your specific use case and the current state of the YOLOv7 repository. Therefore, it is strongly recommended to refer directly to the instructions provided in the YOLOv7 GitHub repository. + +We regret any inconvenience this may cause and will strive to update this document with usage examples for Ultralytics once support for YOLOv7 is implemented. + +## Citations and Acknowledgements + +We would like to acknowledge the YOLOv7 authors for their significant contributions in the field of real-time object detection: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{wang2022yolov7, + title={{YOLOv7}: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors}, + author={Wang, Chien-Yao and Bochkovskiy, Alexey and Liao, Hong-Yuan Mark}, + journal={arXiv preprint arXiv:2207.02696}, + year={2022} + } + ``` + +The original YOLOv7 paper can be found on [arXiv](https://arxiv.org/pdf/2207.02696.pdf). The authors have made their work publicly available, and the codebase can be accessed on [GitHub](https://github.com/WongKinYiu/yolov7). We appreciate their efforts in advancing the field and making their work accessible to the broader community. diff --git a/ultralytics/docs/en/models/yolov7.md:Zone.Identifier b/ultralytics/docs/en/models/yolov7.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/models/yolov7.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/models/yolov8.md b/ultralytics/docs/en/models/yolov8.md new file mode 100755 index 0000000..fd70130 --- /dev/null +++ b/ultralytics/docs/en/models/yolov8.md @@ -0,0 +1,173 @@ +--- +comments: true +description: Explore the thrilling features of YOLOv8, the latest version of our real-time object detector! Learn how advanced architectures, pre-trained models and optimal balance between accuracy & speed make YOLOv8 the perfect choice for your object detection tasks. +keywords: YOLOv8, Ultralytics, real-time object detector, pre-trained models, documentation, object detection, YOLO series, advanced architectures, accuracy, speed +--- + +# YOLOv8 + +## Overview + +YOLOv8 is the latest iteration in the YOLO series of real-time object detectors, offering cutting-edge performance in terms of accuracy and speed. Building upon the advancements of previous YOLO versions, YOLOv8 introduces new features and optimizations that make it an ideal choice for various object detection tasks in a wide range of applications. + +![Ultralytics YOLOv8](https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png) + +

+
+ +
+ Watch: Ultralytics YOLOv8 Model Overview +

+ +## Key Features + +- **Advanced Backbone and Neck Architectures:** YOLOv8 employs state-of-the-art backbone and neck architectures, resulting in improved feature extraction and object detection performance. +- **Anchor-free Split Ultralytics Head:** YOLOv8 adopts an anchor-free split Ultralytics head, which contributes to better accuracy and a more efficient detection process compared to anchor-based approaches. +- **Optimized Accuracy-Speed Tradeoff:** With a focus on maintaining an optimal balance between accuracy and speed, YOLOv8 is suitable for real-time object detection tasks in diverse application areas. +- **Variety of Pre-trained Models:** YOLOv8 offers a range of pre-trained models to cater to various tasks and performance requirements, making it easier to find the right model for your specific use case. + +## Supported Tasks and Modes + +The YOLOv8 series offers a diverse range of models, each specialized for specific tasks in computer vision. These models are designed to cater to various requirements, from object detection to more complex tasks like instance segmentation, pose/keypoints detection, and classification. + +Each variant of the YOLOv8 series is optimized for its respective task, ensuring high performance and accuracy. Additionally, these models are compatible with various operational modes including [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md), and [Export](../modes/export.md), facilitating their use in different stages of deployment and development. + +| Model | Filenames | Task | Inference | Validation | Training | Export | +|-------------|----------------------------------------------------------------------------------------------------------------|----------------------------------------------|-----------|------------|----------|--------| +| YOLOv8 | `yolov8n.pt` `yolov8s.pt` `yolov8m.pt` `yolov8l.pt` `yolov8x.pt` | [Detection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-seg | `yolov8n-seg.pt` `yolov8s-seg.pt` `yolov8m-seg.pt` `yolov8l-seg.pt` `yolov8x-seg.pt` | [Instance Segmentation](../tasks/segment.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-pose | `yolov8n-pose.pt` `yolov8s-pose.pt` `yolov8m-pose.pt` `yolov8l-pose.pt` `yolov8x-pose.pt` `yolov8x-pose-p6.pt` | [Pose/Keypoints](../tasks/pose.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-cls | `yolov8n-cls.pt` `yolov8s-cls.pt` `yolov8m-cls.pt` `yolov8l-cls.pt` `yolov8x-cls.pt` | [Classification](../tasks/classify.md) | โœ… | โœ… | โœ… | โœ… | + +This table provides an overview of the YOLOv8 model variants, highlighting their applicability in specific tasks and their compatibility with various operational modes such as Inference, Validation, Training, and Export. It showcases the versatility and robustness of the YOLOv8 series, making them suitable for a variety of applications in computer vision. + +## Performance Metrics + +!!! Performance + + === "Detection (COCO)" + + See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examples with these models trained on [COCO](https://docs.ultralytics.com/datasets/detect/coco/), which include 80 pre-trained classes. + + | Model | size
(pixels) | mAPval
50-95 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | + | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + + === "Detection (Open Images V7)" + + See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examples with these models trained on [Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/), which include 600 pre-trained classes. + + | Model | size
(pixels) | mAPval
50-95 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | + | ----------------------------------------------------------------------------------------- | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18.4 | 142.4 | 1.21 | 3.5 | 10.5 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27.7 | 183.1 | 1.40 | 11.4 | 29.7 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33.6 | 408.5 | 2.26 | 26.2 | 80.6 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34.9 | 596.9 | 2.43 | 44.1 | 167.4 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36.3 | 860.6 | 3.56 | 68.7 | 260.6 | + + === "Segmentation (COCO)" + + See [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage examples with these models trained on [COCO](https://docs.ultralytics.com/datasets/segment/coco/), which include 80 pre-trained classes. + + | Model | size
(pixels) | mAPbox
50-95 | mAPmask
50-95 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | + | -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | + | [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | + | [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | + | [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | + | [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + + === "Classification (ImageNet)" + + See [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usage examples with these models trained on [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), which include 1000 pre-trained classes. + + | Model | size
(pixels) | acc
top1 | acc
top5 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) at 640 | + | -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ | + | [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | + | [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | + | [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | + | [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | + | [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + + === "Pose (COCO)" + + See [Pose Estimation Docs](https://docs.ultralytics.com/tasks/segment/) for usage examples with these models trained on [COCO](https://docs.ultralytics.com/datasets/pose/coco/), which include 1 pre-trained class, 'person'. + + | Model | size
(pixels) | mAPpose
50-95 | mAPpose
50 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | + | ---------------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | + | [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | + | [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | + | [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | + | [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | + | [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +## Usage Examples + +This example provides simple YOLOv8 training and inference examples. For full documentation on these and other [modes](../modes/index.md) see the [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) and [Export](../modes/export.md) docs pages. + +Note the below example is for YOLOv8 [Detect](../tasks/detect.md) models for object detection. For additional supported tasks see the [Segment](../tasks/segment.md), [Classify](../tasks/classify.md) and [Pose](../tasks/pose.md) docs. + +!!! Example + + === "Python" + + PyTorch pretrained `*.pt` models as well as configuration `*.yaml` files can be passed to the `YOLO()` class to create a model instance in python: + + ```python + from ultralytics import YOLO + + # Load a COCO-pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Display model information (optional) + model.info() + + # Train the model on the COCO8 example dataset for 100 epochs + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Run inference with the YOLOv8n model on the 'bus.jpg' image + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI commands are available to directly run the models: + + ```bash + # Load a COCO-pretrained YOLOv8n model and train it on the COCO8 example dataset for 100 epochs + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Load a COCO-pretrained YOLOv8n model and run inference on the 'bus.jpg' image + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## Citations and Acknowledgements + +If you use the YOLOv8 model or any other software from this repository in your work, please cite it using the following format: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @software{yolov8_ultralytics, + author = {Glenn Jocher and Ayush Chaurasia and Jing Qiu}, + title = {Ultralytics YOLOv8}, + version = {8.0.0}, + year = {2023}, + url = {https://github.com/ultralytics/ultralytics}, + orcid = {0000-0001-5950-6979, 0000-0002-7603-6750, 0000-0003-3783-7069}, + license = {AGPL-3.0} + } + ``` + +Please note that the DOI is pending and will be added to the citation once it is available. YOLOv8 models are provided under [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) and [Enterprise](https://ultralytics.com/license) licenses. diff --git a/ultralytics/docs/en/models/yolov8.md:Zone.Identifier b/ultralytics/docs/en/models/yolov8.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/models/yolov8.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/modes/benchmark.md b/ultralytics/docs/en/modes/benchmark.md new file mode 100755 index 0000000..892e732 --- /dev/null +++ b/ultralytics/docs/en/modes/benchmark.md @@ -0,0 +1,105 @@ +--- +comments: true +description: Learn how to profile speed and accuracy of YOLOv8 across various export formats; get insights on mAP50-95, accuracy_top5 metrics, and more. +keywords: Ultralytics, YOLOv8, benchmarking, speed profiling, accuracy profiling, mAP50-95, accuracy_top5, ONNX, OpenVINO, TensorRT, YOLO export formats +--- + +# Model Benchmarking with Ultralytics YOLO + +Ultralytics YOLO ecosystem and integrations + +## Introduction + +Once your model is trained and validated, the next logical step is to evaluate its performance in various real-world scenarios. Benchmark mode in Ultralytics YOLOv8 serves this purpose by providing a robust framework for assessing the speed and accuracy of your model across a range of export formats. + +

+
+ +
+ Watch: Ultralytics Modes Tutorial: Benchmark +

+ +## Why Is Benchmarking Crucial? + +- **Informed Decisions:** Gain insights into the trade-offs between speed and accuracy. +- **Resource Allocation:** Understand how different export formats perform on different hardware. +- **Optimization:** Learn which export format offers the best performance for your specific use case. +- **Cost Efficiency:** Make more efficient use of hardware resources based on benchmark results. + +### Key Metrics in Benchmark Mode + +- **mAP50-95:** For object detection, segmentation, and pose estimation. +- **accuracy_top5:** For image classification. +- **Inference Time:** Time taken for each image in milliseconds. + +### Supported Export Formats + +- **ONNX:** For optimal CPU performance +- **TensorRT:** For maximal GPU efficiency +- **OpenVINO:** For Intel hardware optimization +- **CoreML, TensorFlow SavedModel, and More:** For diverse deployment needs. + +!!! Tip "Tip" + + * Export to ONNX or OpenVINO for up to 3x CPU speedup. + * Export to TensorRT for up to 5x GPU speedup. + +## Usage Examples + +Run YOLOv8n benchmarks on all supported export formats including ONNX, TensorRT etc. See Arguments section below for a full list of export arguments. + +!!! Example + + === "Python" + + ```python + from ultralytics.utils.benchmarks import benchmark + + # Benchmark on GPU + benchmark(model='yolov8n.pt', data='coco8.yaml', imgsz=640, half=False, device=0) + ``` + === "CLI" + + ```bash + yolo benchmark model=yolov8n.pt data='coco8.yaml' imgsz=640 half=False device=0 + ``` + +## Arguments + +Arguments such as `model`, `data`, `imgsz`, `half`, `device`, and `verbose` provide users with the flexibility to fine-tune the benchmarks to their specific needs and compare the performance of different export formats with ease. + +| Key | Value | Description | +|-----------|---------|-----------------------------------------------------------------------| +| `model` | `None` | path to model file, i.e. yolov8n.pt, yolov8n.yaml | +| `data` | `None` | path to YAML referencing the benchmarking dataset (under `val` label) | +| `imgsz` | `640` | image size as scalar or (h, w) list, i.e. (640, 480) | +| `half` | `False` | FP16 quantization | +| `int8` | `False` | INT8 quantization | +| `device` | `None` | device to run on, i.e. cuda device=0 or device=0,1,2,3 or device=cpu | +| `verbose` | `False` | do not continue on error (bool), or val floor threshold (float) | + +## Export Formats + +Benchmarks will attempt to run automatically on all possible export formats below. + +| Format | `format` Argument | Model | Metadata | Arguments | +|--------------------------------------------------------------------|-------------------|---------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half`, `int8` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras`, `int8` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz`, `half`, `int8` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +See full `export` details in the [Export](https://docs.ultralytics.com/modes/export/) page. diff --git a/ultralytics/docs/en/modes/benchmark.md:Zone.Identifier b/ultralytics/docs/en/modes/benchmark.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/modes/benchmark.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/modes/export.md b/ultralytics/docs/en/modes/export.md new file mode 100755 index 0000000..9077481 --- /dev/null +++ b/ultralytics/docs/en/modes/export.md @@ -0,0 +1,108 @@ +--- +comments: true +description: Step-by-step guide on exporting your YOLOv8 models to various format like ONNX, TensorRT, CoreML and more for deployment. Explore now!. +keywords: YOLO, YOLOv8, Ultralytics, Model export, ONNX, TensorRT, CoreML, TensorFlow SavedModel, OpenVINO, PyTorch, export model +--- + +# Model Export with Ultralytics YOLO + +Ultralytics YOLO ecosystem and integrations + +## Introduction + +The ultimate goal of training a model is to deploy it for real-world applications. Export mode in Ultralytics YOLOv8 offers a versatile range of options for exporting your trained model to different formats, making it deployable across various platforms and devices. This comprehensive guide aims to walk you through the nuances of model exporting, showcasing how to achieve maximum compatibility and performance. + +

+
+ +
+ Watch: How To Export Custom Trained Ultralytics YOLOv8 Model and Run Live Inference on Webcam. +

+ +## Why Choose YOLOv8's Export Mode? + +- **Versatility:** Export to multiple formats including ONNX, TensorRT, CoreML, and more. +- **Performance:** Gain up to 5x GPU speedup with TensorRT and 3x CPU speedup with ONNX or OpenVINO. +- **Compatibility:** Make your model universally deployable across numerous hardware and software environments. +- **Ease of Use:** Simple CLI and Python API for quick and straightforward model exporting. + +### Key Features of Export Mode + +Here are some of the standout functionalities: + +- **One-Click Export:** Simple commands for exporting to different formats. +- **Batch Export:** Export batched-inference capable models. +- **Optimized Inference:** Exported models are optimized for quicker inference times. +- **Tutorial Videos:** In-depth guides and tutorials for a smooth exporting experience. + +!!! Tip "Tip" + + * Export to ONNX or OpenVINO for up to 3x CPU speedup. + * Export to TensorRT for up to 5x GPU speedup. + +## Usage Examples + +Export a YOLOv8n model to a different format like ONNX or TensorRT. See Arguments section below for a full list of export arguments. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom trained model + + # Export the model + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # export official model + yolo export model=path/to/best.pt format=onnx # export custom trained model + ``` + +## Arguments + +Export settings for YOLO models refer to the various configurations and options used to save or export the model for use in other environments or platforms. These settings can affect the model's performance, size, and compatibility with different systems. Some common YOLO export settings include the format of the exported model file (e.g. ONNX, TensorFlow SavedModel), the device on which the model will be run (e.g. CPU, GPU), and the presence of additional features such as masks or multiple labels per box. Other factors that may affect the export process include the specific task the model is being used for and the requirements or constraints of the target environment or platform. It is important to carefully consider and configure these settings to ensure that the exported model is optimized for the intended use case and can be used effectively in the target environment. + +| Key | Value | Description | +|-------------|-----------------|------------------------------------------------------| +| `format` | `'torchscript'` | format to export to | +| `imgsz` | `640` | image size as scalar or (h, w) list, i.e. (640, 480) | +| `keras` | `False` | use Keras for TF SavedModel export | +| `optimize` | `False` | TorchScript: optimize for mobile | +| `half` | `False` | FP16 quantization | +| `int8` | `False` | INT8 quantization | +| `dynamic` | `False` | ONNX/TensorRT: dynamic axes | +| `simplify` | `False` | ONNX/TensorRT: simplify model | +| `opset` | `None` | ONNX: opset version (optional, defaults to latest) | +| `workspace` | `4` | TensorRT: workspace size (GB) | +| `nms` | `False` | CoreML: add NMS | + +## Export Formats + +Available YOLOv8 export formats are in the table below. You can export to any format using the `format` argument, i.e. `format='onnx'` or `format='engine'`. + +| Format | `format` Argument | Model | Metadata | Arguments | +|--------------------------------------------------------------------|-------------------|---------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half`, `int8` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras`, `int8` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz`, `half`, `int8` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | diff --git a/ultralytics/docs/en/modes/export.md:Zone.Identifier b/ultralytics/docs/en/modes/export.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/modes/export.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/modes/index.md b/ultralytics/docs/en/modes/index.md new file mode 100755 index 0000000..d0a0f9e --- /dev/null +++ b/ultralytics/docs/en/modes/index.md @@ -0,0 +1,74 @@ +--- +comments: true +description: From training to tracking, make the most of YOLOv8 with Ultralytics. Get insights and examples for each supported mode including validation, export, and benchmarking. +keywords: Ultralytics, YOLOv8, Machine Learning, Object Detection, Training, Validation, Prediction, Export, Tracking, Benchmarking +--- + +# Ultralytics YOLOv8 Modes + +Ultralytics YOLO ecosystem and integrations + +## Introduction + +Ultralytics YOLOv8 is not just another object detection model; it's a versatile framework designed to cover the entire lifecycle of machine learning modelsโ€”from data ingestion and model training to validation, deployment, and real-world tracking. Each mode serves a specific purpose and is engineered to offer you the flexibility and efficiency required for different tasks and use-cases. + +

+
+ +
+ Watch: Ultralytics Modes Tutorial: Train, Validate, Predict, Export & Benchmark. +

+ +### Modes at a Glance + +Understanding the different **modes** that Ultralytics YOLOv8 supports is critical to getting the most out of your models: + +- **Train** mode: Fine-tune your model on custom or preloaded datasets. +- **Val** mode: A post-training checkpoint to validate model performance. +- **Predict** mode: Unleash the predictive power of your model on real-world data. +- **Export** mode: Make your model deployment-ready in various formats. +- **Track** mode: Extend your object detection model into real-time tracking applications. +- **Benchmark** mode: Analyze the speed and accuracy of your model in diverse deployment environments. + +This comprehensive guide aims to give you an overview and practical insights into each mode, helping you harness the full potential of YOLOv8. + +## [Train](train.md) + +Train mode is used for training a YOLOv8 model on a custom dataset. In this mode, the model is trained using the specified dataset and hyperparameters. The training process involves optimizing the model's parameters so that it can accurately predict the classes and locations of objects in an image. + +[Train Examples](train.md){ .md-button } + +## [Val](val.md) + +Val mode is used for validating a YOLOv8 model after it has been trained. In this mode, the model is evaluated on a validation set to measure its accuracy and generalization performance. This mode can be used to tune the hyperparameters of the model to improve its performance. + +[Val Examples](val.md){ .md-button } + +## [Predict](predict.md) + +Predict mode is used for making predictions using a trained YOLOv8 model on new images or videos. In this mode, the model is loaded from a checkpoint file, and the user can provide images or videos to perform inference. The model predicts the classes and locations of objects in the input images or videos. + +[Predict Examples](predict.md){ .md-button } + +## [Export](export.md) + +Export mode is used for exporting a YOLOv8 model to a format that can be used for deployment. In this mode, the model is converted to a format that can be used by other software applications or hardware devices. This mode is useful when deploying the model to production environments. + +[Export Examples](export.md){ .md-button } + +## [Track](track.md) + +Track mode is used for tracking objects in real-time using a YOLOv8 model. In this mode, the model is loaded from a checkpoint file, and the user can provide a live video stream to perform real-time object tracking. This mode is useful for applications such as surveillance systems or self-driving cars. + +[Track Examples](track.md){ .md-button } + +## [Benchmark](benchmark.md) + +Benchmark mode is used to profile the speed and accuracy of various export formats for YOLOv8. The benchmarks provide information on the size of the exported format, its `mAP50-95` metrics (for object detection, segmentation and pose) +or `accuracy_top5` metrics (for classification), and the inference time in milliseconds per image across various export formats like ONNX, OpenVINO, TensorRT and others. This information can help users choose the optimal export format for their specific use case based on their requirements for speed and accuracy. + +[Benchmark Examples](benchmark.md){ .md-button } diff --git a/ultralytics/docs/en/modes/index.md:Zone.Identifier b/ultralytics/docs/en/modes/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/modes/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/modes/predict.md b/ultralytics/docs/en/modes/predict.md new file mode 100755 index 0000000..6c4561f --- /dev/null +++ b/ultralytics/docs/en/modes/predict.md @@ -0,0 +1,728 @@ +--- +comments: true +description: Discover how to use YOLOv8 predict mode for various tasks. Learn about different inference sources like images, videos, and data formats. +keywords: Ultralytics, YOLOv8, predict mode, inference sources, prediction tasks, streaming mode, image processing, video processing, machine learning, AI +--- + +# Model Prediction with Ultralytics YOLO + +Ultralytics YOLO ecosystem and integrations + +## Introduction + +In the world of machine learning and computer vision, the process of making sense out of visual data is called 'inference' or 'prediction'. Ultralytics YOLOv8 offers a powerful feature known as **predict mode** that is tailored for high-performance, real-time inference on a wide range of data sources. + +

+
+ +
+ Watch: How to Extract the Outputs from Ultralytics YOLOv8 Model for Custom Projects. +

+ +## Real-world Applications + +| Manufacturing | Sports | Safety | +|:-------------------------------------------------:|:----------------------------------------------------:|:-------------------------------------------:| +| ![Vehicle Spare Parts Detection][car spare parts] | ![Football Player Detection][football player detect] | ![People Fall Detection][human fall detect] | +| Vehicle Spare Parts Detection | Football Player Detection | People Fall Detection | + +## Why Use Ultralytics YOLO for Inference? + +Here's why you should consider YOLOv8's predict mode for your various inference needs: + +- **Versatility:** Capable of making inferences on images, videos, and even live streams. +- **Performance:** Engineered for real-time, high-speed processing without sacrificing accuracy. +- **Ease of Use:** Intuitive Python and CLI interfaces for rapid deployment and testing. +- **Highly Customizable:** Various settings and parameters to tune the model's inference behavior according to your specific requirements. + +### Key Features of Predict Mode + +YOLOv8's predict mode is designed to be robust and versatile, featuring: + +- **Multiple Data Source Compatibility:** Whether your data is in the form of individual images, a collection of images, video files, or real-time video streams, predict mode has you covered. +- **Streaming Mode:** Use the streaming feature to generate a memory-efficient generator of `Results` objects. Enable this by setting `stream=True` in the predictor's call method. +- **Batch Processing:** The ability to process multiple images or video frames in a single batch, further speeding up inference time. +- **Integration Friendly:** Easily integrate with existing data pipelines and other software components, thanks to its flexible API. + +Ultralytics YOLO models return either a Python list of `Results` objects, or a memory-efficient Python generator of `Results` objects when `stream=True` is passed to the model during inference: + +!!! Example "Predict" + + === "Return a list with `stream=False`" + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.pt') # pretrained YOLOv8n model + + # Run batched inference on a list of images + results = model(['im1.jpg', 'im2.jpg']) # return a list of Results objects + + # Process results list + for result in results: + boxes = result.boxes # Boxes object for bbox outputs + masks = result.masks # Masks object for segmentation masks outputs + keypoints = result.keypoints # Keypoints object for pose outputs + probs = result.probs # Probs object for classification outputs + ``` + + === "Return a generator with `stream=True`" + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.pt') # pretrained YOLOv8n model + + # Run batched inference on a list of images + results = model(['im1.jpg', 'im2.jpg'], stream=True) # return a generator of Results objects + + # Process results generator + for result in results: + boxes = result.boxes # Boxes object for bbox outputs + masks = result.masks # Masks object for segmentation masks outputs + keypoints = result.keypoints # Keypoints object for pose outputs + probs = result.probs # Probs object for classification outputs + ``` + +## Inference Sources + +YOLOv8 can process different types of input sources for inference, as shown in the table below. The sources include static images, video streams, and various data formats. The table also indicates whether each source can be used in streaming mode with the argument `stream=True` โœ…. Streaming mode is beneficial for processing videos or live streams as it creates a generator of results instead of loading all frames into memory. + +!!! Tip "Tip" + + Use `stream=True` for processing long videos or large datasets to efficiently manage memory. When `stream=False`, the results for all frames or data points are stored in memory, which can quickly add up and cause out-of-memory errors for large inputs. In contrast, `stream=True` utilizes a generator, which only keeps the results of the current frame or data point in memory, significantly reducing memory consumption and preventing out-of-memory issues. + +| Source | Argument | Type | Notes | +|----------------|--------------------------------------------|-----------------|---------------------------------------------------------------------------------------------| +| image | `'image.jpg'` | `str` or `Path` | Single image file. | +| URL | `'https://ultralytics.com/images/bus.jpg'` | `str` | URL to an image. | +| screenshot | `'screen'` | `str` | Capture a screenshot. | +| PIL | `Image.open('im.jpg')` | `PIL.Image` | HWC format with RGB channels. | +| OpenCV | `cv2.imread('im.jpg')` | `np.ndarray` | HWC format with BGR channels `uint8 (0-255)`. | +| numpy | `np.zeros((640,1280,3))` | `np.ndarray` | HWC format with BGR channels `uint8 (0-255)`. | +| torch | `torch.zeros(16,3,320,640)` | `torch.Tensor` | BCHW format with RGB channels `float32 (0.0-1.0)`. | +| CSV | `'sources.csv'` | `str` or `Path` | CSV file containing paths to images, videos, or directories. | +| video โœ… | `'video.mp4'` | `str` or `Path` | Video file in formats like MP4, AVI, etc. | +| directory โœ… | `'path/'` | `str` or `Path` | Path to a directory containing images or videos. | +| glob โœ… | `'path/*.jpg'` | `str` | Glob pattern to match multiple files. Use the `*` character as a wildcard. | +| YouTube โœ… | `'https://youtu.be/LNwODJXcvt4'` | `str` | URL to a YouTube video. | +| stream โœ… | `'rtsp://example.com/media.mp4'` | `str` | URL for streaming protocols such as RTSP, RTMP, TCP, or an IP address. | +| multi-stream โœ… | `'list.streams'` | `str` or `Path` | `*.streams` text file with one stream URL per row, i.e. 8 streams will run at batch-size 8. | + +Below are code examples for using each source type: + +!!! Example "Prediction sources" + + === "image" + Run inference on an image file. + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Define path to the image file + source = 'path/to/image.jpg' + + # Run inference on the source + results = model(source) # list of Results objects + ``` + + === "screenshot" + Run inference on the current screen content as a screenshot. + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Define current screenshot as source + source = 'screen' + + # Run inference on the source + results = model(source) # list of Results objects + ``` + + === "URL" + Run inference on an image or video hosted remotely via URL. + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Define remote image or video URL + source = 'https://ultralytics.com/images/bus.jpg' + + # Run inference on the source + results = model(source) # list of Results objects + ``` + + === "PIL" + Run inference on an image opened with Python Imaging Library (PIL). + ```python + from PIL import Image + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Open an image using PIL + source = Image.open('path/to/image.jpg') + + # Run inference on the source + results = model(source) # list of Results objects + ``` + + === "OpenCV" + Run inference on an image read with OpenCV. + ```python + import cv2 + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Read an image using OpenCV + source = cv2.imread('path/to/image.jpg') + + # Run inference on the source + results = model(source) # list of Results objects + ``` + + === "numpy" + Run inference on an image represented as a numpy array. + ```python + import numpy as np + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Create a random numpy array of HWC shape (640, 640, 3) with values in range [0, 255] and type uint8 + source = np.random.randint(low=0, high=255, size=(640, 640, 3), dtype='uint8') + + # Run inference on the source + results = model(source) # list of Results objects + ``` + + === "torch" + Run inference on an image represented as a PyTorch tensor. + ```python + import torch + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Create a random torch tensor of BCHW shape (1, 3, 640, 640) with values in range [0, 1] and type float32 + source = torch.rand(1, 3, 640, 640, dtype=torch.float32) + + # Run inference on the source + results = model(source) # list of Results objects + ``` + + === "CSV" + Run inference on a collection of images, URLs, videos and directories listed in a CSV file. + ```python + import torch + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Define a path to a CSV file with images, URLs, videos and directories + source = 'path/to/file.csv' + + # Run inference on the source + results = model(source) # list of Results objects + ``` + + === "video" + Run inference on a video file. By using `stream=True`, you can create a generator of Results objects to reduce memory usage. + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Define path to video file + source = 'path/to/video.mp4' + + # Run inference on the source + results = model(source, stream=True) # generator of Results objects + ``` + + === "directory" + Run inference on all images and videos in a directory. To also capture images and videos in subdirectories use a glob pattern, i.e. `path/to/dir/**/*`. + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Define path to directory containing images and videos for inference + source = 'path/to/dir' + + # Run inference on the source + results = model(source, stream=True) # generator of Results objects + ``` + + === "glob" + Run inference on all images and videos that match a glob expression with `*` characters. + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Define a glob search for all JPG files in a directory + source = 'path/to/dir/*.jpg' + + # OR define a recursive glob search for all JPG files including subdirectories + source = 'path/to/dir/**/*.jpg' + + # Run inference on the source + results = model(source, stream=True) # generator of Results objects + ``` + + === "YouTube" + Run inference on a YouTube video. By using `stream=True`, you can create a generator of Results objects to reduce memory usage for long videos. + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Define source as YouTube video URL + source = 'https://youtu.be/LNwODJXcvt4' + + # Run inference on the source + results = model(source, stream=True) # generator of Results objects + ``` + + === "Streams" + Run inference on remote streaming sources using RTSP, RTMP, TCP and IP address protocols. If multiple streams are provided in a `*.streams` text file then batched inference will run, i.e. 8 streams will run at batch-size 8, otherwise single streams will run at batch-size 1. + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Single stream with batch-size 1 inference + source = 'rtsp://example.com/media.mp4' # RTSP, RTMP, TCP or IP streaming address + + # Multiple streams with batched inference (i.e. batch-size 8 for 8 streams) + source = 'path/to/list.streams' # *.streams text file with one streaming address per row + + # Run inference on the source + results = model(source, stream=True) # generator of Results objects + ``` + +## Inference Arguments + +`model.predict()` accepts multiple arguments that can be passed at inference time to override defaults: + +!!! Example + + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Run inference on 'bus.jpg' with arguments + model.predict('bus.jpg', save=True, imgsz=320, conf=0.5) + ``` + +Inference arguments: + +| Name | Type | Default | Description | +|-----------------|----------------|------------------------|----------------------------------------------------------------------------| +| `source` | `str` | `'ultralytics/assets'` | source directory for images or videos | +| `conf` | `float` | `0.25` | object confidence threshold for detection | +| `iou` | `float` | `0.7` | intersection over union (IoU) threshold for NMS | +| `imgsz` | `int or tuple` | `640` | image size as scalar or (h, w) list, i.e. (640, 480) | +| `half` | `bool` | `False` | use half precision (FP16) | +| `device` | `None or str` | `None` | device to run on, i.e. cuda device=0/1/2/3 or device=cpu | +| `max_det` | `int` | `300` | maximum number of detections per image | +| `vid_stride` | `bool` | `False` | video frame-rate stride | +| `stream_buffer` | `bool` | `False` | buffer all streaming frames (True) or return the most recent frame (False) | +| `visualize` | `bool` | `False` | visualize model features | +| `augment` | `bool` | `False` | apply image augmentation to prediction sources | +| `agnostic_nms` | `bool` | `False` | class-agnostic NMS | +| `classes` | `list[int]` | `None` | filter results by class, i.e. classes=0, or classes=[0,2,3] | +| `retina_masks` | `bool` | `False` | use high-resolution segmentation masks | +| `embed` | `list[int]` | `None` | return feature vectors/embeddings from given layers | + +Visualization arguments: + +| Name | Type | Default | Description | +|---------------|---------------|---------|-----------------------------------------------------------------| +| `show` | `bool` | `False` | show predicted images and videos if environment allows | +| `save` | `bool` | `False` | save predicted images and videos | +| `save_frames` | `bool` | `False` | save predicted individual video frames | +| `save_txt` | `bool` | `False` | save results as `.txt` file | +| `save_conf` | `bool` | `False` | save results with confidence scores | +| `save_crop` | `bool` | `False` | save cropped images with results | +| `show_labels` | `bool` | `True` | show prediction labels, i.e. 'person' | +| `show_conf` | `bool` | `True` | show prediction confidence, i.e. '0.99' | +| `show_boxes` | `bool` | `True` | show prediction boxes | +| `line_width` | `None or int` | `None` | line width of the bounding boxes. Scaled to image size if None. | + +## Image and Video Formats + +YOLOv8 supports various image and video formats, as specified in [data/utils.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/utils.py). See the tables below for the valid suffixes and example predict commands. + +### Images + +The below table contains valid Ultralytics image formats. + +| Image Suffixes | Example Predict Command | Reference | +|----------------|----------------------------------|-------------------------------------------------------------------------------| +| .bmp | `yolo predict source=image.bmp` | [Microsoft BMP File Format](https://en.wikipedia.org/wiki/BMP_file_format) | +| .dng | `yolo predict source=image.dng` | [Adobe DNG](https://www.adobe.com/products/photoshop/extend.displayTab2.html) | +| .jpeg | `yolo predict source=image.jpeg` | [JPEG](https://en.wikipedia.org/wiki/JPEG) | +| .jpg | `yolo predict source=image.jpg` | [JPEG](https://en.wikipedia.org/wiki/JPEG) | +| .mpo | `yolo predict source=image.mpo` | [Multi Picture Object](https://fileinfo.com/extension/mpo) | +| .png | `yolo predict source=image.png` | [Portable Network Graphics](https://en.wikipedia.org/wiki/PNG) | +| .tif | `yolo predict source=image.tif` | [Tag Image File Format](https://en.wikipedia.org/wiki/TIFF) | +| .tiff | `yolo predict source=image.tiff` | [Tag Image File Format](https://en.wikipedia.org/wiki/TIFF) | +| .webp | `yolo predict source=image.webp` | [WebP](https://en.wikipedia.org/wiki/WebP) | +| .pfm | `yolo predict source=image.pfm` | [Portable FloatMap](https://en.wikipedia.org/wiki/Netpbm#File_formats) | + +### Videos + +The below table contains valid Ultralytics video formats. + +| Video Suffixes | Example Predict Command | Reference | +|----------------|----------------------------------|----------------------------------------------------------------------------------| +| .asf | `yolo predict source=video.asf` | [Advanced Systems Format](https://en.wikipedia.org/wiki/Advanced_Systems_Format) | +| .avi | `yolo predict source=video.avi` | [Audio Video Interleave](https://en.wikipedia.org/wiki/Audio_Video_Interleave) | +| .gif | `yolo predict source=video.gif` | [Graphics Interchange Format](https://en.wikipedia.org/wiki/GIF) | +| .m4v | `yolo predict source=video.m4v` | [MPEG-4 Part 14](https://en.wikipedia.org/wiki/M4V) | +| .mkv | `yolo predict source=video.mkv` | [Matroska](https://en.wikipedia.org/wiki/Matroska) | +| .mov | `yolo predict source=video.mov` | [QuickTime File Format](https://en.wikipedia.org/wiki/QuickTime_File_Format) | +| .mp4 | `yolo predict source=video.mp4` | [MPEG-4 Part 14 - Wikipedia](https://en.wikipedia.org/wiki/MPEG-4_Part_14) | +| .mpeg | `yolo predict source=video.mpeg` | [MPEG-1 Part 2](https://en.wikipedia.org/wiki/MPEG-1) | +| .mpg | `yolo predict source=video.mpg` | [MPEG-1 Part 2](https://en.wikipedia.org/wiki/MPEG-1) | +| .ts | `yolo predict source=video.ts` | [MPEG Transport Stream](https://en.wikipedia.org/wiki/MPEG_transport_stream) | +| .wmv | `yolo predict source=video.wmv` | [Windows Media Video](https://en.wikipedia.org/wiki/Windows_Media_Video) | +| .webm | `yolo predict source=video.webm` | [WebM Project](https://en.wikipedia.org/wiki/WebM) | + +## Working with Results + +All Ultralytics `predict()` calls will return a list of `Results` objects: + +!!! Example "Results" + + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Run inference on an image + results = model('bus.jpg') # list of 1 Results object + results = model(['bus.jpg', 'zidane.jpg']) # list of 2 Results objects + ``` + +`Results` objects have the following attributes: + +| Attribute | Type | Description | +|--------------|-----------------------|------------------------------------------------------------------------------------------| +| `orig_img` | `numpy.ndarray` | The original image as a numpy array. | +| `orig_shape` | `tuple` | The original image shape in (height, width) format. | +| `boxes` | `Boxes, optional` | A Boxes object containing the detection bounding boxes. | +| `masks` | `Masks, optional` | A Masks object containing the detection masks. | +| `probs` | `Probs, optional` | A Probs object containing probabilities of each class for classification task. | +| `keypoints` | `Keypoints, optional` | A Keypoints object containing detected keypoints for each object. | +| `speed` | `dict` | A dictionary of preprocess, inference, and postprocess speeds in milliseconds per image. | +| `names` | `dict` | A dictionary of class names. | +| `path` | `str` | The path to the image file. | + +`Results` objects have the following methods: + +| Method | Return Type | Description | +|-----------------|-----------------|-------------------------------------------------------------------------------------| +| `__getitem__()` | `Results` | Return a Results object for the specified index. | +| `__len__()` | `int` | Return the number of detections in the Results object. | +| `update()` | `None` | Update the boxes, masks, and probs attributes of the Results object. | +| `cpu()` | `Results` | Return a copy of the Results object with all tensors on CPU memory. | +| `numpy()` | `Results` | Return a copy of the Results object with all tensors as numpy arrays. | +| `cuda()` | `Results` | Return a copy of the Results object with all tensors on GPU memory. | +| `to()` | `Results` | Return a copy of the Results object with tensors on the specified device and dtype. | +| `new()` | `Results` | Return a new Results object with the same image, path, and names. | +| `keys()` | `List[str]` | Return a list of non-empty attribute names. | +| `plot()` | `numpy.ndarray` | Plots the detection results. Returns a numpy array of the annotated image. | +| `verbose()` | `str` | Return log string for each task. | +| `save_txt()` | `None` | Save predictions into a txt file. | +| `save_crop()` | `None` | Save cropped predictions to `save_dir/cls/file_name.jpg`. | +| `tojson()` | `None` | Convert the object to JSON format. | + +For more details see the `Results` class [documentation](../reference/engine/results.md). + +### Boxes + +`Boxes` object can be used to index, manipulate, and convert bounding boxes to different formats. + +!!! Example "Boxes" + + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Run inference on an image + results = model('bus.jpg') # results list + + # View results + for r in results: + print(r.boxes) # print the Boxes object containing the detection bounding boxes + ``` + +Here is a table for the `Boxes` class methods and properties, including their name, type, and description: + +| Name | Type | Description | +|-----------|---------------------------|--------------------------------------------------------------------| +| `cpu()` | Method | Move the object to CPU memory. | +| `numpy()` | Method | Convert the object to a numpy array. | +| `cuda()` | Method | Move the object to CUDA memory. | +| `to()` | Method | Move the object to the specified device. | +| `xyxy` | Property (`torch.Tensor`) | Return the boxes in xyxy format. | +| `conf` | Property (`torch.Tensor`) | Return the confidence values of the boxes. | +| `cls` | Property (`torch.Tensor`) | Return the class values of the boxes. | +| `id` | Property (`torch.Tensor`) | Return the track IDs of the boxes (if available). | +| `xywh` | Property (`torch.Tensor`) | Return the boxes in xywh format. | +| `xyxyn` | Property (`torch.Tensor`) | Return the boxes in xyxy format normalized by original image size. | +| `xywhn` | Property (`torch.Tensor`) | Return the boxes in xywh format normalized by original image size. | + +For more details see the `Boxes` class [documentation](../reference/engine/results.md#ultralytics.engine.results.Boxes). + +### Masks + +`Masks` object can be used index, manipulate and convert masks to segments. + +!!! Example "Masks" + + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n-seg Segment model + model = YOLO('yolov8n-seg.pt') + + # Run inference on an image + results = model('bus.jpg') # results list + + # View results + for r in results: + print(r.masks) # print the Masks object containing the detected instance masks + ``` + +Here is a table for the `Masks` class methods and properties, including their name, type, and description: + +| Name | Type | Description | +|-----------|---------------------------|-----------------------------------------------------------------| +| `cpu()` | Method | Returns the masks tensor on CPU memory. | +| `numpy()` | Method | Returns the masks tensor as a numpy array. | +| `cuda()` | Method | Returns the masks tensor on GPU memory. | +| `to()` | Method | Returns the masks tensor with the specified device and dtype. | +| `xyn` | Property (`torch.Tensor`) | A list of normalized segments represented as tensors. | +| `xy` | Property (`torch.Tensor`) | A list of segments in pixel coordinates represented as tensors. | + +For more details see the `Masks` class [documentation](../reference/engine/results.md#ultralytics.engine.results.Masks). + +### Keypoints + +`Keypoints` object can be used index, manipulate and normalize coordinates. + +!!! Example "Keypoints" + + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n-pose Pose model + model = YOLO('yolov8n-pose.pt') + + # Run inference on an image + results = model('bus.jpg') # results list + + # View results + for r in results: + print(r.keypoints) # print the Keypoints object containing the detected keypoints + ``` + +Here is a table for the `Keypoints` class methods and properties, including their name, type, and description: + +| Name | Type | Description | +|-----------|---------------------------|-------------------------------------------------------------------| +| `cpu()` | Method | Returns the keypoints tensor on CPU memory. | +| `numpy()` | Method | Returns the keypoints tensor as a numpy array. | +| `cuda()` | Method | Returns the keypoints tensor on GPU memory. | +| `to()` | Method | Returns the keypoints tensor with the specified device and dtype. | +| `xyn` | Property (`torch.Tensor`) | A list of normalized keypoints represented as tensors. | +| `xy` | Property (`torch.Tensor`) | A list of keypoints in pixel coordinates represented as tensors. | +| `conf` | Property (`torch.Tensor`) | Returns confidence values of keypoints if available, else None. | + +For more details see the `Keypoints` class [documentation](../reference/engine/results.md#ultralytics.engine.results.Keypoints). + +### Probs + +`Probs` object can be used index, get `top1` and `top5` indices and scores of classification. + +!!! Example "Probs" + + ```python + from ultralytics import YOLO + + # Load a pretrained YOLOv8n-cls Classify model + model = YOLO('yolov8n-cls.pt') + + # Run inference on an image + results = model('bus.jpg') # results list + + # View results + for r in results: + print(r.probs) # print the Probs object containing the detected class probabilities + ``` + +Here's a table summarizing the methods and properties for the `Probs` class: + +| Name | Type | Description | +|------------|---------------------------|-------------------------------------------------------------------------| +| `cpu()` | Method | Returns a copy of the probs tensor on CPU memory. | +| `numpy()` | Method | Returns a copy of the probs tensor as a numpy array. | +| `cuda()` | Method | Returns a copy of the probs tensor on GPU memory. | +| `to()` | Method | Returns a copy of the probs tensor with the specified device and dtype. | +| `top1` | Property (`int`) | Index of the top 1 class. | +| `top5` | Property (`list[int]`) | Indices of the top 5 classes. | +| `top1conf` | Property (`torch.Tensor`) | Confidence of the top 1 class. | +| `top5conf` | Property (`torch.Tensor`) | Confidences of the top 5 classes. | + +For more details see the `Probs` class [documentation](../reference/engine/results.md#ultralytics.engine.results.Probs). + +## Plotting Results + +You can use the `plot()` method of a `Result` objects to visualize predictions. It plots all prediction types (boxes, masks, keypoints, probabilities, etc.) contained in the `Results` object onto a numpy array that can then be shown or saved. + +!!! Example "Plotting" + + ```python + from PIL import Image + from ultralytics import YOLO + + # Load a pretrained YOLOv8n model + model = YOLO('yolov8n.pt') + + # Run inference on 'bus.jpg' + results = model('bus.jpg') # results list + + # Show the results + for r in results: + im_array = r.plot() # plot a BGR numpy array of predictions + im = Image.fromarray(im_array[..., ::-1]) # RGB PIL image + im.show() # show image + im.save('results.jpg') # save image + ``` + + The `plot()` method supports the following arguments: + + | Argument | Type | Description | Default | + |--------------|-----------------|--------------------------------------------------------------------------------|---------------| + | `conf` | `bool` | Whether to plot the detection confidence score. | `True` | + | `line_width` | `float` | The line width of the bounding boxes. If None, it is scaled to the image size. | `None` | + | `font_size` | `float` | The font size of the text. If None, it is scaled to the image size. | `None` | + | `font` | `str` | The font to use for the text. | `'Arial.ttf'` | + | `pil` | `bool` | Whether to return the image as a PIL Image. | `False` | + | `img` | `numpy.ndarray` | Plot to another image. if not, plot to original image. | `None` | + | `im_gpu` | `torch.Tensor` | Normalized image in gpu with shape (1, 3, 640, 640), for faster mask plotting. | `None` | + | `kpt_radius` | `int` | Radius of the drawn keypoints. Default is 5. | `5` | + | `kpt_line` | `bool` | Whether to draw lines connecting keypoints. | `True` | + | `labels` | `bool` | Whether to plot the label of bounding boxes. | `True` | + | `boxes` | `bool` | Whether to plot the bounding boxes. | `True` | + | `masks` | `bool` | Whether to plot the masks. | `True` | + | `probs` | `bool` | Whether to plot classification probability | `True` | + +## Thread-Safe Inference + +Ensuring thread safety during inference is crucial when you are running multiple YOLO models in parallel across different threads. Thread-safe inference guarantees that each thread's predictions are isolated and do not interfere with one another, avoiding race conditions and ensuring consistent and reliable outputs. + +When using YOLO models in a multi-threaded application, it's important to instantiate separate model objects for each thread or employ thread-local storage to prevent conflicts: + +!!! Example "Thread-Safe Inference" + + Instantiate a single model inside each thread for thread-safe inference: + ```python + from ultralytics import YOLO + from threading import Thread + + def thread_safe_predict(image_path): + # Instantiate a new model inside the thread + local_model = YOLO("yolov8n.pt") + results = local_model.predict(image_path) + # Process results + + + # Starting threads that each have their own model instance + Thread(target=thread_safe_predict, args=("image1.jpg",)).start() + Thread(target=thread_safe_predict, args=("image2.jpg",)).start() + ``` + +For an in-depth look at thread-safe inference with YOLO models and step-by-step instructions, please refer to our [YOLO Thread-Safe Inference Guide](../guides/yolo-thread-safe-inference.md). This guide will provide you with all the necessary information to avoid common pitfalls and ensure that your multi-threaded inference runs smoothly. + +## Streaming Source `for`-loop + +Here's a Python script using OpenCV (`cv2`) and YOLOv8 to run inference on video frames. This script assumes you have already installed the necessary packages (`opencv-python` and `ultralytics`). + +!!! Example "Streaming for-loop" + + ```python + import cv2 + from ultralytics import YOLO + + # Load the YOLOv8 model + model = YOLO('yolov8n.pt') + + # Open the video file + video_path = "path/to/your/video/file.mp4" + cap = cv2.VideoCapture(video_path) + + # Loop through the video frames + while cap.isOpened(): + # Read a frame from the video + success, frame = cap.read() + + if success: + # Run YOLOv8 inference on the frame + results = model(frame) + + # Visualize the results on the frame + annotated_frame = results[0].plot() + + # Display the annotated frame + cv2.imshow("YOLOv8 Inference", annotated_frame) + + # Break the loop if 'q' is pressed + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # Break the loop if the end of the video is reached + break + + # Release the video capture object and close the display window + cap.release() + cv2.destroyAllWindows() + ``` + +This script will run predictions on each frame of the video, visualize the results, and display them in a window. The loop can be exited by pressing 'q'. + +[car spare parts]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/a0f802a8-0776-44cf-8f17-93974a4a28a1 + +[football player detect]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/7d320e1f-fc57-4d7f-a691-78ee579c3442 + +[human fall detect]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/86437c4a-3227-4eee-90ef-9efb697bdb43 diff --git a/ultralytics/docs/en/modes/predict.md:Zone.Identifier b/ultralytics/docs/en/modes/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/modes/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/modes/track.md b/ultralytics/docs/en/modes/track.md new file mode 100755 index 0000000..8ec5651 --- /dev/null +++ b/ultralytics/docs/en/modes/track.md @@ -0,0 +1,360 @@ +--- +comments: true +description: Learn how to use Ultralytics YOLO for object tracking in video streams. Guides to use different trackers and customise tracker configurations. +keywords: Ultralytics, YOLO, object tracking, video streams, BoT-SORT, ByteTrack, Python guide, CLI guide +--- + +# Multi-Object Tracking with Ultralytics YOLO + +Multi-object tracking examples + +Object tracking in the realm of video analytics is a critical task that not only identifies the location and class of objects within the frame but also maintains a unique ID for each detected object as the video progresses. The applications are limitlessโ€”ranging from surveillance and security to real-time sports analytics. + +## Why Choose Ultralytics YOLO for Object Tracking? + +The output from Ultralytics trackers is consistent with standard object detection but has the added value of object IDs. This makes it easy to track objects in video streams and perform subsequent analytics. Here's why you should consider using Ultralytics YOLO for your object tracking needs: + +- **Efficiency:** Process video streams in real-time without compromising accuracy. +- **Flexibility:** Supports multiple tracking algorithms and configurations. +- **Ease of Use:** Simple Python API and CLI options for quick integration and deployment. +- **Customizability:** Easy to use with custom trained YOLO models, allowing integration into domain-specific applications. + +

+
+ +
+ Watch: Object Detection and Tracking with Ultralytics YOLOv8. +

+ +## Real-world Applications + +| Transportation | Retail | Aquaculture | +|:----------------------------------:|:--------------------------------:|:----------------------------:| +| ![Vehicle Tracking][vehicle track] | ![People Tracking][people track] | ![Fish Tracking][fish track] | +| Vehicle Tracking | People Tracking | Fish Tracking | + +## Features at a Glance + +Ultralytics YOLO extends its object detection features to provide robust and versatile object tracking: + +- **Real-Time Tracking:** Seamlessly track objects in high-frame-rate videos. +- **Multiple Tracker Support:** Choose from a variety of established tracking algorithms. +- **Customizable Tracker Configurations:** Tailor the tracking algorithm to meet specific requirements by adjusting various parameters. + +## Available Trackers + +Ultralytics YOLO supports the following tracking algorithms. They can be enabled by passing the relevant YAML configuration file such as `tracker=tracker_type.yaml`: + +* [BoT-SORT](https://github.com/NirAharon/BoT-SORT) - Use `botsort.yaml` to enable this tracker. +* [ByteTrack](https://github.com/ifzhang/ByteTrack) - Use `bytetrack.yaml` to enable this tracker. + +The default tracker is BoT-SORT. + +## Tracking + +To run the tracker on video streams, use a trained Detect, Segment or Pose model such as YOLOv8n, YOLOv8n-seg and YOLOv8n-pose. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load an official or custom model + model = YOLO('yolov8n.pt') # Load an official Detect model + model = YOLO('yolov8n-seg.pt') # Load an official Segment model + model = YOLO('yolov8n-pose.pt') # Load an official Pose model + model = YOLO('path/to/best.pt') # Load a custom trained model + + # Perform tracking with the model + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True) # Tracking with default tracker + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True, tracker="bytetrack.yaml") # Tracking with ByteTrack tracker + ``` + + === "CLI" + + ```bash + # Perform tracking with various models using the command line interface + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" # Official Detect model + yolo track model=yolov8n-seg.pt source="https://youtu.be/LNwODJXcvt4" # Official Segment model + yolo track model=yolov8n-pose.pt source="https://youtu.be/LNwODJXcvt4" # Official Pose model + yolo track model=path/to/best.pt source="https://youtu.be/LNwODJXcvt4" # Custom trained model + + # Track using ByteTrack tracker + yolo track model=path/to/best.pt tracker="bytetrack.yaml" + ``` + +As can be seen in the above usage, tracking is available for all Detect, Segment and Pose models run on videos or streaming sources. + +## Configuration + +### Tracking Arguments + +Tracking configuration shares properties with Predict mode, such as `conf`, `iou`, and `show`. For further configurations, refer to the [Predict](../modes/predict.md#inference-arguments) model page. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Configure the tracking parameters and run the tracker + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", conf=0.3, iou=0.5, show=True) + ``` + + === "CLI" + + ```bash + # Configure tracking parameters and run the tracker using the command line interface + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" conf=0.3, iou=0.5 show + ``` + +### Tracker Selection + +Ultralytics also allows you to use a modified tracker configuration file. To do this, simply make a copy of a tracker config file (for example, `custom_tracker.yaml`) from [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) and modify any configurations (except the `tracker_type`) as per your needs. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load the model and run the tracker with a custom configuration file + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", tracker='custom_tracker.yaml') + ``` + + === "CLI" + + ```bash + # Load the model and run the tracker with a custom configuration file using the command line interface + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" tracker='custom_tracker.yaml' + ``` + +For a comprehensive list of tracking arguments, refer to the [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) page. + +## Python Examples + +### Persisting Tracks Loop + +Here is a Python script using OpenCV (`cv2`) and YOLOv8 to run object tracking on video frames. This script still assumes you have already installed the necessary packages (`opencv-python` and `ultralytics`). The `persist=True` argument tells the tracker that the current image or frame is the next in a sequence and to expect tracks from the previous image in the current image. + +!!! Example "Streaming for-loop with tracking" + + ```python + import cv2 + from ultralytics import YOLO + + # Load the YOLOv8 model + model = YOLO('yolov8n.pt') + + # Open the video file + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # Loop through the video frames + while cap.isOpened(): + # Read a frame from the video + success, frame = cap.read() + + if success: + # Run YOLOv8 tracking on the frame, persisting tracks between frames + results = model.track(frame, persist=True) + + # Visualize the results on the frame + annotated_frame = results[0].plot() + + # Display the annotated frame + cv2.imshow("YOLOv8 Tracking", annotated_frame) + + # Break the loop if 'q' is pressed + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # Break the loop if the end of the video is reached + break + + # Release the video capture object and close the display window + cap.release() + cv2.destroyAllWindows() + ``` + +Please note the change from `model(frame)` to `model.track(frame)`, which enables object tracking instead of simple detection. This modified script will run the tracker on each frame of the video, visualize the results, and display them in a window. The loop can be exited by pressing 'q'. + +### Plotting Tracks Over Time + +Visualizing object tracks over consecutive frames can provide valuable insights into the movement patterns and behavior of detected objects within a video. With Ultralytics YOLOv8, plotting these tracks is a seamless and efficient process. + +In the following example, we demonstrate how to utilize YOLOv8's tracking capabilities to plot the movement of detected objects across multiple video frames. This script involves opening a video file, reading it frame by frame, and utilizing the YOLO model to identify and track various objects. By retaining the center points of the detected bounding boxes and connecting them, we can draw lines that represent the paths followed by the tracked objects. + +!!! Example "Plotting tracks over multiple video frames" + + ```python + from collections import defaultdict + + import cv2 + import numpy as np + + from ultralytics import YOLO + + # Load the YOLOv8 model + model = YOLO('yolov8n.pt') + + # Open the video file + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # Store the track history + track_history = defaultdict(lambda: []) + + # Loop through the video frames + while cap.isOpened(): + # Read a frame from the video + success, frame = cap.read() + + if success: + # Run YOLOv8 tracking on the frame, persisting tracks between frames + results = model.track(frame, persist=True) + + # Get the boxes and track IDs + boxes = results[0].boxes.xywh.cpu() + track_ids = results[0].boxes.id.int().cpu().tolist() + + # Visualize the results on the frame + annotated_frame = results[0].plot() + + # Plot the tracks + for box, track_id in zip(boxes, track_ids): + x, y, w, h = box + track = track_history[track_id] + track.append((float(x), float(y))) # x, y center point + if len(track) > 30: # retain 90 tracks for 90 frames + track.pop(0) + + # Draw the tracking lines + points = np.hstack(track).astype(np.int32).reshape((-1, 1, 2)) + cv2.polylines(annotated_frame, [points], isClosed=False, color=(230, 230, 230), thickness=10) + + # Display the annotated frame + cv2.imshow("YOLOv8 Tracking", annotated_frame) + + # Break the loop if 'q' is pressed + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # Break the loop if the end of the video is reached + break + + # Release the video capture object and close the display window + cap.release() + cv2.destroyAllWindows() + ``` + +### Multithreaded Tracking + +Multithreaded tracking provides the capability to run object tracking on multiple video streams simultaneously. This is particularly useful when handling multiple video inputs, such as from multiple surveillance cameras, where concurrent processing can greatly enhance efficiency and performance. + +In the provided Python script, we make use of Python's `threading` module to run multiple instances of the tracker concurrently. Each thread is responsible for running the tracker on one video file, and all the threads run simultaneously in the background. + +To ensure that each thread receives the correct parameters (the video file, the model to use and the file index), we define a function `run_tracker_in_thread` that accepts these parameters and contains the main tracking loop. This function reads the video frame by frame, runs the tracker, and displays the results. + +Two different models are used in this example: `yolov8n.pt` and `yolov8n-seg.pt`, each tracking objects in a different video file. The video files are specified in `video_file1` and `video_file2`. + +The `daemon=True` parameter in `threading.Thread` means that these threads will be closed as soon as the main program finishes. We then start the threads with `start()` and use `join()` to make the main thread wait until both tracker threads have finished. + +Finally, after all threads have completed their task, the windows displaying the results are closed using `cv2.destroyAllWindows()`. + +!!! Example "Streaming for-loop with tracking" + + ```python + import threading + import cv2 + from ultralytics import YOLO + + + def run_tracker_in_thread(filename, model, file_index): + """ + Runs a video file or webcam stream concurrently with the YOLOv8 model using threading. + + This function captures video frames from a given file or camera source and utilizes the YOLOv8 model for object + tracking. The function runs in its own thread for concurrent processing. + + Args: + filename (str): The path to the video file or the identifier for the webcam/external camera source. + model (obj): The YOLOv8 model object. + file_index (int): An index to uniquely identify the file being processed, used for display purposes. + + Note: + Press 'q' to quit the video display window. + """ + video = cv2.VideoCapture(filename) # Read the video file + + while True: + ret, frame = video.read() # Read the video frames + + # Exit the loop if no more frames in either video + if not ret: + break + + # Track objects in frames if available + results = model.track(frame, persist=True) + res_plotted = results[0].plot() + cv2.imshow(f"Tracking_Stream_{file_index}", res_plotted) + + key = cv2.waitKey(1) + if key == ord('q'): + break + + # Release video sources + video.release() + + + # Load the models + model1 = YOLO('yolov8n.pt') + model2 = YOLO('yolov8n-seg.pt') + + # Define the video files for the trackers + video_file1 = "path/to/video1.mp4" # Path to video file, 0 for webcam + video_file2 = 0 # Path to video file, 0 for webcam, 1 for external camera + + # Create the tracker threads + tracker_thread1 = threading.Thread(target=run_tracker_in_thread, args=(video_file1, model1, 1), daemon=True) + tracker_thread2 = threading.Thread(target=run_tracker_in_thread, args=(video_file2, model2, 2), daemon=True) + + # Start the tracker threads + tracker_thread1.start() + tracker_thread2.start() + + # Wait for the tracker threads to finish + tracker_thread1.join() + tracker_thread2.join() + + # Clean up and close windows + cv2.destroyAllWindows() + ``` + +This example can easily be extended to handle more video files and models by creating more threads and applying the same methodology. + +## Contribute New Trackers + +Are you proficient in multi-object tracking and have successfully implemented or adapted a tracking algorithm with Ultralytics YOLO? We invite you to contribute to our Trackers section in [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers)! Your real-world applications and solutions could be invaluable for users working on tracking tasks. + +By contributing to this section, you help expand the scope of tracking solutions available within the Ultralytics YOLO framework, adding another layer of functionality and utility for the community. + +To initiate your contribution, please refer to our [Contributing Guide](https://docs.ultralytics.com/help/contributing) for comprehensive instructions on submitting a Pull Request (PR) ๐Ÿ› ๏ธ. We are excited to see what you bring to the table! + +Together, let's enhance the tracking capabilities of the Ultralytics YOLO ecosystem ๐Ÿ™! + +[vehicle track]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/ee6e6038-383b-4f21-ac29-b2a1c7d386ab + +[people track]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/93bb4ee2-77a0-4e4e-8eb6-eb8f527f0527 + +[fish track]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/a5146d0f-bfa8-4e0a-b7df-3c1446cd8142 diff --git a/ultralytics/docs/en/modes/track.md:Zone.Identifier b/ultralytics/docs/en/modes/track.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/modes/track.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/modes/train.md b/ultralytics/docs/en/modes/train.md new file mode 100755 index 0000000..331d7cd --- /dev/null +++ b/ultralytics/docs/en/modes/train.md @@ -0,0 +1,296 @@ +--- +comments: true +description: Step-by-step guide to train YOLOv8 models with Ultralytics YOLO including examples of single-GPU and multi-GPU training +keywords: Ultralytics, YOLOv8, YOLO, object detection, train mode, custom dataset, GPU training, multi-GPU, hyperparameters, CLI examples, Python examples +--- + +# Model Training with Ultralytics YOLO + +Ultralytics YOLO ecosystem and integrations + +## Introduction + +Training a deep learning model involves feeding it data and adjusting its parameters so that it can make accurate predictions. Train mode in Ultralytics YOLOv8 is engineered for effective and efficient training of object detection models, fully utilizing modern hardware capabilities. This guide aims to cover all the details you need to get started with training your own models using YOLOv8's robust set of features. + +

+
+ +
+ Watch: How to Train a YOLOv8 model on Your Custom Dataset in Google Colab. +

+ +## Why Choose Ultralytics YOLO for Training? + +Here are some compelling reasons to opt for YOLOv8's Train mode: + +- **Efficiency:** Make the most out of your hardware, whether you're on a single-GPU setup or scaling across multiple GPUs. +- **Versatility:** Train on custom datasets in addition to readily available ones like COCO, VOC, and ImageNet. +- **User-Friendly:** Simple yet powerful CLI and Python interfaces for a straightforward training experience. +- **Hyperparameter Flexibility:** A broad range of customizable hyperparameters to fine-tune model performance. + +### Key Features of Train Mode + +The following are some notable features of YOLOv8's Train mode: + +- **Automatic Dataset Download:** Standard datasets like COCO, VOC, and ImageNet are downloaded automatically on first use. +- **Multi-GPU Support:** Scale your training efforts seamlessly across multiple GPUs to expedite the process. +- **Hyperparameter Configuration:** The option to modify hyperparameters through YAML configuration files or CLI arguments. +- **Visualization and Monitoring:** Real-time tracking of training metrics and visualization of the learning process for better insights. + +!!! Tip "Tip" + + * YOLOv8 datasets like COCO, VOC, ImageNet and many others automatically download on first use, i.e. `yolo train data=coco.yaml` + +## Usage Examples + +Train YOLOv8n on the COCO128 dataset for 100 epochs at image size 640. The training device can be specified using the `device` argument. If no argument is passed GPU `device=0` will be used if available, otherwise `device=cpu` will be used. See Arguments section below for a full list of training arguments. + +!!! Example "Single-GPU and CPU Training Example" + + Device is determined automatically. If a GPU is available then it will be used, otherwise training will start on CPU. + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.yaml') # build a new model from YAML + model = YOLO('yolov8n.pt') # load a pretrained model (recommended for training) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # build from YAML and transfer weights + + # Train the model + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + + === "CLI" + + ```bash + # Build a new model from YAML and start training from scratch + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # Start training from a pretrained *.pt model + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # Build a new model from YAML, transfer pretrained weights to it and start training + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### Multi-GPU Training + +Multi-GPU training allows for more efficient utilization of available hardware resources by distributing the training load across multiple GPUs. This feature is available through both the Python API and the command-line interface. To enable multi-GPU training, specify the GPU device IDs you wish to use. + +!!! Example "Multi-GPU Training Example" + + To train with 2 GPUs, CUDA devices 0 and 1 use the following commands. Expand to additional GPUs as required. + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.pt') # load a pretrained model (recommended for training) + + # Train the model with 2 GPUs + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device=[0, 1]) + ``` + + === "CLI" + + ```bash + # Start training from a pretrained *.pt model using GPUs 0 and 1 + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=0,1 + ``` + +### Apple M1 and M2 MPS Training + +With the support for Apple M1 and M2 chips integrated in the Ultralytics YOLO models, it's now possible to train your models on devices utilizing the powerful Metal Performance Shaders (MPS) framework. The MPS offers a high-performance way of executing computation and image processing tasks on Apple's custom silicon. + +To enable training on Apple M1 and M2 chips, you should specify 'mps' as your device when initiating the training process. Below is an example of how you could do this in Python and via the command line: + +!!! Example "MPS Training Example" + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.pt') # load a pretrained model (recommended for training) + + # Train the model with 2 GPUs + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device='mps') + ``` + + === "CLI" + + ```bash + # Start training from a pretrained *.pt model using GPUs 0 and 1 + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=mps + ``` + +While leveraging the computational power of the M1/M2 chips, this enables more efficient processing of the training tasks. For more detailed guidance and advanced configuration options, please refer to the [PyTorch MPS documentation](https://pytorch.org/docs/stable/notes/mps.html). + +### Resuming Interrupted Trainings + +Resuming training from a previously saved state is a crucial feature when working with deep learning models. This can come in handy in various scenarios, like when the training process has been unexpectedly interrupted, or when you wish to continue training a model with new data or for more epochs. + +When training is resumed, Ultralytics YOLO loads the weights from the last saved model and also restores the optimizer state, learning rate scheduler, and the epoch number. This allows you to continue the training process seamlessly from where it was left off. + +You can easily resume training in Ultralytics YOLO by setting the `resume` argument to `True` when calling the `train` method, and specifying the path to the `.pt` file containing the partially trained model weights. + +Below is an example of how to resume an interrupted training using Python and via the command line: + +!!! Example "Resume Training Example" + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('path/to/last.pt') # load a partially trained model + + # Resume training + results = model.train(resume=True) + ``` + + === "CLI" + + ```bash + # Resume an interrupted training + yolo train resume model=path/to/last.pt + ``` + +By setting `resume=True`, the `train` function will continue training from where it left off, using the state stored in the 'path/to/last.pt' file. If the `resume` argument is omitted or set to `False`, the `train` function will start a new training session. + +Remember that checkpoints are saved at the end of every epoch by default, or at fixed interval using the `save_period` argument, so you must complete at least 1 epoch to resume a training run. + +## Arguments + +Training settings for YOLO models refer to the various hyperparameters and configurations used to train the model on a dataset. These settings can affect the model's performance, speed, and accuracy. Some common YOLO training settings include the batch size, learning rate, momentum, and weight decay. Other factors that may affect the training process include the choice of optimizer, the choice of loss function, and the size and composition of the training dataset. It is important to carefully tune and experiment with these settings to achieve the best possible performance for a given task. + +| Key | Value | Description | +|-------------------|----------|------------------------------------------------------------------------------------------------| +| `model` | `None` | path to model file, i.e. yolov8n.pt, yolov8n.yaml | +| `data` | `None` | path to data file, i.e. coco128.yaml | +| `epochs` | `100` | number of epochs to train for | +| `time` | `None` | number of hours to train for, overrides epochs if supplied | +| `patience` | `50` | epochs to wait for no observable improvement for early stopping of training | +| `batch` | `16` | number of images per batch (-1 for AutoBatch) | +| `imgsz` | `640` | size of input images as integer | +| `save` | `True` | save train checkpoints and predict results | +| `save_period` | `-1` | Save checkpoint every x epochs (disabled if < 1) | +| `cache` | `False` | True/ram, disk or False. Use cache for data loading | +| `device` | `None` | device to run on, i.e. cuda device=0 or device=0,1,2,3 or device=cpu | +| `workers` | `8` | number of worker threads for data loading (per RANK if DDP) | +| `project` | `None` | project name | +| `name` | `None` | experiment name | +| `exist_ok` | `False` | whether to overwrite existing experiment | +| `pretrained` | `True` | (bool or str) whether to use a pretrained model (bool) or a model to load weights from (str) | +| `optimizer` | `'auto'` | optimizer to use, choices=[SGD, Adam, Adamax, AdamW, NAdam, RAdam, RMSProp, auto] | +| `verbose` | `False` | whether to print verbose output | +| `seed` | `0` | random seed for reproducibility | +| `deterministic` | `True` | whether to enable deterministic mode | +| `single_cls` | `False` | train multi-class data as single-class | +| `rect` | `False` | rectangular training with each batch collated for minimum padding | +| `cos_lr` | `False` | use cosine learning rate scheduler | +| `close_mosaic` | `10` | (int) disable mosaic augmentation for final epochs (0 to disable) | +| `resume` | `False` | resume training from last checkpoint | +| `amp` | `True` | Automatic Mixed Precision (AMP) training, choices=[True, False] | +| `fraction` | `1.0` | dataset fraction to train on (default is 1.0, all images in train set) | +| `profile` | `False` | profile ONNX and TensorRT speeds during training for loggers | +| `freeze` | `None` | (int or list, optional) freeze first n layers, or freeze list of layer indices during training | +| `lr0` | `0.01` | initial learning rate (i.e. SGD=1E-2, Adam=1E-3) | +| `lrf` | `0.01` | final learning rate (lr0 * lrf) | +| `momentum` | `0.937` | SGD momentum/Adam beta1 | +| `weight_decay` | `0.0005` | optimizer weight decay 5e-4 | +| `warmup_epochs` | `3.0` | warmup epochs (fractions ok) | +| `warmup_momentum` | `0.8` | warmup initial momentum | +| `warmup_bias_lr` | `0.1` | warmup initial bias lr | +| `box` | `7.5` | box loss gain | +| `cls` | `0.5` | cls loss gain (scale with pixels) | +| `dfl` | `1.5` | dfl loss gain | +| `pose` | `12.0` | pose loss gain (pose-only) | +| `kobj` | `2.0` | keypoint obj loss gain (pose-only) | +| `label_smoothing` | `0.0` | label smoothing (fraction) | +| `nbs` | `64` | nominal batch size | +| `overlap_mask` | `True` | masks should overlap during training (segment train only) | +| `mask_ratio` | `4` | mask downsample ratio (segment train only) | +| `dropout` | `0.0` | use dropout regularization (classify train only) | +| `val` | `True` | validate/test during training | +| `plots` | `False` | save plots and images during train/val | + +## Logging + +In training a YOLOv8 model, you might find it valuable to keep track of the model's performance over time. This is where logging comes into play. Ultralytics' YOLO provides support for three types of loggers - Comet, ClearML, and TensorBoard. + +To use a logger, select it from the dropdown menu in the code snippet above and run it. The chosen logger will be installed and initialized. + +### Comet + +[Comet](../integrations/comet.md) is a platform that allows data scientists and developers to track, compare, explain and optimize experiments and models. It provides functionalities such as real-time metrics, code diffs, and hyperparameters tracking. + +To use Comet: + +!!! Example + + === "Python" + ```python + # pip install comet_ml + import comet_ml + + comet_ml.init() + ``` + +Remember to sign in to your Comet account on their website and get your API key. You will need to add this to your environment variables or your script to log your experiments. + +### ClearML + +[ClearML](https://www.clear.ml/) is an open-source platform that automates tracking of experiments and helps with efficient sharing of resources. It is designed to help teams manage, execute, and reproduce their ML work more efficiently. + +To use ClearML: + +!!! Example + + === "Python" + ```python + # pip install clearml + import clearml + + clearml.browser_login() + ``` + +After running this script, you will need to sign in to your ClearML account on the browser and authenticate your session. + +### TensorBoard + +[TensorBoard](https://www.tensorflow.org/tensorboard) is a visualization toolkit for TensorFlow. It allows you to visualize your TensorFlow graph, plot quantitative metrics about the execution of your graph, and show additional data like images that pass through it. + +To use TensorBoard in [Google Colab](https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb): + +!!! Example + + === "CLI" + ```bash + load_ext tensorboard + tensorboard --logdir ultralytics/runs # replace with 'runs' directory + ``` + +To use TensorBoard locally run the below command and view results at http://localhost:6006/. + +!!! Example + + === "CLI" + ```bash + tensorboard --logdir ultralytics/runs # replace with 'runs' directory + ``` + +This will load TensorBoard and direct it to the directory where your training logs are saved. + +After setting up your logger, you can then proceed with your model training. All training metrics will be automatically logged in your chosen platform, and you can access these logs to monitor your model's performance over time, compare different models, and identify areas for improvement. diff --git a/ultralytics/docs/en/modes/train.md:Zone.Identifier b/ultralytics/docs/en/modes/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/modes/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/modes/val.md b/ultralytics/docs/en/modes/val.md new file mode 100755 index 0000000..d9e2c96 --- /dev/null +++ b/ultralytics/docs/en/modes/val.md @@ -0,0 +1,96 @@ +--- +comments: true +description: Guide for Validating YOLOv8 Models. Learn how to evaluate the performance of your YOLO models using validation settings and metrics with Python and CLI examples. +keywords: Ultralytics, YOLO Docs, YOLOv8, validation, model evaluation, hyperparameters, accuracy, metrics, Python, CLI +--- + +# Model Validation with Ultralytics YOLO + +Ultralytics YOLO ecosystem and integrations + +## Introduction + +Validation is a critical step in the machine learning pipeline, allowing you to assess the quality of your trained models. Val mode in Ultralytics YOLOv8 provides a robust suite of tools and metrics for evaluating the performance of your object detection models. This guide serves as a complete resource for understanding how to effectively use the Val mode to ensure that your models are both accurate and reliable. + +

+
+ +
+ Watch: Ultralytics Modes Tutorial: Validation +

+ +## Why Validate with Ultralytics YOLO? + +Here's why using YOLOv8's Val mode is advantageous: + +- **Precision:** Get accurate metrics like mAP50, mAP75, and mAP50-95 to comprehensively evaluate your model. +- **Convenience:** Utilize built-in features that remember training settings, simplifying the validation process. +- **Flexibility:** Validate your model with the same or different datasets and image sizes. +- **Hyperparameter Tuning:** Use validation metrics to fine-tune your model for better performance. + +### Key Features of Val Mode + +These are the notable functionalities offered by YOLOv8's Val mode: + +- **Automated Settings:** Models remember their training configurations for straightforward validation. +- **Multi-Metric Support:** Evaluate your model based on a range of accuracy metrics. +- **CLI and Python API:** Choose from command-line interface or Python API based on your preference for validation. +- **Data Compatibility:** Works seamlessly with datasets used during the training phase as well as custom datasets. + +!!! Tip "Tip" + + * YOLOv8 models automatically remember their training settings, so you can validate a model at the same image size and on the original dataset easily with just `yolo val model=yolov8n.pt` or `model('yolov8n.pt').val()` + +## Usage Examples + +Validate trained YOLOv8n model accuracy on the COCO128 dataset. No argument need to passed as the `model` retains it's training `data` and arguments as model attributes. See Arguments section below for a full list of export arguments. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom model + + # Validate the model + metrics = model.val() # no arguments needed, dataset and settings remembered + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # a list contains map50-95 of each category + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # val official model + yolo detect val model=path/to/best.pt # val custom model + ``` + +## Arguments + +Validation settings for YOLO models refer to the various hyperparameters and configurations used to evaluate the model's performance on a validation dataset. These settings can affect the model's performance, speed, and accuracy. Some common YOLO validation settings include the batch size, the frequency with which validation is performed during training, and the metrics used to evaluate the model's performance. Other factors that may affect the validation process include the size and composition of the validation dataset and the specific task the model is being used for. It is important to carefully tune and experiment with these settings to ensure that the model is performing well on the validation dataset and to detect and prevent overfitting. + +| Key | Value | Description | +|---------------|---------|--------------------------------------------------------------------| +| `data` | `None` | path to data file, i.e. coco128.yaml | +| `imgsz` | `640` | size of input images as integer | +| `batch` | `16` | number of images per batch (-1 for AutoBatch) | +| `save_json` | `False` | save results to JSON file | +| `save_hybrid` | `False` | save hybrid version of labels (labels + additional predictions) | +| `conf` | `0.001` | object confidence threshold for detection | +| `iou` | `0.6` | intersection over union (IoU) threshold for NMS | +| `max_det` | `300` | maximum number of detections per image | +| `half` | `True` | use half precision (FP16) | +| `device` | `None` | device to run on, i.e. cuda device=0/1/2/3 or device=cpu | +| `dnn` | `False` | use OpenCV DNN for ONNX inference | +| `plots` | `False` | save plots and images during train/val | +| `rect` | `False` | rectangular val with each batch collated for minimum padding | +| `split` | `val` | dataset split to use for validation, i.e. 'val', 'test' or 'train' | diff --git a/ultralytics/docs/en/modes/val.md:Zone.Identifier b/ultralytics/docs/en/modes/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/modes/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/quickstart.md b/ultralytics/docs/en/quickstart.md new file mode 100755 index 0000000..4dd0eae --- /dev/null +++ b/ultralytics/docs/en/quickstart.md @@ -0,0 +1,327 @@ +--- +comments: true +description: Explore various methods to install Ultralytics using pip, conda, git and Docker. Learn how to use Ultralytics with command line interface or within your Python projects. +keywords: Ultralytics installation, pip install Ultralytics, Docker install Ultralytics, Ultralytics command line interface, Ultralytics Python interface +--- + +## Install Ultralytics + +Ultralytics provides various installation methods including pip, conda, and Docker. Install YOLOv8 via the `ultralytics` pip package for the latest stable release or by cloning the [Ultralytics GitHub repository](https://github.com/ultralytics/ultralytics) for the most up-to-date version. Docker can be used to execute the package in an isolated container, avoiding local installation. + +

+
+ +
+ Watch: Ultralytics YOLO Quick Start Guide +

+ +!!! Example "Install" + + === "Pip install (recommended)" + Install the `ultralytics` package using pip, or update an existing installation by running `pip install -U ultralytics`. Visit the Python Package Index (PyPI) for more details on the `ultralytics` package: [https://pypi.org/project/ultralytics/](https://pypi.org/project/ultralytics/). + + [![PyPI version](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + + ```bash + # Install the ultralytics package from PyPI + pip install ultralytics + ``` + + You can also install the `ultralytics` package directly from the GitHub [repository](https://github.com/ultralytics/ultralytics). This might be useful if you want the latest development version. Make sure to have the Git command-line tool installed on your system. The `@main` command installs the `main` branch and may be modified to another branch, i.e. `@my-branch`, or removed entirely to default to `main` branch. + + ```bash + # Install the ultralytics package from GitHub + pip install git+https://github.com/ultralytics/ultralytics.git@main + ``` + + + === "Conda install" + Conda is an alternative package manager to pip which may also be used for installation. Visit Anaconda for more details at [https://anaconda.org/conda-forge/ultralytics](https://anaconda.org/conda-forge/ultralytics). Ultralytics feedstock repository for updating the conda package is at [https://github.com/conda-forge/ultralytics-feedstock/](https://github.com/conda-forge/ultralytics-feedstock/). + + + [![Conda Recipe](https://img.shields.io/badge/recipe-ultralytics-green.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Version](https://img.shields.io/conda/vn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) + + ```bash + # Install the ultralytics package using conda + conda install -c conda-forge ultralytics + ``` + + !!! Note + + If you are installing in a CUDA environment best practice is to install `ultralytics`, `pytorch` and `pytorch-cuda` in the same command to allow the conda package manager to resolve any conflicts, or else to install `pytorch-cuda` last to allow it override the CPU-specific `pytorch` package if necessary. + ```bash + # Install all packages together using conda + conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics + ``` + + ### Conda Docker Image + + Ultralytics Conda Docker images are also available from [DockerHub](https://hub.docker.com/r/ultralytics/ultralytics). These images are based on [Miniconda3](https://docs.conda.io/projects/miniconda/en/latest/) and are an simple way to start using `ultralytics` in a Conda environment. + + ```bash + # Set image name as a variable + t=ultralytics/ultralytics:latest-conda + + # Pull the latest ultralytics image from Docker Hub + sudo docker pull $t + + # Run the ultralytics image in a container with GPU support + sudo docker run -it --ipc=host --gpus all $t # all GPUs + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # specify GPUs + ``` + + === "Git clone" + Clone the `ultralytics` repository if you are interested in contributing to the development or wish to experiment with the latest source code. After cloning, navigate into the directory and install the package in editable mode `-e` using pip. + ```bash + # Clone the ultralytics repository + git clone https://github.com/ultralytics/ultralytics + + # Navigate to the cloned directory + cd ultralytics + + # Install the package in editable mode for development + pip install -e . + ``` + + === "Docker" + + Utilize Docker to effortlessly execute the `ultralytics` package in an isolated container, ensuring consistent and smooth performance across various environments. By choosing one of the official `ultralytics` images from [Docker Hub](https://hub.docker.com/r/ultralytics/ultralytics), you not only avoid the complexity of local installation but also benefit from access to a verified working environment. Ultralytics offers 5 main supported Docker images, each designed to provide high compatibility and efficiency for different platforms and use cases: + + Docker Pulls + + - **Dockerfile:** GPU image recommended for training. + - **Dockerfile-arm64:** Optimized for ARM64 architecture, allowing deployment on devices like Raspberry Pi and other ARM64-based platforms. + - **Dockerfile-cpu:** Ubuntu-based CPU-only version suitable for inference and environments without GPUs. + - **Dockerfile-jetson:** Tailored for NVIDIA Jetson devices, integrating GPU support optimized for these platforms. + - **Dockerfile-python:** Minimal image with just Python and necessary dependencies, ideal for lightweight applications and development. + - **Dockerfile-conda:** Based on Miniconda3 with conda installation of ultralytics package. + + Below are the commands to get the latest image and execute it: + + ```bash + # Set image name as a variable + t=ultralytics/ultralytics:latest + + # Pull the latest ultralytics image from Docker Hub + sudo docker pull $t + + # Run the ultralytics image in a container with GPU support + sudo docker run -it --ipc=host --gpus all $t # all GPUs + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # specify GPUs + ``` + + The above command initializes a Docker container with the latest `ultralytics` image. The `-it` flag assigns a pseudo-TTY and maintains stdin open, enabling you to interact with the container. The `--ipc=host` flag sets the IPC (Inter-Process Communication) namespace to the host, which is essential for sharing memory between processes. The `--gpus all` flag enables access to all available GPUs inside the container, which is crucial for tasks that require GPU computation. + + Note: To work with files on your local machine within the container, use Docker volumes for mounting a local directory into the container: + + ```bash + # Mount local directory to a directory inside the container + sudo docker run -it --ipc=host --gpus all -v /path/on/host:/path/in/container $t + ``` + + Alter `/path/on/host` with the directory path on your local machine, and `/path/in/container` with the desired path inside the Docker container for accessibility. + + For advanced Docker usage, feel free to explore the [Ultralytics Docker Guide](https://docs.ultralytics.com/guides/docker-quickstart/). + +See the `ultralytics` [requirements.txt](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) file for a list of dependencies. Note that all examples above install all required dependencies. + +!!! Tip "Tip" + + PyTorch requirements vary by operating system and CUDA requirements, so it's recommended to install PyTorch first following instructions at [https://pytorch.org/get-started/locally](https://pytorch.org/get-started/locally). + + + PyTorch Installation Instructions + + +## Use Ultralytics with CLI + +The Ultralytics command line interface (CLI) allows for simple single-line commands without the need for a Python environment. CLI requires no customization or Python code. You can simply run all tasks from the terminal with the `yolo` command. Check out the [CLI Guide](usage/cli.md) to learn more about using YOLOv8 from the command line. + +!!! Example + + === "Syntax" + + Ultralytics `yolo` commands use the following syntax: + ```bash + yolo TASK MODE ARGS + ``` + + - `TASK` (optional) is one of ([detect](tasks/detect.md), [segment](tasks/segment.md), [classify](tasks/classify.md), [pose](tasks/pose.md)) + - `MODE` (required) is one of ([train](modes/train.md), [val](modes/val.md), [predict](modes/predict.md), [export](modes/export.md), [track](modes/track.md)) + - `ARGS` (optional) are `arg=value` pairs like `imgsz=640` that override defaults. + + See all `ARGS` in the full [Configuration Guide](usage/cfg.md) or with the `yolo cfg` CLI command. + + === "Train" + + Train a detection model for 10 epochs with an initial learning_rate of 0.01 + ```bash + yolo train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "Predict" + + Predict a YouTube video using a pretrained segmentation model at image size 320: + ```bash + yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "Val" + + Val a pretrained detection model at batch-size 1 and image size 640: + ```bash + yolo val model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + + === "Export" + + Export a YOLOv8n classification model to ONNX format at image size 224 by 128 (no TASK required) + ```bash + yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128 + ``` + + === "Special" + + Run special commands to see version, view settings, run checks and more: + ```bash + yolo help + yolo checks + yolo version + yolo settings + yolo copy-cfg + yolo cfg + ``` + +!!! Warning "Warning" + + Arguments must be passed as `arg=val` pairs, split by an equals `=` sign and delimited by spaces between pairs. Do not use `--` argument prefixes or commas `,` between arguments. + + - `yolo predict model=yolov8n.pt imgsz=640 conf=0.25` โœ… + - `yolo predict model yolov8n.pt imgsz 640 conf 0.25` โŒ (missing `=`) + - `yolo predict model=yolov8n.pt, imgsz=640, conf=0.25` โŒ (do not use `,`) + - `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25` โŒ (do not use `--`) + +[CLI Guide](usage/cli.md){ .md-button } + +## Use Ultralytics with Python + +YOLOv8's Python interface allows for seamless integration into your Python projects, making it easy to load, run, and process the model's output. Designed with simplicity and ease of use in mind, the Python interface enables users to quickly implement object detection, segmentation, and classification in their projects. This makes YOLOv8's Python interface an invaluable tool for anyone looking to incorporate these functionalities into their Python projects. + +For example, users can load a model, train it, evaluate its performance on a validation set, and even export it to ONNX format with just a few lines of code. Check out the [Python Guide](usage/python.md) to learn more about using YOLOv8 within your Python projects. + +!!! Example + + ```python + from ultralytics import YOLO + + # Create a new YOLO model from scratch + model = YOLO('yolov8n.yaml') + + # Load a pretrained YOLO model (recommended for training) + model = YOLO('yolov8n.pt') + + # Train the model using the 'coco128.yaml' dataset for 3 epochs + results = model.train(data='coco128.yaml', epochs=3) + + # Evaluate the model's performance on the validation set + results = model.val() + + # Perform object detection on an image using the model + results = model('https://ultralytics.com/images/bus.jpg') + + # Export the model to ONNX format + success = model.export(format='onnx') + ``` + +[Python Guide](usage/python.md){.md-button .md-button--primary} + +## Ultralytics Settings + +The Ultralytics library provides a powerful settings management system to enable fine-grained control over your experiments. By making use of the `SettingsManager` housed within the `ultralytics.utils` module, users can readily access and alter their settings. These are stored in a YAML file and can be viewed or modified either directly within the Python environment or via the Command-Line Interface (CLI). + +### Inspecting Settings + +To gain insight into the current configuration of your settings, you can view them directly: + +!!! Example "View settings" + + === "Python" + You can use Python to view your settings. Start by importing the `settings` object from the `ultralytics` module. Print and return settings using the following commands: + ```python + from ultralytics import settings + + # View all settings + print(settings) + + # Return a specific setting + value = settings['runs_dir'] + ``` + + === "CLI" + Alternatively, the command-line interface allows you to check your settings with a simple command: + ```bash + yolo settings + ``` + +### Modifying Settings + +Ultralytics allows users to easily modify their settings. Changes can be performed in the following ways: + +!!! Example "Update settings" + + === "Python" + Within the Python environment, call the `update` method on the `settings` object to change your settings: + ```python + from ultralytics import settings + + # Update a setting + settings.update({'runs_dir': '/path/to/runs'}) + + # Update multiple settings + settings.update({'runs_dir': '/path/to/runs', 'tensorboard': False}) + + # Reset settings to default values + settings.reset() + ``` + + === "CLI" + If you prefer using the command-line interface, the following commands will allow you to modify your settings: + ```bash + # Update a setting + yolo settings runs_dir='/path/to/runs' + + # Update multiple settings + yolo settings runs_dir='/path/to/runs' tensorboard=False + + # Reset settings to default values + yolo settings reset + ``` + +### Understanding Settings + +The table below provides an overview of the settings available for adjustment within Ultralytics. Each setting is outlined along with an example value, the data type, and a brief description. + +| Name | Example Value | Data Type | Description | +|--------------------|-----------------------|-----------|------------------------------------------------------------------------------------------------------------------| +| `settings_version` | `'0.0.4'` | `str` | Ultralytics _settings_ version (different from Ultralytics [pip](https://pypi.org/project/ultralytics/) version) | +| `datasets_dir` | `'/path/to/datasets'` | `str` | The directory where the datasets are stored | +| `weights_dir` | `'/path/to/weights'` | `str` | The directory where the model weights are stored | +| `runs_dir` | `'/path/to/runs'` | `str` | The directory where the experiment runs are stored | +| `uuid` | `'a1b2c3d4'` | `str` | The unique identifier for the current settings | +| `sync` | `True` | `bool` | Whether to sync analytics and crashes to HUB | +| `api_key` | `''` | `str` | Ultralytics HUB [API Key](https://hub.ultralytics.com/settings?tab=api+keys) | +| `clearml` | `True` | `bool` | Whether to use ClearML logging | +| `comet` | `True` | `bool` | Whether to use [Comet ML](https://bit.ly/yolov8-readme-comet) for experiment tracking and visualization | +| `dvc` | `True` | `bool` | Whether to use [DVC for experiment tracking](https://dvc.org/doc/dvclive/ml-frameworks/yolo) and version control | +| `hub` | `True` | `bool` | Whether to use [Ultralytics HUB](https://hub.ultralytics.com) integration | +| `mlflow` | `True` | `bool` | Whether to use MLFlow for experiment tracking | +| `neptune` | `True` | `bool` | Whether to use Neptune for experiment tracking | +| `raytune` | `True` | `bool` | Whether to use Ray Tune for hyperparameter tuning | +| `tensorboard` | `True` | `bool` | Whether to use TensorBoard for visualization | +| `wandb` | `True` | `bool` | Whether to use Weights & Biases logging | + +As you navigate through your projects or experiments, be sure to revisit these settings to ensure that they are optimally configured for your needs. diff --git a/ultralytics/docs/en/quickstart.md:Zone.Identifier b/ultralytics/docs/en/quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/cfg/__init__.md b/ultralytics/docs/en/reference/cfg/__init__.md new file mode 100755 index 0000000..d73a9f2 --- /dev/null +++ b/ultralytics/docs/en/reference/cfg/__init__.md @@ -0,0 +1,60 @@ +--- +description: Explore Ultralytics cfg functions like cfg2dict, handle_deprecation, merge_equal_args & more to handle YOLO settings and configurations efficiently. +keywords: Ultralytics, YOLO, Configuration, cfg2dict, handle_deprecation, merge_equals_args, handle_yolo_settings, copy_default_cfg, Image Detection +--- + +# Reference for `ultralytics/cfg/__init__.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/__init__.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/__init__.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/cfg/__init__.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.cfg.cfg2dict + +

+ +## ::: ultralytics.cfg.get_cfg + +

+ +## ::: ultralytics.cfg.get_save_dir + +

+ +## ::: ultralytics.cfg._handle_deprecation + +

+ +## ::: ultralytics.cfg.check_dict_alignment + +

+ +## ::: ultralytics.cfg.merge_equals_args + +

+ +## ::: ultralytics.cfg.handle_yolo_hub + +

+ +## ::: ultralytics.cfg.handle_yolo_settings + +

+ +## ::: ultralytics.cfg.parse_key_value_pair + +

+ +## ::: ultralytics.cfg.smart_value + +

+ +## ::: ultralytics.cfg.entrypoint + +

+ +## ::: ultralytics.cfg.copy_default_cfg + +

diff --git a/ultralytics/docs/en/reference/cfg/__init__.md:Zone.Identifier b/ultralytics/docs/en/reference/cfg/__init__.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/cfg/__init__.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/data/annotator.md b/ultralytics/docs/en/reference/data/annotator.md new file mode 100755 index 0000000..ab685b2 --- /dev/null +++ b/ultralytics/docs/en/reference/data/annotator.md @@ -0,0 +1,16 @@ +--- +description: Enhance your machine learning model with Ultralyticsโ€™ auto_annotate function. Simplify data annotation for improved model training. +keywords: Ultralytics, Auto-Annotate, Machine Learning, AI, Annotation, Data Processing, Model Training +--- + +# Reference for `ultralytics/data/annotator.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/annotator.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/annotator.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/annotator.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.data.annotator.auto_annotate + +

diff --git a/ultralytics/docs/en/reference/data/annotator.md:Zone.Identifier b/ultralytics/docs/en/reference/data/annotator.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/data/annotator.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/data/augment.md b/ultralytics/docs/en/reference/data/augment.md new file mode 100755 index 0000000..3b16090 --- /dev/null +++ b/ultralytics/docs/en/reference/data/augment.md @@ -0,0 +1,88 @@ +--- +description: Detailed exploration into Ultralytics data augmentation methods including BaseTransform, MixUp, LetterBox, ToTensor, and more for enhancing model performance. +keywords: Ultralytics, Data Augmentation, BaseTransform, MixUp, RandomHSV, LetterBox, Albumentations, classify_transforms, classify_albumentations +--- + +# Reference for `ultralytics/data/augment.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/augment.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/augment.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/augment.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.data.augment.BaseTransform + +

+ +## ::: ultralytics.data.augment.Compose + +

+ +## ::: ultralytics.data.augment.BaseMixTransform + +

+ +## ::: ultralytics.data.augment.Mosaic + +

+ +## ::: ultralytics.data.augment.MixUp + +

+ +## ::: ultralytics.data.augment.RandomPerspective + +

+ +## ::: ultralytics.data.augment.RandomHSV + +

+ +## ::: ultralytics.data.augment.RandomFlip + +

+ +## ::: ultralytics.data.augment.LetterBox + +

+ +## ::: ultralytics.data.augment.CopyPaste + +

+ +## ::: ultralytics.data.augment.Albumentations + +

+ +## ::: ultralytics.data.augment.Format + +

+ +## ::: ultralytics.data.augment.ClassifyLetterBox + +

+ +## ::: ultralytics.data.augment.CenterCrop + +

+ +## ::: ultralytics.data.augment.ToTensor + +

+ +## ::: ultralytics.data.augment.v8_transforms + +

+ +## ::: ultralytics.data.augment.classify_transforms + +

+ +## ::: ultralytics.data.augment.hsv2colorjitter + +

+ +## ::: ultralytics.data.augment.classify_albumentations + +

diff --git a/ultralytics/docs/en/reference/data/augment.md:Zone.Identifier b/ultralytics/docs/en/reference/data/augment.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/data/augment.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/data/base.md b/ultralytics/docs/en/reference/data/base.md new file mode 100755 index 0000000..c98a7c3 --- /dev/null +++ b/ultralytics/docs/en/reference/data/base.md @@ -0,0 +1,16 @@ +--- +description: Explore BaseDataset in Ultralytics docs. Learn how this implementation simplifies dataset creation and manipulation. +keywords: Ultralytics, docs, BaseDataset, data manipulation, dataset creation +--- + +# Reference for `ultralytics/data/base.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/base.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/base.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/base.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.data.base.BaseDataset + +

diff --git a/ultralytics/docs/en/reference/data/base.md:Zone.Identifier b/ultralytics/docs/en/reference/data/base.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/data/base.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/data/build.md b/ultralytics/docs/en/reference/data/build.md new file mode 100755 index 0000000..811c11d --- /dev/null +++ b/ultralytics/docs/en/reference/data/build.md @@ -0,0 +1,40 @@ +--- +description: Explore the Ultralytics YOLO v3 data build procedures, including the InfiniteDataLoader, seed_worker, build_dataloader, and load_inference_source. +keywords: Ultralytics, YOLO v3, Data build, DataLoader, InfiniteDataLoader, seed_worker, build_dataloader, load_inference_source +--- + +# Reference for `ultralytics/data/build.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/build.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/build.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/build.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.data.build.InfiniteDataLoader + +

+ +## ::: ultralytics.data.build._RepeatSampler + +

+ +## ::: ultralytics.data.build.seed_worker + +

+ +## ::: ultralytics.data.build.build_yolo_dataset + +

+ +## ::: ultralytics.data.build.build_dataloader + +

+ +## ::: ultralytics.data.build.check_source + +

+ +## ::: ultralytics.data.build.load_inference_source + +

diff --git a/ultralytics/docs/en/reference/data/build.md:Zone.Identifier b/ultralytics/docs/en/reference/data/build.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/data/build.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/data/converter.md b/ultralytics/docs/en/reference/data/converter.md new file mode 100755 index 0000000..bbad9d3 --- /dev/null +++ b/ultralytics/docs/en/reference/data/converter.md @@ -0,0 +1,36 @@ +--- +description: Explore Ultralytics data converter functions like coco91_to_coco80_class, merge_multi_segment, rle2polygon for efficient data handling. +keywords: Ultralytics, Data Converter, coco91_to_coco80_class, merge_multi_segment, rle2polygon +--- + +# Reference for `ultralytics/data/converter.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/converter.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/converter.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/converter.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.data.converter.coco91_to_coco80_class + +

+ +## ::: ultralytics.data.converter.coco80_to_coco91_class + +

+ +## ::: ultralytics.data.converter.convert_coco + +

+ +## ::: ultralytics.data.converter.convert_dota_to_yolo_obb + +

+ +## ::: ultralytics.data.converter.min_index + +

+ +## ::: ultralytics.data.converter.merge_multi_segment + +

diff --git a/ultralytics/docs/en/reference/data/converter.md:Zone.Identifier b/ultralytics/docs/en/reference/data/converter.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/data/converter.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/data/dataset.md b/ultralytics/docs/en/reference/data/dataset.md new file mode 100755 index 0000000..242a054 --- /dev/null +++ b/ultralytics/docs/en/reference/data/dataset.md @@ -0,0 +1,32 @@ +--- +description: Explore the YOLODataset and SemanticDataset classes in YOLO data. Learn how to efficiently handle and manipulate your data with Ultralytics. +keywords: Ultralytics, YOLO, YOLODataset, SemanticDataset, data handling, data manipulation +--- + +# Reference for `ultralytics/data/dataset.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/dataset.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/dataset.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/dataset.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.data.dataset.YOLODataset + +

+ +## ::: ultralytics.data.dataset.ClassificationDataset + +

+ +## ::: ultralytics.data.dataset.SemanticDataset + +

+ +## ::: ultralytics.data.dataset.load_dataset_cache_file + +

+ +## ::: ultralytics.data.dataset.save_dataset_cache_file + +

diff --git a/ultralytics/docs/en/reference/data/dataset.md:Zone.Identifier b/ultralytics/docs/en/reference/data/dataset.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/data/dataset.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/data/loaders.md b/ultralytics/docs/en/reference/data/loaders.md new file mode 100755 index 0000000..3ba4c16 --- /dev/null +++ b/ultralytics/docs/en/reference/data/loaders.md @@ -0,0 +1,44 @@ +--- +description: Find detailed guides on Ultralytics YOLO data loaders, including LoadStreams, LoadImages and LoadTensor. Learn how to get the best YouTube URLs. +keywords: Ultralytics, data loaders, LoadStreams, LoadImages, LoadTensor, YOLO, YouTube URLs +--- + +# Reference for `ultralytics/data/loaders.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/loaders.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/loaders.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/loaders.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.data.loaders.SourceTypes + +

+ +## ::: ultralytics.data.loaders.LoadStreams + +

+ +## ::: ultralytics.data.loaders.LoadScreenshots + +

+ +## ::: ultralytics.data.loaders.LoadImages + +

+ +## ::: ultralytics.data.loaders.LoadPilAndNumpy + +

+ +## ::: ultralytics.data.loaders.LoadTensor + +

+ +## ::: ultralytics.data.loaders.autocast_list + +

+ +## ::: ultralytics.data.loaders.get_best_youtube_url + +

diff --git a/ultralytics/docs/en/reference/data/loaders.md:Zone.Identifier b/ultralytics/docs/en/reference/data/loaders.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/data/loaders.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/data/utils.md b/ultralytics/docs/en/reference/data/utils.md new file mode 100755 index 0000000..af06ce8 --- /dev/null +++ b/ultralytics/docs/en/reference/data/utils.md @@ -0,0 +1,68 @@ +--- +description: Uncover a detailed guide to Ultralytics data utilities. Learn functions from img2label_paths to autosplit, all boosting your YOLO modelโ€™s efficiency. +keywords: Ultralytics, data utils, YOLO, img2label_paths, exif_size, polygon2mask, polygons2masks_overlap, check_cls_dataset, delete_dsstore, autosplit +--- + +# Reference for `ultralytics/data/utils.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/utils.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/utils.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/data/utils.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.data.utils.HUBDatasetStats + +

+ +## ::: ultralytics.data.utils.img2label_paths + +

+ +## ::: ultralytics.data.utils.get_hash + +

+ +## ::: ultralytics.data.utils.exif_size + +

+ +## ::: ultralytics.data.utils.verify_image + +

+ +## ::: ultralytics.data.utils.verify_image_label + +

+ +## ::: ultralytics.data.utils.polygon2mask + +

+ +## ::: ultralytics.data.utils.polygons2masks + +

+ +## ::: ultralytics.data.utils.polygons2masks_overlap + +

+ +## ::: ultralytics.data.utils.find_dataset_yaml + +

+ +## ::: ultralytics.data.utils.check_det_dataset + +

+ +## ::: ultralytics.data.utils.check_cls_dataset + +

+ +## ::: ultralytics.data.utils.compress_one_image + +

+ +## ::: ultralytics.data.utils.autosplit + +

diff --git a/ultralytics/docs/en/reference/data/utils.md:Zone.Identifier b/ultralytics/docs/en/reference/data/utils.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/data/utils.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/engine/exporter.md b/ultralytics/docs/en/reference/engine/exporter.md new file mode 100755 index 0000000..e8daeac --- /dev/null +++ b/ultralytics/docs/en/reference/engine/exporter.md @@ -0,0 +1,32 @@ +--- +description: Explore the exporter functionality of Ultralytics. Learn about exporting formats, IOSDetectModel, and try exporting with examples. +keywords: Ultralytics, Exporter, IOSDetectModel, Export Formats, Try export +--- + +# Reference for `ultralytics/engine/exporter.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/exporter.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/exporter.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/exporter.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.engine.exporter.Exporter + +

+ +## ::: ultralytics.engine.exporter.IOSDetectModel + +

+ +## ::: ultralytics.engine.exporter.export_formats + +

+ +## ::: ultralytics.engine.exporter.gd_outputs + +

+ +## ::: ultralytics.engine.exporter.try_export + +

diff --git a/ultralytics/docs/en/reference/engine/exporter.md:Zone.Identifier b/ultralytics/docs/en/reference/engine/exporter.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/engine/exporter.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/engine/model.md b/ultralytics/docs/en/reference/engine/model.md new file mode 100755 index 0000000..5579d75 --- /dev/null +++ b/ultralytics/docs/en/reference/engine/model.md @@ -0,0 +1,16 @@ +--- +description: Explore the detailed guide on using the Ultralytics YOLO Engine Model. Learn better ways to implement, train and evaluate YOLO models. +keywords: Ultralytics, YOLO, engine model, documentation, guide, implementation, training, evaluation +--- + +# Reference for `ultralytics/engine/model.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/model.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.engine.model.Model + +

diff --git a/ultralytics/docs/en/reference/engine/model.md:Zone.Identifier b/ultralytics/docs/en/reference/engine/model.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/engine/model.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/engine/predictor.md b/ultralytics/docs/en/reference/engine/predictor.md new file mode 100755 index 0000000..ad50352 --- /dev/null +++ b/ultralytics/docs/en/reference/engine/predictor.md @@ -0,0 +1,16 @@ +--- +description: Learn about Ultralytics BasePredictor, an essential component of our engine that serves as the foundation for all prediction operations. +keywords: Ultralytics, BasePredictor, YOLO, prediction, engine +--- + +# Reference for `ultralytics/engine/predictor.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/predictor.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/predictor.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/predictor.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.engine.predictor.BasePredictor + +

diff --git a/ultralytics/docs/en/reference/engine/predictor.md:Zone.Identifier b/ultralytics/docs/en/reference/engine/predictor.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/engine/predictor.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/engine/results.md b/ultralytics/docs/en/reference/engine/results.md new file mode 100755 index 0000000..b4b709e --- /dev/null +++ b/ultralytics/docs/en/reference/engine/results.md @@ -0,0 +1,36 @@ +--- +description: Master Ultralytics engine results including base tensors, boxes, and keypoints with our thorough documentation. +keywords: Ultralytics, engine, results, base tensor, boxes, keypoints +--- + +# Reference for `ultralytics/engine/results.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/results.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/results.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/results.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.engine.results.BaseTensor + +

+ +## ::: ultralytics.engine.results.Results + +

+ +## ::: ultralytics.engine.results.Boxes + +

+ +## ::: ultralytics.engine.results.Masks + +

+ +## ::: ultralytics.engine.results.Keypoints + +

+ +## ::: ultralytics.engine.results.Probs + +

diff --git a/ultralytics/docs/en/reference/engine/results.md:Zone.Identifier b/ultralytics/docs/en/reference/engine/results.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/engine/results.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/engine/trainer.md b/ultralytics/docs/en/reference/engine/trainer.md new file mode 100755 index 0000000..7c74b04 --- /dev/null +++ b/ultralytics/docs/en/reference/engine/trainer.md @@ -0,0 +1,16 @@ +--- +description: Learn about the BaseTrainer class in the Ultralytics library. From training control, customization to advanced usage. +keywords: Ultralytics, BaseTrainer, Machine Learning, Training Control, Python library +--- + +# Reference for `ultralytics/engine/trainer.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/trainer.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/trainer.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/trainer.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.engine.trainer.BaseTrainer + +

diff --git a/ultralytics/docs/en/reference/engine/trainer.md:Zone.Identifier b/ultralytics/docs/en/reference/engine/trainer.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/engine/trainer.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/engine/tuner.md b/ultralytics/docs/en/reference/engine/tuner.md new file mode 100755 index 0000000..7882b8c --- /dev/null +++ b/ultralytics/docs/en/reference/engine/tuner.md @@ -0,0 +1,16 @@ +--- +description: Explore the Ultralytics Tuner, a powerful tool designed for hyperparameter tuning of YOLO models to optimize performance across various tasks like object detection, image classification, and more. +keywords: Ultralytics, Tuner, YOLO, hyperparameter tuning, optimization, object detection, image classification, instance segmentation, pose estimation, multi-object tracking +--- + +# Reference for `ultralytics/engine/tuner.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/tuner.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/tuner.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/tuner.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.engine.tuner.Tuner + +

diff --git a/ultralytics/docs/en/reference/engine/tuner.md:Zone.Identifier b/ultralytics/docs/en/reference/engine/tuner.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/engine/tuner.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/engine/validator.md b/ultralytics/docs/en/reference/engine/validator.md new file mode 100755 index 0000000..5c0f4cd --- /dev/null +++ b/ultralytics/docs/en/reference/engine/validator.md @@ -0,0 +1,16 @@ +--- +description: Learn about the Ultralytics BaseValidator module. Understand its principles, uses, and how it interacts with other components. +keywords: Ultralytics, BaseValidator, Ultralytics engine, module, components +--- + +# Reference for `ultralytics/engine/validator.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/validator.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/engine/validator.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/engine/validator.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.engine.validator.BaseValidator + +

diff --git a/ultralytics/docs/en/reference/engine/validator.md:Zone.Identifier b/ultralytics/docs/en/reference/engine/validator.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/engine/validator.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/hub/__init__.md b/ultralytics/docs/en/reference/hub/__init__.md new file mode 100755 index 0000000..912de06 --- /dev/null +++ b/ultralytics/docs/en/reference/hub/__init__.md @@ -0,0 +1,40 @@ +--- +description: Explore Ultralytics hub functions for model resetting, checking datasets, model exporting and more. Easy-to-follow instructions provided. +keywords: Ultralytics, hub functions, model export, dataset check, reset model, YOLO Docs +--- + +# Reference for `ultralytics/hub/__init__.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/__init__.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/__init__.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/hub/__init__.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.hub.login + +

+ +## ::: ultralytics.hub.logout + +

+ +## ::: ultralytics.hub.reset_model + +

+ +## ::: ultralytics.hub.export_fmts_hub + +

+ +## ::: ultralytics.hub.export_model + +

+ +## ::: ultralytics.hub.get_export + +

+ +## ::: ultralytics.hub.check_dataset + +

diff --git a/ultralytics/docs/en/reference/hub/__init__.md:Zone.Identifier b/ultralytics/docs/en/reference/hub/__init__.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/hub/__init__.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/hub/auth.md b/ultralytics/docs/en/reference/hub/auth.md new file mode 100755 index 0000000..964dc73 --- /dev/null +++ b/ultralytics/docs/en/reference/hub/auth.md @@ -0,0 +1,16 @@ +--- +description: Dive into the Ultralytics Auth API documentation & learn how to manage authentication in your AI & ML projects easily and effectively. +keywords: Ultralytics, Auth, API documentation, User Authentication, AI, Machine Learning +--- + +# Reference for `ultralytics/hub/auth.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/auth.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/auth.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/hub/auth.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.hub.auth.Auth + +

diff --git a/ultralytics/docs/en/reference/hub/auth.md:Zone.Identifier b/ultralytics/docs/en/reference/hub/auth.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/hub/auth.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/hub/session.md b/ultralytics/docs/en/reference/hub/session.md new file mode 100755 index 0000000..fb30715 --- /dev/null +++ b/ultralytics/docs/en/reference/hub/session.md @@ -0,0 +1,16 @@ +--- +description: Explore details about the HUBTrainingSession in Ultralytics framework. Learn to utilize this functionality for effective model training. +keywords: Ultralytics, HUBTrainingSession, Documentation, Model Training, AI, Machine Learning, YOLO +--- + +# Reference for `ultralytics/hub/session.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/session.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/session.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/hub/session.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.hub.session.HUBTrainingSession + +

diff --git a/ultralytics/docs/en/reference/hub/session.md:Zone.Identifier b/ultralytics/docs/en/reference/hub/session.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/hub/session.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/hub/utils.md b/ultralytics/docs/en/reference/hub/utils.md new file mode 100755 index 0000000..e310a01 --- /dev/null +++ b/ultralytics/docs/en/reference/hub/utils.md @@ -0,0 +1,28 @@ +--- +description: Explore Ultralytics docs for various Events, including "request_with_credentials" and "requests_with_progress". Also, understand the use of the "smart_request". +keywords: Ultralytics, Events, request_with_credentials, smart_request, Ultralytics hub utils, requests_with_progress +--- + +# Reference for `ultralytics/hub/utils.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/utils.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/hub/utils.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/hub/utils.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.hub.utils.Events + +

+ +## ::: ultralytics.hub.utils.request_with_credentials + +

+ +## ::: ultralytics.hub.utils.requests_with_progress + +

+ +## ::: ultralytics.hub.utils.smart_request + +

diff --git a/ultralytics/docs/en/reference/hub/utils.md:Zone.Identifier b/ultralytics/docs/en/reference/hub/utils.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/hub/utils.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/fastsam/model.md b/ultralytics/docs/en/reference/models/fastsam/model.md new file mode 100755 index 0000000..0e99a8c --- /dev/null +++ b/ultralytics/docs/en/reference/models/fastsam/model.md @@ -0,0 +1,16 @@ +--- +description: Learn all about Ultralytics FastSAM model. Dive into our comprehensive guide for seamless integration and efficient model training. +keywords: Ultralytics, FastSAM model, Model documentation, Efficient model training +--- + +# Reference for `ultralytics/models/fastsam/model.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/fastsam/model.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.fastsam.model.FastSAM + +

diff --git a/ultralytics/docs/en/reference/models/fastsam/model.md:Zone.Identifier b/ultralytics/docs/en/reference/models/fastsam/model.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/fastsam/model.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/fastsam/predict.md b/ultralytics/docs/en/reference/models/fastsam/predict.md new file mode 100755 index 0000000..7c7196e --- /dev/null +++ b/ultralytics/docs/en/reference/models/fastsam/predict.md @@ -0,0 +1,16 @@ +--- +description: Get detailed insights about Ultralytics FastSAMPredictor. Learn to predict and optimize your AI models with our properly documented guidelines. +keywords: Ultralytics, FastSAMPredictor, predictive modeling, AI optimization, machine learning, deep learning, Ultralytics documentation +--- + +# Reference for `ultralytics/models/fastsam/predict.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/fastsam/predict.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.fastsam.predict.FastSAMPredictor + +

diff --git a/ultralytics/docs/en/reference/models/fastsam/predict.md:Zone.Identifier b/ultralytics/docs/en/reference/models/fastsam/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/fastsam/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/fastsam/prompt.md b/ultralytics/docs/en/reference/models/fastsam/prompt.md new file mode 100755 index 0000000..0a37023 --- /dev/null +++ b/ultralytics/docs/en/reference/models/fastsam/prompt.md @@ -0,0 +1,16 @@ +--- +description: Learn to effectively utilize FastSAMPrompt model from Ultralytics. Detailed guide to help you get the most out of your machine learning models. +keywords: Ultralytics, FastSAMPrompt, machine learning, model, guide, documentation +--- + +# Reference for `ultralytics/models/fastsam/prompt.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/prompt.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/prompt.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/fastsam/prompt.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.fastsam.prompt.FastSAMPrompt + +

diff --git a/ultralytics/docs/en/reference/models/fastsam/prompt.md:Zone.Identifier b/ultralytics/docs/en/reference/models/fastsam/prompt.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/fastsam/prompt.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/fastsam/utils.md b/ultralytics/docs/en/reference/models/fastsam/utils.md new file mode 100755 index 0000000..0ba33ab --- /dev/null +++ b/ultralytics/docs/en/reference/models/fastsam/utils.md @@ -0,0 +1,20 @@ +--- +description: Learn how to adjust bounding boxes to image borders in Ultralytics models using the bbox_iou utility. Enhance your object detection performance. +keywords: Ultralytics, bounding boxes, Bboxes, image borders, object detection, bbox_iou, model utilities +--- + +# Reference for `ultralytics/models/fastsam/utils.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/utils.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/utils.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/fastsam/utils.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.fastsam.utils.adjust_bboxes_to_image_border + +

+ +## ::: ultralytics.models.fastsam.utils.bbox_iou + +

diff --git a/ultralytics/docs/en/reference/models/fastsam/utils.md:Zone.Identifier b/ultralytics/docs/en/reference/models/fastsam/utils.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/fastsam/utils.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/fastsam/val.md b/ultralytics/docs/en/reference/models/fastsam/val.md new file mode 100755 index 0000000..4064f05 --- /dev/null +++ b/ultralytics/docs/en/reference/models/fastsam/val.md @@ -0,0 +1,16 @@ +--- +description: Learn about FastSAMValidator in Ultralytics models. Comprehensive guide to enhancing AI capabilities with Ultralytics. +keywords: Ultralytics, FastSAMValidator, model, synthetic, AI, machine learning, validation +--- + +# Reference for `ultralytics/models/fastsam/val.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/fastsam/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/fastsam/val.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.fastsam.val.FastSAMValidator + +

diff --git a/ultralytics/docs/en/reference/models/fastsam/val.md:Zone.Identifier b/ultralytics/docs/en/reference/models/fastsam/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/fastsam/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/nas/model.md b/ultralytics/docs/en/reference/models/nas/model.md new file mode 100755 index 0000000..9536e54 --- /dev/null +++ b/ultralytics/docs/en/reference/models/nas/model.md @@ -0,0 +1,16 @@ +--- +description: Learn how our NAS model operates in Ultralytics. Comprehensive guide with detailed examples. Master the nuances of Ultralytics NAS model. +keywords: Ultralytics, NAS model, NAS guide, machine learning, model documentation +--- + +# Reference for `ultralytics/models/nas/model.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/nas/model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/nas/model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/nas/model.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.nas.model.NAS + +

diff --git a/ultralytics/docs/en/reference/models/nas/model.md:Zone.Identifier b/ultralytics/docs/en/reference/models/nas/model.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/nas/model.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/nas/predict.md b/ultralytics/docs/en/reference/models/nas/predict.md new file mode 100755 index 0000000..3dcc0fd --- /dev/null +++ b/ultralytics/docs/en/reference/models/nas/predict.md @@ -0,0 +1,16 @@ +--- +description: Explore Ultralytics NASPredictor. Understand high-level architecture of the model for effective implementation and efficient predictions. +keywords: NASPredictor, Ultralytics, Ultralytics model, model architecture, efficient predictions +--- + +# Reference for `ultralytics/models/nas/predict.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/nas/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/nas/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/nas/predict.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.nas.predict.NASPredictor + +

diff --git a/ultralytics/docs/en/reference/models/nas/predict.md:Zone.Identifier b/ultralytics/docs/en/reference/models/nas/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/nas/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/nas/val.md b/ultralytics/docs/en/reference/models/nas/val.md new file mode 100755 index 0000000..5d0adf6 --- /dev/null +++ b/ultralytics/docs/en/reference/models/nas/val.md @@ -0,0 +1,16 @@ +--- +description: Explore the utilities and functions of the Ultralytics NASValidator. Find out how it benefits allocation and optimization in AI models. +keywords: Ultralytics, NASValidator, models.nas.val.NASValidator, AI models, allocation, optimization +--- + +# Reference for `ultralytics/models/nas/val.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/nas/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/nas/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/nas/val.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.nas.val.NASValidator + +

diff --git a/ultralytics/docs/en/reference/models/nas/val.md:Zone.Identifier b/ultralytics/docs/en/reference/models/nas/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/nas/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/rtdetr/model.md b/ultralytics/docs/en/reference/models/rtdetr/model.md new file mode 100755 index 0000000..a4578b2 --- /dev/null +++ b/ultralytics/docs/en/reference/models/rtdetr/model.md @@ -0,0 +1,16 @@ +--- +description: Explore the specifics of using the RTDETR model in Ultralytics. Detailed documentation layered with explanations and examples. +keywords: Ultralytics, RTDETR model, Ultralytics models, object detection, Ultralytics documentation +--- + +# Reference for `ultralytics/models/rtdetr/model.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/rtdetr/model.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.rtdetr.model.RTDETR + +

diff --git a/ultralytics/docs/en/reference/models/rtdetr/model.md:Zone.Identifier b/ultralytics/docs/en/reference/models/rtdetr/model.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/rtdetr/model.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/rtdetr/predict.md b/ultralytics/docs/en/reference/models/rtdetr/predict.md new file mode 100755 index 0000000..4763381 --- /dev/null +++ b/ultralytics/docs/en/reference/models/rtdetr/predict.md @@ -0,0 +1,16 @@ +--- +description: Learn how to use the RTDETRPredictor model of the Ultralytics package. Detailed documentation, usage instructions, and advice. +keywords: Ultralytics, RTDETRPredictor, model documentation, guide, real-time object detection +--- + +# Reference for `ultralytics/models/rtdetr/predict.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/rtdetr/predict.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.rtdetr.predict.RTDETRPredictor + +

diff --git a/ultralytics/docs/en/reference/models/rtdetr/predict.md:Zone.Identifier b/ultralytics/docs/en/reference/models/rtdetr/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/rtdetr/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/rtdetr/train.md b/ultralytics/docs/en/reference/models/rtdetr/train.md new file mode 100755 index 0000000..4f9faaa --- /dev/null +++ b/ultralytics/docs/en/reference/models/rtdetr/train.md @@ -0,0 +1,16 @@ +--- +description: Get insights into RTDETRTrainer, a crucial component of Ultralytics for effective model training. Explore detailed documentation at Ultralytics. +keywords: Ultralytics, RTDETRTrainer, model training, Ultralytics models, PyTorch models, neural networks, machine learning, deep learning +--- + +# Reference for `ultralytics/models/rtdetr/train.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/rtdetr/train.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.rtdetr.train.RTDETRTrainer + +

diff --git a/ultralytics/docs/en/reference/models/rtdetr/train.md:Zone.Identifier b/ultralytics/docs/en/reference/models/rtdetr/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/rtdetr/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/rtdetr/val.md b/ultralytics/docs/en/reference/models/rtdetr/val.md new file mode 100755 index 0000000..d31bc86 --- /dev/null +++ b/ultralytics/docs/en/reference/models/rtdetr/val.md @@ -0,0 +1,20 @@ +--- +description: Explore RTDETRDataset in Ultralytics Models. Learn about the RTDETRValidator function, understand its usage in real-time object detection. +keywords: Ultralytics, RTDETRDataset, RTDETRValidator, real-time object detection, models documentation +--- + +# Reference for `ultralytics/models/rtdetr/val.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/rtdetr/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/rtdetr/val.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.rtdetr.val.RTDETRDataset + +

+ +## ::: ultralytics.models.rtdetr.val.RTDETRValidator + +

diff --git a/ultralytics/docs/en/reference/models/rtdetr/val.md:Zone.Identifier b/ultralytics/docs/en/reference/models/rtdetr/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/rtdetr/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/sam/amg.md b/ultralytics/docs/en/reference/models/sam/amg.md new file mode 100755 index 0000000..dea35e4 --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/amg.md @@ -0,0 +1,56 @@ +--- +description: Explore Ultralytics methods for mask data processing, transformation and encoding. Deepen your understanding of RLE encoding, image cropping and more. +keywords: Ultralytics, Mask Data, Transformation, Encoding, RLE encoding, Image cropping, Pytorch, SAM, AMG, Ultralytics model +--- + +# Reference for `ultralytics/models/sam/amg.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/amg.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/amg.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/amg.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.sam.amg.is_box_near_crop_edge + +

+ +## ::: ultralytics.models.sam.amg.batch_iterator + +

+ +## ::: ultralytics.models.sam.amg.calculate_stability_score + +

+ +## ::: ultralytics.models.sam.amg.build_point_grid + +

+ +## ::: ultralytics.models.sam.amg.build_all_layer_point_grids + +

+ +## ::: ultralytics.models.sam.amg.generate_crop_boxes + +

+ +## ::: ultralytics.models.sam.amg.uncrop_boxes_xyxy + +

+ +## ::: ultralytics.models.sam.amg.uncrop_points + +

+ +## ::: ultralytics.models.sam.amg.uncrop_masks + +

+ +## ::: ultralytics.models.sam.amg.remove_small_regions + +

+ +## ::: ultralytics.models.sam.amg.batched_mask_to_box + +

diff --git a/ultralytics/docs/en/reference/models/sam/amg.md:Zone.Identifier b/ultralytics/docs/en/reference/models/sam/amg.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/amg.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/sam/build.md b/ultralytics/docs/en/reference/models/sam/build.md new file mode 100755 index 0000000..9adeeb4 --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/build.md @@ -0,0 +1,36 @@ +--- +description: Master building SAM ViT models with Ultralytics. Discover steps to leverage the power of SAM and Vision Transformer sessions. +keywords: Ultralytics, SAM, build sam, vision transformer, vits, build_sam_vit_l, build_sam_vit_b, build_sam +--- + +# Reference for `ultralytics/models/sam/build.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/build.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/build.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/build.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.sam.build.build_sam_vit_h + +

+ +## ::: ultralytics.models.sam.build.build_sam_vit_l + +

+ +## ::: ultralytics.models.sam.build.build_sam_vit_b + +

+ +## ::: ultralytics.models.sam.build.build_mobile_sam + +

+ +## ::: ultralytics.models.sam.build._build_sam + +

+ +## ::: ultralytics.models.sam.build.build_sam + +

diff --git a/ultralytics/docs/en/reference/models/sam/build.md:Zone.Identifier b/ultralytics/docs/en/reference/models/sam/build.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/build.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/sam/model.md b/ultralytics/docs/en/reference/models/sam/model.md new file mode 100755 index 0000000..32444bc --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/model.md @@ -0,0 +1,16 @@ +--- +description: Dive into the SAM model details in the Ultralytics YOLO documentation. Understand, implement, and optimize your model use. +keywords: Ultralytics, YOLO, SAM Model, Documentations, Machine Learning, AI, Convolutional neural network +--- + +# Reference for `ultralytics/models/sam/model.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/model.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.sam.model.SAM + +

diff --git a/ultralytics/docs/en/reference/models/sam/model.md:Zone.Identifier b/ultralytics/docs/en/reference/models/sam/model.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/model.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/sam/modules/decoders.md b/ultralytics/docs/en/reference/models/sam/modules/decoders.md new file mode 100755 index 0000000..215b832 --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/modules/decoders.md @@ -0,0 +1,20 @@ +--- +description: Explore MaskDecoder, a part of the Ultralytics models. Gain insights on how to utilize it effectively in the SAM modules decoders MLP. +keywords: Ultralytics, MaskDecoder, SAM modules, decoders, MLP, YOLO, machine learning, image recognition +--- + +# Reference for `ultralytics/models/sam/modules/decoders.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/decoders.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/decoders.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/modules/decoders.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.sam.modules.decoders.MaskDecoder + +

+ +## ::: ultralytics.models.sam.modules.decoders.MLP + +

diff --git a/ultralytics/docs/en/reference/models/sam/modules/decoders.md:Zone.Identifier b/ultralytics/docs/en/reference/models/sam/modules/decoders.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/modules/decoders.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/sam/modules/encoders.md b/ultralytics/docs/en/reference/models/sam/modules/encoders.md new file mode 100755 index 0000000..ffcbf44 --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/modules/encoders.md @@ -0,0 +1,52 @@ +--- +description: Discover detailed information on ImageEncoderViT, PositionEmbeddingRandom, Attention, window_partition, get_rel_pos and more in Ultralytics models encoders documentation. +keywords: Ultralytics, Encoders, Modules, Documentation, ImageEncoderViT, PositionEmbeddingRandom, Attention, window_partition, get_rel_pos +--- + +# Reference for `ultralytics/models/sam/modules/encoders.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/encoders.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/encoders.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/modules/encoders.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.sam.modules.encoders.ImageEncoderViT + +

+ +## ::: ultralytics.models.sam.modules.encoders.PromptEncoder + +

+ +## ::: ultralytics.models.sam.modules.encoders.PositionEmbeddingRandom + +

+ +## ::: ultralytics.models.sam.modules.encoders.Block + +

+ +## ::: ultralytics.models.sam.modules.encoders.Attention + +

+ +## ::: ultralytics.models.sam.modules.encoders.PatchEmbed + +

+ +## ::: ultralytics.models.sam.modules.encoders.window_partition + +

+ +## ::: ultralytics.models.sam.modules.encoders.window_unpartition + +

+ +## ::: ultralytics.models.sam.modules.encoders.get_rel_pos + +

+ +## ::: ultralytics.models.sam.modules.encoders.add_decomposed_rel_pos + +

diff --git a/ultralytics/docs/en/reference/models/sam/modules/encoders.md:Zone.Identifier b/ultralytics/docs/en/reference/models/sam/modules/encoders.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/modules/encoders.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/sam/modules/sam.md b/ultralytics/docs/en/reference/models/sam/modules/sam.md new file mode 100755 index 0000000..d309e24 --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/modules/sam.md @@ -0,0 +1,16 @@ +--- +description: Explore the Sam module of Ultralytics. Discover detailed methods, classes, and information for efficient deep-learning model training!. +keywords: Ultralytics, Sam module, deep learning, model training, Ultralytics documentation +--- + +# Reference for `ultralytics/models/sam/modules/sam.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/sam.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/sam.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/modules/sam.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.sam.modules.sam.Sam + +

diff --git a/ultralytics/docs/en/reference/models/sam/modules/sam.md:Zone.Identifier b/ultralytics/docs/en/reference/models/sam/modules/sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/modules/sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/sam/modules/tiny_encoder.md b/ultralytics/docs/en/reference/models/sam/modules/tiny_encoder.md new file mode 100755 index 0000000..1d945be --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/modules/tiny_encoder.md @@ -0,0 +1,56 @@ +--- +description: Get in-depth insights about Ultralytics Tiny Encoder Modules such as Conv2d_BN, MBConv, ConvLayer, Attention, BasicLayer, and TinyViT. Improve your understanding of machine learning model components. +keywords: Ultralytics, Tiny Encoder, Conv2d_BN, MBConv, ConvLayer, Attention, BasicLayer, TinyViT, Machine learning modules, Ultralytics models +--- + +# Reference for `ultralytics/models/sam/modules/tiny_encoder.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/tiny_encoder.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/tiny_encoder.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/modules/tiny_encoder.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.sam.modules.tiny_encoder.Conv2d_BN + +

+ +## ::: ultralytics.models.sam.modules.tiny_encoder.PatchEmbed + +

+ +## ::: ultralytics.models.sam.modules.tiny_encoder.MBConv + +

+ +## ::: ultralytics.models.sam.modules.tiny_encoder.PatchMerging + +

+ +## ::: ultralytics.models.sam.modules.tiny_encoder.ConvLayer + +

+ +## ::: ultralytics.models.sam.modules.tiny_encoder.Mlp + +

+ +## ::: ultralytics.models.sam.modules.tiny_encoder.Attention + +

+ +## ::: ultralytics.models.sam.modules.tiny_encoder.TinyViTBlock + +

+ +## ::: ultralytics.models.sam.modules.tiny_encoder.BasicLayer + +

+ +## ::: ultralytics.models.sam.modules.tiny_encoder.LayerNorm2d + +

+ +## ::: ultralytics.models.sam.modules.tiny_encoder.TinyViT + +

diff --git a/ultralytics/docs/en/reference/models/sam/modules/tiny_encoder.md:Zone.Identifier b/ultralytics/docs/en/reference/models/sam/modules/tiny_encoder.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/modules/tiny_encoder.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/sam/modules/transformer.md b/ultralytics/docs/en/reference/models/sam/modules/transformer.md new file mode 100755 index 0000000..d36a31e --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/modules/transformer.md @@ -0,0 +1,24 @@ +--- +description: Learn about TwoWayTransformer and Attention modules in Ultralytics. Leverage these tools to enhance your AI models. +keywords: Ultralytics, TwoWayTransformer, Attention, AI models, transformers +--- + +# Reference for `ultralytics/models/sam/modules/transformer.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/transformer.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/modules/transformer.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/modules/transformer.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.sam.modules.transformer.TwoWayTransformer + +

+ +## ::: ultralytics.models.sam.modules.transformer.TwoWayAttentionBlock + +

+ +## ::: ultralytics.models.sam.modules.transformer.Attention + +

diff --git a/ultralytics/docs/en/reference/models/sam/modules/transformer.md:Zone.Identifier b/ultralytics/docs/en/reference/models/sam/modules/transformer.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/modules/transformer.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/sam/predict.md b/ultralytics/docs/en/reference/models/sam/predict.md new file mode 100755 index 0000000..ae4c788 --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/predict.md @@ -0,0 +1,16 @@ +--- +description: Master the ultralytics.models.sam.predict.Predictor class with our comprehensive guide. Discover techniques to enhance your model predictions. +keywords: Ultralytics, predictor, models, sam.predict.Predictor, AI, machine learning, predictive models +--- + +# Reference for `ultralytics/models/sam/predict.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/sam/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/sam/predict.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.sam.predict.Predictor + +

diff --git a/ultralytics/docs/en/reference/models/sam/predict.md:Zone.Identifier b/ultralytics/docs/en/reference/models/sam/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/sam/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/utils/loss.md b/ultralytics/docs/en/reference/models/utils/loss.md new file mode 100755 index 0000000..567308e --- /dev/null +++ b/ultralytics/docs/en/reference/models/utils/loss.md @@ -0,0 +1,20 @@ +--- +description: Learn to use the DETRLoss function provided by Ultralytics YOLO. Understand how to utilize loss in RTDETR detection models to improve accuracy. +keywords: Ultralytics, YOLO, Documentation, DETRLoss, Detection Loss, Loss function, DETR, RTDETR Detection Models +--- + +# Reference for `ultralytics/models/utils/loss.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/utils/loss.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/utils/loss.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/utils/loss.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.utils.loss.DETRLoss + +

+ +## ::: ultralytics.models.utils.loss.RTDETRDetectionLoss + +

diff --git a/ultralytics/docs/en/reference/models/utils/loss.md:Zone.Identifier b/ultralytics/docs/en/reference/models/utils/loss.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/utils/loss.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/utils/ops.md b/ultralytics/docs/en/reference/models/utils/ops.md new file mode 100755 index 0000000..4ca7614 --- /dev/null +++ b/ultralytics/docs/en/reference/models/utils/ops.md @@ -0,0 +1,20 @@ +--- +description: Discover details for "HungarianMatcher" & "inverse_sigmoid" functions in Ultralytics YOLO, advanced tools supporting detection models. +keywords: Ultralytics, YOLO, HungarianMatcher, inverse_sigmoid, detection models, model utilities, ops +--- + +# Reference for `ultralytics/models/utils/ops.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/utils/ops.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/utils/ops.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/utils/ops.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.utils.ops.HungarianMatcher + +

+ +## ::: ultralytics.models.utils.ops.get_cdn_group + +

diff --git a/ultralytics/docs/en/reference/models/utils/ops.md:Zone.Identifier b/ultralytics/docs/en/reference/models/utils/ops.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/utils/ops.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/classify/predict.md b/ultralytics/docs/en/reference/models/yolo/classify/predict.md new file mode 100755 index 0000000..581107b --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/classify/predict.md @@ -0,0 +1,16 @@ +--- +description: Explore the Ultralytics ClassificationPredictor guide for model prediction and visualization. Build powerful AI models with YOLO. +keywords: Ultralytics, classification predictor, predict, YOLO, AI models, model visualization +--- + +# Reference for `ultralytics/models/yolo/classify/predict.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/classify/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/classify/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/classify/predict.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.classify.predict.ClassificationPredictor + +

diff --git a/ultralytics/docs/en/reference/models/yolo/classify/predict.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/classify/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/classify/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/classify/train.md b/ultralytics/docs/en/reference/models/yolo/classify/train.md new file mode 100755 index 0000000..e4b9ea8 --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/classify/train.md @@ -0,0 +1,16 @@ +--- +description: Delve into Classification Trainer at Ultralytics YOLO docs and optimize your model's training process with insights from the masters!. +keywords: Ultralytics, YOLO, Classification Trainer, deep learning, training process, AI models, documentation +--- + +# Reference for `ultralytics/models/yolo/classify/train.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/classify/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/classify/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/classify/train.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.classify.train.ClassificationTrainer + +

diff --git a/ultralytics/docs/en/reference/models/yolo/classify/train.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/classify/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/classify/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/classify/val.md b/ultralytics/docs/en/reference/models/yolo/classify/val.md new file mode 100755 index 0000000..27f64ff --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/classify/val.md @@ -0,0 +1,16 @@ +--- +description: Explore YOLO ClassificationValidator, a key element of Ultralytics YOLO models. Learn how it validates and fine-tunes model outputs. +keywords: Ultralytics, YOLO, ClassificationValidator, model validation, model fine-tuning, deep learning, computer vision +--- + +# Reference for `ultralytics/models/yolo/classify/val.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/classify/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/classify/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/classify/val.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.classify.val.ClassificationValidator + +

diff --git a/ultralytics/docs/en/reference/models/yolo/classify/val.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/classify/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/classify/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/detect/predict.md b/ultralytics/docs/en/reference/models/yolo/detect/predict.md new file mode 100755 index 0000000..4b6cafd --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/detect/predict.md @@ -0,0 +1,16 @@ +--- +description: Explore the guide to using the DetectionPredictor in Ultralytics YOLO. Learn how to predict, detect and analyze objects accurately. +keywords: Ultralytics, YOLO, DetectionPredictor, detect, predict, object detection, analysis +--- + +# Reference for `ultralytics/models/yolo/detect/predict.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/detect/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/detect/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/detect/predict.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.detect.predict.DetectionPredictor + +

diff --git a/ultralytics/docs/en/reference/models/yolo/detect/predict.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/detect/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/detect/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/detect/train.md b/ultralytics/docs/en/reference/models/yolo/detect/train.md new file mode 100755 index 0000000..bca6faf --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/detect/train.md @@ -0,0 +1,16 @@ +--- +description: Maximize your model's potential with Ultralytics YOLO Detection Trainer. Learn advanced techniques, tips, and tricks for training. +keywords: Ultralytics YOLO, YOLO, Detection Trainer, Model Training, Machine Learning, Deep Learning, Computer Vision +--- + +# Reference for `ultralytics/models/yolo/detect/train.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/detect/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/detect/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/detect/train.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.detect.train.DetectionTrainer + +

diff --git a/ultralytics/docs/en/reference/models/yolo/detect/train.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/detect/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/detect/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/detect/val.md b/ultralytics/docs/en/reference/models/yolo/detect/val.md new file mode 100755 index 0000000..c5333d0 --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/detect/val.md @@ -0,0 +1,16 @@ +--- +description: Discover function valuation of your YOLO models with the Ultralytics Detection Validator. Enhance precision and recall rates today. +keywords: Ultralytics, YOLO, Detection Validator, model valuation, precision, recall +--- + +# Reference for `ultralytics/models/yolo/detect/val.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/detect/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/detect/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/detect/val.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.detect.val.DetectionValidator + +

diff --git a/ultralytics/docs/en/reference/models/yolo/detect/val.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/detect/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/detect/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/model.md b/ultralytics/docs/en/reference/models/yolo/model.md new file mode 100755 index 0000000..742685f --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/model.md @@ -0,0 +1,16 @@ +--- +description: Discover the Ultralytics YOLO model class. Learn advanced techniques, tips, and tricks for training. +keywords: Ultralytics YOLO, YOLO, YOLO model, Model Training, Machine Learning, Deep Learning, Computer Vision +--- + +# Reference for `ultralytics/models/yolo/model.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/model.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/model.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/model.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.model.YOLO + +

diff --git a/ultralytics/docs/en/reference/models/yolo/model.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/model.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/model.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/pose/predict.md b/ultralytics/docs/en/reference/models/yolo/pose/predict.md new file mode 100755 index 0000000..9eb4af4 --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/pose/predict.md @@ -0,0 +1,16 @@ +--- +description: Discover how to use PosePredictor in the Ultralytics YOLO model. Includes detailed guides, code examples, and explanations. +keywords: Ultralytics, YOLO, PosePredictor, machine learning, AI, predictive models +--- + +# Reference for `ultralytics/models/yolo/pose/predict.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/pose/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/pose/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/pose/predict.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.pose.predict.PosePredictor + +

diff --git a/ultralytics/docs/en/reference/models/yolo/pose/predict.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/pose/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/pose/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/pose/train.md b/ultralytics/docs/en/reference/models/yolo/pose/train.md new file mode 100755 index 0000000..1d02bf2 --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/pose/train.md @@ -0,0 +1,16 @@ +--- +description: Explore Ultralytics PoseTrainer for YOLO models. Get a step-by-step guide on how to train on custom pose data for more accurate AI modeling. +keywords: Ultralytics, YOLO, PoseTrainer, pose training, AI modeling, custom data training +--- + +# Reference for `ultralytics/models/yolo/pose/train.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/pose/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/pose/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/pose/train.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.pose.train.PoseTrainer + +

diff --git a/ultralytics/docs/en/reference/models/yolo/pose/train.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/pose/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/pose/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/pose/val.md b/ultralytics/docs/en/reference/models/yolo/pose/val.md new file mode 100755 index 0000000..77207be --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/pose/val.md @@ -0,0 +1,16 @@ +--- +description: Explore the PoseValidatorโ€”review how Ultralytics YOLO validates poses for object detection. Improve your understanding of YOLO. +keywords: PoseValidator, Ultralytics, YOLO, Object detection, Pose validation +--- + +# Reference for `ultralytics/models/yolo/pose/val.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/pose/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/pose/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/pose/val.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.pose.val.PoseValidator + +

diff --git a/ultralytics/docs/en/reference/models/yolo/pose/val.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/pose/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/pose/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/segment/predict.md b/ultralytics/docs/en/reference/models/yolo/segment/predict.md new file mode 100755 index 0000000..a92b6f6 --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/segment/predict.md @@ -0,0 +1,16 @@ +--- +description: Discover how to utilize the YOLO Segmentation Predictor in Ultralytics. Enhance your objects detection skills with us. +keywords: YOLO, Ultralytics, object detection, segmentation predictor +--- + +# Reference for `ultralytics/models/yolo/segment/predict.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/segment/predict.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/segment/predict.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/segment/predict.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.segment.predict.SegmentationPredictor + +

diff --git a/ultralytics/docs/en/reference/models/yolo/segment/predict.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/segment/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/segment/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/segment/train.md b/ultralytics/docs/en/reference/models/yolo/segment/train.md new file mode 100755 index 0000000..1b5c3d5 --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/segment/train.md @@ -0,0 +1,16 @@ +--- +description: Maximize your YOLO model's performance with our SegmentationTrainer. Explore comprehensive guides and tutorials on ultralytics.com. +keywords: Ultralytics, YOLO, SegmentationTrainer, image segmentation, object detection, model training, YOLO model +--- + +# Reference for `ultralytics/models/yolo/segment/train.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/segment/train.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/segment/train.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/segment/train.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.segment.train.SegmentationTrainer + +

diff --git a/ultralytics/docs/en/reference/models/yolo/segment/train.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/segment/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/segment/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/models/yolo/segment/val.md b/ultralytics/docs/en/reference/models/yolo/segment/val.md new file mode 100755 index 0000000..745b22c --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/segment/val.md @@ -0,0 +1,16 @@ +--- +description: Get practical insights about our SegmentationValidator in YOLO Ultralytics models. Discover functionality details, methods, inputs, and outputs. +keywords: Ultralytics, YOLO, SegmentationValidator, model segmentation, image classification, object detection +--- + +# Reference for `ultralytics/models/yolo/segment/val.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/segment/val.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/models/yolo/segment/val.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/models/yolo/segment/val.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.models.yolo.segment.val.SegmentationValidator + +

diff --git a/ultralytics/docs/en/reference/models/yolo/segment/val.md:Zone.Identifier b/ultralytics/docs/en/reference/models/yolo/segment/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/models/yolo/segment/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/nn/autobackend.md b/ultralytics/docs/en/reference/nn/autobackend.md new file mode 100755 index 0000000..3e8c2f7 --- /dev/null +++ b/ultralytics/docs/en/reference/nn/autobackend.md @@ -0,0 +1,24 @@ +--- +description: Get to know more about Ultralytics nn.autobackend.check_class_names functionality. Optimize your YOLO models seamlessly. +keywords: Ultralytics, AutoBackend, check_class_names, YOLO, YOLO models, optimization +--- + +# Reference for `ultralytics/nn/autobackend.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/autobackend.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/autobackend.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/nn/autobackend.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.nn.autobackend.AutoBackend + +

+ +## ::: ultralytics.nn.autobackend.check_class_names + +

+ +## ::: ultralytics.nn.autobackend.default_class_names + +

diff --git a/ultralytics/docs/en/reference/nn/autobackend.md:Zone.Identifier b/ultralytics/docs/en/reference/nn/autobackend.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/nn/autobackend.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/nn/modules/block.md b/ultralytics/docs/en/reference/nn/modules/block.md new file mode 100755 index 0000000..f8b1e58 --- /dev/null +++ b/ultralytics/docs/en/reference/nn/modules/block.md @@ -0,0 +1,88 @@ +--- +description: Explore Ultralytics YOLO neural network modules, Proto to BottleneckCSP. Detailed explanation of each module with easy-to-follow code examples. +keywords: YOLO, Ultralytics, neural network, nn.modules.block, Proto, HGBlock, SPPF, C2, C3, RepC3, C3Ghost, Bottleneck, BottleneckCSP +--- + +# Reference for `ultralytics/nn/modules/block.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/block.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/block.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/nn/modules/block.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.nn.modules.block.DFL + +

+ +## ::: ultralytics.nn.modules.block.Proto + +

+ +## ::: ultralytics.nn.modules.block.HGStem + +

+ +## ::: ultralytics.nn.modules.block.HGBlock + +

+ +## ::: ultralytics.nn.modules.block.SPP + +

+ +## ::: ultralytics.nn.modules.block.SPPF + +

+ +## ::: ultralytics.nn.modules.block.C1 + +

+ +## ::: ultralytics.nn.modules.block.C2 + +

+ +## ::: ultralytics.nn.modules.block.C2f + +

+ +## ::: ultralytics.nn.modules.block.C3 + +

+ +## ::: ultralytics.nn.modules.block.C3x + +

+ +## ::: ultralytics.nn.modules.block.RepC3 + +

+ +## ::: ultralytics.nn.modules.block.C3TR + +

+ +## ::: ultralytics.nn.modules.block.C3Ghost + +

+ +## ::: ultralytics.nn.modules.block.GhostBottleneck + +

+ +## ::: ultralytics.nn.modules.block.Bottleneck + +

+ +## ::: ultralytics.nn.modules.block.BottleneckCSP + +

+ +## ::: ultralytics.nn.modules.block.ResNetBlock + +

+ +## ::: ultralytics.nn.modules.block.ResNetLayer + +

diff --git a/ultralytics/docs/en/reference/nn/modules/block.md:Zone.Identifier b/ultralytics/docs/en/reference/nn/modules/block.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/nn/modules/block.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/nn/modules/conv.md b/ultralytics/docs/en/reference/nn/modules/conv.md new file mode 100755 index 0000000..6c9fb35 --- /dev/null +++ b/ultralytics/docs/en/reference/nn/modules/conv.md @@ -0,0 +1,68 @@ +--- +description: Explore various Ultralytics convolution modules including Conv2, DWConv, ConvTranspose, GhostConv, Channel Attention and more. +keywords: Ultralytics, Convolution Modules, Conv2, DWConv, ConvTranspose, GhostConv, ChannelAttention, CBAM, autopad +--- + +# Reference for `ultralytics/nn/modules/conv.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/conv.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/conv.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/nn/modules/conv.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.nn.modules.conv.Conv + +

+ +## ::: ultralytics.nn.modules.conv.Conv2 + +

+ +## ::: ultralytics.nn.modules.conv.LightConv + +

+ +## ::: ultralytics.nn.modules.conv.DWConv + +

+ +## ::: ultralytics.nn.modules.conv.DWConvTranspose2d + +

+ +## ::: ultralytics.nn.modules.conv.ConvTranspose + +

+ +## ::: ultralytics.nn.modules.conv.Focus + +

+ +## ::: ultralytics.nn.modules.conv.GhostConv + +

+ +## ::: ultralytics.nn.modules.conv.RepConv + +

+ +## ::: ultralytics.nn.modules.conv.ChannelAttention + +

+ +## ::: ultralytics.nn.modules.conv.SpatialAttention + +

+ +## ::: ultralytics.nn.modules.conv.CBAM + +

+ +## ::: ultralytics.nn.modules.conv.Concat + +

+ +## ::: ultralytics.nn.modules.conv.autopad + +

diff --git a/ultralytics/docs/en/reference/nn/modules/conv.md:Zone.Identifier b/ultralytics/docs/en/reference/nn/modules/conv.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/nn/modules/conv.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/nn/modules/head.md b/ultralytics/docs/en/reference/nn/modules/head.md new file mode 100755 index 0000000..40ffb25 --- /dev/null +++ b/ultralytics/docs/en/reference/nn/modules/head.md @@ -0,0 +1,32 @@ +--- +description: Explore docs covering Ultralytics YOLO detection, pose & RTDETRDecoder. Comprehensive guides to help you understand Ultralytics nn modules. +keywords: Ultralytics, YOLO, Detection, Pose, RTDETRDecoder, nn modules, guides +--- + +# Reference for `ultralytics/nn/modules/head.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/head.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/head.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/nn/modules/head.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.nn.modules.head.Detect + +

+ +## ::: ultralytics.nn.modules.head.Segment + +

+ +## ::: ultralytics.nn.modules.head.Pose + +

+ +## ::: ultralytics.nn.modules.head.Classify + +

+ +## ::: ultralytics.nn.modules.head.RTDETRDecoder + +

diff --git a/ultralytics/docs/en/reference/nn/modules/head.md:Zone.Identifier b/ultralytics/docs/en/reference/nn/modules/head.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/nn/modules/head.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/nn/modules/transformer.md b/ultralytics/docs/en/reference/nn/modules/transformer.md new file mode 100755 index 0000000..91904be --- /dev/null +++ b/ultralytics/docs/en/reference/nn/modules/transformer.md @@ -0,0 +1,52 @@ +--- +description: Learn about Ultralytics transformer encoder, layer, MLP block, LayerNorm2d and the deformable transformer decoder layer. Expand your understanding of these crucial AI modules. +keywords: Ultralytics, Ultralytics documentation, TransformerEncoderLayer, TransformerLayer, MLPBlock, LayerNorm2d, DeformableTransformerDecoderLayer +--- + +# Reference for `ultralytics/nn/modules/transformer.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/transformer.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/transformer.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/nn/modules/transformer.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.nn.modules.transformer.TransformerEncoderLayer + +

+ +## ::: ultralytics.nn.modules.transformer.AIFI + +

+ +## ::: ultralytics.nn.modules.transformer.TransformerLayer + +

+ +## ::: ultralytics.nn.modules.transformer.TransformerBlock + +

+ +## ::: ultralytics.nn.modules.transformer.MLPBlock + +

+ +## ::: ultralytics.nn.modules.transformer.MLP + +

+ +## ::: ultralytics.nn.modules.transformer.LayerNorm2d + +

+ +## ::: ultralytics.nn.modules.transformer.MSDeformAttn + +

+ +## ::: ultralytics.nn.modules.transformer.DeformableTransformerDecoderLayer + +

+ +## ::: ultralytics.nn.modules.transformer.DeformableTransformerDecoder + +

diff --git a/ultralytics/docs/en/reference/nn/modules/transformer.md:Zone.Identifier b/ultralytics/docs/en/reference/nn/modules/transformer.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/nn/modules/transformer.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/nn/modules/utils.md b/ultralytics/docs/en/reference/nn/modules/utils.md new file mode 100755 index 0000000..25b0b1c --- /dev/null +++ b/ultralytics/docs/en/reference/nn/modules/utils.md @@ -0,0 +1,32 @@ +--- +description: Explore Ultralytics neural network utils, such as bias_init_with_prob, inverse_sigmoid and multi_scale_deformable_attn_pytorch functions. +keywords: Ultralytics, neural network, nn.modules.utils, bias_init_with_prob, inverse_sigmoid, multi_scale_deformable_attn_pytorch +--- + +# Reference for `ultralytics/nn/modules/utils.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/utils.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/modules/utils.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/nn/modules/utils.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.nn.modules.utils._get_clones + +

+ +## ::: ultralytics.nn.modules.utils.bias_init_with_prob + +

+ +## ::: ultralytics.nn.modules.utils.linear_init_ + +

+ +## ::: ultralytics.nn.modules.utils.inverse_sigmoid + +

+ +## ::: ultralytics.nn.modules.utils.multi_scale_deformable_attn_pytorch + +

diff --git a/ultralytics/docs/en/reference/nn/modules/utils.md:Zone.Identifier b/ultralytics/docs/en/reference/nn/modules/utils.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/nn/modules/utils.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/nn/tasks.md b/ultralytics/docs/en/reference/nn/tasks.md new file mode 100755 index 0000000..aa84d88 --- /dev/null +++ b/ultralytics/docs/en/reference/nn/tasks.md @@ -0,0 +1,72 @@ +--- +description: Dive into the intricacies of YOLO tasks.py. Learn about DetectionModel, PoseModel and more for powerful AI development. +keywords: Ultralytics, YOLO, nn tasks, DetectionModel, PoseModel, RTDETRDetectionModel, model weights, parse model, AI development +--- + +# Reference for `ultralytics/nn/tasks.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/tasks.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/nn/tasks.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/nn/tasks.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.nn.tasks.BaseModel + +

+ +## ::: ultralytics.nn.tasks.DetectionModel + +

+ +## ::: ultralytics.nn.tasks.SegmentationModel + +

+ +## ::: ultralytics.nn.tasks.PoseModel + +

+ +## ::: ultralytics.nn.tasks.ClassificationModel + +

+ +## ::: ultralytics.nn.tasks.RTDETRDetectionModel + +

+ +## ::: ultralytics.nn.tasks.Ensemble + +

+ +## ::: ultralytics.nn.tasks.temporary_modules + +

+ +## ::: ultralytics.nn.tasks.torch_safe_load + +

+ +## ::: ultralytics.nn.tasks.attempt_load_weights + +

+ +## ::: ultralytics.nn.tasks.attempt_load_one_weight + +

+ +## ::: ultralytics.nn.tasks.parse_model + +

+ +## ::: ultralytics.nn.tasks.yaml_model_load + +

+ +## ::: ultralytics.nn.tasks.guess_model_scale + +

+ +## ::: ultralytics.nn.tasks.guess_model_task + +

diff --git a/ultralytics/docs/en/reference/nn/tasks.md:Zone.Identifier b/ultralytics/docs/en/reference/nn/tasks.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/nn/tasks.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/solutions/ai_gym.md b/ultralytics/docs/en/reference/solutions/ai_gym.md new file mode 100755 index 0000000..eb3e634 --- /dev/null +++ b/ultralytics/docs/en/reference/solutions/ai_gym.md @@ -0,0 +1,16 @@ +--- +description: Explore Ultralytics YOLO's advanced AI Gym feature for real-time pose estimation and gym exercise tracking using cutting-edge machine learning technology. +keywords: Ultralytics, YOLO, AI Gym, pose estimation, real-time tracking, machine learning, exercise counting, AI fitness, computer vision, gym workout analysis, YOLOv8, artificial intelligence, fitness technology +--- + +# Reference for `ultralytics/solutions/ai_gym.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/ai_gym.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/ai_gym.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/ai_gym.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.solutions.ai_gym.AIGym + +

diff --git a/ultralytics/docs/en/reference/solutions/ai_gym.md:Zone.Identifier b/ultralytics/docs/en/reference/solutions/ai_gym.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/solutions/ai_gym.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/solutions/heatmap.md b/ultralytics/docs/en/reference/solutions/heatmap.md new file mode 100755 index 0000000..133125c --- /dev/null +++ b/ultralytics/docs/en/reference/solutions/heatmap.md @@ -0,0 +1,16 @@ +--- +description: Explore Ultralytics YOLO's advanced Heatmaps feature designed to highlight areas of interest, providing an immediate, impactful way to interpret spatial information. +keywords: Ultralytics, YOLO, heatmaps, object tracking, data visualization, real-time tracking, machine learning, object counting, computer vision, retail analytics, YOLOv8, artificial intelligence +--- + +# Reference for `ultralytics/solutions/heatmap.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/heatmap.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/heatmap.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/heatmap.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.solutions.heatmap.Heatmap + +

diff --git a/ultralytics/docs/en/reference/solutions/heatmap.md:Zone.Identifier b/ultralytics/docs/en/reference/solutions/heatmap.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/solutions/heatmap.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/solutions/object_counter.md b/ultralytics/docs/en/reference/solutions/object_counter.md new file mode 100755 index 0000000..6cd3e00 --- /dev/null +++ b/ultralytics/docs/en/reference/solutions/object_counter.md @@ -0,0 +1,16 @@ +--- +description: Transform object tracking with Ultralytics YOLO Object Counter featuring cutting-edge technology for precise real-time counting in video streams. +keywords: Ultralytics YOLO, object tracking software, real-time counting solutions, video stream analysis, YOLOv8 object detection, AI surveillance, smart counting technology, computer vision, AI-powered tracking, object counting accuracy, video analytics tools, automated monitoring. +--- + +# Reference for `ultralytics/solutions/object_counter.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/object_counter.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/solutions/object_counter.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/solutions/object_counter.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.solutions.object_counter.ObjectCounter + +

diff --git a/ultralytics/docs/en/reference/solutions/object_counter.md:Zone.Identifier b/ultralytics/docs/en/reference/solutions/object_counter.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/solutions/object_counter.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/trackers/basetrack.md b/ultralytics/docs/en/reference/trackers/basetrack.md new file mode 100755 index 0000000..97f5c4a --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/basetrack.md @@ -0,0 +1,20 @@ +--- +description: Get familiar with TrackState in Ultralytics. Learn how it is used in the BaseTrack of the Ultralytics tracker for enhanced functionality. +keywords: Ultralytics, TrackState, BaseTrack, Ultralytics tracker, Ultralytics documentation +--- + +# Reference for `ultralytics/trackers/basetrack.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/basetrack.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/basetrack.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/basetrack.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.trackers.basetrack.TrackState + +

+ +## ::: ultralytics.trackers.basetrack.BaseTrack + +

diff --git a/ultralytics/docs/en/reference/trackers/basetrack.md:Zone.Identifier b/ultralytics/docs/en/reference/trackers/basetrack.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/basetrack.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/trackers/bot_sort.md b/ultralytics/docs/en/reference/trackers/bot_sort.md new file mode 100755 index 0000000..85dfeee --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/bot_sort.md @@ -0,0 +1,20 @@ +--- +description: Master the use of Ultralytics BOTrack, a key component of the powerful Ultralytics tracking system. Learn to integrate and use BOTSORT in your projects. +keywords: Ultralytics, BOTSORT, BOTrack, tracking system, official documentation, machine learning, AI tracking +--- + +# Reference for `ultralytics/trackers/bot_sort.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/bot_sort.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/bot_sort.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/bot_sort.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.trackers.bot_sort.BOTrack + +

+ +## ::: ultralytics.trackers.bot_sort.BOTSORT + +

diff --git a/ultralytics/docs/en/reference/trackers/bot_sort.md:Zone.Identifier b/ultralytics/docs/en/reference/trackers/bot_sort.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/bot_sort.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/trackers/byte_tracker.md b/ultralytics/docs/en/reference/trackers/byte_tracker.md new file mode 100755 index 0000000..e240d44 --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/byte_tracker.md @@ -0,0 +1,20 @@ +--- +description: Step-in to explore in-depth the functionalities of Ultralytics BYTETracker under STrack. Gain advanced feature insights to streamline your operations. +keywords: STrack, Ultralytics, BYTETracker, documentation, Ultralytics tracker, object tracking, YOLO +--- + +# Reference for `ultralytics/trackers/byte_tracker.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/byte_tracker.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/byte_tracker.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/byte_tracker.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.trackers.byte_tracker.STrack + +

+ +## ::: ultralytics.trackers.byte_tracker.BYTETracker + +

diff --git a/ultralytics/docs/en/reference/trackers/byte_tracker.md:Zone.Identifier b/ultralytics/docs/en/reference/trackers/byte_tracker.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/byte_tracker.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/trackers/track.md b/ultralytics/docs/en/reference/trackers/track.md new file mode 100755 index 0000000..7f15130 --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/track.md @@ -0,0 +1,24 @@ +--- +description: Explore Ultralytics documentation on prediction function starters & register trackers. Understand our code & its applications better. +keywords: Ultralytics, YOLO, on predict start, register tracker, prediction functions, documentation +--- + +# Reference for `ultralytics/trackers/track.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/track.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/track.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/track.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.trackers.track.on_predict_start + +

+ +## ::: ultralytics.trackers.track.on_predict_postprocess_end + +

+ +## ::: ultralytics.trackers.track.register_tracker + +

diff --git a/ultralytics/docs/en/reference/trackers/track.md:Zone.Identifier b/ultralytics/docs/en/reference/trackers/track.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/track.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/trackers/utils/gmc.md b/ultralytics/docs/en/reference/trackers/utils/gmc.md new file mode 100755 index 0000000..fa51e8e --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/utils/gmc.md @@ -0,0 +1,16 @@ +--- +description: Explore the Ultralytics GMC tool in our comprehensive documentation. Learn how it works, best practices, and implementation advice. +keywords: Ultralytics, GMC utility, Ultralytics documentation, Ultralytics tracker, machine learning tools +--- + +# Reference for `ultralytics/trackers/utils/gmc.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/utils/gmc.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/utils/gmc.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/utils/gmc.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.trackers.utils.gmc.GMC + +

diff --git a/ultralytics/docs/en/reference/trackers/utils/gmc.md:Zone.Identifier b/ultralytics/docs/en/reference/trackers/utils/gmc.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/utils/gmc.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/trackers/utils/kalman_filter.md b/ultralytics/docs/en/reference/trackers/utils/kalman_filter.md new file mode 100755 index 0000000..11063ef --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/utils/kalman_filter.md @@ -0,0 +1,20 @@ +--- +description: Explore KalmanFilterXYAH, a key component of Ultralytics trackers. Understand its utilities and learn to leverage it in your own projects. +keywords: Ultralytics, KalmanFilterXYAH, tracker, documentation, guide +--- + +# Reference for `ultralytics/trackers/utils/kalman_filter.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/utils/kalman_filter.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/utils/kalman_filter.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/utils/kalman_filter.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.trackers.utils.kalman_filter.KalmanFilterXYAH + +

+ +## ::: ultralytics.trackers.utils.kalman_filter.KalmanFilterXYWH + +

diff --git a/ultralytics/docs/en/reference/trackers/utils/kalman_filter.md:Zone.Identifier b/ultralytics/docs/en/reference/trackers/utils/kalman_filter.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/utils/kalman_filter.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/trackers/utils/matching.md b/ultralytics/docs/en/reference/trackers/utils/matching.md new file mode 100755 index 0000000..fb7d326 --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/utils/matching.md @@ -0,0 +1,28 @@ +--- +description: Explore in-depth guidance for using Ultralytics trackers utils matching, including merge_matches, linear_assignment, iou_distance, embedding_distance, fuse_motion, and fuse_score. +keywords: Ultralytics, Trackers Utils, Matching, merge_matches, linear_assignment, iou_distance, embedding_distance, fuse_motion, fuse_score, documentation +--- + +# Reference for `ultralytics/trackers/utils/matching.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/utils/matching.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/trackers/utils/matching.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/trackers/utils/matching.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.trackers.utils.matching.linear_assignment + +

+ +## ::: ultralytics.trackers.utils.matching.iou_distance + +

+ +## ::: ultralytics.trackers.utils.matching.embedding_distance + +

+ +## ::: ultralytics.trackers.utils.matching.fuse_score + +

diff --git a/ultralytics/docs/en/reference/trackers/utils/matching.md:Zone.Identifier b/ultralytics/docs/en/reference/trackers/utils/matching.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/trackers/utils/matching.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/__init__.md b/ultralytics/docs/en/reference/utils/__init__.md new file mode 100755 index 0000000..43b245f --- /dev/null +++ b/ultralytics/docs/en/reference/utils/__init__.md @@ -0,0 +1,156 @@ +--- +description: Explore the Ultralytics Utils package, with handy functions like colorstr, yaml_save, set_logging & more, designed to enhance your coding experience. +keywords: Ultralytics, Utils, utilitarian functions, colorstr, yaml_save, set_logging, is_kaggle, is_docker, clean_url +--- + +# Reference for `ultralytics/utils/__init__.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/__init__.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/__init__.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/__init__.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.TQDM + +

+ +## ::: ultralytics.utils.SimpleClass + +

+ +## ::: ultralytics.utils.IterableSimpleNamespace + +

+ +## ::: ultralytics.utils.ThreadingLocked + +

+ +## ::: ultralytics.utils.TryExcept + +

+ +## ::: ultralytics.utils.SettingsManager + +

+ +## ::: ultralytics.utils.plt_settings + +

+ +## ::: ultralytics.utils.set_logging + +

+ +## ::: ultralytics.utils.emojis + +

+ +## ::: ultralytics.utils.yaml_save + +

+ +## ::: ultralytics.utils.yaml_load + +

+ +## ::: ultralytics.utils.yaml_print + +

+ +## ::: ultralytics.utils.is_ubuntu + +

+ +## ::: ultralytics.utils.is_colab + +

+ +## ::: ultralytics.utils.is_kaggle + +

+ +## ::: ultralytics.utils.is_jupyter + +

+ +## ::: ultralytics.utils.is_docker + +

+ +## ::: ultralytics.utils.is_online + +

+ +## ::: ultralytics.utils.is_pip_package + +

+ +## ::: ultralytics.utils.is_dir_writeable + +

+ +## ::: ultralytics.utils.is_pytest_running + +

+ +## ::: ultralytics.utils.is_github_action_running + +

+ +## ::: ultralytics.utils.is_git_dir + +

+ +## ::: ultralytics.utils.get_git_dir + +

+ +## ::: ultralytics.utils.get_git_origin_url + +

+ +## ::: ultralytics.utils.get_git_branch + +

+ +## ::: ultralytics.utils.get_default_args + +

+ +## ::: ultralytics.utils.get_ubuntu_version + +

+ +## ::: ultralytics.utils.get_user_config_dir + +

+ +## ::: ultralytics.utils.colorstr + +

+ +## ::: ultralytics.utils.remove_colorstr + +

+ +## ::: ultralytics.utils.threaded + +

+ +## ::: ultralytics.utils.set_sentry + +

+ +## ::: ultralytics.utils.deprecation_warn + +

+ +## ::: ultralytics.utils.clean_url + +

+ +## ::: ultralytics.utils.url2file + +

diff --git a/ultralytics/docs/en/reference/utils/__init__.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/__init__.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/__init__.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/autobatch.md b/ultralytics/docs/en/reference/utils/autobatch.md new file mode 100755 index 0000000..4dc132b --- /dev/null +++ b/ultralytics/docs/en/reference/utils/autobatch.md @@ -0,0 +1,20 @@ +--- +description: Explore Ultralytics documentation for check_train_batch_size utility in the autobatch module. Understand how it could improve your machine learning process. +keywords: Ultralytics, check_train_batch_size, autobatch, utility, machine learning, documentation +--- + +# Reference for `ultralytics/utils/autobatch.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/autobatch.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/autobatch.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/autobatch.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.autobatch.check_train_batch_size + +

+ +## ::: ultralytics.utils.autobatch.autobatch + +

diff --git a/ultralytics/docs/en/reference/utils/autobatch.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/autobatch.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/autobatch.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/benchmarks.md b/ultralytics/docs/en/reference/utils/benchmarks.md new file mode 100755 index 0000000..76d4885 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/benchmarks.md @@ -0,0 +1,20 @@ +--- +description: Discover how to profile your models using Ultralytics utilities. Enhance performance, optimize your benchmarks, and learn best practices. +keywords: Ultralytics, ProfileModels, benchmarks, model profiling, performance optimization +--- + +# Reference for `ultralytics/utils/benchmarks.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/benchmarks.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/benchmarks.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/benchmarks.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.benchmarks.ProfileModels + +

+ +## ::: ultralytics.utils.benchmarks.benchmark + +

diff --git a/ultralytics/docs/en/reference/utils/benchmarks.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/benchmarks.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/benchmarks.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/callbacks/base.md b/ultralytics/docs/en/reference/utils/callbacks/base.md new file mode 100755 index 0000000..609e34b --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/base.md @@ -0,0 +1,120 @@ +--- +description: Explore how to use the on-train, on-validation, on-pretrain, and on-predict callbacks in Ultralytics. Learn to update params, save models, and add integration callbacks. +keywords: Ultralytics, Callbacks, On-train, On-validation, On-pretrain, On-predict, Parameters update, Model saving, Integration callbacks +--- + +# Reference for `ultralytics/utils/callbacks/base.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/base.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/base.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/base.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.callbacks.base.on_pretrain_routine_start + +

+ +## ::: ultralytics.utils.callbacks.base.on_pretrain_routine_end + +

+ +## ::: ultralytics.utils.callbacks.base.on_train_start + +

+ +## ::: ultralytics.utils.callbacks.base.on_train_epoch_start + +

+ +## ::: ultralytics.utils.callbacks.base.on_train_batch_start + +

+ +## ::: ultralytics.utils.callbacks.base.optimizer_step + +

+ +## ::: ultralytics.utils.callbacks.base.on_before_zero_grad + +

+ +## ::: ultralytics.utils.callbacks.base.on_train_batch_end + +

+ +## ::: ultralytics.utils.callbacks.base.on_train_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.base.on_fit_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.base.on_model_save + +

+ +## ::: ultralytics.utils.callbacks.base.on_train_end + +

+ +## ::: ultralytics.utils.callbacks.base.on_params_update + +

+ +## ::: ultralytics.utils.callbacks.base.teardown + +

+ +## ::: ultralytics.utils.callbacks.base.on_val_start + +

+ +## ::: ultralytics.utils.callbacks.base.on_val_batch_start + +

+ +## ::: ultralytics.utils.callbacks.base.on_val_batch_end + +

+ +## ::: ultralytics.utils.callbacks.base.on_val_end + +

+ +## ::: ultralytics.utils.callbacks.base.on_predict_start + +

+ +## ::: ultralytics.utils.callbacks.base.on_predict_batch_start + +

+ +## ::: ultralytics.utils.callbacks.base.on_predict_batch_end + +

+ +## ::: ultralytics.utils.callbacks.base.on_predict_postprocess_end + +

+ +## ::: ultralytics.utils.callbacks.base.on_predict_end + +

+ +## ::: ultralytics.utils.callbacks.base.on_export_start + +

+ +## ::: ultralytics.utils.callbacks.base.on_export_end + +

+ +## ::: ultralytics.utils.callbacks.base.get_default_callbacks + +

+ +## ::: ultralytics.utils.callbacks.base.add_integration_callbacks + +

diff --git a/ultralytics/docs/en/reference/utils/callbacks/base.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/callbacks/base.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/base.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/callbacks/clearml.md b/ultralytics/docs/en/reference/utils/callbacks/clearml.md new file mode 100755 index 0000000..a05b08c --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/clearml.md @@ -0,0 +1,40 @@ +--- +description: Uncover the specifics of Ultralytics ClearML callbacks, from pretrain routine start to training end. Boost your ML model performance. +keywords: Ultralytics, clearML, callbacks, pretrain routine start, validation end, train epoch end, training end +--- + +# Reference for `ultralytics/utils/callbacks/clearml.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/clearml.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/clearml.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/clearml.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.callbacks.clearml._log_debug_samples + +

+ +## ::: ultralytics.utils.callbacks.clearml._log_plot + +

+ +## ::: ultralytics.utils.callbacks.clearml.on_pretrain_routine_start + +

+ +## ::: ultralytics.utils.callbacks.clearml.on_train_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.clearml.on_fit_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.clearml.on_val_end + +

+ +## ::: ultralytics.utils.callbacks.clearml.on_train_end + +

diff --git a/ultralytics/docs/en/reference/utils/callbacks/clearml.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/callbacks/clearml.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/clearml.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/callbacks/comet.md b/ultralytics/docs/en/reference/utils/callbacks/comet.md new file mode 100755 index 0000000..2ba62fa --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/comet.md @@ -0,0 +1,108 @@ +--- +description: Explore comprehensive documentation for utilising Comet Callbacks in Ultralytics. Learn to optimise training, logging, and experiment workflows. +keywords: Ultralytics, Comet Callbacks, Training optimisation, Logging, Experiment Workflows +--- + +# Reference for `ultralytics/utils/callbacks/comet.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/comet.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/comet.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/comet.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.callbacks.comet._get_comet_mode + +

+ +## ::: ultralytics.utils.callbacks.comet._get_comet_model_name + +

+ +## ::: ultralytics.utils.callbacks.comet._get_eval_batch_logging_interval + +

+ +## ::: ultralytics.utils.callbacks.comet._get_max_image_predictions_to_log + +

+ +## ::: ultralytics.utils.callbacks.comet._scale_confidence_score + +

+ +## ::: ultralytics.utils.callbacks.comet._should_log_confusion_matrix + +

+ +## ::: ultralytics.utils.callbacks.comet._should_log_image_predictions + +

+ +## ::: ultralytics.utils.callbacks.comet._get_experiment_type + +

+ +## ::: ultralytics.utils.callbacks.comet._create_experiment + +

+ +## ::: ultralytics.utils.callbacks.comet._fetch_trainer_metadata + +

+ +## ::: ultralytics.utils.callbacks.comet._scale_bounding_box_to_original_image_shape + +

+ +## ::: ultralytics.utils.callbacks.comet._format_ground_truth_annotations_for_detection + +

+ +## ::: ultralytics.utils.callbacks.comet._format_prediction_annotations_for_detection + +

+ +## ::: ultralytics.utils.callbacks.comet._fetch_annotations + +

+ +## ::: ultralytics.utils.callbacks.comet._create_prediction_metadata_map + +

+ +## ::: ultralytics.utils.callbacks.comet._log_confusion_matrix + +

+ +## ::: ultralytics.utils.callbacks.comet._log_images + +

+ +## ::: ultralytics.utils.callbacks.comet._log_image_predictions + +

+ +## ::: ultralytics.utils.callbacks.comet._log_plots + +

+ +## ::: ultralytics.utils.callbacks.comet._log_model + +

+ +## ::: ultralytics.utils.callbacks.comet.on_pretrain_routine_start + +

+ +## ::: ultralytics.utils.callbacks.comet.on_train_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.comet.on_fit_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.comet.on_train_end + +

diff --git a/ultralytics/docs/en/reference/utils/callbacks/comet.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/callbacks/comet.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/comet.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/callbacks/dvc.md b/ultralytics/docs/en/reference/utils/callbacks/dvc.md new file mode 100755 index 0000000..a325989 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/dvc.md @@ -0,0 +1,48 @@ +--- +description: Browse through Ultralytics YOLO docs to learn about important logging and callback functions used in training and pretraining models. +keywords: Ultralytics, YOLO, callbacks, logger, training, pretraining, machine learning, models +--- + +# Reference for `ultralytics/utils/callbacks/dvc.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/dvc.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/dvc.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/dvc.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.callbacks.dvc._log_images + +

+ +## ::: ultralytics.utils.callbacks.dvc._log_plots + +

+ +## ::: ultralytics.utils.callbacks.dvc._log_confusion_matrix + +

+ +## ::: ultralytics.utils.callbacks.dvc.on_pretrain_routine_start + +

+ +## ::: ultralytics.utils.callbacks.dvc.on_pretrain_routine_end + +

+ +## ::: ultralytics.utils.callbacks.dvc.on_train_start + +

+ +## ::: ultralytics.utils.callbacks.dvc.on_train_epoch_start + +

+ +## ::: ultralytics.utils.callbacks.dvc.on_fit_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.dvc.on_train_end + +

diff --git a/ultralytics/docs/en/reference/utils/callbacks/dvc.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/callbacks/dvc.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/dvc.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/callbacks/hub.md b/ultralytics/docs/en/reference/utils/callbacks/hub.md new file mode 100755 index 0000000..3735e90 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/hub.md @@ -0,0 +1,44 @@ +--- +description: Explore the detailed information on key Ultralytics callbacks such as on_pretrain_routine_end, on_model_save, on_train_start, and on_predict_start. +keywords: Ultralytics, callbacks, on_pretrain_routine_end, on_model_save, on_train_start, on_predict_start, hub, training +--- + +# Reference for `ultralytics/utils/callbacks/hub.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/hub.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/hub.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/hub.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.callbacks.hub.on_pretrain_routine_end + +

+ +## ::: ultralytics.utils.callbacks.hub.on_fit_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.hub.on_model_save + +

+ +## ::: ultralytics.utils.callbacks.hub.on_train_end + +

+ +## ::: ultralytics.utils.callbacks.hub.on_train_start + +

+ +## ::: ultralytics.utils.callbacks.hub.on_val_start + +

+ +## ::: ultralytics.utils.callbacks.hub.on_predict_start + +

+ +## ::: ultralytics.utils.callbacks.hub.on_export_start + +

diff --git a/ultralytics/docs/en/reference/utils/callbacks/hub.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/callbacks/hub.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/hub.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/callbacks/mlflow.md b/ultralytics/docs/en/reference/utils/callbacks/mlflow.md new file mode 100755 index 0000000..e3fab6c --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/mlflow.md @@ -0,0 +1,28 @@ +--- +description: Understand routines at the end of pre-training and training in Ultralytics. Elevate your MLflow callbacks expertise. +keywords: Ultralytics, MLflow, Callbacks, on_pretrain_routine_end, on_train_end, Machine Learning, Training +--- + +# Reference for `ultralytics/utils/callbacks/mlflow.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/mlflow.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/mlflow.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/mlflow.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.callbacks.mlflow.on_pretrain_routine_end + +

+ +## ::: ultralytics.utils.callbacks.mlflow.on_train_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.mlflow.on_fit_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.mlflow.on_train_end + +

diff --git a/ultralytics/docs/en/reference/utils/callbacks/mlflow.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/callbacks/mlflow.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/mlflow.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/callbacks/neptune.md b/ultralytics/docs/en/reference/utils/callbacks/neptune.md new file mode 100755 index 0000000..82906ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/neptune.md @@ -0,0 +1,44 @@ +--- +description: Explore exhaustive details about Ultralytics callbacks in Neptune, with specifics about scalar logging, routine start, and more. +keywords: Ultralytics, Neptune callbacks, on_train_epoch_end, on_val_end, _log_plot, _log_images, on_pretrain_routine_start, on_fit_epoch_end, on_train_end +--- + +# Reference for `ultralytics/utils/callbacks/neptune.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/neptune.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/neptune.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/neptune.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.callbacks.neptune._log_scalars + +

+ +## ::: ultralytics.utils.callbacks.neptune._log_images + +

+ +## ::: ultralytics.utils.callbacks.neptune._log_plot + +

+ +## ::: ultralytics.utils.callbacks.neptune.on_pretrain_routine_start + +

+ +## ::: ultralytics.utils.callbacks.neptune.on_train_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.neptune.on_fit_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.neptune.on_val_end + +

+ +## ::: ultralytics.utils.callbacks.neptune.on_train_end + +

diff --git a/ultralytics/docs/en/reference/utils/callbacks/neptune.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/callbacks/neptune.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/neptune.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/callbacks/raytune.md b/ultralytics/docs/en/reference/utils/callbacks/raytune.md new file mode 100755 index 0000000..2ed6191 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/raytune.md @@ -0,0 +1,16 @@ +--- +description: Discover the functionality of the on_fit_epoch_end callback in the Ultralytics YOLO framework. Learn how to end an epoch in your deep learning projects. +keywords: Ultralytics, YOLO, on_fit_epoch_end, callbacks, documentation, deep learning, YOLO framework +--- + +# Reference for `ultralytics/utils/callbacks/raytune.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/raytune.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/raytune.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/raytune.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.callbacks.raytune.on_fit_epoch_end + +

diff --git a/ultralytics/docs/en/reference/utils/callbacks/raytune.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/callbacks/raytune.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/raytune.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/callbacks/tensorboard.md b/ultralytics/docs/en/reference/utils/callbacks/tensorboard.md new file mode 100755 index 0000000..74fb3b1 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/tensorboard.md @@ -0,0 +1,36 @@ +--- +description: Explore Ultralytics YOLO Docs for a deep understanding of log_scalars, on_batch_end & other callback utilities embedded in the tensorboard module. +keywords: Ultralytics, YOLO, documentation, callback utilities, log_scalars, on_batch_end, tensorboard +--- + +# Reference for `ultralytics/utils/callbacks/tensorboard.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/tensorboard.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/tensorboard.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/tensorboard.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.callbacks.tensorboard._log_scalars + +

+ +## ::: ultralytics.utils.callbacks.tensorboard._log_tensorboard_graph + +

+ +## ::: ultralytics.utils.callbacks.tensorboard.on_pretrain_routine_start + +

+ +## ::: ultralytics.utils.callbacks.tensorboard.on_train_start + +

+ +## ::: ultralytics.utils.callbacks.tensorboard.on_train_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.tensorboard.on_fit_epoch_end + +

diff --git a/ultralytics/docs/en/reference/utils/callbacks/tensorboard.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/callbacks/tensorboard.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/tensorboard.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/callbacks/wb.md b/ultralytics/docs/en/reference/utils/callbacks/wb.md new file mode 100755 index 0000000..59a2088 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/wb.md @@ -0,0 +1,40 @@ +--- +description: Deep dive into Ultralytics callbacks. Learn how to use the _log_plots, on_fit_epoch_end, and on_train_end functions effectively. +keywords: Ultralytics, callbacks, _log_plots, on_fit_epoch_end, on_train_end +--- + +# Reference for `ultralytics/utils/callbacks/wb.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/wb.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/wb.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/callbacks/wb.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.callbacks.wb._custom_table + +

+ +## ::: ultralytics.utils.callbacks.wb._plot_curve + +

+ +## ::: ultralytics.utils.callbacks.wb._log_plots + +

+ +## ::: ultralytics.utils.callbacks.wb.on_pretrain_routine_start + +

+ +## ::: ultralytics.utils.callbacks.wb.on_fit_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.wb.on_train_epoch_end + +

+ +## ::: ultralytics.utils.callbacks.wb.on_train_end + +

diff --git a/ultralytics/docs/en/reference/utils/callbacks/wb.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/callbacks/wb.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/callbacks/wb.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/checks.md b/ultralytics/docs/en/reference/utils/checks.md new file mode 100755 index 0000000..f2e4ed9 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/checks.md @@ -0,0 +1,112 @@ +--- +description: Learn about our routine checks that safeguard Ultralytics operations including ASCII, font, YOLO file, YAML, Python and torchvision checks. +keywords: Ultralytics, utility checks, ASCII, check_version, pip_update, check_python, check_torchvision, check_yaml, YOLO filename +--- + +# Reference for `ultralytics/utils/checks.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/checks.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/checks.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/checks.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.checks.parse_requirements + +

+ +## ::: ultralytics.utils.checks.parse_version + +

+ +## ::: ultralytics.utils.checks.is_ascii + +

+ +## ::: ultralytics.utils.checks.check_imgsz + +

+ +## ::: ultralytics.utils.checks.check_version + +

+ +## ::: ultralytics.utils.checks.check_latest_pypi_version + +

+ +## ::: ultralytics.utils.checks.check_pip_update_available + +

+ +## ::: ultralytics.utils.checks.check_font + +

+ +## ::: ultralytics.utils.checks.check_python + +

+ +## ::: ultralytics.utils.checks.check_requirements + +

+ +## ::: ultralytics.utils.checks.check_torchvision + +

+ +## ::: ultralytics.utils.checks.check_suffix + +

+ +## ::: ultralytics.utils.checks.check_yolov5u_filename + +

+ +## ::: ultralytics.utils.checks.check_model_file_from_stem + +

+ +## ::: ultralytics.utils.checks.check_file + +

+ +## ::: ultralytics.utils.checks.check_yaml + +

+ +## ::: ultralytics.utils.checks.check_is_path_safe + +

+ +## ::: ultralytics.utils.checks.check_imshow + +

+ +## ::: ultralytics.utils.checks.check_yolo + +

+ +## ::: ultralytics.utils.checks.collect_system_info + +

+ +## ::: ultralytics.utils.checks.check_amp + +

+ +## ::: ultralytics.utils.checks.git_describe + +

+ +## ::: ultralytics.utils.checks.print_args + +

+ +## ::: ultralytics.utils.checks.cuda_device_count + +

+ +## ::: ultralytics.utils.checks.cuda_is_available + +

diff --git a/ultralytics/docs/en/reference/utils/checks.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/checks.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/checks.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/dist.md b/ultralytics/docs/en/reference/utils/dist.md new file mode 100755 index 0000000..1ca18e3 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/dist.md @@ -0,0 +1,28 @@ +--- +description: Discover the role of dist.find_free_network_port & dist.generate_ddp_command in Ultralytics DDP utilities. Use our guide for efficient deployment. +keywords: Ultralytics, DDP, DDP utility functions, Distributed Data Processing, find free network port, generate DDP command +--- + +# Reference for `ultralytics/utils/dist.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/dist.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/dist.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/dist.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.dist.find_free_network_port + +

+ +## ::: ultralytics.utils.dist.generate_ddp_file + +

+ +## ::: ultralytics.utils.dist.generate_ddp_command + +

+ +## ::: ultralytics.utils.dist.ddp_cleanup + +

diff --git a/ultralytics/docs/en/reference/utils/dist.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/dist.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/dist.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/downloads.md b/ultralytics/docs/en/reference/utils/downloads.md new file mode 100755 index 0000000..aeef4c9 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/downloads.md @@ -0,0 +1,52 @@ +--- +description: Learn about the download utilities in Ultralytics YOLO, featuring functions like is_url, check_disk_space, get_github_assets, and download. +keywords: Ultralytics, YOLO, download utilities, is_url, check_disk_space, get_github_assets, download, documentation +--- + +# Reference for `ultralytics/utils/downloads.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/downloads.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/downloads.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/downloads.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.downloads.is_url + +

+ +## ::: ultralytics.utils.downloads.delete_dsstore + +

+ +## ::: ultralytics.utils.downloads.zip_directory + +

+ +## ::: ultralytics.utils.downloads.unzip_file + +

+ +## ::: ultralytics.utils.downloads.check_disk_space + +

+ +## ::: ultralytics.utils.downloads.get_google_drive_file_info + +

+ +## ::: ultralytics.utils.downloads.safe_download + +

+ +## ::: ultralytics.utils.downloads.get_github_assets + +

+ +## ::: ultralytics.utils.downloads.attempt_download_asset + +

+ +## ::: ultralytics.utils.downloads.download + +

diff --git a/ultralytics/docs/en/reference/utils/downloads.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/downloads.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/downloads.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/errors.md b/ultralytics/docs/en/reference/utils/errors.md new file mode 100755 index 0000000..816328a --- /dev/null +++ b/ultralytics/docs/en/reference/utils/errors.md @@ -0,0 +1,16 @@ +--- +description: Learn about the HUBModelError in Ultralytics. Enhance your understanding, troubleshoot errors and optimize your machine learning projects. +keywords: Ultralytics, HUBModelError, Machine Learning, Error troubleshooting, Ultralytics documentation +--- + +# Reference for `ultralytics/utils/errors.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/errors.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/errors.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/errors.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.errors.HUBModelError + +

diff --git a/ultralytics/docs/en/reference/utils/errors.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/errors.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/errors.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/files.md b/ultralytics/docs/en/reference/utils/files.md new file mode 100755 index 0000000..586373b --- /dev/null +++ b/ultralytics/docs/en/reference/utils/files.md @@ -0,0 +1,40 @@ +--- +description: Discover how to use Ultralytics utility functions for file-related operations including incrementing paths, finding file age, checking file size and creating directories. +keywords: Ultralytics, utility functions, file operations, working directory, file age, file size, create directories +--- + +# Reference for `ultralytics/utils/files.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/files.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/files.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/files.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.files.WorkingDirectory + +

+ +## ::: ultralytics.utils.files.spaces_in_path + +

+ +## ::: ultralytics.utils.files.increment_path + +

+ +## ::: ultralytics.utils.files.file_age + +

+ +## ::: ultralytics.utils.files.file_date + +

+ +## ::: ultralytics.utils.files.file_size + +

+ +## ::: ultralytics.utils.files.get_latest_run + +

diff --git a/ultralytics/docs/en/reference/utils/files.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/files.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/files.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/instance.md b/ultralytics/docs/en/reference/utils/instance.md new file mode 100755 index 0000000..6771cd8 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/instance.md @@ -0,0 +1,24 @@ +--- +description: Dive into Ultralytics detailed utility guide. Learn about Bboxes, _ntuple and more from Ultralytics utils.instance module. +keywords: Ultralytics, Bboxes, _ntuple, utility, ultralytics utils.instance +--- + +# Reference for `ultralytics/utils/instance.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/instance.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/instance.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/instance.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.instance.Bboxes + +

+ +## ::: ultralytics.utils.instance.Instances + +

+ +## ::: ultralytics.utils.instance._ntuple + +

diff --git a/ultralytics/docs/en/reference/utils/instance.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/instance.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/instance.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/loss.md b/ultralytics/docs/en/reference/utils/loss.md new file mode 100755 index 0000000..922ad6c --- /dev/null +++ b/ultralytics/docs/en/reference/utils/loss.md @@ -0,0 +1,44 @@ +--- +description: Explore Ultralytics' versatile loss functions - VarifocalLoss, BboxLoss, v8DetectionLoss, v8PoseLoss. Improve your accuracy on YOLO implementations. +keywords: Ultralytics, Loss functions, VarifocalLoss, BboxLoss, v8DetectionLoss, v8PoseLoss, YOLO, Ultralytics Documentation +--- + +# Reference for `ultralytics/utils/loss.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/loss.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/loss.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/loss.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.loss.VarifocalLoss + +

+ +## ::: ultralytics.utils.loss.FocalLoss + +

+ +## ::: ultralytics.utils.loss.BboxLoss + +

+ +## ::: ultralytics.utils.loss.KeypointLoss + +

+ +## ::: ultralytics.utils.loss.v8DetectionLoss + +

+ +## ::: ultralytics.utils.loss.v8SegmentationLoss + +

+ +## ::: ultralytics.utils.loss.v8PoseLoss + +

+ +## ::: ultralytics.utils.loss.v8ClassificationLoss + +

diff --git a/ultralytics/docs/en/reference/utils/loss.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/loss.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/loss.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/metrics.md b/ultralytics/docs/en/reference/utils/metrics.md new file mode 100755 index 0000000..3154a73 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/metrics.md @@ -0,0 +1,80 @@ +--- +description: Explore Ultralytics YOLO metrics tools - from confusion matrix, detection metrics, pose metrics to box IOU. Learn how to compute and plot precision-recall curves. +keywords: Ultralytics, YOLO, YOLOv3, YOLOv4, metrics, confusion matrix, detection metrics, pose metrics, box IOU, mask IOU, plot precision-recall curves, compute average precision +--- + +# Reference for `ultralytics/utils/metrics.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/metrics.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/metrics.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/metrics.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.metrics.ConfusionMatrix + +

+ +## ::: ultralytics.utils.metrics.Metric + +

+ +## ::: ultralytics.utils.metrics.DetMetrics + +

+ +## ::: ultralytics.utils.metrics.SegmentMetrics + +

+ +## ::: ultralytics.utils.metrics.PoseMetrics + +

+ +## ::: ultralytics.utils.metrics.ClassifyMetrics + +

+ +## ::: ultralytics.utils.metrics.bbox_ioa + +

+ +## ::: ultralytics.utils.metrics.box_iou + +

+ +## ::: ultralytics.utils.metrics.bbox_iou + +

+ +## ::: ultralytics.utils.metrics.mask_iou + +

+ +## ::: ultralytics.utils.metrics.kpt_iou + +

+ +## ::: ultralytics.utils.metrics.smooth_BCE + +

+ +## ::: ultralytics.utils.metrics.smooth + +

+ +## ::: ultralytics.utils.metrics.plot_pr_curve + +

+ +## ::: ultralytics.utils.metrics.plot_mc_curve + +

+ +## ::: ultralytics.utils.metrics.compute_ap + +

+ +## ::: ultralytics.utils.metrics.ap_per_class + +

diff --git a/ultralytics/docs/en/reference/utils/metrics.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/metrics.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/metrics.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/ops.md b/ultralytics/docs/en/reference/utils/ops.md new file mode 100755 index 0000000..c366fd9 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/ops.md @@ -0,0 +1,128 @@ +--- +description: Explore detailed documentation for Ultralytics utility operations. Learn about methods like segment2box, make_divisible, clip_boxes, and many more. +keywords: Ultralytics YOLO, Utility Operations, segment2box, make_divisible, clip_boxes, scale_image, xywh2xyxy, xyxy2xywhn, xywh2ltwh, ltwh2xywh, segments2boxes, crop_mask, process_mask, scale_masks, masks2segments +--- + +# Reference for `ultralytics/utils/ops.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/ops.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/ops.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/ops.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.ops.Profile + +

+ +## ::: ultralytics.utils.ops.segment2box + +

+ +## ::: ultralytics.utils.ops.scale_boxes + +

+ +## ::: ultralytics.utils.ops.make_divisible + +

+ +## ::: ultralytics.utils.ops.non_max_suppression + +

+ +## ::: ultralytics.utils.ops.clip_boxes + +

+ +## ::: ultralytics.utils.ops.clip_coords + +

+ +## ::: ultralytics.utils.ops.scale_image + +

+ +## ::: ultralytics.utils.ops.xyxy2xywh + +

+ +## ::: ultralytics.utils.ops.xywh2xyxy + +

+ +## ::: ultralytics.utils.ops.xywhn2xyxy + +

+ +## ::: ultralytics.utils.ops.xyxy2xywhn + +

+ +## ::: ultralytics.utils.ops.xywh2ltwh + +

+ +## ::: ultralytics.utils.ops.xyxy2ltwh + +

+ +## ::: ultralytics.utils.ops.ltwh2xywh + +

+ +## ::: ultralytics.utils.ops.xyxyxyxy2xywhr + +

+ +## ::: ultralytics.utils.ops.xywhr2xyxyxyxy + +

+ +## ::: ultralytics.utils.ops.ltwh2xyxy + +

+ +## ::: ultralytics.utils.ops.segments2boxes + +

+ +## ::: ultralytics.utils.ops.resample_segments + +

+ +## ::: ultralytics.utils.ops.crop_mask + +

+ +## ::: ultralytics.utils.ops.process_mask_upsample + +

+ +## ::: ultralytics.utils.ops.process_mask + +

+ +## ::: ultralytics.utils.ops.process_mask_native + +

+ +## ::: ultralytics.utils.ops.scale_masks + +

+ +## ::: ultralytics.utils.ops.scale_coords + +

+ +## ::: ultralytics.utils.ops.masks2segments + +

+ +## ::: ultralytics.utils.ops.convert_torch2numpy_batch + +

+ +## ::: ultralytics.utils.ops.clean_str + +

diff --git a/ultralytics/docs/en/reference/utils/ops.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/ops.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/ops.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/patches.md b/ultralytics/docs/en/reference/utils/patches.md new file mode 100755 index 0000000..fdcf394 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/patches.md @@ -0,0 +1,28 @@ +--- +description: Learn about Ultralytics utils patches including imread, imshow and torch_save. Enhance your image processing skills. +keywords: Ultralytics, Utils, Patches, imread, imshow, torch_save, image processing +--- + +# Reference for `ultralytics/utils/patches.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/patches.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/patches.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/patches.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.patches.imread + +

+ +## ::: ultralytics.utils.patches.imwrite + +

+ +## ::: ultralytics.utils.patches.imshow + +

+ +## ::: ultralytics.utils.patches.torch_save + +

diff --git a/ultralytics/docs/en/reference/utils/patches.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/patches.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/patches.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/plotting.md b/ultralytics/docs/en/reference/utils/plotting.md new file mode 100755 index 0000000..b465af4 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/plotting.md @@ -0,0 +1,52 @@ +--- +description: Master advanced plotting utils from Ultralytics including color annotations, label and image plotting, and feature visualization. +keywords: Ultralytics, plotting, utils, color annotation, label plotting, image plotting, feature visualization +--- + +# Reference for `ultralytics/utils/plotting.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/plotting.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/plotting.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/plotting.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.plotting.Colors + +

+ +## ::: ultralytics.utils.plotting.Annotator + +

+ +## ::: ultralytics.utils.plotting.plot_labels + +

+ +## ::: ultralytics.utils.plotting.save_one_box + +

+ +## ::: ultralytics.utils.plotting.plot_images + +

+ +## ::: ultralytics.utils.plotting.plot_results + +

+ +## ::: ultralytics.utils.plotting.plt_color_scatter + +

+ +## ::: ultralytics.utils.plotting.plot_tune_results + +

+ +## ::: ultralytics.utils.plotting.output_to_target + +

+ +## ::: ultralytics.utils.plotting.feature_visualization + +

diff --git a/ultralytics/docs/en/reference/utils/plotting.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/plotting.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/plotting.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/tal.md b/ultralytics/docs/en/reference/utils/tal.md new file mode 100755 index 0000000..6519d35 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/tal.md @@ -0,0 +1,36 @@ +--- +description: Explore Ultralytics utilities for optimized task assignment, bounding box creation, and distance calculation. Learn more about algorithm implementations. +keywords: Ultralytics, task aligned assigner, select highest overlaps, make anchors, dist2bbox, bbox2dist, utilities, algorithm +--- + +# Reference for `ultralytics/utils/tal.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/tal.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/tal.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/tal.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.tal.TaskAlignedAssigner + +

+ +## ::: ultralytics.utils.tal.select_candidates_in_gts + +

+ +## ::: ultralytics.utils.tal.select_highest_overlaps + +

+ +## ::: ultralytics.utils.tal.make_anchors + +

+ +## ::: ultralytics.utils.tal.dist2bbox + +

+ +## ::: ultralytics.utils.tal.bbox2dist + +

diff --git a/ultralytics/docs/en/reference/utils/tal.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/tal.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/tal.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/torch_utils.md b/ultralytics/docs/en/reference/utils/torch_utils.md new file mode 100755 index 0000000..5c88f29 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/torch_utils.md @@ -0,0 +1,120 @@ +--- +description: Explore Ultralytics-tailored torch utility features like Model EMA, early stopping, smart inference, image scaling, get_flops, and many more. +keywords: Ultralytics, Torch Utils, Model EMA, Early Stopping, Smart Inference, Get CPU Info, Time Sync, Fuse Deconv and bn, Get num params, Get FLOPs, Scale img, Copy attr, Intersect dicts, De_parallel, Init seeds, Profile +--- + +# Reference for `ultralytics/utils/torch_utils.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/torch_utils.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/torch_utils.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/torch_utils.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.torch_utils.ModelEMA + +

+ +## ::: ultralytics.utils.torch_utils.EarlyStopping + +

+ +## ::: ultralytics.utils.torch_utils.torch_distributed_zero_first + +

+ +## ::: ultralytics.utils.torch_utils.smart_inference_mode + +

+ +## ::: ultralytics.utils.torch_utils.get_cpu_info + +

+ +## ::: ultralytics.utils.torch_utils.select_device + +

+ +## ::: ultralytics.utils.torch_utils.time_sync + +

+ +## ::: ultralytics.utils.torch_utils.fuse_conv_and_bn + +

+ +## ::: ultralytics.utils.torch_utils.fuse_deconv_and_bn + +

+ +## ::: ultralytics.utils.torch_utils.model_info + +

+ +## ::: ultralytics.utils.torch_utils.get_num_params + +

+ +## ::: ultralytics.utils.torch_utils.get_num_gradients + +

+ +## ::: ultralytics.utils.torch_utils.model_info_for_loggers + +

+ +## ::: ultralytics.utils.torch_utils.get_flops + +

+ +## ::: ultralytics.utils.torch_utils.get_flops_with_torch_profiler + +

+ +## ::: ultralytics.utils.torch_utils.initialize_weights + +

+ +## ::: ultralytics.utils.torch_utils.scale_img + +

+ +## ::: ultralytics.utils.torch_utils.make_divisible + +

+ +## ::: ultralytics.utils.torch_utils.copy_attr + +

+ +## ::: ultralytics.utils.torch_utils.get_latest_opset + +

+ +## ::: ultralytics.utils.torch_utils.intersect_dicts + +

+ +## ::: ultralytics.utils.torch_utils.is_parallel + +

+ +## ::: ultralytics.utils.torch_utils.de_parallel + +

+ +## ::: ultralytics.utils.torch_utils.one_cycle + +

+ +## ::: ultralytics.utils.torch_utils.init_seeds + +

+ +## ::: ultralytics.utils.torch_utils.strip_optimizer + +

+ +## ::: ultralytics.utils.torch_utils.profile + +

diff --git a/ultralytics/docs/en/reference/utils/torch_utils.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/torch_utils.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/torch_utils.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/triton.md b/ultralytics/docs/en/reference/utils/triton.md new file mode 100755 index 0000000..6a86de6 --- /dev/null +++ b/ultralytics/docs/en/reference/utils/triton.md @@ -0,0 +1,16 @@ +--- +description: Deploy ML models effortlessly with Ultralytics TritonRemoteModel. Simplify serving with our comprehensive utils guide. +keywords: Ultralytics, YOLO, TritonRemoteModel, machine learning, model serving, deployment, utils, documentation +--- + +# Reference for `ultralytics/utils/triton.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/triton.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/triton.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/triton.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.triton.TritonRemoteModel + +

diff --git a/ultralytics/docs/en/reference/utils/triton.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/triton.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/triton.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/reference/utils/tuner.md b/ultralytics/docs/en/reference/utils/tuner.md new file mode 100755 index 0000000..d05f83d --- /dev/null +++ b/ultralytics/docs/en/reference/utils/tuner.md @@ -0,0 +1,16 @@ +--- +description: Learn to utilize the run_ray_tune function with Ultralytics. Make your machine learning tuning process easier and more efficient. +keywords: Ultralytics, run_ray_tune, machine learning tuning, machine learning efficiency +--- + +# Reference for `ultralytics/utils/tuner.py` + +!!! Note + + This file is available at [https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/tuner.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/tuner.py). If you spot a problem please help fix it by [contributing](https://docs.ultralytics.com/help/contributing/) a [Pull Request](https://github.com/ultralytics/ultralytics/edit/main/ultralytics/utils/tuner.py) ๐Ÿ› ๏ธ. Thank you ๐Ÿ™! + +

+ +## ::: ultralytics.utils.tuner.run_ray_tune + +

diff --git a/ultralytics/docs/en/reference/utils/tuner.md:Zone.Identifier b/ultralytics/docs/en/reference/utils/tuner.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/reference/utils/tuner.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/robots.txt b/ultralytics/docs/en/robots.txt new file mode 100755 index 0000000..6d80eae --- /dev/null +++ b/ultralytics/docs/en/robots.txt @@ -0,0 +1,12 @@ +User-agent: * +Sitemap: http://docs.ultralytics.com/sitemap.xml +Sitemap: http://docs.ultralytics.com/ar/sitemap.xml +Sitemap: http://docs.ultralytics.com/de/sitemap.xml +Sitemap: http://docs.ultralytics.com/es/sitemap.xml +Sitemap: http://docs.ultralytics.com/fr/sitemap.xml +Sitemap: http://docs.ultralytics.com/hi/sitemap.xml +Sitemap: http://docs.ultralytics.com/ja/sitemap.xml +Sitemap: http://docs.ultralytics.com/ko/sitemap.xml +Sitemap: http://docs.ultralytics.com/pt/sitemap.xml +Sitemap: http://docs.ultralytics.com/ru/sitemap.xml +Sitemap: http://docs.ultralytics.com/zh/sitemap.xml diff --git a/ultralytics/docs/en/robots.txt:Zone.Identifier b/ultralytics/docs/en/robots.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/robots.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/tasks/classify.md b/ultralytics/docs/en/tasks/classify.md new file mode 100755 index 0000000..fc5ec08 --- /dev/null +++ b/ultralytics/docs/en/tasks/classify.md @@ -0,0 +1,184 @@ +--- +comments: true +description: Learn about YOLOv8 Classify models for image classification. Get detailed information on List of Pretrained Models & how to Train, Validate, Predict & Export models. +keywords: Ultralytics, YOLOv8, Image Classification, Pretrained Models, YOLOv8n-cls, Training, Validation, Prediction, Model Export +--- + +# Image Classification + +Image classification examples + +Image classification is the simplest of the three tasks and involves classifying an entire image into one of a set of predefined classes. + +The output of an image classifier is a single class label and a confidence score. Image classification is useful when you need to know only what class an image belongs to and don't need to know where objects of that class are located or what their exact shape is. + +

+
+ +
+ Watch: Explore Ultralytics YOLO Tasks: Image Classification +

+ +!!! Tip "Tip" + + YOLOv8 Classify models use the `-cls` suffix, i.e. `yolov8n-cls.pt` and are pretrained on [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +## [Models](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +YOLOv8 pretrained Classify models are shown here. Detect, Segment and Pose models are pretrained on the [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) dataset, while Classify models are pretrained on the [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) dataset. + +[Models](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) download automatically from the latest Ultralytics [release](https://github.com/ultralytics/assets/releases) on first use. + +| Model | size
(pixels) | acc
top1 | acc
top5 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) at 640 | +|----------------------------------------------------------------------------------------------|-----------------------|------------------|------------------|--------------------------------|-------------------------------------|--------------------|--------------------------| +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- **acc** values are model accuracies on the [ImageNet](https://www.image-net.org/) dataset validation set. +
Reproduce by `yolo val classify data=path/to/ImageNet device=0` +- **Speed** averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) + instance. +
Reproduce by `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu` + +## Train + +Train YOLOv8n-cls on the MNIST160 dataset for 100 epochs at image size 64. For a full list of available arguments see the [Configuration](../usage/cfg.md) page. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-cls.yaml') # build a new model from YAML + model = YOLO('yolov8n-cls.pt') # load a pretrained model (recommended for training) + model = YOLO('yolov8n-cls.yaml').load('yolov8n-cls.pt') # build from YAML and transfer weights + + # Train the model + results = model.train(data='mnist160', epochs=100, imgsz=64) + ``` + + === "CLI" + + ```bash + # Build a new model from YAML and start training from scratch + yolo classify train data=mnist160 model=yolov8n-cls.yaml epochs=100 imgsz=64 + + # Start training from a pretrained *.pt model + yolo classify train data=mnist160 model=yolov8n-cls.pt epochs=100 imgsz=64 + + # Build a new model from YAML, transfer pretrained weights to it and start training + yolo classify train data=mnist160 model=yolov8n-cls.yaml pretrained=yolov8n-cls.pt epochs=100 imgsz=64 + ``` + +### Dataset format + +YOLO classification dataset format can be found in detail in the [Dataset Guide](../datasets/classify/index.md). + +## Val + +Validate trained YOLOv8n-cls model accuracy on the MNIST160 dataset. No argument need to passed as the `model` retains it's training `data` and arguments as model attributes. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-cls.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom model + + # Validate the model + metrics = model.val() # no arguments needed, dataset and settings remembered + metrics.top1 # top1 accuracy + metrics.top5 # top5 accuracy + ``` + === "CLI" + + ```bash + yolo classify val model=yolov8n-cls.pt # val official model + yolo classify val model=path/to/best.pt # val custom model + ``` + +## Predict + +Use a trained YOLOv8n-cls model to run predictions on images. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-cls.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom model + + # Predict with the model + results = model('https://ultralytics.com/images/bus.jpg') # predict on an image + ``` + === "CLI" + + ```bash + yolo classify predict model=yolov8n-cls.pt source='https://ultralytics.com/images/bus.jpg' # predict with official model + yolo classify predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # predict with custom model + ``` + +See full `predict` mode details in the [Predict](https://docs.ultralytics.com/modes/predict/) page. + +## Export + +Export a YOLOv8n-cls model to a different format like ONNX, CoreML, etc. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-cls.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom trained model + + # Export the model + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-cls.pt format=onnx # export official model + yolo export model=path/to/best.pt format=onnx # export custom trained model + ``` + +Available YOLOv8-cls export formats are in the table below. You can predict or validate directly on exported models, i.e. `yolo predict model=yolov8n-cls.onnx`. Usage examples are shown for your model after export completes. + +| Format | `format` Argument | Model | Metadata | Arguments | +|--------------------------------------------------------------------|-------------------|-------------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-cls.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-cls.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-cls.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-cls_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-cls.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-cls.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-cls_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-cls.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-cls.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-cls_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-cls_web_model/` | โœ… | `imgsz`, `half`, `int8` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-cls_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-cls_ncnn_model/` | โœ… | `imgsz`, `half` | + +See full `export` details in the [Export](https://docs.ultralytics.com/modes/export/) page. diff --git a/ultralytics/docs/en/tasks/classify.md:Zone.Identifier b/ultralytics/docs/en/tasks/classify.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/tasks/classify.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/tasks/detect.md b/ultralytics/docs/en/tasks/detect.md new file mode 100755 index 0000000..e4891f5 --- /dev/null +++ b/ultralytics/docs/en/tasks/detect.md @@ -0,0 +1,185 @@ +--- +comments: true +description: Official documentation for YOLOv8 by Ultralytics. Learn how to train, validate, predict and export models in various formats. Including detailed performance stats. +keywords: YOLOv8, Ultralytics, object detection, pretrained models, training, validation, prediction, export models, COCO, ImageNet, PyTorch, ONNX, CoreML +--- + +# Object Detection + +Object detection examples + +Object detection is a task that involves identifying the location and class of objects in an image or video stream. + +The output of an object detector is a set of bounding boxes that enclose the objects in the image, along with class labels and confidence scores for each box. Object detection is a good choice when you need to identify objects of interest in a scene, but don't need to know exactly where the object is or its exact shape. + +

+
+ +
+ Watch: Object Detection with Pre-trained Ultralytics YOLOv8 Model. +

+ +!!! Tip "Tip" + + YOLOv8 Detect models are the default YOLOv8 models, i.e. `yolov8n.pt` and are pretrained on [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml). + +## [Models](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +YOLOv8 pretrained Detect models are shown here. Detect, Segment and Pose models are pretrained on the [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) dataset, while Classify models are pretrained on the [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) dataset. + +[Models](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) download automatically from the latest Ultralytics [release](https://github.com/ultralytics/assets/releases) on first use. + +| Model | size
(pixels) | mAPval
50-95 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | +|--------------------------------------------------------------------------------------|-----------------------|----------------------|--------------------------------|-------------------------------------|--------------------|-------------------| +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- **mAPval** values are for single-model single-scale on [COCO val2017](http://cocodataset.org) dataset. +
Reproduce by `yolo val detect data=coco.yaml device=0` +- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) + instance. +
Reproduce by `yolo val detect data=coco128.yaml batch=1 device=0|cpu` + +## Train + +Train YOLOv8n on the COCO128 dataset for 100 epochs at image size 640. For a full list of available arguments see the [Configuration](../usage/cfg.md) page. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.yaml') # build a new model from YAML + model = YOLO('yolov8n.pt') # load a pretrained model (recommended for training) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # build from YAML and transfer weights + + # Train the model + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Build a new model from YAML and start training from scratch + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # Start training from a pretrained *.pt model + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # Build a new model from YAML, transfer pretrained weights to it and start training + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### Dataset format + +YOLO detection dataset format can be found in detail in the [Dataset Guide](../datasets/detect/index.md). To convert your existing dataset from other formats (like COCO etc.) to YOLO format, please use [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) tool by Ultralytics. + +## Val + +Validate trained YOLOv8n model accuracy on the COCO128 dataset. No argument need to passed as the `model` retains it's training `data` and arguments as model attributes. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom model + + # Validate the model + metrics = model.val() # no arguments needed, dataset and settings remembered + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # a list contains map50-95 of each category + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # val official model + yolo detect val model=path/to/best.pt # val custom model + ``` + +## Predict + +Use a trained YOLOv8n model to run predictions on images. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom model + + # Predict with the model + results = model('https://ultralytics.com/images/bus.jpg') # predict on an image + ``` + === "CLI" + + ```bash + yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' # predict with official model + yolo detect predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # predict with custom model + ``` + +See full `predict` mode details in the [Predict](https://docs.ultralytics.com/modes/predict/) page. + +## Export + +Export a YOLOv8n model to a different format like ONNX, CoreML, etc. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom trained model + + # Export the model + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # export official model + yolo export model=path/to/best.pt format=onnx # export custom trained model + ``` + +Available YOLOv8 export formats are in the table below. You can predict or validate directly on exported models, i.e. `yolo predict model=yolov8n.onnx`. Usage examples are shown for your model after export completes. + +| Format | `format` Argument | Model | Metadata | Arguments | +|--------------------------------------------------------------------|-------------------|---------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half`, `int8` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras`, `int8` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz`, `half`, `int8` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +See full `export` details in the [Export](https://docs.ultralytics.com/modes/export/) page. diff --git a/ultralytics/docs/en/tasks/detect.md:Zone.Identifier b/ultralytics/docs/en/tasks/detect.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/tasks/detect.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/tasks/index.md b/ultralytics/docs/en/tasks/index.md new file mode 100755 index 0000000..a5de275 --- /dev/null +++ b/ultralytics/docs/en/tasks/index.md @@ -0,0 +1,51 @@ +--- +comments: true +description: Learn about the cornerstone computer vision tasks YOLOv8 can perform including detection, segmentation, classification, and pose estimation. Understand their uses in your AI projects. +keywords: Ultralytics, YOLOv8, Detection, Segmentation, Classification, Pose Estimation, AI Framework, Computer Vision Tasks +--- + +# Ultralytics YOLOv8 Tasks + +
+Ultralytics YOLO supported tasks + +YOLOv8 is an AI framework that supports multiple computer vision **tasks**. The framework can be used to perform [detection](detect.md), [segmentation](segment.md), [classification](classify.md), and [pose](pose.md) estimation. Each of these tasks has a different objective and use case. + +

+
+ +
+ Watch: Explore Ultralytics YOLO Tasks: Object Detection, Segmentation, Tracking, and Pose Estimation. +

+ +## [Detection](detect.md) + +Detection is the primary task supported by YOLOv8. It involves detecting objects in an image or video frame and drawing bounding boxes around them. The detected objects are classified into different categories based on their features. YOLOv8 can detect multiple objects in a single image or video frame with high accuracy and speed. + +[Detection Examples](detect.md){ .md-button } + +## [Segmentation](segment.md) + +Segmentation is a task that involves segmenting an image into different regions based on the content of the image. Each region is assigned a label based on its content. This task is useful in applications such as image segmentation and medical imaging. YOLOv8 uses a variant of the U-Net architecture to perform segmentation. + +[Segmentation Examples](segment.md){ .md-button } + +## [Classification](classify.md) + +Classification is a task that involves classifying an image into different categories. YOLOv8 can be used to classify images based on their content. It uses a variant of the EfficientNet architecture to perform classification. + +[Classification Examples](classify.md){ .md-button } + +## [Pose](pose.md) + +Pose/keypoint detection is a task that involves detecting specific points in an image or video frame. These points are referred to as keypoints and are used to track movement or pose estimation. YOLOv8 can detect keypoints in an image or video frame with high accuracy and speed. + +[Pose Examples](pose.md){ .md-button } + +## Conclusion + +YOLOv8 supports multiple tasks, including detection, segmentation, classification, and keypoints detection. Each of these tasks has different objectives and use cases. By understanding the differences between these tasks, you can choose the appropriate task for your computer vision application. diff --git a/ultralytics/docs/en/tasks/index.md:Zone.Identifier b/ultralytics/docs/en/tasks/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/tasks/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/tasks/pose.md b/ultralytics/docs/en/tasks/pose.md new file mode 100755 index 0000000..292c94b --- /dev/null +++ b/ultralytics/docs/en/tasks/pose.md @@ -0,0 +1,189 @@ +--- +comments: true +description: Learn how to use Ultralytics YOLOv8 for pose estimation tasks. Find pretrained models, learn how to train, validate, predict, and export your own. +keywords: Ultralytics, YOLO, YOLOv8, pose estimation, keypoints detection, object detection, pre-trained models, machine learning, artificial intelligence +--- + +# Pose Estimation + +Pose estimation examples + +Pose estimation is a task that involves identifying the location of specific points in an image, usually referred to as keypoints. The keypoints can represent various parts of the object such as joints, landmarks, or other distinctive features. The locations of the keypoints are usually represented as a set of 2D `[x, y]` or 3D `[x, y, visible]` +coordinates. + +The output of a pose estimation model is a set of points that represent the keypoints on an object in the image, usually along with the confidence scores for each point. Pose estimation is a good choice when you need to identify specific parts of an object in a scene, and their location in relation to each other. + +

+
+ +
+ Watch: Pose Estimation with Ultralytics YOLOv8. +

+ +!!! Tip "Tip" + + YOLOv8 _pose_ models use the `-pose` suffix, i.e. `yolov8n-pose.pt`. These models are trained on the [COCO keypoints](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco-pose.yaml) dataset and are suitable for a variety of pose estimation tasks. + +## [Models](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +YOLOv8 pretrained Pose models are shown here. Detect, Segment and Pose models are pretrained on the [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) dataset, while Classify models are pretrained on the [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) dataset. + +[Models](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) download automatically from the latest Ultralytics [release](https://github.com/ultralytics/assets/releases) on first use. + +| Model | size
(pixels) | mAPpose
50-95 | mAPpose
50 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | +|------------------------------------------------------------------------------------------------------|-----------------------|-----------------------|--------------------|--------------------------------|-------------------------------------|--------------------|-------------------| +| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | +| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | +| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | +| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | +| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | +| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +- **mAPval** values are for single-model single-scale on [COCO Keypoints val2017](http://cocodataset.org) + dataset. +
Reproduce by `yolo val pose data=coco-pose.yaml device=0` +- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) + instance. +
Reproduce by `yolo val pose data=coco8-pose.yaml batch=1 device=0|cpu` + +## Train + +Train a YOLOv8-pose model on the COCO128-pose dataset. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-pose.yaml') # build a new model from YAML + model = YOLO('yolov8n-pose.pt') # load a pretrained model (recommended for training) + model = YOLO('yolov8n-pose.yaml').load('yolov8n-pose.pt') # build from YAML and transfer weights + + # Train the model + results = model.train(data='coco8-pose.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Build a new model from YAML and start training from scratch + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml epochs=100 imgsz=640 + + # Start training from a pretrained *.pt model + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.pt epochs=100 imgsz=640 + + # Build a new model from YAML, transfer pretrained weights to it and start training + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml pretrained=yolov8n-pose.pt epochs=100 imgsz=640 + ``` + +### Dataset format + +YOLO pose dataset format can be found in detail in the [Dataset Guide](../datasets/pose/index.md). To convert your existing dataset from other formats (like COCO etc.) to YOLO format, please use [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) tool by Ultralytics. + +## Val + +Validate trained YOLOv8n-pose model accuracy on the COCO128-pose dataset. No argument need to passed as the `model` +retains it's training `data` and arguments as model attributes. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-pose.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom model + + # Validate the model + metrics = model.val() # no arguments needed, dataset and settings remembered + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # a list contains map50-95 of each category + ``` + === "CLI" + + ```bash + yolo pose val model=yolov8n-pose.pt # val official model + yolo pose val model=path/to/best.pt # val custom model + ``` + +## Predict + +Use a trained YOLOv8n-pose model to run predictions on images. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-pose.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom model + + # Predict with the model + results = model('https://ultralytics.com/images/bus.jpg') # predict on an image + ``` + === "CLI" + + ```bash + yolo pose predict model=yolov8n-pose.pt source='https://ultralytics.com/images/bus.jpg' # predict with official model + yolo pose predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # predict with custom model + ``` + +See full `predict` mode details in the [Predict](https://docs.ultralytics.com/modes/predict/) page. + +## Export + +Export a YOLOv8n Pose model to a different format like ONNX, CoreML, etc. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-pose.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom trained model + + # Export the model + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-pose.pt format=onnx # export official model + yolo export model=path/to/best.pt format=onnx # export custom trained model + ``` + +Available YOLOv8-pose export formats are in the table below. You can predict or validate directly on exported models, i.e. `yolo predict model=yolov8n-pose.onnx`. Usage examples are shown for your model after export completes. + +| Format | `format` Argument | Model | Metadata | Arguments | +|--------------------------------------------------------------------|-------------------|--------------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-pose.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-pose.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-pose.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-pose_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-pose.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-pose.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-pose_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-pose.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-pose.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-pose_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-pose_web_model/` | โœ… | `imgsz`, `half`, `int8` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-pose_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-pose_ncnn_model/` | โœ… | `imgsz`, `half` | + +See full `export` details in the [Export](https://docs.ultralytics.com/modes/export/) page. diff --git a/ultralytics/docs/en/tasks/pose.md:Zone.Identifier b/ultralytics/docs/en/tasks/pose.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/tasks/pose.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/tasks/segment.md b/ultralytics/docs/en/tasks/segment.md new file mode 100755 index 0000000..7c3eaac --- /dev/null +++ b/ultralytics/docs/en/tasks/segment.md @@ -0,0 +1,190 @@ +--- +comments: true +description: Learn how to use instance segmentation models with Ultralytics YOLO. Instructions on training, validation, image prediction, and model export. +keywords: yolov8, instance segmentation, Ultralytics, COCO dataset, image segmentation, object detection, model training, model validation, image prediction, model export +--- + +# Instance Segmentation + +Instance segmentation examples + +Instance segmentation goes a step further than object detection and involves identifying individual objects in an image and segmenting them from the rest of the image. + +The output of an instance segmentation model is a set of masks or contours that outline each object in the image, along with class labels and confidence scores for each object. Instance segmentation is useful when you need to know not only where objects are in an image, but also what their exact shape is. + +

+
+ +
+ Watch: Run Segmentation with Pre-Trained Ultralytics YOLOv8 Model in Python. +

+ +!!! Tip "Tip" + + YOLOv8 Segment models use the `-seg` suffix, i.e. `yolov8n-seg.pt` and are pretrained on [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml). + +## [Models](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +YOLOv8 pretrained Segment models are shown here. Detect, Segment and Pose models are pretrained on the [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) dataset, while Classify models are pretrained on the [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) dataset. + +[Models](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) download automatically from the latest Ultralytics [release](https://github.com/ultralytics/assets/releases) on first use. + +| Model | size
(pixels) | mAPbox
50-95 | mAPmask
50-95 | Speed
CPU ONNX
(ms) | Speed
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | +|----------------------------------------------------------------------------------------------|-----------------------|----------------------|-----------------------|--------------------------------|-------------------------------------|--------------------|-------------------| +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- **mAPval** values are for single-model single-scale on [COCO val2017](http://cocodataset.org) dataset. +
Reproduce by `yolo val segment data=coco.yaml device=0` +- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) + instance. +
Reproduce by `yolo val segment data=coco128-seg.yaml batch=1 device=0|cpu` + +## Train + +Train YOLOv8n-seg on the COCO128-seg dataset for 100 epochs at image size 640. For a full list of available arguments see the [Configuration](../usage/cfg.md) page. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-seg.yaml') # build a new model from YAML + model = YOLO('yolov8n-seg.pt') # load a pretrained model (recommended for training) + model = YOLO('yolov8n-seg.yaml').load('yolov8n.pt') # build from YAML and transfer weights + + # Train the model + results = model.train(data='coco128-seg.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Build a new model from YAML and start training from scratch + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml epochs=100 imgsz=640 + + # Start training from a pretrained *.pt model + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.pt epochs=100 imgsz=640 + + # Build a new model from YAML, transfer pretrained weights to it and start training + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml pretrained=yolov8n-seg.pt epochs=100 imgsz=640 + ``` + +### Dataset format + +YOLO segmentation dataset format can be found in detail in the [Dataset Guide](../datasets/segment/index.md). To convert your existing dataset from other formats (like COCO etc.) to YOLO format, please use [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) tool by Ultralytics. + +## Val + +Validate trained YOLOv8n-seg model accuracy on the COCO128-seg dataset. No argument need to passed as the `model` +retains it's training `data` and arguments as model attributes. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-seg.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom model + + # Validate the model + metrics = model.val() # no arguments needed, dataset and settings remembered + metrics.box.map # map50-95(B) + metrics.box.map50 # map50(B) + metrics.box.map75 # map75(B) + metrics.box.maps # a list contains map50-95(B) of each category + metrics.seg.map # map50-95(M) + metrics.seg.map50 # map50(M) + metrics.seg.map75 # map75(M) + metrics.seg.maps # a list contains map50-95(M) of each category + ``` + === "CLI" + + ```bash + yolo segment val model=yolov8n-seg.pt # val official model + yolo segment val model=path/to/best.pt # val custom model + ``` + +## Predict + +Use a trained YOLOv8n-seg model to run predictions on images. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-seg.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom model + + # Predict with the model + results = model('https://ultralytics.com/images/bus.jpg') # predict on an image + ``` + === "CLI" + + ```bash + yolo segment predict model=yolov8n-seg.pt source='https://ultralytics.com/images/bus.jpg' # predict with official model + yolo segment predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # predict with custom model + ``` + +See full `predict` mode details in the [Predict](https://docs.ultralytics.com/modes/predict/) page. + +## Export + +Export a YOLOv8n-seg model to a different format like ONNX, CoreML, etc. + +!!! Example + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n-seg.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom trained model + + # Export the model + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-seg.pt format=onnx # export official model + yolo export model=path/to/best.pt format=onnx # export custom trained model + ``` + +Available YOLOv8-seg export formats are in the table below. You can predict or validate directly on exported models, i.e. `yolo predict model=yolov8n-seg.onnx`. Usage examples are shown for your model after export completes. + +| Format | `format` Argument | Model | Metadata | Arguments | +|--------------------------------------------------------------------|-------------------|-------------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-seg.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-seg.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-seg.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-seg_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-seg.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-seg.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-seg_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-seg.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-seg.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-seg_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-seg_web_model/` | โœ… | `imgsz`, `half`, `int8` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-seg_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-seg_ncnn_model/` | โœ… | `imgsz`, `half` | + +See full `export` details in the [Export](https://docs.ultralytics.com/modes/export/) page. diff --git a/ultralytics/docs/en/tasks/segment.md:Zone.Identifier b/ultralytics/docs/en/tasks/segment.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/tasks/segment.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/usage/callbacks.md b/ultralytics/docs/en/usage/callbacks.md new file mode 100755 index 0000000..51ff0bd --- /dev/null +++ b/ultralytics/docs/en/usage/callbacks.md @@ -0,0 +1,101 @@ +--- +comments: true +description: Learn how to utilize callbacks in the Ultralytics framework during train, val, export, and predict modes for enhanced functionality. +keywords: Ultralytics, YOLO, callbacks guide, training callback, validation callback, export callback, prediction callback +--- + +## Callbacks + +Ultralytics framework supports callbacks as entry points in strategic stages of train, val, export, and predict modes. Each callback accepts a `Trainer`, `Validator`, or `Predictor` object depending on the operation type. All properties of these objects can be found in Reference section of the docs. + +

+
+ +
+ Watch: Mastering Ultralytics YOLOv8: Callbacks +

+ +## Examples + +### Returning additional information with Prediction + +In this example, we want to return the original frame with each result object. Here's how we can do that + +```python +from ultralytics import YOLO + + +def on_predict_batch_end(predictor): + # Retrieve the batch data + _, image, _, _ = predictor.batch + + # Ensure that image is a list + image = image if isinstance(image, list) else [image] + + # Combine the prediction results with the corresponding frames + predictor.results = zip(predictor.results, image) + + +# Create a YOLO model instance +model = YOLO(f'yolov8n.pt') + +# Add the custom callback to the model +model.add_callback("on_predict_batch_end", on_predict_batch_end) + +# Iterate through the results and frames +for (result, frame) in model.predict(): # or model.track() + pass +``` + +## All callbacks + +Here are all supported callbacks. See callbacks [source code](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/utils/callbacks/base.py) for additional details. + +### Trainer Callbacks + +| Callback | Description | +|-----------------------------|---------------------------------------------------------| +| `on_pretrain_routine_start` | Triggered at the beginning of pre-training routine | +| `on_pretrain_routine_end` | Triggered at the end of pre-training routine | +| `on_train_start` | Triggered when the training starts | +| `on_train_epoch_start` | Triggered at the start of each training epoch | +| `on_train_batch_start` | Triggered at the start of each training batch | +| `optimizer_step` | Triggered during the optimizer step | +| `on_before_zero_grad` | Triggered before gradients are zeroed | +| `on_train_batch_end` | Triggered at the end of each training batch | +| `on_train_epoch_end` | Triggered at the end of each training epoch | +| `on_fit_epoch_end` | Triggered at the end of each fit epoch | +| `on_model_save` | Triggered when the model is saved | +| `on_train_end` | Triggered when the training process ends | +| `on_params_update` | Triggered when model parameters are updated | +| `teardown` | Triggered when the training process is being cleaned up | + +### Validator Callbacks + +| Callback | Description | +|----------------------|-------------------------------------------------| +| `on_val_start` | Triggered when the validation starts | +| `on_val_batch_start` | Triggered at the start of each validation batch | +| `on_val_batch_end` | Triggered at the end of each validation batch | +| `on_val_end` | Triggered when the validation ends | + +### Predictor Callbacks + +| Callback | Description | +|------------------------------|---------------------------------------------------| +| `on_predict_start` | Triggered when the prediction process starts | +| `on_predict_batch_start` | Triggered at the start of each prediction batch | +| `on_predict_postprocess_end` | Triggered at the end of prediction postprocessing | +| `on_predict_batch_end` | Triggered at the end of each prediction batch | +| `on_predict_end` | Triggered when the prediction process ends | + +### Exporter Callbacks + +| Callback | Description | +|-------------------|------------------------------------------| +| `on_export_start` | Triggered when the export process starts | +| `on_export_end` | Triggered when the export process ends | diff --git a/ultralytics/docs/en/usage/callbacks.md:Zone.Identifier b/ultralytics/docs/en/usage/callbacks.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/usage/callbacks.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/usage/cfg.md b/ultralytics/docs/en/usage/cfg.md new file mode 100755 index 0000000..2e822ae --- /dev/null +++ b/ultralytics/docs/en/usage/cfg.md @@ -0,0 +1,260 @@ +--- +comments: true +description: Master YOLOv8 settings and hyperparameters for improved model performance. Learn to use YOLO CLI commands, adjust training settings, and optimize YOLO tasks & modes. +keywords: YOLOv8, settings, hyperparameters, YOLO CLI commands, YOLO tasks, YOLO modes, Ultralytics documentation, model optimization, YOLOv8 training +--- + +YOLO settings and hyperparameters play a critical role in the model's performance, speed, and accuracy. These settings and hyperparameters can affect the model's behavior at various stages of the model development process, including training, validation, and prediction. + +

+
+ +
+ Watch: Mastering Ultralytics YOLOv8: Configuration +

+ +Ultralytics commands use the following syntax: + +!!! Example + + === "CLI" + + ```bash + yolo TASK MODE ARGS + ``` + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a YOLOv8 model from a pre-trained weights file + model = YOLO('yolov8n.pt') + + # Run MODE mode using the custom arguments ARGS (guess TASK) + model.MODE(ARGS) + ``` + +Where: + +- `TASK` (optional) is one of ([detect](../tasks/detect.md), [segment](../tasks/segment.md), [classify](../tasks/classify.md), [pose](../tasks/pose.md)) +- `MODE` (required) is one of ([train](../modes/train.md), [val](../modes/val.md), [predict](../modes/predict.md), [export](../modes/export.md), [track](../modes/track.md)) +- `ARGS` (optional) are `arg=value` pairs like `imgsz=640` that override defaults. + +Default `ARG` values are defined on this page from the `cfg/defaults.yaml` [file](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/default.yaml). + +#### Tasks + +YOLO models can be used for a variety of tasks, including detection, segmentation, classification and pose. These tasks differ in the type of output they produce and the specific problem they are designed to solve. + +**Detect**: For identifying and localizing objects or regions of interest in an image or video. +**Segment**: For dividing an image or video into regions or pixels that correspond to different objects or classes. +**Classify**: For predicting the class label of an input image. +**Pose**: For identifying objects and estimating their keypoints in an image or video. + +| Key | Value | Description | +|--------|------------|-------------------------------------------------| +| `task` | `'detect'` | YOLO task, i.e. detect, segment, classify, pose | + +[Tasks Guide](../tasks/index.md){ .md-button } + +#### Modes + +YOLO models can be used in different modes depending on the specific problem you are trying to solve. These modes include: + +**Train**: For training a YOLOv8 model on a custom dataset. +**Val**: For validating a YOLOv8 model after it has been trained. +**Predict**: For making predictions using a trained YOLOv8 model on new images or videos. +**Export**: For exporting a YOLOv8 model to a format that can be used for deployment. +**Track**: For tracking objects in real-time using a YOLOv8 model. +**Benchmark**: For benchmarking YOLOv8 exports (ONNX, TensorRT, etc.) speed and accuracy. + +| Key | Value | Description | +|--------|-----------|---------------------------------------------------------------| +| `mode` | `'train'` | YOLO mode, i.e. train, val, predict, export, track, benchmark | + +[Modes Guide](../modes/index.md){ .md-button } + +## Train + +The training settings for YOLO models encompass various hyperparameters and configurations used during the training process. These settings influence the model's performance, speed, and accuracy. Key training settings include batch size, learning rate, momentum, and weight decay. Additionally, the choice of optimizer, loss function, and training dataset composition can impact the training process. Careful tuning and experimentation with these settings are crucial for optimizing performance. + +| Key | Value | Description | +|-------------------|----------|------------------------------------------------------------------------------------------------| +| `model` | `None` | path to model file, i.e. yolov8n.pt, yolov8n.yaml | +| `data` | `None` | path to data file, i.e. coco128.yaml | +| `epochs` | `100` | number of epochs to train for | +| `time` | `None` | number of hours to train for, overrides epochs if supplied | +| `patience` | `50` | epochs to wait for no observable improvement for early stopping of training | +| `batch` | `16` | number of images per batch (-1 for AutoBatch) | +| `imgsz` | `640` | size of input images as integer | +| `save` | `True` | save train checkpoints and predict results | +| `save_period` | `-1` | Save checkpoint every x epochs (disabled if < 1) | +| `cache` | `False` | True/ram, disk or False. Use cache for data loading | +| `device` | `None` | device to run on, i.e. cuda device=0 or device=0,1,2,3 or device=cpu | +| `workers` | `8` | number of worker threads for data loading (per RANK if DDP) | +| `project` | `None` | project name | +| `name` | `None` | experiment name | +| `exist_ok` | `False` | whether to overwrite existing experiment | +| `pretrained` | `True` | (bool or str) whether to use a pretrained model (bool) or a model to load weights from (str) | +| `optimizer` | `'auto'` | optimizer to use, choices=[SGD, Adam, Adamax, AdamW, NAdam, RAdam, RMSProp, auto] | +| `verbose` | `False` | whether to print verbose output | +| `seed` | `0` | random seed for reproducibility | +| `deterministic` | `True` | whether to enable deterministic mode | +| `single_cls` | `False` | train multi-class data as single-class | +| `rect` | `False` | rectangular training with each batch collated for minimum padding | +| `cos_lr` | `False` | use cosine learning rate scheduler | +| `close_mosaic` | `10` | (int) disable mosaic augmentation for final epochs (0 to disable) | +| `resume` | `False` | resume training from last checkpoint | +| `amp` | `True` | Automatic Mixed Precision (AMP) training, choices=[True, False] | +| `fraction` | `1.0` | dataset fraction to train on (default is 1.0, all images in train set) | +| `profile` | `False` | profile ONNX and TensorRT speeds during training for loggers | +| `freeze` | `None` | (int or list, optional) freeze first n layers, or freeze list of layer indices during training | +| `lr0` | `0.01` | initial learning rate (i.e. SGD=1E-2, Adam=1E-3) | +| `lrf` | `0.01` | final learning rate (lr0 * lrf) | +| `momentum` | `0.937` | SGD momentum/Adam beta1 | +| `weight_decay` | `0.0005` | optimizer weight decay 5e-4 | +| `warmup_epochs` | `3.0` | warmup epochs (fractions ok) | +| `warmup_momentum` | `0.8` | warmup initial momentum | +| `warmup_bias_lr` | `0.1` | warmup initial bias lr | +| `box` | `7.5` | box loss gain | +| `cls` | `0.5` | cls loss gain (scale with pixels) | +| `dfl` | `1.5` | dfl loss gain | +| `pose` | `12.0` | pose loss gain (pose-only) | +| `kobj` | `2.0` | keypoint obj loss gain (pose-only) | +| `label_smoothing` | `0.0` | label smoothing (fraction) | +| `nbs` | `64` | nominal batch size | +| `overlap_mask` | `True` | masks should overlap during training (segment train only) | +| `mask_ratio` | `4` | mask downsample ratio (segment train only) | +| `dropout` | `0.0` | use dropout regularization (classify train only) | +| `val` | `True` | validate/test during training | +| `plots` | `False` | save plots and images during train/val | + +[Train Guide](../modes/train.md){ .md-button } + +## Predict + +The prediction settings for YOLO models encompass a range of hyperparameters and configurations that influence the model's performance, speed, and accuracy during inference on new data. Careful tuning and experimentation with these settings are essential to achieve optimal performance for a specific task. Key settings include the confidence threshold, Non-Maximum Suppression (NMS) threshold, and the number of classes considered. Additional factors affecting the prediction process are input data size and format, the presence of supplementary features such as masks or multiple labels per box, and the particular task the model is employed for. + +Inference arguments: + +| Name | Type | Default | Description | +|-----------------|----------------|------------------------|----------------------------------------------------------------------------| +| `source` | `str` | `'ultralytics/assets'` | source directory for images or videos | +| `conf` | `float` | `0.25` | object confidence threshold for detection | +| `iou` | `float` | `0.7` | intersection over union (IoU) threshold for NMS | +| `imgsz` | `int or tuple` | `640` | image size as scalar or (h, w) list, i.e. (640, 480) | +| `half` | `bool` | `False` | use half precision (FP16) | +| `device` | `None or str` | `None` | device to run on, i.e. cuda device=0/1/2/3 or device=cpu | +| `max_det` | `int` | `300` | maximum number of detections per image | +| `vid_stride` | `bool` | `False` | video frame-rate stride | +| `stream_buffer` | `bool` | `False` | buffer all streaming frames (True) or return the most recent frame (False) | +| `visualize` | `bool` | `False` | visualize model features | +| `augment` | `bool` | `False` | apply image augmentation to prediction sources | +| `agnostic_nms` | `bool` | `False` | class-agnostic NMS | +| `classes` | `list[int]` | `None` | filter results by class, i.e. classes=0, or classes=[0,2,3] | +| `retina_masks` | `bool` | `False` | use high-resolution segmentation masks | +| `embed` | `list[int]` | `None` | return feature vectors/embeddings from given layers | + +Visualization arguments: + +| Name | Type | Default | Description | +|---------------|---------------|---------|-----------------------------------------------------------------| +| `show` | `bool` | `False` | show predicted images and videos if environment allows | +| `save` | `bool` | `False` | save predicted images and videos | +| `save_frames` | `bool` | `False` | save predicted individual video frames | +| `save_txt` | `bool` | `False` | save results as `.txt` file | +| `save_conf` | `bool` | `False` | save results with confidence scores | +| `save_crop` | `bool` | `False` | save cropped images with results | +| `show_labels` | `bool` | `True` | show prediction labels, i.e. 'person' | +| `show_conf` | `bool` | `True` | show prediction confidence, i.e. '0.99' | +| `show_boxes` | `bool` | `True` | show prediction boxes | +| `line_width` | `None or int` | `None` | line width of the bounding boxes. Scaled to image size if None. | + +[Predict Guide](../modes/predict.md){ .md-button } + +## Val + +The val (validation) settings for YOLO models involve various hyperparameters and configurations used to evaluate the model's performance on a validation dataset. These settings influence the model's performance, speed, and accuracy. Common YOLO validation settings include batch size, validation frequency during training, and performance evaluation metrics. Other factors affecting the validation process include the validation dataset's size and composition, as well as the specific task the model is employed for. Careful tuning and experimentation with these settings are crucial to ensure optimal performance on the validation dataset and detect and prevent overfitting. + +| Key | Value | Description | +|---------------|---------|--------------------------------------------------------------------| +| `data` | `None` | path to data file, i.e. coco128.yaml | +| `imgsz` | `640` | size of input images as integer | +| `batch` | `16` | number of images per batch (-1 for AutoBatch) | +| `save_json` | `False` | save results to JSON file | +| `save_hybrid` | `False` | save hybrid version of labels (labels + additional predictions) | +| `conf` | `0.001` | object confidence threshold for detection | +| `iou` | `0.6` | intersection over union (IoU) threshold for NMS | +| `max_det` | `300` | maximum number of detections per image | +| `half` | `True` | use half precision (FP16) | +| `device` | `None` | device to run on, i.e. cuda device=0/1/2/3 or device=cpu | +| `dnn` | `False` | use OpenCV DNN for ONNX inference | +| `plots` | `False` | save plots and images during train/val | +| `rect` | `False` | rectangular val with each batch collated for minimum padding | +| `split` | `val` | dataset split to use for validation, i.e. 'val', 'test' or 'train' | + +[Val Guide](../modes/val.md){ .md-button } + +## Export + +Export settings for YOLO models encompass configurations and options related to saving or exporting the model for use in different environments or platforms. These settings can impact the model's performance, size, and compatibility with various systems. Key export settings include the exported model file format (e.g., ONNX, TensorFlow SavedModel), the target device (e.g., CPU, GPU), and additional features such as masks or multiple labels per box. The export process may also be affected by the model's specific task and the requirements or constraints of the destination environment or platform. It is crucial to thoughtfully configure these settings to ensure the exported model is optimized for the intended use case and functions effectively in the target environment. + +| Key | Value | Description | +|-------------|-----------------|------------------------------------------------------| +| `format` | `'torchscript'` | format to export to | +| `imgsz` | `640` | image size as scalar or (h, w) list, i.e. (640, 480) | +| `keras` | `False` | use Keras for TF SavedModel export | +| `optimize` | `False` | TorchScript: optimize for mobile | +| `half` | `False` | FP16 quantization | +| `int8` | `False` | INT8 quantization | +| `dynamic` | `False` | ONNX/TensorRT: dynamic axes | +| `simplify` | `False` | ONNX/TensorRT: simplify model | +| `opset` | `None` | ONNX: opset version (optional, defaults to latest) | +| `workspace` | `4` | TensorRT: workspace size (GB) | +| `nms` | `False` | CoreML: add NMS | + +[Export Guide](../modes/export.md){ .md-button } + +## Augmentation + +Augmentation settings for YOLO models refer to the various transformations and modifications applied to the training data to increase the diversity and size of the dataset. These settings can affect the model's performance, speed, and accuracy. Some common YOLO augmentation settings include the type and intensity of the transformations applied (e.g. random flips, rotations, cropping, color changes), the probability with which each transformation is applied, and the presence of additional features such as masks or multiple labels per box. Other factors that may affect the augmentation process include the size and composition of the original dataset and the specific task the model is being used for. It is important to carefully tune and experiment with these settings to ensure that the augmented dataset is diverse and representative enough to train a high-performing model. + +| Key | Value | Description | +|---------------|---------|-------------------------------------------------| +| `hsv_h` | `0.015` | image HSV-Hue augmentation (fraction) | +| `hsv_s` | `0.7` | image HSV-Saturation augmentation (fraction) | +| `hsv_v` | `0.4` | image HSV-Value augmentation (fraction) | +| `degrees` | `0.0` | image rotation (+/- deg) | +| `translate` | `0.1` | image translation (+/- fraction) | +| `scale` | `0.5` | image scale (+/- gain) | +| `shear` | `0.0` | image shear (+/- deg) | +| `perspective` | `0.0` | image perspective (+/- fraction), range 0-0.001 | +| `flipud` | `0.0` | image flip up-down (probability) | +| `fliplr` | `0.5` | image flip left-right (probability) | +| `mosaic` | `1.0` | image mosaic (probability) | +| `mixup` | `0.0` | image mixup (probability) | +| `copy_paste` | `0.0` | segment copy-paste (probability) | + +## Logging, checkpoints, plotting and file management + +Logging, checkpoints, plotting, and file management are important considerations when training a YOLO model. + +- Logging: It is often helpful to log various metrics and statistics during training to track the model's progress and diagnose any issues that may arise. This can be done using a logging library such as TensorBoard or by writing log messages to a file. +- Checkpoints: It is a good practice to save checkpoints of the model at regular intervals during training. This allows you to resume training from a previous point if the training process is interrupted or if you want to experiment with different training configurations. +- Plotting: Visualizing the model's performance and training progress can be helpful for understanding how the model is behaving and identifying potential issues. This can be done using a plotting library such as matplotlib or by generating plots using a logging library such as TensorBoard. +- File management: Managing the various files generated during the training process, such as model checkpoints, log files, and plots, can be challenging. It is important to have a clear and organized file structure to keep track of these files and make it easy to access and analyze them as needed. + +Effective logging, checkpointing, plotting, and file management can help you keep track of the model's progress and make it easier to debug and optimize the training process. + +| Key | Value | Description | +|------------|----------|------------------------------------------------------------------------------------------------| +| `project` | `'runs'` | project name | +| `name` | `'exp'` | experiment name. `exp` gets automatically incremented if not specified, i.e, `exp`, `exp2` ... | +| `exist_ok` | `False` | whether to overwrite existing experiment | +| `plots` | `False` | save plots during train/val | +| `save` | `False` | save train checkpoints and predict results | diff --git a/ultralytics/docs/en/usage/cfg.md:Zone.Identifier b/ultralytics/docs/en/usage/cfg.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/usage/cfg.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/usage/cli.md b/ultralytics/docs/en/usage/cli.md new file mode 100755 index 0000000..eeae078 --- /dev/null +++ b/ultralytics/docs/en/usage/cli.md @@ -0,0 +1,227 @@ +--- +comments: true +description: 'Learn how to use Ultralytics YOLO through Command Line: train models, run predictions and exports models to different formats easily using terminal commands.' +keywords: Ultralytics, YOLO, CLI, train, validation, prediction, command line interface, YOLO CLI, YOLO terminal, model training, prediction, exporting +--- + +# Command Line Interface Usage + +The YOLO command line interface (CLI) allows for simple single-line commands without the need for a Python environment. CLI requires no customization or Python code. You can simply run all tasks from the terminal with the `yolo` command. + +

+
+ +
+ Watch: Mastering Ultralytics YOLOv8: CLI +

+ +!!! Example + + === "Syntax" + + Ultralytics `yolo` commands use the following syntax: + ```bash + yolo TASK MODE ARGS + + Where TASK (optional) is one of [detect, segment, classify] + MODE (required) is one of [train, val, predict, export, track] + ARGS (optional) are any number of custom 'arg=value' pairs like 'imgsz=320' that override defaults. + ``` + See all ARGS in the full [Configuration Guide](cfg.md) or with `yolo cfg` + + === "Train" + + Train a detection model for 10 epochs with an initial learning_rate of 0.01 + ```bash + yolo train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "Predict" + + Predict a YouTube video using a pretrained segmentation model at image size 320: + ```bash + yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "Val" + + Val a pretrained detection model at batch-size 1 and image size 640: + ```bash + yolo val model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + + === "Export" + + Export a YOLOv8n classification model to ONNX format at image size 224 by 128 (no TASK required) + ```bash + yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128 + ``` + + === "Special" + + Run special commands to see version, view settings, run checks and more: + ```bash + yolo help + yolo checks + yolo version + yolo settings + yolo copy-cfg + yolo cfg + ``` + +Where: + +- `TASK` (optional) is one of `[detect, segment, classify]`. If it is not passed explicitly YOLOv8 will try to guess the `TASK` from the model type. +- `MODE` (required) is one of `[train, val, predict, export, track]` +- `ARGS` (optional) are any number of custom `arg=value` pairs like `imgsz=320` that override defaults. For a full list of available `ARGS` see the [Configuration](cfg.md) page and `defaults.yaml` + GitHub [source](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/default.yaml). + +!!! Warning "Warning" + + Arguments must be passed as `arg=val` pairs, split by an equals `=` sign and delimited by spaces ` ` between pairs. Do not use `--` argument prefixes or commas `,` between arguments. + + - `yolo predict model=yolov8n.pt imgsz=640 conf=0.25`   โœ… + - `yolo predict model yolov8n.pt imgsz 640 conf 0.25`   โŒ + - `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25`   โŒ + +## Train + +Train YOLOv8n on the COCO128 dataset for 100 epochs at image size 640. For a full list of available arguments see the [Configuration](cfg.md) page. + +!!! Example "Example" + + === "Train" + + Start training YOLOv8n on COCO128 for 100 epochs at image-size 640. + ```bash + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + ``` + + === "Resume" + + Resume an interrupted training. + ```bash + yolo detect train resume model=last.pt + ``` + +## Val + +Validate trained YOLOv8n model accuracy on the COCO128 dataset. No argument need to passed as the `model` retains it's training `data` and arguments as model attributes. + +!!! Example "Example" + + === "Official" + + Validate an official YOLOv8n model. + ```bash + yolo detect val model=yolov8n.pt + ``` + + === "Custom" + + Validate a custom-trained model. + ```bash + yolo detect val model=path/to/best.pt + ``` + +## Predict + +Use a trained YOLOv8n model to run predictions on images. + +!!! Example "Example" + + === "Official" + + Predict with an official YOLOv8n model. + ```bash + yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' + ``` + + === "Custom" + + Predict with a custom model. + ```bash + yolo detect predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' + ``` + +## Export + +Export a YOLOv8n model to a different format like ONNX, CoreML, etc. + +!!! Example "Example" + + === "Official" + + Export an official YOLOv8n model to ONNX format. + ```bash + yolo export model=yolov8n.pt format=onnx + ``` + + === "Custom" + + Export a custom-trained model to ONNX format. + ```bash + yolo export model=path/to/best.pt format=onnx + ``` + +Available YOLOv8 export formats are in the table below. You can export to any format using the `format` argument, i.e. `format='onnx'` or `format='engine'`. + +| Format | `format` Argument | Model | Metadata | Arguments | +|--------------------------------------------------------------------|-------------------|---------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half`, `int8` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras`, `int8` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz`, `half`, `int8` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +## Overriding default arguments + +Default arguments can be overridden by simply passing them as arguments in the CLI in `arg=value` pairs. + +!!! Tip "" + + === "Train" + Train a detection model for `10 epochs` with `learning_rate` of `0.01` + ```bash + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "Predict" + Predict a YouTube video using a pretrained segmentation model at image size 320: + ```bash + yolo segment predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "Val" + Validate a pretrained detection model at batch-size 1 and image size 640: + ```bash + yolo detect val model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + +## Overriding default config file + +You can override the `default.yaml` config file entirely by passing a new file with the `cfg` arguments, i.e. `cfg=custom.yaml`. + +To do this first create a copy of `default.yaml` in your current working dir with the `yolo copy-cfg` command. + +This will create `default_copy.yaml`, which you can then pass as `cfg=default_copy.yaml` along with any additional args, like `imgsz=320` in this example: + +!!! Example + + === "CLI" + ```bash + yolo copy-cfg + yolo cfg=default_copy.yaml imgsz=320 + ``` diff --git a/ultralytics/docs/en/usage/cli.md:Zone.Identifier b/ultralytics/docs/en/usage/cli.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/usage/cli.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/usage/engine.md b/ultralytics/docs/en/usage/engine.md new file mode 100755 index 0000000..12d25e3 --- /dev/null +++ b/ultralytics/docs/en/usage/engine.md @@ -0,0 +1,93 @@ +--- +comments: true +description: Discover how to customize and extend base Ultralytics YOLO Trainer engines. Support your custom model and dataloader by overriding built-in functions. +keywords: Ultralytics, YOLO, trainer engines, BaseTrainer, DetectionTrainer, customizing trainers, extending trainers, custom model, custom dataloader +--- + +Both the Ultralytics YOLO command-line and Python interfaces are simply a high-level abstraction on the base engine executors. Let's take a look at the Trainer engine. + +

+
+ +
+ Watch: Mastering Ultralytics YOLOv8: Advanced Customization +

+ +## BaseTrainer + +BaseTrainer contains the generic boilerplate training routine. It can be customized for any task based over overriding the required functions or operations as long the as correct formats are followed. For example, you can support your own custom model and dataloader by just overriding these functions: + +* `get_model(cfg, weights)` - The function that builds the model to be trained +* `get_dataloader()` - The function that builds the dataloader More details and source code can be found in [`BaseTrainer` Reference](../reference/engine/trainer.md) + +## DetectionTrainer + +Here's how you can use the YOLOv8 `DetectionTrainer` and customize it. + +```python +from ultralytics.models.yolo.detect import DetectionTrainer + +trainer = DetectionTrainer(overrides={...}) +trainer.train() +trained_model = trainer.best # get best model +``` + +### Customizing the DetectionTrainer + +Let's customize the trainer **to train a custom detection model** that is not supported directly. You can do this by simply overloading the existing the `get_model` functionality: + +```python +from ultralytics.models.yolo.detect import DetectionTrainer + + +class CustomTrainer(DetectionTrainer): + def get_model(self, cfg, weights): + ... + + +trainer = CustomTrainer(overrides={...}) +trainer.train() +``` + +You now realize that you need to customize the trainer further to: + +* Customize the `loss function`. +* Add `callback` that uploads model to your Google Drive after every 10 `epochs` + Here's how you can do it: + +```python +from ultralytics.models.yolo.detect import DetectionTrainer +from ultralytics.nn.tasks import DetectionModel + + +class MyCustomModel(DetectionModel): + def init_criterion(self): + ... + + +class CustomTrainer(DetectionTrainer): + def get_model(self, cfg, weights): + return MyCustomModel(...) + + +# callback to upload model weights +def log_model(trainer): + last_weight_path = trainer.last + print(last_weight_path) + + +trainer = CustomTrainer(overrides={...}) +trainer.add_callback("on_train_epoch_end", log_model) # Adds to existing callback +trainer.train() +``` + +To know more about Callback triggering events and entry point, checkout our [Callbacks Guide](callbacks.md) + +## Other engine components + +There are other components that can be customized similarly like `Validators` and `Predictors` +See Reference section for more information on these. diff --git a/ultralytics/docs/en/usage/engine.md:Zone.Identifier b/ultralytics/docs/en/usage/engine.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/usage/engine.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/usage/python.md b/ultralytics/docs/en/usage/python.md new file mode 100755 index 0000000..53a456e --- /dev/null +++ b/ultralytics/docs/en/usage/python.md @@ -0,0 +1,273 @@ +--- +comments: true +description: Boost your Python projects with object detection, segmentation and classification using YOLOv8. Explore how to load, train, validate, predict, export, track and benchmark models with ease. +keywords: YOLOv8, Ultralytics, Python, object detection, segmentation, classification, model training, validation, prediction, model export, benchmark, real-time tracking +--- + +# Python Usage + +Welcome to the YOLOv8 Python Usage documentation! This guide is designed to help you seamlessly integrate YOLOv8 into your Python projects for object detection, segmentation, and classification. Here, you'll learn how to load and use pretrained models, train new models, and perform predictions on images. The easy-to-use Python interface is a valuable resource for anyone looking to incorporate YOLOv8 into their Python projects, allowing you to quickly implement advanced object detection capabilities. Let's get started! + +

+
+ +
+ Watch: Mastering Ultralytics YOLOv8: Python +

+ +For example, users can load a model, train it, evaluate its performance on a validation set, and even export it to ONNX format with just a few lines of code. + +!!! Example "Python" + + ```python + from ultralytics import YOLO + + # Create a new YOLO model from scratch + model = YOLO('yolov8n.yaml') + + # Load a pretrained YOLO model (recommended for training) + model = YOLO('yolov8n.pt') + + # Train the model using the 'coco128.yaml' dataset for 3 epochs + results = model.train(data='coco128.yaml', epochs=3) + + # Evaluate the model's performance on the validation set + results = model.val() + + # Perform object detection on an image using the model + results = model('https://ultralytics.com/images/bus.jpg') + + # Export the model to ONNX format + success = model.export(format='onnx') + ``` + +## [Train](../modes/train.md) + +Train mode is used for training a YOLOv8 model on a custom dataset. In this mode, the model is trained using the specified dataset and hyperparameters. The training process involves optimizing the model's parameters so that it can accurately predict the classes and locations of objects in an image. + +!!! Example "Train" + + === "From pretrained(recommended)" + ```python + from ultralytics import YOLO + + model = YOLO('yolov8n.pt') # pass any model type + results = model.train(epochs=5) + ``` + + === "From scratch" + ```python + from ultralytics import YOLO + + model = YOLO('yolov8n.yaml') + results = model.train(data='coco128.yaml', epochs=5) + ``` + + === "Resume" + ```python + model = YOLO("last.pt") + results = model.train(resume=True) + ``` + +[Train Examples](../modes/train.md){ .md-button } + +## [Val](../modes/val.md) + +Val mode is used for validating a YOLOv8 model after it has been trained. In this mode, the model is evaluated on a validation set to measure its accuracy and generalization performance. This mode can be used to tune the hyperparameters of the model to improve its performance. + +!!! Example "Val" + + === "Val after training" + ```python + from ultralytics import YOLO + + model = YOLO('yolov8n.yaml') + model.train(data='coco128.yaml', epochs=5) + model.val() # It'll automatically evaluate the data you trained. + ``` + + === "Val independently" + ```python + from ultralytics import YOLO + + model = YOLO("model.pt") + # It'll use the data YAML file in model.pt if you don't set data. + model.val() + # or you can set the data you want to val + model.val(data='coco128.yaml') + ``` + +[Val Examples](../modes/val.md){ .md-button } + +## [Predict](../modes/predict.md) + +Predict mode is used for making predictions using a trained YOLOv8 model on new images or videos. In this mode, the model is loaded from a checkpoint file, and the user can provide images or videos to perform inference. The model predicts the classes and locations of objects in the input images or videos. + +!!! Example "Predict" + + === "From source" + ```python + from ultralytics import YOLO + from PIL import Image + import cv2 + + model = YOLO("model.pt") + # accepts all formats - image/dir/Path/URL/video/PIL/ndarray. 0 for webcam + results = model.predict(source="0") + results = model.predict(source="folder", show=True) # Display preds. Accepts all YOLO predict arguments + + # from PIL + im1 = Image.open("bus.jpg") + results = model.predict(source=im1, save=True) # save plotted images + + # from ndarray + im2 = cv2.imread("bus.jpg") + results = model.predict(source=im2, save=True, save_txt=True) # save predictions as labels + + # from list of PIL/ndarray + results = model.predict(source=[im1, im2]) + ``` + + === "Results usage" + ```python + # results would be a list of Results object including all the predictions by default + # but be careful as it could occupy a lot memory when there're many images, + # especially the task is segmentation. + # 1. return as a list + results = model.predict(source="folder") + + # results would be a generator which is more friendly to memory by setting stream=True + # 2. return as a generator + results = model.predict(source=0, stream=True) + + for result in results: + # Detection + result.boxes.xyxy # box with xyxy format, (N, 4) + result.boxes.xywh # box with xywh format, (N, 4) + result.boxes.xyxyn # box with xyxy format but normalized, (N, 4) + result.boxes.xywhn # box with xywh format but normalized, (N, 4) + result.boxes.conf # confidence score, (N, 1) + result.boxes.cls # cls, (N, 1) + + # Segmentation + result.masks.data # masks, (N, H, W) + result.masks.xy # x,y segments (pixels), List[segment] * N + result.masks.xyn # x,y segments (normalized), List[segment] * N + + # Classification + result.probs # cls prob, (num_class, ) + + # Each result is composed of torch.Tensor by default, + # in which you can easily use following functionality: + result = result.cuda() + result = result.cpu() + result = result.to("cpu") + result = result.numpy() + ``` + +[Predict Examples](../modes/predict.md){ .md-button } + +## [Export](../modes/export.md) + +Export mode is used for exporting a YOLOv8 model to a format that can be used for deployment. In this mode, the model is converted to a format that can be used by other software applications or hardware devices. This mode is useful when deploying the model to production environments. + +!!! Example "Export" + + === "Export to ONNX" + + Export an official YOLOv8n model to ONNX with dynamic batch-size and image-size. + ```python + from ultralytics import YOLO + + model = YOLO('yolov8n.pt') + model.export(format='onnx', dynamic=True) + ``` + + === "Export to TensorRT" + + Export an official YOLOv8n model to TensorRT on `device=0` for acceleration on CUDA devices. + ```python + from ultralytics import YOLO + + model = YOLO('yolov8n.pt') + model.export(format='onnx', device=0) + ``` + +[Export Examples](../modes/export.md){ .md-button } + +## [Track](../modes/track.md) + +Track mode is used for tracking objects in real-time using a YOLOv8 model. In this mode, the model is loaded from a checkpoint file, and the user can provide a live video stream to perform real-time object tracking. This mode is useful for applications such as surveillance systems or self-driving cars. + +!!! Example "Track" + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.pt') # load an official detection model + model = YOLO('yolov8n-seg.pt') # load an official segmentation model + model = YOLO('path/to/best.pt') # load a custom model + + # Track with the model + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True) + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True, tracker="bytetrack.yaml") + ``` + +[Track Examples](../modes/track.md){ .md-button } + +## [Benchmark](../modes/benchmark.md) + +Benchmark mode is used to profile the speed and accuracy of various export formats for YOLOv8. The benchmarks provide information on the size of the exported format, its `mAP50-95` metrics (for object detection and segmentation) +or `accuracy_top5` metrics (for classification), and the inference time in milliseconds per image across various export formats like ONNX, OpenVINO, TensorRT and others. This information can help users choose the optimal export format for their specific use case based on their requirements for speed and accuracy. + +!!! Example "Benchmark" + + === "Python" + + Benchmark an official YOLOv8n model across all export formats. + ```python + from ultralytics.utils.benchmarks import benchmark + + # Benchmark + benchmark(model='yolov8n.pt', data='coco8.yaml', imgsz=640, half=False, device=0) + ``` + +[Benchmark Examples](../modes/benchmark.md){ .md-button } + +## Using Trainers + +`YOLO` model class is a high-level wrapper on the Trainer classes. Each YOLO task has its own trainer that inherits from `BaseTrainer`. + +!!! Tip "Detection Trainer Example" + + ```python + from ultralytics.models.yolo import DetectionTrainer, DetectionValidator, DetectionPredictor + + # trainer + trainer = DetectionTrainer(overrides={}) + trainer.train() + trained_model = trainer.best + + # Validator + val = DetectionValidator(args=...) + val(model=trained_model) + + # predictor + pred = DetectionPredictor(overrides={}) + pred(source=SOURCE, model=trained_model) + + # resume from last weight + overrides["resume"] = trainer.last + trainer = detect.DetectionTrainer(overrides=overrides) + ``` + +You can easily customize Trainers to support custom tasks or explore R&D ideas. Learn more about Customizing `Trainers`, `Validators` and `Predictors` to suit your project needs in the Customization Section. + +[Customization tutorials](engine.md){ .md-button } diff --git a/ultralytics/docs/en/usage/python.md:Zone.Identifier b/ultralytics/docs/en/usage/python.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/usage/python.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/environments/aws_quickstart_tutorial.md b/ultralytics/docs/en/yolov5/environments/aws_quickstart_tutorial.md new file mode 100755 index 0000000..2bf3240 --- /dev/null +++ b/ultralytics/docs/en/yolov5/environments/aws_quickstart_tutorial.md @@ -0,0 +1,95 @@ +--- +comments: true +description: Follow this comprehensive guide to set up and operate YOLOv5 on an AWS Deep Learning instance for object detection tasks. Get started with model training and deployment. +keywords: YOLOv5, AWS Deep Learning AMIs, object detection, machine learning, AI, model training, instance setup, Ultralytics +--- + +# YOLOv5 ๐Ÿš€ on AWS Deep Learning Instance: Your Complete Guide + +Setting up a high-performance deep learning environment can be daunting for newcomers, but fear not! ๐Ÿ› ๏ธ With this guide, we'll walk you through the process of getting YOLOv5 up and running on an AWS Deep Learning instance. By leveraging the power of Amazon Web Services (AWS), even those new to machine learning can get started quickly and cost-effectively. The AWS platform's scalability is perfect for both experimentation and production deployment. + +Other quickstart options for YOLOv5 include our [Colab Notebook](https://colab.research.google.com/github/ultralytics/yolov5/blob/master/tutorial.ipynb) Open In Colab Open In Kaggle, [GCP Deep Learning VM](https://docs.ultralytics.com/yolov5/environments/google_cloud_quickstart_tutorial), and our Docker image at [Docker Hub](https://hub.docker.com/r/ultralytics/yolov5) Docker Pulls. + +## Step 1: AWS Console Sign-In + +Start by creating an account or signing in to the AWS console at [https://aws.amazon.com/console/](https://aws.amazon.com/console/). Once logged in, select the **EC2** service to manage and set up your instances. + +![Console](https://user-images.githubusercontent.com/26833433/106323804-debddd00-622c-11eb-997f-b8217dc0e975.png) + +## Step 2: Launch Your Instance + +In the EC2 dashboard, you'll find the **Launch Instance** button which is your gateway to creating a new virtual server. + +![Launch](https://user-images.githubusercontent.com/26833433/106323950-204e8800-622d-11eb-915d-5c90406973ea.png) + +### Selecting the Right Amazon Machine Image (AMI) + +Here's where you choose the operating system and software stack for your instance. Type 'Deep Learning' into the search field and select the latest Ubuntu-based Deep Learning AMI, unless your needs dictate otherwise. Amazon's Deep Learning AMIs come pre-installed with popular frameworks and GPU drivers to streamline your setup process. + +![Choose AMI](https://user-images.githubusercontent.com/26833433/106326107-c9e34880-6230-11eb-97c9-3b5fc2f4e2ff.png) + +### Picking an Instance Type + +For deep learning tasks, selecting a GPU instance type is generally recommended as it can vastly accelerate model training. For instance size considerations, remember that the model's memory requirements should never exceed what your instance can provide. + +**Note:** The size of your model should be a factor in selecting an instance. If your model exceeds an instance's available RAM, select a different instance type with enough memory for your application. + +For a list of available GPU instance types, visit [EC2 Instance Types](https://aws.amazon.com/ec2/instance-types/), specifically under Accelerated Computing. + +![Choose Type](https://user-images.githubusercontent.com/26833433/106324624-52141e80-622e-11eb-9662-1a376d9c887d.png) + +For more information on GPU monitoring and optimization, see [GPU Monitoring and Optimization](https://docs.aws.amazon.com/dlami/latest/devguide/tutorial-gpu.html). For pricing, see [On-Demand Pricing](https://aws.amazon.com/ec2/pricing/on-demand/) and [Spot Pricing](https://aws.amazon.com/ec2/spot/pricing/). + +### Configuring Your Instance + +Amazon EC2 Spot Instances offer a cost-effective way to run applications as they allow you to bid for unused capacity at a fraction of the standard cost. For a persistent experience that retains data even when the Spot Instance goes down, opt for a persistent request. + +![Spot Request](https://user-images.githubusercontent.com/26833433/106324835-ac14e400-622e-11eb-8853-df5ec9b16dfc.png) + +Remember to adjust the rest of your instance settings and security configurations as needed in Steps 4-7 before launching. + +## Step 3: Connect to Your Instance + +Once your instance is running, select its checkbox and click Connect to access the SSH information. Use the displayed SSH command in your preferred terminal to establish a connection to your instance. + +![Connect](https://user-images.githubusercontent.com/26833433/106325530-cf8c5e80-622f-11eb-9f64-5b313a9d57a1.png) + +## Step 4: Running YOLOv5 + +Logged into your instance, you're now ready to clone the YOLOv5 repository and install dependencies within a Python 3.8 or later environment. YOLOv5's models and datasets will automatically download from the latest [release](https://github.com/ultralytics/yolov5/releases). + +```bash +git clone https://github.com/ultralytics/yolov5 # clone repository +cd yolov5 +pip install -r requirements.txt # install dependencies +``` + +With your environment set up, you can begin training, validating, performing inference, and exporting your YOLOv5 models: + +```bash +# Train a model on your data +python train.py + +# Validate the trained model for Precision, Recall, and mAP +python val.py --weights yolov5s.pt + +# Run inference using the trained model on your images or videos +python detect.py --weights yolov5s.pt --source path/to/images + +# Export the trained model to other formats for deployment +python export.py --weights yolov5s.pt --include onnx coreml tflite +``` + +## Optional Extras + +To add more swap memory, which can be a savior for large datasets, run: + +```bash +sudo fallocate -l 64G /swapfile # allocate 64GB swap file +sudo chmod 600 /swapfile # modify permissions +sudo mkswap /swapfile # set up a Linux swap area +sudo swapon /swapfile # activate swap file +free -h # verify swap memory +``` + +And that's it! ๐ŸŽ‰ You've successfully created an AWS Deep Learning instance and run YOLOv5. Whether you're just starting with object detection or scaling up for production, this setup can help you achieve your machine learning goals. Happy training, validating, and deploying! If you encounter any hiccups along the way, the robust AWS documentation and the active Ultralytics community are here to support you. diff --git a/ultralytics/docs/en/yolov5/environments/aws_quickstart_tutorial.md:Zone.Identifier b/ultralytics/docs/en/yolov5/environments/aws_quickstart_tutorial.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/environments/aws_quickstart_tutorial.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/environments/azureml_quickstart_tutorial.md b/ultralytics/docs/en/yolov5/environments/azureml_quickstart_tutorial.md new file mode 100755 index 0000000..e1e58a4 --- /dev/null +++ b/ultralytics/docs/en/yolov5/environments/azureml_quickstart_tutorial.md @@ -0,0 +1,95 @@ +--- +comments: true +description: Azure Machine Learning YOLOv5 quickstart +keywords: Ultralytics, YOLO, Deep Learning, Object detection, quickstart, Azure, AzureML +--- + +# YOLOv5 ๐Ÿš€ on AzureML + +This guide provides a quickstart to use YOLOv5 from an AzureML compute instance. + +Note that this guide is a quickstart for quick trials. If you want to unlock the full power AzureML, you can find the documentation to: + +- [Create a data asset](https://learn.microsoft.com/azure/machine-learning/how-to-create-data-assets) +- [Create an AzureML job](https://learn.microsoft.com/azure/machine-learning/how-to-train-model) +- [Register a model](https://learn.microsoft.com/azure/machine-learning/how-to-manage-models) + +## Prerequisites + +You need an [AzureML workspace](https://learn.microsoft.com/azure/machine-learning/concept-workspace?view=azureml-api-2). + +## Create a compute instance + +From your AzureML workspace, select Compute > Compute instances > New, select the instance with the resources you need. + +create-compute-arrow + +## Open a Terminal + +Now from the Notebooks view, open a Terminal and select your compute. + +![open-terminal-arrow](https://github.com/ouphi/ultralytics/assets/17216799/c4697143-7234-4a04-89ea-9084ed9c6312) + +## Setup and run YOLOv5 + +Now you can, create a virtual environment: + +```bash +conda create --name yolov5env -y +conda activate yolov5env +conda install pip -y +``` + +Clone YOLOv5 repository with its submodules: + +```bash +git clone https://github.com/ultralytics/yolov5 +cd yolov5 +git submodule update --init --recursive # Note that you might have a message asking you to add your folder as a safe.directory just copy the recommended command +``` + +Install the required dependencies: + +```bash +pip install -r yolov5/requirements.txt +pip install onnx>=1.10.0 +``` + +Train the YOLOv5 model: + +```bash +python train.py +``` + +Validate the model for Precision, Recall, and mAP + +```bash +python val.py --weights yolov5s.pt +``` + +Run inference on images and videos: + +```bash +python detect.py --weights yolov5s.pt --source path/to/images +``` + +Export models to other formats: + +```bash +python detect.py --weights yolov5s.pt --source path/to/images +``` + +## Notes on using a notebook + +Note that if you want to run these commands from a Notebook, you need to [create a new Kernel](https://learn.microsoft.com/en-us/azure/machine-learning/how-to-access-terminal?view=azureml-api-2#add-new-kernels) +and select your new Kernel on the top of your Notebook. + +If you create Python cells it will automatically use your custom environment, but if you add bash cells, you will need to run `source activate ` on each of these cells to make sure it uses your custom environment. + +For example: + +```bash +%%bash +source activate newenv +python val.py --weights yolov5s.pt +``` diff --git a/ultralytics/docs/en/yolov5/environments/azureml_quickstart_tutorial.md:Zone.Identifier b/ultralytics/docs/en/yolov5/environments/azureml_quickstart_tutorial.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/environments/azureml_quickstart_tutorial.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/environments/docker_image_quickstart_tutorial.md b/ultralytics/docs/en/yolov5/environments/docker_image_quickstart_tutorial.md new file mode 100755 index 0000000..2e2bd81 --- /dev/null +++ b/ultralytics/docs/en/yolov5/environments/docker_image_quickstart_tutorial.md @@ -0,0 +1,71 @@ +--- +comments: true +description: Learn how to set up and run YOLOv5 in a Docker container. This tutorial includes the prerequisites and step-by-step instructions. +keywords: YOLOv5, Docker, Ultralytics, Image Detection, YOLOv5 Docker Image, Docker Container, Machine Learning, AI +--- + +# Get Started with YOLOv5 ๐Ÿš€ in Docker + +This tutorial will guide you through the process of setting up and running YOLOv5 in a Docker container. + +You can also explore other quickstart options for YOLOv5, such as our [Colab Notebook](https://colab.research.google.com/github/ultralytics/yolov5/blob/master/tutorial.ipynb) Open In Colab Open In Kaggle, [GCP Deep Learning VM](https://docs.ultralytics.com/yolov5/environments/google_cloud_quickstart_tutorial), and [Amazon AWS](https://docs.ultralytics.com/yolov5/environments/aws_quickstart_tutorial). + +## Prerequisites + +1. **Nvidia Driver**: Version 455.23 or higher. Download from [Nvidia's website](https://www.nvidia.com/Download/index.aspx). +2. **Nvidia-Docker**: Allows Docker to interact with your local GPU. Installation instructions are available on the [Nvidia-Docker GitHub repository](https://github.com/NVIDIA/nvidia-docker). +3. **Docker Engine - CE**: Version 19.03 or higher. Download and installation instructions can be found on the [Docker website](https://docs.docker.com/install/). + +## Step 1: Pull the YOLOv5 Docker Image + +The Ultralytics YOLOv5 DockerHub repository is available at [https://hub.docker.com/r/ultralytics/yolov5](https://hub.docker.com/r/ultralytics/yolov5). Docker Autobuild ensures that the `ultralytics/yolov5:latest` image is always in sync with the most recent repository commit. To pull the latest image, run the following command: + +```bash +sudo docker pull ultralytics/yolov5:latest +``` + +## Step 2: Run the Docker Container + +### Basic container: + +Run an interactive instance of the YOLOv5 Docker image (called a "container") using the `-it` flag: + +```bash +sudo docker run --ipc=host -it ultralytics/yolov5:latest +``` + +### Container with local file access: + +To run a container with access to local files (e.g., COCO training data in `/datasets`), use the `-v` flag: + +```bash +sudo docker run --ipc=host -it -v "$(pwd)"/datasets:/usr/src/datasets ultralytics/yolov5:latest +``` + +### Container with GPU access: + +To run a container with GPU access, use the `--gpus all` flag: + +```bash +sudo docker run --ipc=host -it --gpus all ultralytics/yolov5:latest +``` + +## Step 3: Use YOLOv5 ๐Ÿš€ within the Docker Container + +Now you can train, test, detect, and export YOLOv5 models within the running Docker container: + +```bash +# Train a model on your data +python train.py + +# Validate the trained model for Precision, Recall, and mAP +python val.py --weights yolov5s.pt + +# Run inference using the trained model on your images or videos +python detect.py --weights yolov5s.pt --source path/to/images + +# Export the trained model to other formats for deployment +python export.py --weights yolov5s.pt --include onnx coreml tflite +``` + +

GCP running Docker

diff --git a/ultralytics/docs/en/yolov5/environments/docker_image_quickstart_tutorial.md:Zone.Identifier b/ultralytics/docs/en/yolov5/environments/docker_image_quickstart_tutorial.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/environments/docker_image_quickstart_tutorial.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/environments/google_cloud_quickstart_tutorial.md b/ultralytics/docs/en/yolov5/environments/google_cloud_quickstart_tutorial.md new file mode 100755 index 0000000..fd96e84 --- /dev/null +++ b/ultralytics/docs/en/yolov5/environments/google_cloud_quickstart_tutorial.md @@ -0,0 +1,87 @@ +--- +comments: true +description: Discover how to deploy YOLOv5 on a GCP Deep Learning VM for seamless object detection. Ideal for ML beginners and cloud learners. Get started with our easy-to-follow tutorial! +keywords: YOLOv5, Google Cloud Platform, GCP, Deep Learning VM, ML model training, object detection, AI tutorial, cloud-based AI, machine learning setup +--- + +# Mastering YOLOv5 ๐Ÿš€ Deployment on Google Cloud Platform (GCP) Deep Learning Virtual Machine (VM) โญ + +Embarking on the journey of artificial intelligence and machine learning can be exhilarating, especially when you leverage the power and flexibility of a cloud platform. Google Cloud Platform (GCP) offers robust tools tailored for machine learning enthusiasts and professionals alike. One such tool is the Deep Learning VM that is preconfigured for data science and ML tasks. In this tutorial, we will navigate through the process of setting up YOLOv5 on a GCP Deep Learning VM. Whether youโ€™re taking your first steps in ML or youโ€™re a seasoned practitioner, this guide is designed to provide you with a clear pathway to implementing object detection models powered by YOLOv5. + +๐Ÿ†“ Plus, if you're a fresh GCP user, youโ€™re in luck with a [$300 free credit offer](https://cloud.google.com/free/docs/gcp-free-tier#free-trial) to kickstart your projects. + +In addition to GCP, explore other accessible quickstart options for YOLOv5, like our [Colab Notebook](https://colab.research.google.com/github/ultralytics/yolov5/blob/master/tutorial.ipynb) Open In Colab for a browser-based experience, or the scalability of [Amazon AWS](https://docs.ultralytics.com/yolov5/environments/aws_quickstart_tutorial). Furthermore, container aficionados can utilize our official Docker image at [Docker Hub](https://hub.docker.com/r/ultralytics/yolov5) Docker Pulls for an encapsulated environment. + +## Step 1: Create and Configure Your Deep Learning VM + +Letโ€™s begin by creating a virtual machine thatโ€™s tuned for deep learning: + +1. Head over to the [GCP marketplace](https://console.cloud.google.com/marketplace/details/click-to-deploy-images/deeplearning) and select the **Deep Learning VM**. +2. Opt for a **n1-standard-8** instance; it offers a balance of 8 vCPUs and 30 GB of memory, ideally suited for our needs. +3. Next, select a GPU. This depends on your workload; even a basic one like the Tesla T4 will markedly accelerate your model training. +4. Tick the box for 'Install NVIDIA GPU driver automatically on first startup?' for hassle-free setup. +5. Allocate a 300 GB SSD Persistent Disk to ensure you don't bottleneck on I/O operations. +6. Hit 'Deploy' and let GCP do its magic in provisioning your custom Deep Learning VM. + +This VM comes loaded with a treasure trove of preinstalled tools and frameworks, including the [Anaconda](https://www.anaconda.com/) Python distribution, which conveniently bundles all the necessary dependencies for YOLOv5. + +![GCP Marketplace illustration of setting up a Deep Learning VM](https://user-images.githubusercontent.com/26833433/105811495-95863880-5f61-11eb-841d-c2f2a5aa0ffe.png) + +## Step 2: Ready the VM for YOLOv5 + +Following the environment setup, let's get YOLOv5 up and running: + +```bash +# Clone the YOLOv5 repository +git clone https://github.com/ultralytics/yolov5 + +# Change the directory to the cloned repository +cd yolov5 + +# Install the necessary Python packages from requirements.txt +pip install -r requirements.txt +``` + +This setup process ensures you're working with a Python environment version 3.8.0 or newer and PyTorch 1.8 or above. Our scripts smoothly download [models](https://github.com/ultralytics/yolov5/tree/master/models) and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) rending from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases), making it hassle-free to start model training. + +## Step 3: Train and Deploy Your YOLOv5 Models ๐ŸŒ + +With the setup complete, you're ready to delve into training and inference with YOLOv5 on your GCP VM: + +```bash +# Train a model on your data +python train.py + +# Validate the trained model for Precision, Recall, and mAP +python val.py --weights yolov5s.pt + +# Run inference using the trained model on your images or videos +python detect.py --weights yolov5s.pt --source path/to/images + +# Export the trained model to other formats for deployment +python export.py --weights yolov5s.pt --include onnx coreml tflite +``` + +With just a few commands, YOLOv5 allows you to train custom object detection models tailored to your specific needs or utilize pre-trained weights for quick results on a variety of tasks. + +![Terminal command image illustrating model training on a GCP Deep Learning VM](https://user-images.githubusercontent.com/26833433/142223900-275e5c9e-e2b5-43f7-a21c-35c4ca7de87c.png) + +## Allocate Swap Space (optional) + +For those dealing with hefty datasets, consider amplifying your GCP instance with an additional 64GB of swap memory: + +```bash +sudo fallocate -l 64G /swapfile +sudo chmod 600 /swapfile +sudo mkswap /swapfile +sudo swapon /swapfile +free -h # confirm the memory increment +``` + +### Concluding Thoughts + +Congratulations! You are now empowered to harness the capabilities of YOLOv5 with the computational prowess of Google Cloud Platform. This combination provides scalability, efficiency, and versatility for your object detection tasks. Whether for personal projects, academic research, or industrial applications, you have taken a pivotal step into the world of AI and machine learning on the cloud. + +Do remember to document your journey, share insights with the Ultralytics community, and leverage the collaborative arenas such as [GitHub discussions](https://github.com/ultralytics/yolov5/discussions) to grow further. Now, go forth and innovate with YOLOv5 and GCP! ๐ŸŒŸ + +Want to keep improving your ML skills and knowledge? Dive into our [documentation and tutorials](https://docs.ultralytics.com/) for more resources. Let your AI adventure continue! diff --git a/ultralytics/docs/en/yolov5/environments/google_cloud_quickstart_tutorial.md:Zone.Identifier b/ultralytics/docs/en/yolov5/environments/google_cloud_quickstart_tutorial.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/environments/google_cloud_quickstart_tutorial.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/index.md b/ultralytics/docs/en/yolov5/index.md new file mode 100755 index 0000000..1e8303d --- /dev/null +++ b/ultralytics/docs/en/yolov5/index.md @@ -0,0 +1,91 @@ +--- +comments: true +description: Deep dive into Ultralytics' YOLOv5. Learn about object detection model - YOLOv5, how to train it on custom data, multi-GPU training and more. +keywords: YOLOv5, object detection, computer vision, CUDA, PyTorch tutorial, multi-GPU training, custom dataset, model export, deployment, CI tests +--- + +# Comprehensive Guide to Ultralytics YOLOv5 + +
+

+ + Ultralytics YOLOv5 v7.0 banner +

+ +YOLOv5 CI +YOLOv5 Citation +Docker Pulls +
+Run on Gradient +Open In Colab +Open In Kaggle +
+
+ +Welcome to the Ultralytics' YOLOv5๐Ÿš€ Documentation! YOLOv5, the fifth iteration of the revolutionary "You Only Look Once" object detection model, is designed to deliver high-speed, high-accuracy results in real-time. +

+Built on PyTorch, this powerful deep learning framework has garnered immense popularity for its versatility, ease of use, and high performance. Our documentation guides you through the installation process, explains the architectural nuances of the model, showcases various use-cases, and provides a series of detailed tutorials. These resources will help you harness the full potential of YOLOv5 for your computer vision projects. Let's get started! + +
+ +## Explore and Learn + +Here's a compilation of comprehensive tutorials that will guide you through different aspects of YOLOv5. + +* [Train Custom Data](tutorials/train_custom_data.md) ๐Ÿš€ RECOMMENDED: Learn how to train the YOLOv5 model on your custom dataset. +* [Tips for Best Training Results](tutorials/tips_for_best_training_results.md) โ˜˜๏ธ: Uncover practical tips to optimize your model training process. +* [Multi-GPU Training](tutorials/multi_gpu_training.md): Understand how to leverage multiple GPUs to expedite your training. +* [PyTorch Hub](tutorials/pytorch_hub_model_loading.md) ๐ŸŒŸ NEW: Learn to load pre-trained models via PyTorch Hub. +* [TFLite, ONNX, CoreML, TensorRT Export](tutorials/model_export.md) ๐Ÿš€: Understand how to export your model to different formats. +* [NVIDIA Jetson platform Deployment](tutorials/running_on_jetson_nano.md) ๐ŸŒŸ NEW: Learn how to deploy your YOLOv5 model on NVIDIA Jetson platform. +* [Test-Time Augmentation (TTA)](tutorials/test_time_augmentation.md): Explore how to use TTA to improve your model's prediction accuracy. +* [Model Ensembling](tutorials/model_ensembling.md): Learn the strategy of combining multiple models for improved performance. +* [Model Pruning/Sparsity](tutorials/model_pruning_and_sparsity.md): Understand pruning and sparsity concepts, and how to create a more efficient model. +* [Hyperparameter Evolution](tutorials/hyperparameter_evolution.md): Discover the process of automated hyperparameter tuning for better model performance. +* [Transfer Learning with Frozen Layers](tutorials/transfer_learning_with_frozen_layers.md): Learn how to implement transfer learning by freezing layers in YOLOv5. +* [Architecture Summary](tutorials/architecture_description.md) ๐ŸŒŸ Delve into the structural details of the YOLOv5 model. +* [Roboflow for Datasets](tutorials/roboflow_datasets_integration.md): Understand how to utilize Roboflow for dataset management, labeling, and active learning. +* [ClearML Logging](tutorials/clearml_logging_integration.md) ๐ŸŒŸ Learn how to integrate ClearML for efficient logging during your model training. +* [YOLOv5 with Neural Magic](tutorials/neural_magic_pruning_quantization.md) Discover how to use Neural Magic's Deepsparse to prune and quantize your YOLOv5 model. +* [Comet Logging](tutorials/comet_logging_integration.md) ๐ŸŒŸ NEW: Explore how to utilize Comet for improved model training logging. + +## Supported Environments + +Ultralytics provides a range of ready-to-use environments, each pre-installed with essential dependencies such as [CUDA](https://developer.nvidia.com/cuda), [CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/), and [PyTorch](https://pytorch.org/), to kickstart your projects. + +- **Free GPU Notebooks**: Run on Gradient Open In Colab Open In Kaggle +- **Google Cloud**: [GCP Quickstart Guide](environments/google_cloud_quickstart_tutorial.md) +- **Amazon**: [AWS Quickstart Guide](environments/aws_quickstart_tutorial.md) +- **Azure**: [AzureML Quickstart Guide](environments/azureml_quickstart_tutorial.md) +- **Docker**: [Docker Quickstart Guide](environments/docker_image_quickstart_tutorial.md) Docker Pulls + +## Project Status + +YOLOv5 CI + +This badge indicates that all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are successfully passing. These CI tests rigorously check the functionality and performance of YOLOv5 across various key aspects: [training](https://github.com/ultralytics/yolov5/blob/master/train.py), [validation](https://github.com/ultralytics/yolov5/blob/master/val.py), [inference](https://github.com/ultralytics/yolov5/blob/master/detect.py), [export](https://github.com/ultralytics/yolov5/blob/master/export.py), and [benchmarks](https://github.com/ultralytics/yolov5/blob/master/benchmarks.py). They ensure consistent and reliable operation on macOS, Windows, and Ubuntu, with tests conducted every 24 hours and upon each new commit. + +
+
+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+ +## Connect and Contribute + +Your journey with YOLOv5 doesn't have to be a solitary one. Join our vibrant community on [GitHub](https://github.com/ultralytics/yolov5), connect with professionals on [LinkedIn](https://www.linkedin.com/company/ultralytics/), share your results on [Twitter](https://twitter.com/ultralytics), and find educational resources on [YouTube](https://youtube.com/ultralytics). Follow us on [TikTok](https://www.tiktok.com/@ultralytics) and [Instagram](https://www.instagram.com/ultralytics/) for more engaging content. + +Interested in contributing? We welcome contributions of all forms; from code improvements and bug reports to documentation updates. Check out our [contributing guidelines](https://github.com/ultralytics/yolov5/blob/master/CONTRIBUTING.md) for more information. + +We're excited to see the innovative ways you'll use YOLOv5. Dive in, experiment, and revolutionize your computer vision projects! ๐Ÿš€ diff --git a/ultralytics/docs/en/yolov5/index.md:Zone.Identifier b/ultralytics/docs/en/yolov5/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/quickstart_tutorial.md b/ultralytics/docs/en/yolov5/quickstart_tutorial.md new file mode 100755 index 0000000..e5a379c --- /dev/null +++ b/ultralytics/docs/en/yolov5/quickstart_tutorial.md @@ -0,0 +1,72 @@ +--- +comments: true +description: Dive into YOLOv5 for object detection with our easy-to-follow guide on setup, model training, and image inference using PyTorch. Get started now! +keywords: YOLOv5 Tutorial, Object Detection Guide, PyTorch Model Training, Inference with YOLOv5, Ultralytics YOLOv5 Setup +--- + +# YOLOv5 Quickstart ๐Ÿš€ + +Embark on your journey into the dynamic realm of real-time object detection with YOLOv5! This guide is crafted to serve as a comprehensive starting point for AI enthusiasts and professionals aiming to master YOLOv5. From initial setup to advanced training techniques, we've got you covered. By the end of this guide, you'll have the knowledge to implement YOLOv5 into your projects confidently. Let's ignite the engines and soar into YOLOv5! + +## Install + +Prepare for launch by cloning the repository and establishing the environment. This ensures that all the necessary [requirements](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) are installed. Check that you have [**Python>=3.8.0**](https://www.python.org/) and [**PyTorch>=1.8**](https://pytorch.org/get-started/locally/) ready for takeoff. + +```bash +git clone https://github.com/ultralytics/yolov5 # clone repository +cd yolov5 +pip install -r requirements.txt # install dependencies +``` + +## Inference with PyTorch Hub + +Experience the simplicity of YOLOv5 [PyTorch Hub](https://docs.ultralytics.com/yolov5/tutorials/pytorch_hub_model_loading) inference, where [models](https://github.com/ultralytics/yolov5/tree/master/models) are seamlessly downloaded from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). + +```python +import torch + +# Model loading +model = torch.hub.load("ultralytics/yolov5", "yolov5s") # Can be 'yolov5n' - 'yolov5x6', or 'custom' + +# Inference on images +img = "https://ultralytics.com/images/zidane.jpg" # Can be a file, Path, PIL, OpenCV, numpy, or list of images + +# Run inference +results = model(img) + +# Display results +results.print() # Other options: .show(), .save(), .crop(), .pandas(), etc. +``` + +## Inference with detect.py + +Harness `detect.py` for versatile inference on various sources. It automatically fetches [models](https://github.com/ultralytics/yolov5/tree/master/models) from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases) and saves results with ease. + +```bash +python detect.py --weights yolov5s.pt --source 0 # webcam + img.jpg # image + vid.mp4 # video + screen # screenshot + path/ # directory + list.txt # list of images + list.streams # list of streams + 'path/*.jpg' # glob + 'https://youtu.be/LNwODJXcvt4' # YouTube + 'rtsp://example.com/media.mp4' # RTSP, RTMP, HTTP stream +``` + +## Training + +Replicate the YOLOv5 [COCO](https://github.com/ultralytics/yolov5/blob/master/data/scripts/get_coco.sh) benchmarks with the instructions below. The necessary [models](https://github.com/ultralytics/yolov5/tree/master/models) and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) are pulled directly from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). Training YOLOv5n/s/m/l/x on a V100 GPU should typically take 1/2/4/6/8 days respectively (note that [Multi-GPU](https://docs.ultralytics.com/yolov5/tutorials/multi_gpu_training) setups work faster). Maximize performance by using the highest possible `--batch-size` or use `--batch-size -1` for the YOLOv5 [AutoBatch](https://github.com/ultralytics/yolov5/pull/5092) feature. The following batch sizes are ideal for V100-16GB GPUs. + +```bash +python train.py --data coco.yaml --epochs 300 --weights '' --cfg yolov5n.yaml --batch-size 128 + yolov5s 64 + yolov5m 40 + yolov5l 24 + yolov5x 16 +``` + +YOLO training curves + +To conclude, YOLOv5 is not only a state-of-the-art tool for object detection but also a testament to the power of machine learning in transforming the way we interact with the world through visual understanding. As you progress through this guide and begin applying YOLOv5 to your projects, remember that you are at the forefront of a technological revolution, capable of achieving remarkable feats. Should you need further insights or support from fellow visionaries, you're invited to our [GitHub repository](https://github.com/ultralytics/yolov5) home to a thriving community of developers and researchers. Keep exploring, keep innovating, and enjoy the marvels of YOLOv5. Happy detecting! ๐ŸŒ ๐Ÿ” diff --git a/ultralytics/docs/en/yolov5/quickstart_tutorial.md:Zone.Identifier b/ultralytics/docs/en/yolov5/quickstart_tutorial.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/quickstart_tutorial.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/architecture_description.md b/ultralytics/docs/en/yolov5/tutorials/architecture_description.md new file mode 100755 index 0000000..1fb8233 --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/architecture_description.md @@ -0,0 +1,223 @@ +--- +comments: true +description: Explore the architecture of YOLOv5, an object detection algorithm by Ultralytics. Understand the model structure, data augmentation methods, training strategies, and loss computation techniques. +keywords: Ultralytics, YOLOv5, Object Detection, Architecture, Model Structure, Data Augmentation, Training Strategies, Loss Computation +--- + +# Ultralytics YOLOv5 Architecture + +YOLOv5 (v6.0/6.1) is a powerful object detection algorithm developed by Ultralytics. This article dives deep into the YOLOv5 architecture, data augmentation strategies, training methodologies, and loss computation techniques. This comprehensive understanding will help improve your practical application of object detection in various fields, including surveillance, autonomous vehicles, and image recognition. + +## 1. Model Structure + +YOLOv5's architecture consists of three main parts: + +- **Backbone**: This is the main body of the network. For YOLOv5, the backbone is designed using the `New CSP-Darknet53` structure, a modification of the Darknet architecture used in previous versions. +- **Neck**: This part connects the backbone and the head. In YOLOv5, `SPPF` and `New CSP-PAN` structures are utilized. +- **Head**: This part is responsible for generating the final output. YOLOv5 uses the `YOLOv3 Head` for this purpose. + +The structure of the model is depicted in the image below. The model structure details can be found in `yolov5l.yaml`. + +![yolov5](https://user-images.githubusercontent.com/31005897/172404576-c260dcf9-76bb-4bc8-b6a9-f2d987792583.png) + +YOLOv5 introduces some minor changes compared to its predecessors: + +1. The `Focus` structure, found in earlier versions, is replaced with a `6x6 Conv2d` structure. This change boosts efficiency [#4825](https://github.com/ultralytics/yolov5/issues/4825). +2. The `SPP` structure is replaced with `SPPF`. This alteration more than doubles the speed of processing. + +To test the speed of `SPP` and `SPPF`, the following code can be used: + +
+SPP vs SPPF speed profiling example (click to open) + +```python +import time +import torch +import torch.nn as nn + + +class SPP(nn.Module): + def __init__(self): + super().__init__() + self.maxpool1 = nn.MaxPool2d(5, 1, padding=2) + self.maxpool2 = nn.MaxPool2d(9, 1, padding=4) + self.maxpool3 = nn.MaxPool2d(13, 1, padding=6) + + def forward(self, x): + o1 = self.maxpool1(x) + o2 = self.maxpool2(x) + o3 = self.maxpool3(x) + return torch.cat([x, o1, o2, o3], dim=1) + + +class SPPF(nn.Module): + def __init__(self): + super().__init__() + self.maxpool = nn.MaxPool2d(5, 1, padding=2) + + def forward(self, x): + o1 = self.maxpool(x) + o2 = self.maxpool(o1) + o3 = self.maxpool(o2) + return torch.cat([x, o1, o2, o3], dim=1) + + +def main(): + input_tensor = torch.rand(8, 32, 16, 16) + spp = SPP() + sppf = SPPF() + output1 = spp(input_tensor) + output2 = sppf(input_tensor) + + print(torch.equal(output1, output2)) + + t_start = time.time() + for _ in range(100): + spp(input_tensor) + print(f"SPP time: {time.time() - t_start}") + + t_start = time.time() + for _ in range(100): + sppf(input_tensor) + print(f"SPPF time: {time.time() - t_start}") + + +if __name__ == '__main__': + main() +``` + +result: + +``` +True +SPP time: 0.5373051166534424 +SPPF time: 0.20780706405639648 +``` + +
+ +## 2. Data Augmentation Techniques + +YOLOv5 employs various data augmentation techniques to improve the model's ability to generalize and reduce overfitting. These techniques include: + +- **Mosaic Augmentation**: An image processing technique that combines four training images into one in ways that encourage object detection models to better handle various object scales and translations. + + ![mosaic](https://user-images.githubusercontent.com/31005897/159109235-c7aad8f2-1d4f-41f9-8d5f-b2fde6f2885e.png) + +- **Copy-Paste Augmentation**: An innovative data augmentation method that copies random patches from an image and pastes them onto another randomly chosen image, effectively generating a new training sample. + + ![copy-paste](https://user-images.githubusercontent.com/31005897/159116277-91b45033-6bec-4f82-afc4-41138866628e.png) + +- **Random Affine Transformations**: This includes random rotation, scaling, translation, and shearing of the images. + + ![random-affine](https://user-images.githubusercontent.com/31005897/159109326-45cd5acb-14fa-43e7-9235-0f21b0021c7d.png) + +- **MixUp Augmentation**: A method that creates composite images by taking a linear combination of two images and their associated labels. + + ![mixup](https://user-images.githubusercontent.com/31005897/159109361-3b24333b-f481-478b-ae00-df7838f0b5cd.png) + +- **Albumentations**: A powerful library for image augmenting that supports a wide variety of augmentation techniques. +- **HSV Augmentation**: Random changes to the Hue, Saturation, and Value of the images. + + ![hsv](https://user-images.githubusercontent.com/31005897/159109407-83d100ba-1aba-4f4b-aa03-4f048f815981.png) + +- **Random Horizontal Flip**: An augmentation method that randomly flips images horizontally. + + ![horizontal-flip](https://user-images.githubusercontent.com/31005897/159109429-0d44619a-a76a-49eb-bfc0-6709860c043e.png) + +## 3. Training Strategies + +YOLOv5 applies several sophisticated training strategies to enhance the model's performance. They include: + +- **Multiscale Training**: The input images are randomly rescaled within a range of 0.5 to 1.5 times their original size during the training process. +- **AutoAnchor**: This strategy optimizes the prior anchor boxes to match the statistical characteristics of the ground truth boxes in your custom data. +- **Warmup and Cosine LR Scheduler**: A method to adjust the learning rate to enhance model performance. +- **Exponential Moving Average (EMA)**: A strategy that uses the average of parameters over past steps to stabilize the training process and reduce generalization error. +- **Mixed Precision Training**: A method to perform operations in half-precision format, reducing memory usage and enhancing computational speed. +- **Hyperparameter Evolution**: A strategy to automatically tune hyperparameters to achieve optimal performance. + +## 4. Additional Features + +### 4.1 Compute Losses + +The loss in YOLOv5 is computed as a combination of three individual loss components: + +- **Classes Loss (BCE Loss)**: Binary Cross-Entropy loss, measures the error for the classification task. +- **Objectness Loss (BCE Loss)**: Another Binary Cross-Entropy loss, calculates the error in detecting whether an object is present in a particular grid cell or not. +- **Location Loss (CIoU Loss)**: Complete IoU loss, measures the error in localizing the object within the grid cell. + +The overall loss function is depicted by: + +![loss](https://latex.codecogs.com/svg.image?Loss=\lambda_1L_{cls}+\lambda_2L_{obj}+\lambda_3L_{loc}) + +### 4.2 Balance Losses + +The objectness losses of the three prediction layers (`P3`, `P4`, `P5`) are weighted differently. The balance weights are `[4.0, 1.0, 0.4]` respectively. This approach ensures that the predictions at different scales contribute appropriately to the total loss. + +![obj_loss](https://latex.codecogs.com/svg.image?L_{obj}=4.0\cdot&space;L_{obj}^{small}+1.0\cdot&space;L_{obj}^{medium}+0.4\cdot&space;L_{obj}^{large}) + +### 4.3 Eliminate Grid Sensitivity + +The YOLOv5 architecture makes some important changes to the box prediction strategy compared to earlier versions of YOLO. In YOLOv2 and YOLOv3, the box coordinates were directly predicted using the activation of the last layer. + +![b_x](https://latex.codecogs.com/svg.image?b_x=\sigma(t_x)+c_x) +![b_y](https://latex.codecogs.com/svg.image?b_y=\sigma(t_y)+c_y) +![b_w](https://latex.codecogs.com/svg.image?b_w=p_w\cdot&space;e^{t_w}) +![b_h](https://latex.codecogs.com/svg.image?b_h=p_h\cdot&space;e^{t_h}) + +YOLOv5 grid computation + +However, in YOLOv5, the formula for predicting the box coordinates has been updated to reduce grid sensitivity and prevent the model from predicting unbounded box dimensions. + +The revised formulas for calculating the predicted bounding box are as follows: + +![bx](https://latex.codecogs.com/svg.image?b_x=(2\cdot\sigma(t_x)-0.5)+c_x) +![by](https://latex.codecogs.com/svg.image?b_y=(2\cdot\sigma(t_y)-0.5)+c_y) +![bw](https://latex.codecogs.com/svg.image?b_w=p_w\cdot(2\cdot\sigma(t_w))^2) +![bh](https://latex.codecogs.com/svg.image?b_h=p_h\cdot(2\cdot\sigma(t_h))^2) + +Compare the center point offset before and after scaling. The center point offset range is adjusted from (0, 1) to (-0.5, 1.5). Therefore, offset can easily get 0 or 1. + +YOLOv5 grid scaling + +Compare the height and width scaling ratio(relative to anchor) before and after adjustment. The original yolo/darknet box equations have a serious flaw. Width and Height are completely unbounded as they are simply out=exp(in), which is dangerous, as it can lead to runaway gradients, instabilities, NaN losses and ultimately a complete loss of training. [refer this issue](https://github.com/ultralytics/yolov5/issues/471#issuecomment-662009779) + +YOLOv5 unbounded scaling + +### 4.4 Build Targets + +The build target process in YOLOv5 is critical for training efficiency and model accuracy. It involves assigning ground truth boxes to the appropriate grid cells in the output map and matching them with the appropriate anchor boxes. + +This process follows these steps: + +- Calculate the ratio of the ground truth box dimensions and the dimensions of each anchor template. + +![rw](https://latex.codecogs.com/svg.image?r_w=w_{gt}/w_{at}) + +![rh](https://latex.codecogs.com/svg.image?r_h=h_{gt}/h_{at}) + +![rwmax](https://latex.codecogs.com/svg.image?r_w^{max}=max(r_w,1/r_w)) + +![rhmax](https://latex.codecogs.com/svg.image?r_h^{max}=max(r_h,1/r_h)) + +![rmax](https://latex.codecogs.com/svg.image?r^{max}=max(r_w^{max},r_h^{max})) + +![match](https://latex.codecogs.com/svg.image?r^{max}<{\rm&space;anchor_t}) + +YOLOv5 IoU computation + +- If the calculated ratio is within the threshold, match the ground truth box with the corresponding anchor. + +YOLOv5 grid overlap + +- Assign the matched anchor to the appropriate cells, keeping in mind that due to the revised center point offset, a ground truth box can be assigned to more than one anchor. Because the center point offset range is adjusted from (0, 1) to (-0.5, 1.5). GT Box can be assigned to more anchors. + +YOLOv5 anchor selection + +This way, the build targets process ensures that each ground truth object is properly assigned and matched during the training process, allowing YOLOv5 to learn the task of object detection more effectively. + +## Conclusion + +In conclusion, YOLOv5 represents a significant step forward in the development of real-time object detection models. By incorporating various new features, enhancements, and training strategies, it surpasses previous versions of the YOLO family in performance and efficiency. + +The primary enhancements in YOLOv5 include the use of a dynamic architecture, an extensive range of data augmentation techniques, innovative training strategies, as well as important adjustments in computing losses and the process of building targets. All these innovations significantly improve the accuracy and efficiency of object detection while retaining a high degree of speed, which is the trademark of YOLO models. diff --git a/ultralytics/docs/en/yolov5/tutorials/architecture_description.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/architecture_description.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/architecture_description.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/clearml_logging_integration.md b/ultralytics/docs/en/yolov5/tutorials/clearml_logging_integration.md new file mode 100755 index 0000000..dda96f8 --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/clearml_logging_integration.md @@ -0,0 +1,237 @@ +--- +comments: true +description: Learn how ClearML can enhance your YOLOv5 pipeline โ€“ track your training runs, version your data, remotely monitor your models and optimize performance. +keywords: ClearML, YOLOv5, Ultralytics, AI toolbox, training data, remote training, hyperparameter optimization, YOLOv5 model +--- + +# ClearML Integration + +Clear|MLClear|ML + +## About ClearML + +[ClearML](https://cutt.ly/yolov5-tutorial-clearml) is an [open-source](https://github.com/allegroai/clearml) toolbox designed to save you time โฑ๏ธ. + +๐Ÿ”จ Track every YOLOv5 training run in the experiment manager + +๐Ÿ”ง Version and easily access your custom training data with the integrated ClearML Data Versioning Tool + +๐Ÿ”ฆ Remotely train and monitor your YOLOv5 training runs using ClearML Agent + +๐Ÿ”ฌ Get the very best mAP using ClearML Hyperparameter Optimization + +๐Ÿ”ญ Turn your newly trained YOLOv5 model into an API with just a few commands using ClearML Serving + +
+And so much more. It's up to you how many of these tools you want to use, you can stick to the experiment manager, or chain them all together into an impressive pipeline! +
+
+ +![ClearML scalars dashboard](https://github.com/thepycoder/clearml_screenshots/raw/main/experiment_manager_with_compare.gif) + +
+
+ +## ๐Ÿฆพ Setting Things Up + +To keep track of your experiments and/or data, ClearML needs to communicate to a server. You have 2 options to get one: + +Either sign up for free to the [ClearML Hosted Service](https://cutt.ly/yolov5-tutorial-clearml) or you can set up your own server, see [here](https://clear.ml/docs/latest/docs/deploying_clearml/clearml_server). Even the server is open-source, so even if you're dealing with sensitive data, you should be good to go! + +- Install the `clearml` python package: + + ```bash + pip install clearml + ``` + +- Connect the ClearML SDK to the server by [creating credentials](https://app.clear.ml/settings/workspace-configuration) (go right top to Settings -> Workspace -> Create new credentials), then execute the command below and follow the instructions: + + ```bash + clearml-init + ``` + +That's it! You're done ๐Ÿ˜Ž + +
+ +## ๐Ÿš€ Training YOLOv5 With ClearML + +To enable ClearML experiment tracking, simply install the ClearML pip package. + +```bash +pip install clearml>=1.2.0 +``` + +This will enable integration with the YOLOv5 training script. Every training run from now on, will be captured and stored by the ClearML experiment manager. + +If you want to change the `project_name` or `task_name`, use the `--project` and `--name` arguments of the `train.py` script, by default the project will be called `YOLOv5` and the task `Training`. PLEASE NOTE: ClearML uses `/` as a delimiter for subprojects, so be careful when using `/` in your project name! + +```bash +python train.py --img 640 --batch 16 --epochs 3 --data coco128.yaml --weights yolov5s.pt --cache +``` + +or with custom project and task name: + +```bash +python train.py --project my_project --name my_training --img 640 --batch 16 --epochs 3 --data coco128.yaml --weights yolov5s.pt --cache +``` + +This will capture: + +- Source code + uncommitted changes +- Installed packages +- (Hyper)parameters +- Model files (use `--save-period n` to save a checkpoint every n epochs) +- Console output +- Scalars (mAP_0.5, mAP_0.5:0.95, precision, recall, losses, learning rates, ...) +- General info such as machine details, runtime, creation date etc. +- All produced plots such as label correlogram and confusion matrix +- Images with bounding boxes per epoch +- Mosaic per epoch +- Validation images per epoch + +That's a lot right? ๐Ÿคฏ Now, we can visualize all of this information in the ClearML UI to get an overview of our training progress. Add custom columns to the table view (such as e.g. mAP_0.5) so you can easily sort on the best performing model. Or select multiple experiments and directly compare them! + +There even more we can do with all of this information, like hyperparameter optimization and remote execution, so keep reading if you want to see how that works! + +### ๐Ÿ”— Dataset Version Management + +Versioning your data separately from your code is generally a good idea and makes it easy to acquire the latest version too. This repository supports supplying a dataset version ID, and it will make sure to get the data if it's not there yet. Next to that, this workflow also saves the used dataset ID as part of the task parameters, so you will always know for sure which data was used in which experiment! + +![ClearML Dataset Interface](https://github.com/thepycoder/clearml_screenshots/raw/main/clearml_data.gif) + +### Prepare Your Dataset + +The YOLOv5 repository supports a number of different datasets by using YAML files containing their information. By default datasets are downloaded to the `../datasets` folder in relation to the repository root folder. So if you downloaded the `coco128` dataset using the link in the YAML or with the scripts provided by yolov5, you get this folder structure: + +``` +.. +|_ yolov5 +|_ datasets + |_ coco128 + |_ images + |_ labels + |_ LICENSE + |_ README.txt +``` + +But this can be any dataset you wish. Feel free to use your own, as long as you keep to this folder structure. + +Next, โš ๏ธ**copy the corresponding YAML file to the root of the dataset folder**โš ๏ธ. This YAML files contains the information ClearML will need to properly use the dataset. You can make this yourself too, of course, just follow the structure of the example YAMLs. + +Basically we need the following keys: `path`, `train`, `test`, `val`, `nc`, `names`. + +``` +.. +|_ yolov5 +|_ datasets + |_ coco128 + |_ images + |_ labels + |_ coco128.yaml # <---- HERE! + |_ LICENSE + |_ README.txt +``` + +### Upload Your Dataset + +To get this dataset into ClearML as a versioned dataset, go to the dataset root folder and run the following command: + +```bash +cd coco128 +clearml-data sync --project YOLOv5 --name coco128 --folder . +``` + +The command `clearml-data sync` is actually a shorthand command. You could also run these commands one after the other: + +```bash +# Optionally add --parent if you want to base +# this version on another dataset version, so no duplicate files are uploaded! +clearml-data create --name coco128 --project YOLOv5 +clearml-data add --files . +clearml-data close +``` + +### Run Training Using A ClearML Dataset + +Now that you have a ClearML dataset, you can very simply use it to train custom YOLOv5 ๐Ÿš€ models! + +```bash +python train.py --img 640 --batch 16 --epochs 3 --data clearml:// --weights yolov5s.pt --cache +``` + +
+ +### ๐Ÿ‘€ Hyperparameter Optimization + +Now that we have our experiments and data versioned, it's time to take a look at what we can build on top! + +Using the code information, installed packages and environment details, the experiment itself is now **completely reproducible**. In fact, ClearML allows you to clone an experiment and even change its parameters. We can then just rerun it with these new parameters automatically, this is basically what HPO does! + +To **run hyperparameter optimization locally**, we've included a pre-made script for you. Just make sure a training task has been run at least once, so it is in the ClearML experiment manager, we will essentially clone it and change its hyperparameters. + +You'll need to fill in the ID of this `template task` in the script found at `utils/loggers/clearml/hpo.py` and then just run it :) You can change `task.execute_locally()` to `task.execute()` to put it in a ClearML queue and have a remote agent work on it instead. + +```bash +# To use optuna, install it first, otherwise you can change the optimizer to just be RandomSearch +pip install optuna +python utils/loggers/clearml/hpo.py +``` + +![HPO](https://github.com/thepycoder/clearml_screenshots/raw/main/hpo.png) + +## ๐Ÿคฏ Remote Execution (advanced) + +Running HPO locally is really handy, but what if we want to run our experiments on a remote machine instead? Maybe you have access to a very powerful GPU machine on-site, or you have some budget to use cloud GPUs. This is where the ClearML Agent comes into play. Check out what the agent can do here: + +- [YouTube video](https://youtu.be/MX3BrXnaULs) +- [Documentation](https://clear.ml/docs/latest/docs/clearml_agent) + +In short: every experiment tracked by the experiment manager contains enough information to reproduce it on a different machine (installed packages, uncommitted changes etc.). So a ClearML agent does just that: it listens to a queue for incoming tasks and when it finds one, it recreates the environment and runs it while still reporting scalars, plots etc. to the experiment manager. + +You can turn any machine (a cloud VM, a local GPU machine, your own laptop ... ) into a ClearML agent by simply running: + +```bash +clearml-agent daemon --queue [--docker] +``` + +### Cloning, Editing And Enqueuing + +With our agent running, we can give it some work. Remember from the HPO section that we can clone a task and edit the hyperparameters? We can do that from the interface too! + +๐Ÿช„ Clone the experiment by right-clicking it + +๐ŸŽฏ Edit the hyperparameters to what you wish them to be + +โณ Enqueue the task to any of the queues by right-clicking it + +![Enqueue a task from the UI](https://github.com/thepycoder/clearml_screenshots/raw/main/enqueue.gif) + +### Executing A Task Remotely + +Now you can clone a task like we explained above, or simply mark your current script by adding `task.execute_remotely()` and on execution it will be put into a queue, for the agent to start working on! + +To run the YOLOv5 training script remotely, all you have to do is add this line to the training.py script after the clearml logger has been instantiated: + +```python +# ... +# Loggers +data_dict = None +if RANK in {-1, 0}: + loggers = Loggers(save_dir, weights, opt, hyp, LOGGER) # loggers instance + if loggers.clearml: + loggers.clearml.task.execute_remotely(queue="my_queue") # <------ ADD THIS LINE + # Data_dict is either None is user did not choose for ClearML dataset or is filled in by ClearML + data_dict = loggers.clearml.data_dict +# ... +``` + +When running the training script after this change, python will run the script up until that line, after which it will package the code and send it to the queue instead! + +### Autoscaling workers + +ClearML comes with autoscalers too! This tool will automatically spin up new remote machines in the cloud of your choice (AWS, GCP, Azure) and turn them into ClearML agents for you whenever there are experiments detected in the queue. Once the tasks are processed, the autoscaler will automatically shut down the remote machines, and you stop paying! + +Check out the autoscalers getting started video below. + +[![Watch the video](https://img.youtube.com/vi/j4XVMAaUt3E/0.jpg)](https://youtu.be/j4XVMAaUt3E) diff --git a/ultralytics/docs/en/yolov5/tutorials/clearml_logging_integration.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/clearml_logging_integration.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/clearml_logging_integration.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/comet_logging_integration.md b/ultralytics/docs/en/yolov5/tutorials/comet_logging_integration.md new file mode 100755 index 0000000..a911909 --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/comet_logging_integration.md @@ -0,0 +1,261 @@ +--- +comments: true +description: Learn how to set up and use Comet to enhance your YOLOv5 model training, metrics tracking and visualization. Includes a step by step guide to integrate Comet with YOLOv5. +keywords: YOLOv5, Comet, Machine Learning, Ultralytics, Real time metrics tracking, Hyperparameters, Model checkpoints, Model predictions, YOLOv5 training, Comet Credentials +--- + +![Comet](https://cdn.comet.ml/img/notebook_logo.png) + +# YOLOv5 with Comet + +This guide will cover how to use YOLOv5 with [Comet](https://bit.ly/yolov5-readme-comet2) + +## About Comet + +Comet builds tools that help data scientists, engineers, and team leaders accelerate and optimize machine learning and deep learning models. + +Track and visualize model metrics in real time, save your hyperparameters, datasets, and model checkpoints, and visualize your model predictions with [Comet Custom Panels](https://www.comet.com/docs/v2/guides/comet-dashboard/code-panels/about-panels/?utm_source=yolov5&utm_medium=partner&utm_campaign=partner_yolov5_2022&utm_content=github)! +Comet makes sure you never lose track of your work and makes it easy to share results and collaborate across teams of all sizes! + +## Getting Started + +### Install Comet + +```shell +pip install comet_ml +``` + +### Configure Comet Credentials + +There are two ways to configure Comet with YOLOv5. + +You can either set your credentials through environment variables + +**Environment Variables** + +```shell +export COMET_API_KEY= +export COMET_PROJECT_NAME= # This will default to 'yolov5' +``` + +Or create a `.comet.config` file in your working directory and set your credentials there. + +**Comet Configuration File** + +``` +[comet] +api_key= +project_name= # This will default to 'yolov5' +``` + +### Run the Training Script + +```shell +# Train YOLOv5s on COCO128 for 5 epochs +python train.py --img 640 --batch 16 --epochs 5 --data coco128.yaml --weights yolov5s.pt +``` + +That's it! Comet will automatically log your hyperparameters, command line arguments, training and validation metrics. You can visualize and analyze your runs in the Comet UI + +yolo-ui + +## Try out an Example! + +Check out an example of a [completed run here](https://www.comet.com/examples/comet-example-yolov5/a0e29e0e9b984e4a822db2a62d0cb357?experiment-tab=chart&showOutliers=true&smoothing=0&transformY=smoothing&xAxis=step&utm_source=yolov5&utm_medium=partner&utm_campaign=partner_yolov5_2022&utm_content=github) + +Or better yet, try it out yourself in this Colab Notebook + +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1RG0WOQyxlDlo5Km8GogJpIEJlg_5lyYO?usp=sharing) + +## Log automatically + +By default, Comet will log the following items + +## Metrics + +- Box Loss, Object Loss, Classification Loss for the training and validation data +- mAP_0.5, mAP_0.5:0.95 metrics for the validation data. +- Precision and Recall for the validation data + +## Parameters + +- Model Hyperparameters +- All parameters passed through the command line options + +## Visualizations + +- Confusion Matrix of the model predictions on the validation data +- Plots for the PR and F1 curves across all classes +- Correlogram of the Class Labels + +## Configure Comet Logging + +Comet can be configured to log additional data either through command line flags passed to the training script or through environment variables. + +```shell +export COMET_MODE=online # Set whether to run Comet in 'online' or 'offline' mode. Defaults to online +export COMET_MODEL_NAME= #Set the name for the saved model. Defaults to yolov5 +export COMET_LOG_CONFUSION_MATRIX=false # Set to disable logging a Comet Confusion Matrix. Defaults to true +export COMET_MAX_IMAGE_UPLOADS= # Controls how many total image predictions to log to Comet. Defaults to 100. +export COMET_LOG_PER_CLASS_METRICS=true # Set to log evaluation metrics for each detected class at the end of training. Defaults to false +export COMET_DEFAULT_CHECKPOINT_FILENAME= # Set this if you would like to resume training from a different checkpoint. Defaults to 'last.pt' +export COMET_LOG_BATCH_LEVEL_METRICS=true # Set this if you would like to log training metrics at the batch level. Defaults to false. +export COMET_LOG_PREDICTIONS=true # Set this to false to disable logging model predictions +``` + +## Logging Checkpoints with Comet + +Logging Models to Comet is disabled by default. To enable it, pass the `save-period` argument to the training script. This will save the logged checkpoints to Comet based on the interval value provided by `save-period` + +```shell +python train.py \ +--img 640 \ +--batch 16 \ +--epochs 5 \ +--data coco128.yaml \ +--weights yolov5s.pt \ +--save-period 1 +``` + +## Logging Model Predictions + +By default, model predictions (images, ground truth labels and bounding boxes) will be logged to Comet. + +You can control the frequency of logged predictions and the associated images by passing the `bbox_interval` command line argument. Predictions can be visualized using Comet's Object Detection Custom Panel. This frequency corresponds to every Nth batch of data per epoch. In the example below, we are logging every 2nd batch of data for each epoch. + +**Note:** The YOLOv5 validation dataloader will default to a batch size of 32, so you will have to set the logging frequency accordingly. + +Here is an [example project using the Panel](https://www.comet.com/examples/comet-example-yolov5?shareable=YcwMiJaZSXfcEXpGOHDD12vA1&utm_source=yolov5&utm_medium=partner&utm_campaign=partner_yolov5_2022&utm_content=github) + +```shell +python train.py \ +--img 640 \ +--batch 16 \ +--epochs 5 \ +--data coco128.yaml \ +--weights yolov5s.pt \ +--bbox_interval 2 +``` + +### Controlling the number of Prediction Images logged to Comet + +When logging predictions from YOLOv5, Comet will log the images associated with each set of predictions. By default a maximum of 100 validation images are logged. You can increase or decrease this number using the `COMET_MAX_IMAGE_UPLOADS` environment variable. + +```shell +env COMET_MAX_IMAGE_UPLOADS=200 python train.py \ +--img 640 \ +--batch 16 \ +--epochs 5 \ +--data coco128.yaml \ +--weights yolov5s.pt \ +--bbox_interval 1 +``` + +### Logging Class Level Metrics + +Use the `COMET_LOG_PER_CLASS_METRICS` environment variable to log mAP, precision, recall, f1 for each class. + +```shell +env COMET_LOG_PER_CLASS_METRICS=true python train.py \ +--img 640 \ +--batch 16 \ +--epochs 5 \ +--data coco128.yaml \ +--weights yolov5s.pt +``` + +## Uploading a Dataset to Comet Artifacts + +If you would like to store your data using [Comet Artifacts](https://www.comet.com/docs/v2/guides/data-management/using-artifacts/#learn-more?utm_source=yolov5&utm_medium=partner&utm_campaign=partner_yolov5_2022&utm_content=github), you can do so using the `upload_dataset` flag. + +The dataset be organized in the way described in the [YOLOv5 documentation](train_custom_data.md). The dataset config `yaml` file must follow the same format as that of the `coco128.yaml` file. + +```shell +python train.py \ +--img 640 \ +--batch 16 \ +--epochs 5 \ +--data coco128.yaml \ +--weights yolov5s.pt \ +--upload_dataset +``` + +You can find the uploaded dataset in the Artifacts tab in your Comet Workspace +artifact-1 + +You can preview the data directly in the Comet UI. +artifact-2 + +Artifacts are versioned and also support adding metadata about the dataset. Comet will automatically log the metadata from your dataset `yaml` file +artifact-3 + +### Using a saved Artifact + +If you would like to use a dataset from Comet Artifacts, set the `path` variable in your dataset `yaml` file to point to the following Artifact resource URL. + +``` +# contents of artifact.yaml file +path: "comet:///:" +``` + +Then pass this file to your training script in the following way + +```shell +python train.py \ +--img 640 \ +--batch 16 \ +--epochs 5 \ +--data artifact.yaml \ +--weights yolov5s.pt +``` + +Artifacts also allow you to track the lineage of data as it flows through your Experimentation workflow. Here you can see a graph that shows you all the experiments that have used your uploaded dataset. +artifact-4 + +## Resuming a Training Run + +If your training run is interrupted for any reason, e.g. disrupted internet connection, you can resume the run using the `resume` flag and the Comet Run Path. + +The Run Path has the following format `comet:////`. + +This will restore the run to its state before the interruption, which includes restoring the model from a checkpoint, restoring all hyperparameters and training arguments and downloading Comet dataset Artifacts if they were used in the original run. The resumed run will continue logging to the existing Experiment in the Comet UI + +```shell +python train.py \ +--resume "comet://" +``` + +## Hyperparameter Search with the Comet Optimizer + +YOLOv5 is also integrated with Comet's Optimizer, making is simple to visualize hyperparameter sweeps in the Comet UI. + +### Configuring an Optimizer Sweep + +To configure the Comet Optimizer, you will have to create a JSON file with the information about the sweep. An example file has been provided in `utils/loggers/comet/optimizer_config.json` + +```shell +python utils/loggers/comet/hpo.py \ + --comet_optimizer_config "utils/loggers/comet/optimizer_config.json" +``` + +The `hpo.py` script accepts the same arguments as `train.py`. If you wish to pass additional arguments to your sweep simply add them after the script. + +```shell +python utils/loggers/comet/hpo.py \ + --comet_optimizer_config "utils/loggers/comet/optimizer_config.json" \ + --save-period 1 \ + --bbox_interval 1 +``` + +### Running a Sweep in Parallel + +```shell +comet optimizer -j utils/loggers/comet/hpo.py \ + utils/loggers/comet/optimizer_config.json" +``` + +## Visualizing Results + +Comet provides a number of ways to visualize the results of your sweep. Take a look at a [project with a completed sweep here](https://www.comet.com/examples/comet-example-yolov5/view/PrlArHGuuhDTKC1UuBmTtOSXD/panels?utm_source=yolov5&utm_medium=partner&utm_campaign=partner_yolov5_2022&utm_content=github) + +hyperparameter-yolo diff --git a/ultralytics/docs/en/yolov5/tutorials/comet_logging_integration.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/comet_logging_integration.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/comet_logging_integration.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/hyperparameter_evolution.md b/ultralytics/docs/en/yolov5/tutorials/hyperparameter_evolution.md new file mode 100755 index 0000000..13e29ca --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/hyperparameter_evolution.md @@ -0,0 +1,166 @@ +--- +comments: true +description: Learn how to optimize YOLOv5 with hyperparameter evolution using Genetic Algorithm. This guide provides steps to initialize, define, evolve and visualize hyperparameters for top performance. +keywords: Ultralytics, YOLOv5, Hyperparameter Optimization, Genetic Algorithm, Machine Learning, Deep Learning, AI, Object Detection, Image Classification, Python +--- + +๐Ÿ“š This guide explains **hyperparameter evolution** for YOLOv5 ๐Ÿš€. Hyperparameter evolution is a method of [Hyperparameter Optimization](https://en.wikipedia.org/wiki/Hyperparameter_optimization) using a [Genetic Algorithm](https://en.wikipedia.org/wiki/Genetic_algorithm) (GA) for optimization. + +Hyperparameters in ML control various aspects of training, and finding optimal values for them can be a challenge. Traditional methods like grid searches can quickly become intractable due to 1) the high dimensional search space 2) unknown correlations among the dimensions, and 3) expensive nature of evaluating the fitness at each point, making GA a suitable candidate for hyperparameter searches. + +## Before You Start + +Clone repo and install [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) in a [**Python>=3.8.0**](https://www.python.org/) environment, including [**PyTorch>=1.8**](https://pytorch.org/get-started/locally/). [Models](https://github.com/ultralytics/yolov5/tree/master/models) and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) download automatically from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). + +```bash +git clone https://github.com/ultralytics/yolov5 # clone +cd yolov5 +pip install -r requirements.txt # install +``` + +## 1. Initialize Hyperparameters + +YOLOv5 has about 30 hyperparameters used for various training settings. These are defined in `*.yaml` files in the `/data/hyps` directory. Better initial guesses will produce better final results, so it is important to initialize these values properly before evolving. If in doubt, simply use the default values, which are optimized for YOLOv5 COCO training from scratch. + +```yaml +# YOLOv5 ๐Ÿš€ by Ultralytics, AGPL-3.0 license +# Hyperparameters for low-augmentation COCO training from scratch +# python train.py --batch 64 --cfg yolov5n6.yaml --weights '' --data coco.yaml --img 640 --epochs 300 --linear +# See tutorials for hyperparameter evolution https://github.com/ultralytics/yolov5#tutorials + +lr0: 0.01 # initial learning rate (SGD=1E-2, Adam=1E-3) +lrf: 0.01 # final OneCycleLR learning rate (lr0 * lrf) +momentum: 0.937 # SGD momentum/Adam beta1 +weight_decay: 0.0005 # optimizer weight decay 5e-4 +warmup_epochs: 3.0 # warmup epochs (fractions ok) +warmup_momentum: 0.8 # warmup initial momentum +warmup_bias_lr: 0.1 # warmup initial bias lr +box: 0.05 # box loss gain +cls: 0.5 # cls loss gain +cls_pw: 1.0 # cls BCELoss positive_weight +obj: 1.0 # obj loss gain (scale with pixels) +obj_pw: 1.0 # obj BCELoss positive_weight +iou_t: 0.20 # IoU training threshold +anchor_t: 4.0 # anchor-multiple threshold +# anchors: 3 # anchors per output layer (0 to ignore) +fl_gamma: 0.0 # focal loss gamma (efficientDet default gamma=1.5) +hsv_h: 0.015 # image HSV-Hue augmentation (fraction) +hsv_s: 0.7 # image HSV-Saturation augmentation (fraction) +hsv_v: 0.4 # image HSV-Value augmentation (fraction) +degrees: 0.0 # image rotation (+/- deg) +translate: 0.1 # image translation (+/- fraction) +scale: 0.5 # image scale (+/- gain) +shear: 0.0 # image shear (+/- deg) +perspective: 0.0 # image perspective (+/- fraction), range 0-0.001 +flipud: 0.0 # image flip up-down (probability) +fliplr: 0.5 # image flip left-right (probability) +mosaic: 1.0 # image mosaic (probability) +mixup: 0.0 # image mixup (probability) +copy_paste: 0.0 # segment copy-paste (probability) +``` + +## 2. Define Fitness + +Fitness is the value we seek to maximize. In YOLOv5 we define a default fitness function as a weighted combination of metrics: `mAP@0.5` contributes 10% of the weight and `mAP@0.5:0.95` contributes the remaining 90%, with [Precision `P` and Recall `R`](https://en.wikipedia.org/wiki/Precision_and_recall) absent. You may adjust these as you see fit or use the default fitness definition in utils/metrics.py (recommended). + +```python +def fitness(x): + # Model fitness as a weighted combination of metrics + w = [0.0, 0.0, 0.1, 0.9] # weights for [P, R, mAP@0.5, mAP@0.5:0.95] + return (x[:, :4] * w).sum(1) +``` + +## 3. Evolve + +Evolution is performed about a base scenario which we seek to improve upon. The base scenario in this example is finetuning COCO128 for 10 epochs using pretrained YOLOv5s. The base scenario training command is: + +```bash +python train.py --epochs 10 --data coco128.yaml --weights yolov5s.pt --cache +``` + +To evolve hyperparameters **specific to this scenario**, starting from our initial values defined in **Section 1.**, and maximizing the fitness defined in **Section 2.**, append `--evolve`: + +```bash +# Single-GPU +python train.py --epochs 10 --data coco128.yaml --weights yolov5s.pt --cache --evolve + +# Multi-GPU +for i in 0 1 2 3 4 5 6 7; do + sleep $(expr 30 \* $i) && # 30-second delay (optional) + echo 'Starting GPU '$i'...' && + nohup python train.py --epochs 10 --data coco128.yaml --weights yolov5s.pt --cache --device $i --evolve > evolve_gpu_$i.log & +done + +# Multi-GPU bash-while (not recommended) +for i in 0 1 2 3 4 5 6 7; do + sleep $(expr 30 \* $i) && # 30-second delay (optional) + echo 'Starting GPU '$i'...' && + "$(while true; do nohup python train.py... --device $i --evolve 1 > evolve_gpu_$i.log; done)" & +done +``` + +The default evolution settings will run the base scenario 300 times, i.e. for 300 generations. You can modify generations via the `--evolve` argument, i.e. `python train.py --evolve 1000`. + +The main genetic operators are **crossover** and **mutation**. In this work mutation is used, with an 80% probability and a 0.04 variance to create new offspring based on a combination of the best parents from all previous generations. Results are logged to `runs/evolve/exp/evolve.csv`, and the highest fitness offspring is saved every generation as `runs/evolve/hyp_evolved.yaml`: + +```yaml +# YOLOv5 Hyperparameter Evolution Results +# Best generation: 287 +# Last generation: 300 +# metrics/precision, metrics/recall, metrics/mAP_0.5, metrics/mAP_0.5:0.95, val/box_loss, val/obj_loss, val/cls_loss +# 0.54634, 0.55625, 0.58201, 0.33665, 0.056451, 0.042892, 0.013441 + +lr0: 0.01 # initial learning rate (SGD=1E-2, Adam=1E-3) +lrf: 0.2 # final OneCycleLR learning rate (lr0 * lrf) +momentum: 0.937 # SGD momentum/Adam beta1 +weight_decay: 0.0005 # optimizer weight decay 5e-4 +warmup_epochs: 3.0 # warmup epochs (fractions ok) +warmup_momentum: 0.8 # warmup initial momentum +warmup_bias_lr: 0.1 # warmup initial bias lr +box: 0.05 # box loss gain +cls: 0.5 # cls loss gain +cls_pw: 1.0 # cls BCELoss positive_weight +obj: 1.0 # obj loss gain (scale with pixels) +obj_pw: 1.0 # obj BCELoss positive_weight +iou_t: 0.20 # IoU training threshold +anchor_t: 4.0 # anchor-multiple threshold +# anchors: 3 # anchors per output layer (0 to ignore) +fl_gamma: 0.0 # focal loss gamma (efficientDet default gamma=1.5) +hsv_h: 0.015 # image HSV-Hue augmentation (fraction) +hsv_s: 0.7 # image HSV-Saturation augmentation (fraction) +hsv_v: 0.4 # image HSV-Value augmentation (fraction) +degrees: 0.0 # image rotation (+/- deg) +translate: 0.1 # image translation (+/- fraction) +scale: 0.5 # image scale (+/- gain) +shear: 0.0 # image shear (+/- deg) +perspective: 0.0 # image perspective (+/- fraction), range 0-0.001 +flipud: 0.0 # image flip up-down (probability) +fliplr: 0.5 # image flip left-right (probability) +mosaic: 1.0 # image mosaic (probability) +mixup: 0.0 # image mixup (probability) +copy_paste: 0.0 # segment copy-paste (probability) +``` + +We recommend a minimum of 300 generations of evolution for best results. Note that **evolution is generally expensive and time-consuming**, as the base scenario is trained hundreds of times, possibly requiring hundreds or thousands of GPU hours. + +## 4. Visualize + +`evolve.csv` is plotted as `evolve.png` by `utils.plots.plot_evolve()` after evolution finishes with one subplot per hyperparameter showing fitness (y-axis) vs hyperparameter values (x-axis). Yellow indicates higher concentrations. Vertical distributions indicate that a parameter has been disabled and does not mutate. This is user selectable in the `meta` dictionary in train.py, and is useful for fixing parameters and preventing them from evolving. + +![evolve](https://user-images.githubusercontent.com/26833433/89130469-f43e8e00-d4b9-11ea-9e28-f8ae3622516d.png) + +## Supported Environments + +Ultralytics provides a range of ready-to-use environments, each pre-installed with essential dependencies such as [CUDA](https://developer.nvidia.com/cuda), [CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/), and [PyTorch](https://pytorch.org/), to kickstart your projects. + +- **Free GPU Notebooks**: Run on Gradient Open In Colab Open In Kaggle +- **Google Cloud**: [GCP Quickstart Guide](../environments/google_cloud_quickstart_tutorial.md) +- **Amazon**: [AWS Quickstart Guide](../environments/aws_quickstart_tutorial.md) +- **Azure**: [AzureML Quickstart Guide](../environments/azureml_quickstart_tutorial.md) +- **Docker**: [Docker Quickstart Guide](../environments/docker_image_quickstart_tutorial.md) Docker Pulls + +## Project Status + +YOLOv5 CI + +This badge indicates that all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are successfully passing. These CI tests rigorously check the functionality and performance of YOLOv5 across various key aspects: [training](https://github.com/ultralytics/yolov5/blob/master/train.py), [validation](https://github.com/ultralytics/yolov5/blob/master/val.py), [inference](https://github.com/ultralytics/yolov5/blob/master/detect.py), [export](https://github.com/ultralytics/yolov5/blob/master/export.py), and [benchmarks](https://github.com/ultralytics/yolov5/blob/master/benchmarks.py). They ensure consistent and reliable operation on macOS, Windows, and Ubuntu, with tests conducted every 24 hours and upon each new commit. diff --git a/ultralytics/docs/en/yolov5/tutorials/hyperparameter_evolution.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/hyperparameter_evolution.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/hyperparameter_evolution.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/model_ensembling.md b/ultralytics/docs/en/yolov5/tutorials/model_ensembling.md new file mode 100755 index 0000000..767ed2a --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/model_ensembling.md @@ -0,0 +1,146 @@ +--- +comments: true +description: Learn how to ensemble YOLOv5 models for improved mAP and Recall! Clone the repo, install requirements, and start testing and inference. +keywords: YOLOv5, object detection, ensemble learning, mAP, Recall +--- + +๐Ÿ“š This guide explains how to use YOLOv5 ๐Ÿš€ **model ensembling** during testing and inference for improved mAP and Recall. + +From [https://en.wikipedia.org/wiki/Ensemble_learning](https://en.wikipedia.org/wiki/Ensemble_learning): +> Ensemble modeling is a process where multiple diverse models are created to predict an outcome, either by using many different modeling algorithms or using different training data sets. The ensemble model then aggregates the prediction of each base model and results in once final prediction for the unseen data. The motivation for using ensemble models is to reduce the generalization error of the prediction. As long as the base models are diverse and independent, the prediction error of the model decreases when the ensemble approach is used. The approach seeks the wisdom of crowds in making a prediction. Even though the ensemble model has multiple base models within the model, it acts and performs as a single model. + +## Before You Start + +Clone repo and install [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) in a [**Python>=3.8.0**](https://www.python.org/) environment, including [**PyTorch>=1.8**](https://pytorch.org/get-started/locally/). [Models](https://github.com/ultralytics/yolov5/tree/master/models) and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) download automatically from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). + +```bash +git clone https://github.com/ultralytics/yolov5 # clone +cd yolov5 +pip install -r requirements.txt # install +``` + +## Test Normally + +Before ensembling we want to establish the baseline performance of a single model. This command tests YOLOv5x on COCO val2017 at image size 640 pixels. `yolov5x.pt` is the largest and most accurate model available. Other options are `yolov5s.pt`, `yolov5m.pt` and `yolov5l.pt`, or you own checkpoint from training a custom dataset `./weights/best.pt`. For details on all available models please see our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints). + +```bash +python val.py --weights yolov5x.pt --data coco.yaml --img 640 --half +``` + +Output: + +```shell +val: data=./data/coco.yaml, weights=['yolov5x.pt'], batch_size=32, imgsz=640, conf_thres=0.001, iou_thres=0.65, task=val, device=, single_cls=False, augment=False, verbose=False, save_txt=False, save_hybrid=False, save_conf=False, save_json=True, project=runs/val, name=exp, exist_ok=False, half=True +YOLOv5 ๐Ÿš€ v5.0-267-g6a3ee7c torch 1.9.0+cu102 CUDA:0 (Tesla P100-PCIE-16GB, 16280.875MB) + +Fusing layers... +Model Summary: 476 layers, 87730285 parameters, 0 gradients + +val: Scanning '../datasets/coco/val2017' images and labels...4952 found, 48 missing, 0 empty, 0 corrupted: 100% 5000/5000 [00:01<00:00, 2846.03it/s] +val: New cache created: ../datasets/coco/val2017.cache + Class Images Labels P R mAP@.5 mAP@.5:.95: 100% 157/157 [02:30<00:00, 1.05it/s] + all 5000 36335 0.746 0.626 0.68 0.49 +Speed: 0.1ms pre-process, 22.4ms inference, 1.4ms NMS per image at shape (32, 3, 640, 640) # <--- baseline speed + +Evaluating pycocotools mAP... saving runs/val/exp/yolov5x_predictions.json... +... + Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.504 # <--- baseline mAP + Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.688 + Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.546 + Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.351 + Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.551 + Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.644 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.382 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.628 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.681 # <--- baseline mAR + Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.524 + Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.735 + Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.826 +``` + +## Ensemble Test + +Multiple pretrained models may be ensembled together at test and inference time by simply appending extra models to the `--weights` argument in any existing val.py or detect.py command. This example tests an ensemble of 2 models together: + +- YOLOv5x +- YOLOv5l6 + +```bash +python val.py --weights yolov5x.pt yolov5l6.pt --data coco.yaml --img 640 --half +``` + +Output: + +```shell +val: data=./data/coco.yaml, weights=['yolov5x.pt', 'yolov5l6.pt'], batch_size=32, imgsz=640, conf_thres=0.001, iou_thres=0.6, task=val, device=, single_cls=False, augment=False, verbose=False, save_txt=False, save_hybrid=False, save_conf=False, save_json=True, project=runs/val, name=exp, exist_ok=False, half=True +YOLOv5 ๐Ÿš€ v5.0-267-g6a3ee7c torch 1.9.0+cu102 CUDA:0 (Tesla P100-PCIE-16GB, 16280.875MB) + +Fusing layers... +Model Summary: 476 layers, 87730285 parameters, 0 gradients # Model 1 +Fusing layers... +Model Summary: 501 layers, 77218620 parameters, 0 gradients # Model 2 +Ensemble created with ['yolov5x.pt', 'yolov5l6.pt'] # Ensemble notice + +val: Scanning '../datasets/coco/val2017.cache' images and labels... 4952 found, 48 missing, 0 empty, 0 corrupted: 100% 5000/5000 [00:00<00:00, 49695545.02it/s] + Class Images Labels P R mAP@.5 mAP@.5:.95: 100% 157/157 [03:58<00:00, 1.52s/it] + all 5000 36335 0.747 0.637 0.692 0.502 +Speed: 0.1ms pre-process, 39.5ms inference, 2.0ms NMS per image at shape (32, 3, 640, 640) # <--- ensemble speed + +Evaluating pycocotools mAP... saving runs/val/exp3/yolov5x_predictions.json... +... + Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.515 # <--- ensemble mAP + Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.699 + Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.557 + Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.356 + Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.563 + Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.668 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.387 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.638 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.689 # <--- ensemble mAR + Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.526 + Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.743 + Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.844 +``` + +## Ensemble Inference + +Append extra models to the `--weights` argument to run ensemble inference: + +```bash +python detect.py --weights yolov5x.pt yolov5l6.pt --img 640 --source data/images +``` + +Output: + +```bash +YOLOv5 ๐Ÿš€ v5.0-267-g6a3ee7c torch 1.9.0+cu102 CUDA:0 (Tesla P100-PCIE-16GB, 16280.875MB) + +Fusing layers... +Model Summary: 476 layers, 87730285 parameters, 0 gradients +Fusing layers... +Model Summary: 501 layers, 77218620 parameters, 0 gradients +Ensemble created with ['yolov5x.pt', 'yolov5l6.pt'] + +image 1/2 /content/yolov5/data/images/bus.jpg: 640x512 4 persons, 1 bus, 1 tie, Done. (0.063s) +image 2/2 /content/yolov5/data/images/zidane.jpg: 384x640 3 persons, 2 ties, Done. (0.056s) +Results saved to runs/detect/exp2 +Done. (0.223s) +``` + +YOLO inference result + +## Supported Environments + +Ultralytics provides a range of ready-to-use environments, each pre-installed with essential dependencies such as [CUDA](https://developer.nvidia.com/cuda), [CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/), and [PyTorch](https://pytorch.org/), to kickstart your projects. + +- **Free GPU Notebooks**: Run on Gradient Open In Colab Open In Kaggle +- **Google Cloud**: [GCP Quickstart Guide](../environments/google_cloud_quickstart_tutorial.md) +- **Amazon**: [AWS Quickstart Guide](../environments/aws_quickstart_tutorial.md) +- **Azure**: [AzureML Quickstart Guide](../environments/azureml_quickstart_tutorial.md) +- **Docker**: [Docker Quickstart Guide](../environments/docker_image_quickstart_tutorial.md) Docker Pulls + +## Project Status + +YOLOv5 CI + +This badge indicates that all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are successfully passing. These CI tests rigorously check the functionality and performance of YOLOv5 across various key aspects: [training](https://github.com/ultralytics/yolov5/blob/master/train.py), [validation](https://github.com/ultralytics/yolov5/blob/master/val.py), [inference](https://github.com/ultralytics/yolov5/blob/master/detect.py), [export](https://github.com/ultralytics/yolov5/blob/master/export.py), and [benchmarks](https://github.com/ultralytics/yolov5/blob/master/benchmarks.py). They ensure consistent and reliable operation on macOS, Windows, and Ubuntu, with tests conducted every 24 hours and upon each new commit. diff --git a/ultralytics/docs/en/yolov5/tutorials/model_ensembling.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/model_ensembling.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/model_ensembling.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/model_export.md b/ultralytics/docs/en/yolov5/tutorials/model_export.md new file mode 100755 index 0000000..bb72f2e --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/model_export.md @@ -0,0 +1,245 @@ +--- +comments: true +description: Learn how to export a trained YOLOv5 model from PyTorch to different formats including TorchScript, ONNX, OpenVINO, TensorRT, and CoreML, and how to use these models. +keywords: Ultralytics, YOLOv5, model export, PyTorch, TorchScript, ONNX, OpenVINO, TensorRT, CoreML, TensorFlow +--- + +# TFLite, ONNX, CoreML, TensorRT Export + +๐Ÿ“š This guide explains how to export a trained YOLOv5 ๐Ÿš€ model from PyTorch to ONNX and TorchScript formats. + +## Before You Start + +Clone repo and install [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) in a [**Python>=3.8.0**](https://www.python.org/) environment, including [**PyTorch>=1.8**](https://pytorch.org/get-started/locally/). [Models](https://github.com/ultralytics/yolov5/tree/master/models) and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) download automatically from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). + +```bash +git clone https://github.com/ultralytics/yolov5 # clone +cd yolov5 +pip install -r requirements.txt # install +``` + +For [TensorRT](https://developer.nvidia.com/tensorrt) export example (requires GPU) see our Colab [notebook](https://colab.research.google.com/github/ultralytics/yolov5/blob/master/tutorial.ipynb#scrollTo=VTRwsvA9u7ln&line=2&uniqifier=1) appendix section. Open In Colab + +## Formats + +YOLOv5 inference is officially supported in 11 formats: + +๐Ÿ’ก ProTip: Export to ONNX or OpenVINO for up to 3x CPU speedup. See [CPU Benchmarks](https://github.com/ultralytics/yolov5/pull/6613). ๐Ÿ’ก ProTip: Export to TensorRT for up to 5x GPU speedup. See [GPU Benchmarks](https://github.com/ultralytics/yolov5/pull/6963). + +| Format | `export.py --include` | Model | +|:---------------------------------------------------------------------------|:----------------------|:--------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov5s.pt` | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov5s.torchscript` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov5s.onnx` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov5s_openvino_model/` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov5s.engine` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov5s.mlmodel` | +| [TensorFlow SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov5s_saved_model/` | +| [TensorFlow GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov5s.pb` | +| [TensorFlow Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov5s.tflite` | +| [TensorFlow Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov5s_edgetpu.tflite` | +| [TensorFlow.js](https://www.tensorflow.org/js) | `tfjs` | `yolov5s_web_model/` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov5s_paddle_model/` | + +## Benchmarks + +Benchmarks below run on a Colab Pro with the YOLOv5 tutorial notebook Open In Colab. To reproduce: + +```bash +python benchmarks.py --weights yolov5s.pt --imgsz 640 --device 0 +``` + +### Colab Pro V100 GPU + +``` +benchmarks: weights=/content/yolov5/yolov5s.pt, imgsz=640, batch_size=1, data=/content/yolov5/data/coco128.yaml, device=0, half=False, test=False +Checking setup... +YOLOv5 ๐Ÿš€ v6.1-135-g7926afc torch 1.10.0+cu111 CUDA:0 (Tesla V100-SXM2-16GB, 16160MiB) +Setup complete โœ… (8 CPUs, 51.0 GB RAM, 46.7/166.8 GB disk) + +Benchmarks complete (458.07s) + Format mAP@0.5:0.95 Inference time (ms) +0 PyTorch 0.4623 10.19 +1 TorchScript 0.4623 6.85 +2 ONNX 0.4623 14.63 +3 OpenVINO NaN NaN +4 TensorRT 0.4617 1.89 +5 CoreML NaN NaN +6 TensorFlow SavedModel 0.4623 21.28 +7 TensorFlow GraphDef 0.4623 21.22 +8 TensorFlow Lite NaN NaN +9 TensorFlow Edge TPU NaN NaN +10 TensorFlow.js NaN NaN +``` + +### Colab Pro CPU + +``` +benchmarks: weights=/content/yolov5/yolov5s.pt, imgsz=640, batch_size=1, data=/content/yolov5/data/coco128.yaml, device=cpu, half=False, test=False +Checking setup... +YOLOv5 ๐Ÿš€ v6.1-135-g7926afc torch 1.10.0+cu111 CPU +Setup complete โœ… (8 CPUs, 51.0 GB RAM, 41.5/166.8 GB disk) + +Benchmarks complete (241.20s) + Format mAP@0.5:0.95 Inference time (ms) +0 PyTorch 0.4623 127.61 +1 TorchScript 0.4623 131.23 +2 ONNX 0.4623 69.34 +3 OpenVINO 0.4623 66.52 +4 TensorRT NaN NaN +5 CoreML NaN NaN +6 TensorFlow SavedModel 0.4623 123.79 +7 TensorFlow GraphDef 0.4623 121.57 +8 TensorFlow Lite 0.4623 316.61 +9 TensorFlow Edge TPU NaN NaN +10 TensorFlow.js NaN NaN +``` + +## Export a Trained YOLOv5 Model + +This command exports a pretrained YOLOv5s model to TorchScript and ONNX formats. `yolov5s.pt` is the 'small' model, the second-smallest model available. Other options are `yolov5n.pt`, `yolov5m.pt`, `yolov5l.pt` and `yolov5x.pt`, along with their P6 counterparts i.e. `yolov5s6.pt` or you own custom training checkpoint i.e. `runs/exp/weights/best.pt`. For details on all available models please see our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints). + +```bash +python export.py --weights yolov5s.pt --include torchscript onnx +``` + +๐Ÿ’ก ProTip: Add `--half` to export models at FP16 half precision for smaller file sizes + +Output: + +```bash +export: data=data/coco128.yaml, weights=['yolov5s.pt'], imgsz=[640, 640], batch_size=1, device=cpu, half=False, inplace=False, train=False, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=12, verbose=False, workspace=4, nms=False, agnostic_nms=False, topk_per_class=100, topk_all=100, iou_thres=0.45, conf_thres=0.25, include=['torchscript', 'onnx'] +YOLOv5 ๐Ÿš€ v6.2-104-ge3e5122 Python-3.8.0 torch-1.12.1+cu113 CPU + +Downloading https://github.com/ultralytics/yolov5/releases/download/v6.2/yolov5s.pt to yolov5s.pt... +100% 14.1M/14.1M [00:00<00:00, 274MB/s] + +Fusing layers... +YOLOv5s summary: 213 layers, 7225885 parameters, 0 gradients + +PyTorch: starting from yolov5s.pt with output shape (1, 25200, 85) (14.1 MB) + +TorchScript: starting export with torch 1.12.1+cu113... +TorchScript: export success โœ… 1.7s, saved as yolov5s.torchscript (28.1 MB) + +ONNX: starting export with onnx 1.12.0... +ONNX: export success โœ… 2.3s, saved as yolov5s.onnx (28.0 MB) + +Export complete (5.5s) +Results saved to /content/yolov5 +Detect: python detect.py --weights yolov5s.onnx +Validate: python val.py --weights yolov5s.onnx +PyTorch Hub: model = torch.hub.load('ultralytics/yolov5', 'custom', 'yolov5s.onnx') +Visualize: https://netron.app/ +``` + +The 3 exported models will be saved alongside the original PyTorch model: +

YOLO export locations

+ +[Netron Viewer](https://github.com/lutzroeder/netron) is recommended for visualizing exported models: +

YOLO model visualization

+ +## Exported Model Usage Examples + +`detect.py` runs inference on exported models: + +```bash +python detect.py --weights yolov5s.pt # PyTorch + yolov5s.torchscript # TorchScript + yolov5s.onnx # ONNX Runtime or OpenCV DNN with dnn=True + yolov5s_openvino_model # OpenVINO + yolov5s.engine # TensorRT + yolov5s.mlmodel # CoreML (macOS only) + yolov5s_saved_model # TensorFlow SavedModel + yolov5s.pb # TensorFlow GraphDef + yolov5s.tflite # TensorFlow Lite + yolov5s_edgetpu.tflite # TensorFlow Edge TPU + yolov5s_paddle_model # PaddlePaddle +``` + +`val.py` runs validation on exported models: + +```bash +python val.py --weights yolov5s.pt # PyTorch + yolov5s.torchscript # TorchScript + yolov5s.onnx # ONNX Runtime or OpenCV DNN with dnn=True + yolov5s_openvino_model # OpenVINO + yolov5s.engine # TensorRT + yolov5s.mlmodel # CoreML (macOS Only) + yolov5s_saved_model # TensorFlow SavedModel + yolov5s.pb # TensorFlow GraphDef + yolov5s.tflite # TensorFlow Lite + yolov5s_edgetpu.tflite # TensorFlow Edge TPU + yolov5s_paddle_model # PaddlePaddle +``` + +Use PyTorch Hub with exported YOLOv5 models: + +``` python +import torch + +# Model +model = torch.hub.load('ultralytics/yolov5', 'custom', 'yolov5s.pt') + 'yolov5s.torchscript ') # TorchScript + 'yolov5s.onnx') # ONNX Runtime + 'yolov5s_openvino_model') # OpenVINO + 'yolov5s.engine') # TensorRT + 'yolov5s.mlmodel') # CoreML (macOS Only) + 'yolov5s_saved_model') # TensorFlow SavedModel + 'yolov5s.pb') # TensorFlow GraphDef + 'yolov5s.tflite') # TensorFlow Lite + 'yolov5s_edgetpu.tflite') # TensorFlow Edge TPU + 'yolov5s_paddle_model') # PaddlePaddle + +# Images +img = 'https://ultralytics.com/images/zidane.jpg' # or file, Path, PIL, OpenCV, numpy, list + +# Inference +results = model(img) + +# Results +results.print() # or .show(), .save(), .crop(), .pandas(), etc. +``` + +## OpenCV DNN inference + +OpenCV inference with ONNX models: + +```bash +python export.py --weights yolov5s.pt --include onnx + +python detect.py --weights yolov5s.onnx --dnn # detect +python val.py --weights yolov5s.onnx --dnn # validate +``` + +## C++ Inference + +YOLOv5 OpenCV DNN C++ inference on exported ONNX model examples: + +- [https://github.com/Hexmagic/ONNX-yolov5/blob/master/src/test.cpp](https://github.com/Hexmagic/ONNX-yolov5/blob/master/src/test.cpp) +- [https://github.com/doleron/yolov5-opencv-cpp-python](https://github.com/doleron/yolov5-opencv-cpp-python) + +YOLOv5 OpenVINO C++ inference examples: + +- [https://github.com/dacquaviva/yolov5-openvino-cpp-python](https://github.com/dacquaviva/yolov5-openvino-cpp-python) +- [https://github.com/UNeedCryDear/yolov5-seg-opencv-dnn-cpp](https://github.com/UNeedCryDear/yolov5-seg-opencv-dnn-cpp) + +## TensorFlow.js Web Browser Inference + +- [https://aukerul-shuvo.github.io/YOLOv5_TensorFlow-JS/](https://aukerul-shuvo.github.io/YOLOv5_TensorFlow-JS/) + +## Supported Environments + +Ultralytics provides a range of ready-to-use environments, each pre-installed with essential dependencies such as [CUDA](https://developer.nvidia.com/cuda), [CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/), and [PyTorch](https://pytorch.org/), to kickstart your projects. + +- **Free GPU Notebooks**: Run on Gradient Open In Colab Open In Kaggle +- **Google Cloud**: [GCP Quickstart Guide](../environments/google_cloud_quickstart_tutorial.md) +- **Amazon**: [AWS Quickstart Guide](../environments/aws_quickstart_tutorial.md) +- **Azure**: [AzureML Quickstart Guide](../environments/azureml_quickstart_tutorial.md) +- **Docker**: [Docker Quickstart Guide](../environments/docker_image_quickstart_tutorial.md) Docker Pulls + +## Project Status + +YOLOv5 CI + +This badge indicates that all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are successfully passing. These CI tests rigorously check the functionality and performance of YOLOv5 across various key aspects: [training](https://github.com/ultralytics/yolov5/blob/master/train.py), [validation](https://github.com/ultralytics/yolov5/blob/master/val.py), [inference](https://github.com/ultralytics/yolov5/blob/master/detect.py), [export](https://github.com/ultralytics/yolov5/blob/master/export.py), and [benchmarks](https://github.com/ultralytics/yolov5/blob/master/benchmarks.py). They ensure consistent and reliable operation on macOS, Windows, and Ubuntu, with tests conducted every 24 hours and upon each new commit. diff --git a/ultralytics/docs/en/yolov5/tutorials/model_export.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/model_export.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/model_export.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/model_pruning_and_sparsity.md b/ultralytics/docs/en/yolov5/tutorials/model_pruning_and_sparsity.md new file mode 100755 index 0000000..fddd1c6 --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/model_pruning_and_sparsity.md @@ -0,0 +1,110 @@ +--- +comments: true +description: Improve YOLOv5 model efficiency by pruning with Ultralytics. Understand the process, conduct tests and view the impact on accuracy and sparsity. Test-maintained API environments. +keywords: YOLOv5, YOLO, Ultralytics, model pruning, PyTorch, machine learning, deep learning, computer vision, object detection +--- + +๐Ÿ“š This guide explains how to apply **pruning** to YOLOv5 ๐Ÿš€ models. + +## Before You Start + +Clone repo and install [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) in a [**Python>=3.8.0**](https://www.python.org/) environment, including [**PyTorch>=1.8**](https://pytorch.org/get-started/locally/). [Models](https://github.com/ultralytics/yolov5/tree/master/models) and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) download automatically from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). + +```bash +git clone https://github.com/ultralytics/yolov5 # clone +cd yolov5 +pip install -r requirements.txt # install +``` + +## Test Normally + +Before pruning we want to establish a baseline performance to compare to. This command tests YOLOv5x on COCO val2017 at image size 640 pixels. `yolov5x.pt` is the largest and most accurate model available. Other options are `yolov5s.pt`, `yolov5m.pt` and `yolov5l.pt`, or you own checkpoint from training a custom dataset `./weights/best.pt`. For details on all available models please see our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints). + +```bash +python val.py --weights yolov5x.pt --data coco.yaml --img 640 --half +``` + +Output: + +```shell +val: data=/content/yolov5/data/coco.yaml, weights=['yolov5x.pt'], batch_size=32, imgsz=640, conf_thres=0.001, iou_thres=0.65, task=val, device=, workers=8, single_cls=False, augment=False, verbose=False, save_txt=False, save_hybrid=False, save_conf=False, save_json=True, project=runs/val, name=exp, exist_ok=False, half=True, dnn=False +YOLOv5 ๐Ÿš€ v6.0-224-g4c40933 torch 1.10.0+cu111 CUDA:0 (Tesla V100-SXM2-16GB, 16160MiB) + +Fusing layers... +Model Summary: 444 layers, 86705005 parameters, 0 gradients +val: Scanning '/content/datasets/coco/val2017.cache' images and labels... 4952 found, 48 missing, 0 empty, 0 corrupt: 100% 5000/5000 [00:00 + +30% pruned output: + +```bash +val: data=/content/yolov5/data/coco.yaml, weights=['yolov5x.pt'], batch_size=32, imgsz=640, conf_thres=0.001, iou_thres=0.65, task=val, device=, workers=8, single_cls=False, augment=False, verbose=False, save_txt=False, save_hybrid=False, save_conf=False, save_json=True, project=runs/val, name=exp, exist_ok=False, half=True, dnn=False +YOLOv5 ๐Ÿš€ v6.0-224-g4c40933 torch 1.10.0+cu111 CUDA:0 (Tesla V100-SXM2-16GB, 16160MiB) + +Fusing layers... +Model Summary: 444 layers, 86705005 parameters, 0 gradients +Pruning model... 0.3 global sparsity +val: Scanning '/content/datasets/coco/val2017.cache' images and labels... 4952 found, 48 missing, 0 empty, 0 corrupt: 100% 5000/5000 [00:00Run on Gradient Open In Colab Open In Kaggle +- **Google Cloud**: [GCP Quickstart Guide](../environments/google_cloud_quickstart_tutorial.md) +- **Amazon**: [AWS Quickstart Guide](../environments/aws_quickstart_tutorial.md) +- **Azure**: [AzureML Quickstart Guide](../environments/azureml_quickstart_tutorial.md) +- **Docker**: [Docker Quickstart Guide](../environments/docker_image_quickstart_tutorial.md) Docker Pulls + +## Project Status + +YOLOv5 CI + +This badge indicates that all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are successfully passing. These CI tests rigorously check the functionality and performance of YOLOv5 across various key aspects: [training](https://github.com/ultralytics/yolov5/blob/master/train.py), [validation](https://github.com/ultralytics/yolov5/blob/master/val.py), [inference](https://github.com/ultralytics/yolov5/blob/master/detect.py), [export](https://github.com/ultralytics/yolov5/blob/master/export.py), and [benchmarks](https://github.com/ultralytics/yolov5/blob/master/benchmarks.py). They ensure consistent and reliable operation on macOS, Windows, and Ubuntu, with tests conducted every 24 hours and upon each new commit. diff --git a/ultralytics/docs/en/yolov5/tutorials/model_pruning_and_sparsity.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/model_pruning_and_sparsity.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/model_pruning_and_sparsity.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/multi_gpu_training.md b/ultralytics/docs/en/yolov5/tutorials/multi_gpu_training.md new file mode 100755 index 0000000..35c17fd --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/multi_gpu_training.md @@ -0,0 +1,190 @@ +--- +comments: true +description: Learn how to train datasets on single or multiple GPUs using YOLOv5. Includes setup, training modes and result profiling for efficient leveraging of multiple GPUs. +keywords: YOLOv5, multi-GPU Training, YOLOv5 training, deep learning, machine learning, object detection, Ultralytics +--- + +๐Ÿ“š This guide explains how to properly use **multiple** GPUs to train a dataset with YOLOv5 ๐Ÿš€ on single or multiple machine(s). + +## Before You Start + +Clone repo and install [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) in a [**Python>=3.8.0**](https://www.python.org/) environment, including [**PyTorch>=1.8**](https://pytorch.org/get-started/locally/). [Models](https://github.com/ultralytics/yolov5/tree/master/models) and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) download automatically from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). + +```bash +git clone https://github.com/ultralytics/yolov5 # clone +cd yolov5 +pip install -r requirements.txt # install +``` + +๐Ÿ’ก ProTip! **Docker Image** is recommended for all Multi-GPU trainings. See [Docker Quickstart Guide](https://docs.ultralytics.com/yolov5/environments/docker_image_quickstart_tutorial/) Docker Pulls + +๐Ÿ’ก ProTip! `torch.distributed.run` replaces `torch.distributed.launch` in **PyTorch>=1.9**. See [docs](https://pytorch.org/docs/stable/distributed.html) for details. + +## Training + +Select a pretrained model to start training from. Here we select [YOLOv5s](https://github.com/ultralytics/yolov5/blob/master/models/yolov5s.yaml), the smallest and fastest model available. See our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints) for a full comparison of all models. We will train this model with Multi-GPU on the [COCO](https://github.com/ultralytics/yolov5/blob/master/data/scripts/get_coco.sh) dataset. + +

YOLOv5 Models

+ +### Single GPU + +```bash +python train.py --batch 64 --data coco.yaml --weights yolov5s.pt --device 0 +``` + +### Multi-GPU [DataParallel](https://pytorch.org/docs/stable/nn.html#torch.nn.DataParallel) Mode (โš ๏ธ not recommended) + +You can increase the `device` to use Multiple GPUs in DataParallel mode. + +```bash +python train.py --batch 64 --data coco.yaml --weights yolov5s.pt --device 0,1 +``` + +This method is slow and barely speeds up training compared to using just 1 GPU. + +### Multi-GPU [DistributedDataParallel](https://pytorch.org/docs/stable/nn.html#torch.nn.parallel.DistributedDataParallel) Mode (โœ… recommended) + +You will have to pass `python -m torch.distributed.run --nproc_per_node`, followed by the usual arguments. + +```bash +python -m torch.distributed.run --nproc_per_node 2 train.py --batch 64 --data coco.yaml --weights yolov5s.pt --device 0,1 +``` + +`--nproc_per_node` specifies how many GPUs you would like to use. In the example above, it is 2. +`--batch ` is the total batch-size. It will be divided evenly to each GPU. In the example above, it is 64/2=32 per GPU. + +The code above will use GPUs `0... (N-1)`. + +
+ Use specific GPUs (click to expand) + +You can do so by simply passing `--device` followed by your specific GPUs. For example, in the code below, we will use GPUs `2,3`. + +```bash +python -m torch.distributed.run --nproc_per_node 2 train.py --batch 64 --data coco.yaml --cfg yolov5s.yaml --weights '' --device 2,3 +``` + +
+ +
+ Use SyncBatchNorm (click to expand) + +[SyncBatchNorm](https://pytorch.org/docs/master/generated/torch.nn.SyncBatchNorm.html) could increase accuracy for multiple gpu training, however, it will slow down training by a significant factor. It is **only** available for Multiple GPU DistributedDataParallel training. + +It is best used when the batch-size on **each** GPU is small (<= 8). + +To use SyncBatchNorm, simple pass `--sync-bn` to the command like below, + +```bash +python -m torch.distributed.run --nproc_per_node 2 train.py --batch 64 --data coco.yaml --cfg yolov5s.yaml --weights '' --sync-bn +``` + +
+ +
+ Use Multiple machines (click to expand) + +This is **only** available for Multiple GPU DistributedDataParallel training. + +Before we continue, make sure the files on all machines are the same, dataset, codebase, etc. Afterward, make sure the machines can communicate to each other. + +You will have to choose a master machine(the machine that the others will talk to). Note down its address(`master_addr`) and choose a port(`master_port`). I will use `master_addr = 192.168.1.1` and `master_port = 1234` for the example below. + +To use it, you can do as the following, + +```bash +# On master machine 0 +python -m torch.distributed.run --nproc_per_node G --nnodes N --node_rank 0 --master_addr "192.168.1.1" --master_port 1234 train.py --batch 64 --data coco.yaml --cfg yolov5s.yaml --weights '' +``` + +```bash +# On machine R +python -m torch.distributed.run --nproc_per_node G --nnodes N --node_rank R --master_addr "192.168.1.1" --master_port 1234 train.py --batch 64 --data coco.yaml --cfg yolov5s.yaml --weights '' +``` + +where `G` is number of GPU per machine, `N` is the number of machines, and `R` is the machine number from `0...(N-1)`. Let's say I have two machines with two GPUs each, it would be `G = 2` , `N = 2`, and `R = 1` for the above. + +Training will not start until all `N` machines are connected. Output will only be shown on master machine! + +
+ +### Notes + +- Windows support is untested, Linux is recommended. +- `--batch ` must be a multiple of the number of GPUs. +- GPU 0 will take slightly more memory than the other GPUs as it maintains EMA and is responsible for checkpointing etc. +- If you get `RuntimeError: Address already in use`, it could be because you are running multiple trainings at a time. To fix this, simply use a different port number by adding `--master_port` like below, + +```bash +python -m torch.distributed.run --master_port 1234 --nproc_per_node 2 ... +``` + +## Results + +DDP profiling results on an [AWS EC2 P4d instance](https://docs.ultralytics.com/yolov5/environments/aws_quickstart_tutorial/) with 8x A100 SXM4-40GB for YOLOv5l for 1 COCO epoch. + +
+ Profiling code + +```bash +# prepare +t=ultralytics/yolov5:latest && sudo docker pull $t && sudo docker run -it --ipc=host --gpus all -v "$(pwd)"/coco:/usr/src/coco $t +pip3 install torch==1.9.0+cu111 torchvision==0.10.0+cu111 -f https://download.pytorch.org/whl/torch_stable.html +cd .. && rm -rf app && git clone https://github.com/ultralytics/yolov5 -b master app && cd app +cp data/coco.yaml data/coco_profile.yaml + +# profile +python train.py --batch-size 16 --data coco_profile.yaml --weights yolov5l.pt --epochs 1 --device 0 +python -m torch.distributed.run --nproc_per_node 2 train.py --batch-size 32 --data coco_profile.yaml --weights yolov5l.pt --epochs 1 --device 0,1 +python -m torch.distributed.run --nproc_per_node 4 train.py --batch-size 64 --data coco_profile.yaml --weights yolov5l.pt --epochs 1 --device 0,1,2,3 +python -m torch.distributed.run --nproc_per_node 8 train.py --batch-size 128 --data coco_profile.yaml --weights yolov5l.pt --epochs 1 --device 0,1,2,3,4,5,6,7 +``` + +
+ +| GPUs
A100 | batch-size | CUDA_mem
device0 (G) | COCO
train | COCO
val | +|--------------|------------|------------------------------|--------------------|------------------| +| 1x | 16 | 26GB | 20:39 | 0:55 | +| 2x | 32 | 26GB | 11:43 | 0:57 | +| 4x | 64 | 26GB | 5:57 | 0:55 | +| 8x | 128 | 26GB | 3:09 | 0:57 | + +## FAQ + +If an error occurs, please read the checklist below first! (It could save your time) + +
+ Checklist (click to expand) + +
    +
  • Have you properly read this post?
  • +
  • Have you tried to re-clone the codebase? The code changes daily.
  • +
  • Have you tried to search for your error? Someone may have already encountered it in this repo or in another and have the solution.
  • +
  • Have you installed all the requirements listed on top (including the correct Python and Pytorch versions)?
  • +
  • Have you tried in other environments listed in the "Environments" section below?
  • +
  • Have you tried with another dataset like coco128 or coco2017? It will make it easier to find the root cause.
  • +
+ +If you went through all the above, feel free to raise an Issue by giving as much detail as possible following the template. + +
+ +## Supported Environments + +Ultralytics provides a range of ready-to-use environments, each pre-installed with essential dependencies such as [CUDA](https://developer.nvidia.com/cuda), [CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/), and [PyTorch](https://pytorch.org/), to kickstart your projects. + +- **Free GPU Notebooks**: Run on Gradient Open In Colab Open In Kaggle +- **Google Cloud**: [GCP Quickstart Guide](../environments/google_cloud_quickstart_tutorial.md) +- **Amazon**: [AWS Quickstart Guide](../environments/aws_quickstart_tutorial.md) +- **Azure**: [AzureML Quickstart Guide](../environments/azureml_quickstart_tutorial.md) +- **Docker**: [Docker Quickstart Guide](../environments/docker_image_quickstart_tutorial.md) Docker Pulls + +## Project Status + +YOLOv5 CI + +This badge indicates that all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are successfully passing. These CI tests rigorously check the functionality and performance of YOLOv5 across various key aspects: [training](https://github.com/ultralytics/yolov5/blob/master/train.py), [validation](https://github.com/ultralytics/yolov5/blob/master/val.py), [inference](https://github.com/ultralytics/yolov5/blob/master/detect.py), [export](https://github.com/ultralytics/yolov5/blob/master/export.py), and [benchmarks](https://github.com/ultralytics/yolov5/blob/master/benchmarks.py). They ensure consistent and reliable operation on macOS, Windows, and Ubuntu, with tests conducted every 24 hours and upon each new commit. + +## Credits + +We would like to thank @MagicFrogSJTU, who did all the heavy lifting, and @glenn-jocher for guiding us along the way. diff --git a/ultralytics/docs/en/yolov5/tutorials/multi_gpu_training.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/multi_gpu_training.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/multi_gpu_training.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/neural_magic_pruning_quantization.md b/ultralytics/docs/en/yolov5/tutorials/neural_magic_pruning_quantization.md new file mode 100755 index 0000000..e0c2528 --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/neural_magic_pruning_quantization.md @@ -0,0 +1,264 @@ +--- +comments: true +description: Explore how to achieve exceptional AI performance with DeepSparse's incredible inference speed. Discover how to deploy YOLOv5, and learn about model sparsification and fine-tuning with SparseML. +keywords: YOLOv5, DeepSparse, Ultralytics, Neural Magic, sparsification, inference runtime, deep learning, deployment, model fine-tuning, SparseML, AI performance, GPU-class performance +--- + + + +Welcome to software-delivered AI. + +This guide explains how to deploy YOLOv5 with Neural Magic's DeepSparse. + +DeepSparse is an inference runtime with exceptional performance on CPUs. For instance, compared to the ONNX Runtime baseline, DeepSparse offers a 5.8x speed-up for YOLOv5s, running on the same machine! + +

+ YOLOv5 speed improvement +

+ +For the first time, your deep learning workloads can meet the performance demands of production without the complexity and costs of hardware accelerators. Put simply, DeepSparse gives you the performance of GPUs and the simplicity of software: + +- **Flexible Deployments**: Run consistently across cloud, data center, and edge with any hardware provider from Intel to AMD to ARM +- **Infinite Scalability**: Scale vertically to 100s of cores, out with standard Kubernetes, or fully-abstracted with Serverless +- **Easy Integration**: Clean APIs for integrating your model into an application and monitoring it in production + +### How Does DeepSparse Achieve GPU-Class Performance? + +DeepSparse takes advantage of model sparsity to gain its performance speedup. + +Sparsification through pruning and quantization is a broadly studied technique, allowing order-of-magnitude reductions in the size and compute needed to execute a network, while maintaining high accuracy. DeepSparse is sparsity-aware, meaning it skips the zeroed out parameters, shrinking amount of compute in a forward pass. Since the sparse computation is now memory bound, DeepSparse executes the network depth-wise, breaking the problem into Tensor Columns, vertical stripes of computation that fit in cache. + +

+ YOLO model pruning +

+ +Sparse networks with compressed computation, executed depth-wise in cache, allows DeepSparse to deliver GPU-class performance on CPUs! + +### How Do I Create A Sparse Version of YOLOv5 Trained on My Data? + +Neural Magic's open-source model repository, SparseZoo, contains pre-sparsified checkpoints of each YOLOv5 model. Using SparseML, which is integrated with Ultralytics, you can fine-tune a sparse checkpoint onto your data with a single CLI command. + +[Checkout Neural Magic's YOLOv5 documentation for more details](https://docs.neuralmagic.com/use-cases/object-detection/sparsifying). + +## DeepSparse Usage + +We will walk through an example benchmarking and deploying a sparse version of YOLOv5s with DeepSparse. + +### Install DeepSparse + +Run the following to install DeepSparse. We recommend you use a virtual environment with Python. + +```bash +pip install "deepsparse[server,yolo,onnxruntime]" +``` + +### Collect an ONNX File + +DeepSparse accepts a model in the ONNX format, passed either as: + +- A SparseZoo stub which identifies an ONNX file in the SparseZoo +- A local path to an ONNX model in a filesystem + +The examples below use the standard dense and pruned-quantized YOLOv5s checkpoints, identified by the following SparseZoo stubs: + +```bash +zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/base-none +zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/pruned65_quant-none +``` + +### Deploy a Model + +DeepSparse offers convenient APIs for integrating your model into an application. + +To try the deployment examples below, pull down a sample image and save it as `basilica.jpg` with the following: + +```bash +wget -O basilica.jpg https://raw.githubusercontent.com/neuralmagic/deepsparse/main/src/deepsparse/yolo/sample_images/basilica.jpg +``` + +#### Python API + +`Pipelines` wrap pre-processing and output post-processing around the runtime, providing a clean interface for adding DeepSparse to an application. The DeepSparse-Ultralytics integration includes an out-of-the-box `Pipeline` that accepts raw images and outputs the bounding boxes. + +Create a `Pipeline` and run inference: + +```python +from deepsparse import Pipeline + +# list of images in local filesystem +images = ["basilica.jpg"] + +# create Pipeline +model_stub = "zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/pruned65_quant-none" +yolo_pipeline = Pipeline.create( + task="yolo", + model_path=model_stub, +) + +# run inference on images, receive bounding boxes + classes +pipeline_outputs = yolo_pipeline(images=images, iou_thres=0.6, conf_thres=0.001) +print(pipeline_outputs) +``` + +If you are running in the cloud, you may get an error that open-cv cannot find `libGL.so.1`. Running the following on Ubuntu installs it: + +``` +apt-get install libgl1 +``` + +#### HTTP Server + +DeepSparse Server runs on top of the popular FastAPI web framework and Uvicorn web server. With just a single CLI command, you can easily setup a model service endpoint with DeepSparse. The Server supports any Pipeline from DeepSparse, including object detection with YOLOv5, enabling you to send raw images to the endpoint and receive the bounding boxes. + +Spin up the Server with the pruned-quantized YOLOv5s: + +```bash +deepsparse.server \ + --task yolo \ + --model_path zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/pruned65_quant-none +``` + +An example request, using Python's `requests` package: + +```python +import requests, json + +# list of images for inference (local files on client side) +path = ['basilica.jpg'] +files = [('request', open(img, 'rb')) for img in path] + +# send request over HTTP to /predict/from_files endpoint +url = 'http://0.0.0.0:5543/predict/from_files' +resp = requests.post(url=url, files=files) + +# response is returned in JSON +annotations = json.loads(resp.text) # dictionary of annotation results +bounding_boxes = annotations["boxes"] +labels = annotations["labels"] +``` + +#### Annotate CLI + +You can also use the annotate command to have the engine save an annotated photo on disk. Try --source 0 to annotate your live webcam feed! + +```bash +deepsparse.object_detection.annotate --model_filepath zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/pruned65_quant-none --source basilica.jpg +``` + +Running the above command will create an `annotation-results` folder and save the annotated image inside. + +

+annotated +

+ +## Benchmarking Performance + +We will compare DeepSparse's throughput to ONNX Runtime's throughput on YOLOv5s, using DeepSparse's benchmarking script. + +The benchmarks were run on an AWS `c6i.8xlarge` instance (16 cores). + +### Batch 32 Performance Comparison + +#### ONNX Runtime Baseline + +At batch 32, ONNX Runtime achieves 42 images/sec with the standard dense YOLOv5s: + +```bash +deepsparse.benchmark zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/base-none -s sync -b 32 -nstreams 1 -e onnxruntime + +> Original Model Path: zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/base-none +> Batch Size: 32 +> Scenario: sync +> Throughput (items/sec): 41.9025 +``` + +#### DeepSparse Dense Performance + +While DeepSparse offers its best performance with optimized sparse models, it also performs well with the standard dense YOLOv5s. + +At batch 32, DeepSparse achieves 70 images/sec with the standard dense YOLOv5s, a **1.7x performance improvement over ORT**! + +```bash +deepsparse.benchmark zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/base-none -s sync -b 32 -nstreams 1 + +> Original Model Path: zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/base-none +> Batch Size: 32 +> Scenario: sync +> Throughput (items/sec): 69.5546 +``` + +#### DeepSparse Sparse Performance + +When sparsity is applied to the model, DeepSparse's performance gains over ONNX Runtime is even stronger. + +At batch 32, DeepSparse achieves 241 images/sec with the pruned-quantized YOLOv5s, a **5.8x performance improvement over ORT**! + +```bash +deepsparse.benchmark zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/pruned65_quant-none -s sync -b 32 -nstreams 1 + +> Original Model Path: zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/pruned65_quant-none +> Batch Size: 32 +> Scenario: sync +> Throughput (items/sec): 241.2452 +``` + +### Batch 1 Performance Comparison + +DeepSparse is also able to gain a speed-up over ONNX Runtime for the latency-sensitive, batch 1 scenario. + +#### ONNX Runtime Baseline + +At batch 1, ONNX Runtime achieves 48 images/sec with the standard, dense YOLOv5s. + +```bash +deepsparse.benchmark zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/base-none -s sync -b 1 -nstreams 1 -e onnxruntime + +> Original Model Path: zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/base-none +> Batch Size: 1 +> Scenario: sync +> Throughput (items/sec): 48.0921 +``` + +#### DeepSparse Sparse Performance + +At batch 1, DeepSparse achieves 135 items/sec with a pruned-quantized YOLOv5s, **a 2.8x performance gain over ONNX Runtime!** + +```bash +deepsparse.benchmark zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/pruned65_quant-none -s sync -b 1 -nstreams 1 + +> Original Model Path: zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/pruned65_quant-none +> Batch Size: 1 +> Scenario: sync +> Throughput (items/sec): 134.9468 +``` + +Since `c6i.8xlarge` instances have VNNI instructions, DeepSparse's throughput can be pushed further if weights are pruned in blocks of 4. + +At batch 1, DeepSparse achieves 180 items/sec with a 4-block pruned-quantized YOLOv5s, a **3.7x performance gain over ONNX Runtime!** + +```bash +deepsparse.benchmark zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/pruned35_quant-none-vnni -s sync -b 1 -nstreams 1 + +> Original Model Path: zoo:cv/detection/yolov5-s/pytorch/ultralytics/coco/pruned35_quant-none-vnni +> Batch Size: 1 +> Scenario: sync +> Throughput (items/sec): 179.7375 +``` + +## Get Started With DeepSparse + +**Research or Testing?** DeepSparse Community is free for research and testing. Get started with our [Documentation](https://docs.neuralmagic.com/). diff --git a/ultralytics/docs/en/yolov5/tutorials/neural_magic_pruning_quantization.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/neural_magic_pruning_quantization.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/neural_magic_pruning_quantization.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/pytorch_hub_model_loading.md b/ultralytics/docs/en/yolov5/tutorials/pytorch_hub_model_loading.md new file mode 100755 index 0000000..4cc828d --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/pytorch_hub_model_loading.md @@ -0,0 +1,372 @@ +--- +comments: true +description: Detailed guide on loading YOLOv5 from PyTorch Hub. Includes examples & tips on inference settings, multi-GPU inference, training and more. +keywords: Ultralytics, YOLOv5, PyTorch, loading YOLOv5, PyTorch Hub, inference, multi-GPU inference, training +--- + +๐Ÿ“š This guide explains how to load YOLOv5 ๐Ÿš€ from PyTorch Hub at [https://pytorch.org/hub/ultralytics_yolov5](https://pytorch.org/hub/ultralytics_yolov5). + +## Before You Start + +Install [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) in a [**Python>=3.8.0**](https://www.python.org/) environment, including [**PyTorch>=1.8**](https://pytorch.org/get-started/locally/). [Models](https://github.com/ultralytics/yolov5/tree/master/models) and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) download automatically from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). + +```bash +pip install -r https://raw.githubusercontent.com/ultralytics/yolov5/master/requirements.txt +``` + +๐Ÿ’ก ProTip: Cloning [https://github.com/ultralytics/yolov5](https://github.com/ultralytics/yolov5) is **not** required ๐Ÿ˜ƒ + +## Load YOLOv5 with PyTorch Hub + +### Simple Example + +This example loads a pretrained YOLOv5s model from PyTorch Hub as `model` and passes an image for inference. `'yolov5s'` is the lightest and fastest YOLOv5 model. For details on all available models please see the [README](https://github.com/ultralytics/yolov5#pretrained-checkpoints). + +```python +import torch + +# Model +model = torch.hub.load('ultralytics/yolov5', 'yolov5s') + +# Image +im = 'https://ultralytics.com/images/zidane.jpg' + +# Inference +results = model(im) + +results.pandas().xyxy[0] +# xmin ymin xmax ymax confidence class name +# 0 749.50 43.50 1148.0 704.5 0.874023 0 person +# 1 433.50 433.50 517.5 714.5 0.687988 27 tie +# 2 114.75 195.75 1095.0 708.0 0.624512 0 person +# 3 986.00 304.00 1028.0 420.0 0.286865 27 tie +``` + +### Detailed Example + +This example shows **batched inference** with **PIL** and **OpenCV** image sources. `results` can be **printed** to console, **saved** to `runs/hub`, **showed** to screen on supported environments, and returned as **tensors** or **pandas** dataframes. + +```python +import cv2 +import torch +from PIL import Image + +# Model +model = torch.hub.load('ultralytics/yolov5', 'yolov5s') + +# Images +for f in 'zidane.jpg', 'bus.jpg': + torch.hub.download_url_to_file('https://ultralytics.com/images/' + f, f) # download 2 images +im1 = Image.open('zidane.jpg') # PIL image +im2 = cv2.imread('bus.jpg')[..., ::-1] # OpenCV image (BGR to RGB) + +# Inference +results = model([im1, im2], size=640) # batch of images + +# Results +results.print() +results.save() # or .show() + +results.xyxy[0] # im1 predictions (tensor) +results.pandas().xyxy[0] # im1 predictions (pandas) +# xmin ymin xmax ymax confidence class name +# 0 749.50 43.50 1148.0 704.5 0.874023 0 person +# 1 433.50 433.50 517.5 714.5 0.687988 27 tie +# 2 114.75 195.75 1095.0 708.0 0.624512 0 person +# 3 986.00 304.00 1028.0 420.0 0.286865 27 tie +``` + +YOLO inference results on zidane.jpg +YOLO inference results on bus.jpg + +For all inference options see YOLOv5 `AutoShape()` forward [method](https://github.com/ultralytics/yolov5/blob/30e4c4f09297b67afedf8b2bcd851833ddc9dead/models/common.py#L243-L252). + +### Inference Settings + +YOLOv5 models contain various inference attributes such as **confidence threshold**, **IoU threshold**, etc. which can be set by: + +```python +model.conf = 0.25 # NMS confidence threshold +iou = 0.45 # NMS IoU threshold +agnostic = False # NMS class-agnostic +multi_label = False # NMS multiple labels per box +classes = None # (optional list) filter by class, i.e. = [0, 15, 16] for COCO persons, cats and dogs +max_det = 1000 # maximum number of detections per image +amp = False # Automatic Mixed Precision (AMP) inference + +results = model(im, size=320) # custom inference size +``` + +### Device + +Models can be transferred to any device after creation: + +```python +model.cpu() # CPU +model.cuda() # GPU +model.to(device) # i.e. device=torch.device(0) +``` + +Models can also be created directly on any `device`: + +```python +model = torch.hub.load('ultralytics/yolov5', 'yolov5s', device='cpu') # load on CPU +``` + +๐Ÿ’ก ProTip: Input images are automatically transferred to the correct model device before inference. + +### Silence Outputs + +Models can be loaded silently with `_verbose=False`: + +```python +model = torch.hub.load('ultralytics/yolov5', 'yolov5s', _verbose=False) # load silently +``` + +### Input Channels + +To load a pretrained YOLOv5s model with 4 input channels rather than the default 3: + +```python +model = torch.hub.load('ultralytics/yolov5', 'yolov5s', channels=4) +``` + +In this case the model will be composed of pretrained weights **except for** the very first input layer, which is no longer the same shape as the pretrained input layer. The input layer will remain initialized by random weights. + +### Number of Classes + +To load a pretrained YOLOv5s model with 10 output classes rather than the default 80: + +```python +model = torch.hub.load('ultralytics/yolov5', 'yolov5s', classes=10) +``` + +In this case the model will be composed of pretrained weights **except for** the output layers, which are no longer the same shape as the pretrained output layers. The output layers will remain initialized by random weights. + +### Force Reload + +If you run into problems with the above steps, setting `force_reload=True` may help by discarding the existing cache and force a fresh download of the latest YOLOv5 version from PyTorch Hub. + +```python +model = torch.hub.load('ultralytics/yolov5', 'yolov5s', force_reload=True) # force reload +``` + +### Screenshot Inference + +To run inference on your desktop screen: + +```python +import torch +from PIL import ImageGrab + +# Model +model = torch.hub.load('ultralytics/yolov5', 'yolov5s') + +# Image +im = ImageGrab.grab() # take a screenshot + +# Inference +results = model(im) +``` + +### Multi-GPU Inference + +YOLOv5 models can be loaded to multiple GPUs in parallel with threaded inference: + +```python +import torch +import threading + + +def run(model, im): + results = model(im) + results.save() + + +# Models +model0 = torch.hub.load('ultralytics/yolov5', 'yolov5s', device=0) +model1 = torch.hub.load('ultralytics/yolov5', 'yolov5s', device=1) + +# Inference +threading.Thread(target=run, args=[model0, 'https://ultralytics.com/images/zidane.jpg'], daemon=True).start() +threading.Thread(target=run, args=[model1, 'https://ultralytics.com/images/bus.jpg'], daemon=True).start() +``` + +### Training + +To load a YOLOv5 model for training rather than inference, set `autoshape=False`. To load a model with randomly initialized weights (to train from scratch) use `pretrained=False`. You must provide your own training script in this case. Alternatively see our YOLOv5 [Train Custom Data Tutorial](https://docs.ultralytics.com/yolov5/tutorials/train_custom_data) for model training. + +```python +import torch + +model = torch.hub.load('ultralytics/yolov5', 'yolov5s', autoshape=False) # load pretrained +model = torch.hub.load('ultralytics/yolov5', 'yolov5s', autoshape=False, pretrained=False) # load scratch +``` + +### Base64 Results + +For use with API services. See https://github.com/ultralytics/yolov5/pull/2291 and [Flask REST API](https://github.com/ultralytics/yolov5/tree/master/utils/flask_rest_api) example for details. + +```python +results = model(im) # inference + +results.ims # array of original images (as np array) passed to model for inference +results.render() # updates results.ims with boxes and labels +for im in results.ims: + buffered = BytesIO() + im_base64 = Image.fromarray(im) + im_base64.save(buffered, format="JPEG") + print(base64.b64encode(buffered.getvalue()).decode('utf-8')) # base64 encoded image with results +``` + +### Cropped Results + +Results can be returned and saved as detection crops: + +```python +results = model(im) # inference +crops = results.crop(save=True) # cropped detections dictionary +``` + +### Pandas Results + +Results can be returned as [Pandas DataFrames](https://pandas.pydata.org/): + +```python +results = model(im) # inference +results.pandas().xyxy[0] # Pandas DataFrame +``` + +
+ Pandas Output (click to expand) + +```python +print(results.pandas().xyxy[0]) +# xmin ymin xmax ymax confidence class name +# 0 749.50 43.50 1148.0 704.5 0.874023 0 person +# 1 433.50 433.50 517.5 714.5 0.687988 27 tie +# 2 114.75 195.75 1095.0 708.0 0.624512 0 person +# 3 986.00 304.00 1028.0 420.0 0.286865 27 tie +``` + +
+ +### Sorted Results + +Results can be sorted by column, i.e. to sort license plate digit detection left-to-right (x-axis): + +```python +results = model(im) # inference +results.pandas().xyxy[0].sort_values('xmin') # sorted left-right +``` + +### Box-Cropped Results + +Results can be returned and saved as detection crops: + +```python +results = model(im) # inference +crops = results.crop(save=True) # cropped detections dictionary +``` + +### JSON Results + +Results can be returned in JSON format once converted to `.pandas()` dataframes using the `.to_json()` method. The JSON format can be modified using the `orient` argument. See pandas `.to_json()` [documentation](https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.to_json.html) for details. + +```python +results = model(ims) # inference +results.pandas().xyxy[0].to_json(orient="records") # JSON img1 predictions +``` + +
+ JSON Output (click to expand) + +```json +[ + { + "xmin": 749.5, + "ymin": 43.5, + "xmax": 1148.0, + "ymax": 704.5, + "confidence": 0.8740234375, + "class": 0, + "name": "person" + }, + { + "xmin": 433.5, + "ymin": 433.5, + "xmax": 517.5, + "ymax": 714.5, + "confidence": 0.6879882812, + "class": 27, + "name": "tie" + }, + { + "xmin": 115.25, + "ymin": 195.75, + "xmax": 1096.0, + "ymax": 708.0, + "confidence": 0.6254882812, + "class": 0, + "name": "person" + }, + { + "xmin": 986.0, + "ymin": 304.0, + "xmax": 1028.0, + "ymax": 420.0, + "confidence": 0.2873535156, + "class": 27, + "name": "tie" + } +] +``` + +
+ +## Custom Models + +This example loads a custom 20-class [VOC](https://github.com/ultralytics/yolov5/blob/master/data/VOC.yaml)-trained YOLOv5s model `'best.pt'` with PyTorch Hub. + +```python +import torch + +model = torch.hub.load('ultralytics/yolov5', 'custom', path='path/to/best.pt') # local model +model = torch.hub.load('path/to/yolov5', 'custom', path='path/to/best.pt', source='local') # local repo +``` + +## TensorRT, ONNX and OpenVINO Models + +PyTorch Hub supports inference on most YOLOv5 export formats, including custom trained models. See [TFLite, ONNX, CoreML, TensorRT Export tutorial](https://docs.ultralytics.com/yolov5/tutorials/model_export) for details on exporting models. + +๐Ÿ’ก ProTip: **TensorRT** may be up to 2-5X faster than PyTorch on [**GPU benchmarks**](https://github.com/ultralytics/yolov5/pull/6963) +๐Ÿ’ก ProTip: **ONNX** and **OpenVINO** may be up to 2-3X faster than PyTorch on [**CPU benchmarks**](https://github.com/ultralytics/yolov5/pull/6613) + +```python +import torch + +model = torch.hub.load('ultralytics/yolov5', 'custom', path='yolov5s.pt') # PyTorch +model = torch.hub.load('ultralytics/yolov5', 'custom', path='yolov5s.torchscript') # TorchScript +model = torch.hub.load('ultralytics/yolov5', 'custom', path='yolov5s.onnx') # ONNX +model = torch.hub.load('ultralytics/yolov5', 'custom', path='yolov5s_openvino_model/') # OpenVINO +model = torch.hub.load('ultralytics/yolov5', 'custom', path='yolov5s.engine') # TensorRT +model = torch.hub.load('ultralytics/yolov5', 'custom', path='yolov5s.mlmodel') # CoreML (macOS-only) +model = torch.hub.load('ultralytics/yolov5', 'custom', path='yolov5s.tflite') # TFLite +model = torch.hub.load('ultralytics/yolov5', 'custom', path='yolov5s_paddle_model/') # PaddlePaddle +``` + +## Supported Environments + +Ultralytics provides a range of ready-to-use environments, each pre-installed with essential dependencies such as [CUDA](https://developer.nvidia.com/cuda), [CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/), and [PyTorch](https://pytorch.org/), to kickstart your projects. + +- **Free GPU Notebooks**: Run on Gradient Open In Colab Open In Kaggle +- **Google Cloud**: [GCP Quickstart Guide](../environments/google_cloud_quickstart_tutorial.md) +- **Amazon**: [AWS Quickstart Guide](../environments/aws_quickstart_tutorial.md) +- **Azure**: [AzureML Quickstart Guide](../environments/azureml_quickstart_tutorial.md) +- **Docker**: [Docker Quickstart Guide](../environments/docker_image_quickstart_tutorial.md) Docker Pulls + +## Project Status + +YOLOv5 CI + +This badge indicates that all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are successfully passing. These CI tests rigorously check the functionality and performance of YOLOv5 across various key aspects: [training](https://github.com/ultralytics/yolov5/blob/master/train.py), [validation](https://github.com/ultralytics/yolov5/blob/master/val.py), [inference](https://github.com/ultralytics/yolov5/blob/master/detect.py), [export](https://github.com/ultralytics/yolov5/blob/master/export.py), and [benchmarks](https://github.com/ultralytics/yolov5/blob/master/benchmarks.py). They ensure consistent and reliable operation on macOS, Windows, and Ubuntu, with tests conducted every 24 hours and upon each new commit. diff --git a/ultralytics/docs/en/yolov5/tutorials/pytorch_hub_model_loading.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/pytorch_hub_model_loading.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/pytorch_hub_model_loading.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/roboflow_datasets_integration.md b/ultralytics/docs/en/yolov5/tutorials/roboflow_datasets_integration.md new file mode 100755 index 0000000..a278ef2 --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/roboflow_datasets_integration.md @@ -0,0 +1,68 @@ +--- +comments: true +description: Learn how to use Roboflow for organizing, labelling, preparing, and hosting your datasets for YOLOv5 models. Enhance your model deployments with our platform. +keywords: Ultralytics, YOLOv5, Roboflow, data organization, data labelling, data preparation, model deployment, active learning, machine learning pipeline +--- + +# Roboflow Datasets + +You can now use Roboflow to organize, label, prepare, version, and host your datasets for training YOLOv5 ๐Ÿš€ models. Roboflow is free to use with YOLOv5 if you make your workspace public. + +!!! Warning + + Roboflow users can use Ultralytics under the [AGPL license](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) or procure an [Enterprise license](https://ultralytics.com/license) directly from Ultralytics. Be aware that Roboflow does **not** provide Ultralytics licenses, and it is the responsibility of the user to ensure appropriate licensing. + +## Upload + +You can upload your data to Roboflow via [web UI](https://docs.roboflow.com/adding-data), [REST API](https://docs.roboflow.com/adding-data/upload-api), or [Python](https://docs.roboflow.com/python). + +## Labeling + +After uploading data to Roboflow, you can label your data and review previous labels. + +[![Roboflow Annotate](https://roboflow-darknet.s3.us-east-2.amazonaws.com/roboflow-annotate.gif)](https://roboflow.com/annotate) + +## Versioning + +You can make versions of your dataset with different preprocessing and offline augmentation options. YOLOv5 does online augmentations natively, so be intentional when layering Roboflow's offline augmentations on top. + +![Roboflow Preprocessing](https://roboflow-darknet.s3.us-east-2.amazonaws.com/robolfow-preprocessing.png) + +## Exporting Data + +You can download your data in YOLOv5 format to quickly begin training. + +``` +from roboflow import Roboflow +rf = Roboflow(api_key="YOUR API KEY HERE") +project = rf.workspace().project("YOUR PROJECT") +dataset = project.version("YOUR VERSION").download("yolov5") +``` + +## Custom Training + +We have released a custom training tutorial demonstrating all of the above capabilities. You can access the code here: + +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/roboflow-ai/yolov5-custom-training-tutorial/blob/main/yolov5-custom-training.ipynb) + +## Active Learning + +The real world is messy and your model will invariably encounter situations your dataset didn't anticipate. Using [active learning](https://blog.roboflow.com/what-is-active-learning/) is an important strategy to iteratively improve your dataset and model. With the Roboflow and YOLOv5 integration, you can quickly make improvements on your model deployments by using a battle tested machine learning pipeline. + +

Roboflow active learning

+ +## Supported Environments + +Ultralytics provides a range of ready-to-use environments, each pre-installed with essential dependencies such as [CUDA](https://developer.nvidia.com/cuda), [CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/), and [PyTorch](https://pytorch.org/), to kickstart your projects. + +- **Free GPU Notebooks**: Run on Gradient Open In Colab Open In Kaggle +- **Google Cloud**: [GCP Quickstart Guide](../environments/google_cloud_quickstart_tutorial.md) +- **Amazon**: [AWS Quickstart Guide](../environments/aws_quickstart_tutorial.md) +- **Azure**: [AzureML Quickstart Guide](../environments/azureml_quickstart_tutorial.md) +- **Docker**: [Docker Quickstart Guide](../environments/docker_image_quickstart_tutorial.md) Docker Pulls + +## Project Status + +YOLOv5 CI + +This badge indicates that all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are successfully passing. These CI tests rigorously check the functionality and performance of YOLOv5 across various key aspects: [training](https://github.com/ultralytics/yolov5/blob/master/train.py), [validation](https://github.com/ultralytics/yolov5/blob/master/val.py), [inference](https://github.com/ultralytics/yolov5/blob/master/detect.py), [export](https://github.com/ultralytics/yolov5/blob/master/export.py), and [benchmarks](https://github.com/ultralytics/yolov5/blob/master/benchmarks.py). They ensure consistent and reliable operation on macOS, Windows, and Ubuntu, with tests conducted every 24 hours and upon each new commit. diff --git a/ultralytics/docs/en/yolov5/tutorials/roboflow_datasets_integration.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/roboflow_datasets_integration.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/roboflow_datasets_integration.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/running_on_jetson_nano.md b/ultralytics/docs/en/yolov5/tutorials/running_on_jetson_nano.md new file mode 100755 index 0000000..58618c1 --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/running_on_jetson_nano.md @@ -0,0 +1,319 @@ +--- +comments: true +description: Detailed guide on deploying trained models on NVIDIA Jetson using TensorRT and DeepStream SDK. Optimize the inference performance on Jetson with Ultralytics. +keywords: TensorRT, NVIDIA Jetson, DeepStream SDK, deployment, Ultralytics, YOLO, Machine Learning, AI, Deep Learning, model optimization, inference performance +--- + +# Deploy on NVIDIA Jetson using TensorRT and DeepStream SDK + +๐Ÿ“š This guide explains how to deploy a trained model into NVIDIA Jetson Platform and perform inference using TensorRT and DeepStream SDK. Here we use TensorRT to maximize the inference performance on the Jetson platform. + +## Hardware Verification + +We have tested and verified this guide on the following Jetson devices + +- [Seeed reComputer J1010 built with Jetson Nano module](https://www.seeedstudio.com/Jetson-10-1-A0-p-5336.html) +- [Seeed reComputer J2021 built with Jetson Xavier NX module](https://www.seeedstudio.com/reComputer-J2021-p-5438.html) + +## Before You Start + +Make sure you have properly installed **JetPack SDK** with all the **SDK Components** and **DeepStream SDK** on the Jetson device as this includes CUDA, TensorRT and DeepStream SDK which are needed for this guide. + +JetPack SDK provides a full development environment for hardware-accelerated AI-at-the-edge development. All Jetson modules and developer kits are supported by JetPack SDK. + +There are two major installation methods including, + +1. SD Card Image Method +2. NVIDIA SDK Manager Method + +You can find a very detailed installation guide from NVIDIA [official website](https://developer.nvidia.com/jetpack-sdk-461). You can also find guides corresponding to the above-mentioned [reComputer J1010](https://wiki.seeedstudio.com/reComputer_J1010_J101_Flash_Jetpack) and [reComputer J2021](https://wiki.seeedstudio.com/reComputer_J2021_J202_Flash_Jetpack). + +## Install Necessary Packages + +- **Step 1.** Access the terminal of Jetson device, install pip and upgrade it + +```sh +sudo apt update +sudo apt install -y python3-pip +pip3 install --upgrade pip +``` + +- **Step 2.** Clone the following repo + +```sh +git clone https://github.com/ultralytics/yolov5 +``` + +- **Step 3.** Open **requirements.txt** + +```sh +cd yolov5 +vi requirements.txt +``` + +- **Step 5.** Edit the following lines. Here you need to press **i** first to enter editing mode. Press **ESC**, then type **:wq** to save and quit + +```sh +# torch>=1.8.0 +# torchvision>=0.9.0 +``` + +**Note:** torch and torchvision are excluded for now because they will be installed later. + +- **Step 6.** install the below dependency + +```sh +sudo apt install -y libfreetype6-dev +``` + +- **Step 7.** Install the necessary packages + +```sh +pip3 install -r requirements.txt +``` + +## Install PyTorch and Torchvision + +We cannot install PyTorch and Torchvision from pip because they are not compatible to run on Jetson platform which is based on **ARM aarch64 architecture**. Therefore, we need to manually install pre-built PyTorch pip wheel and compile/ install Torchvision from source. + +Visit [this page](https://forums.developer.nvidia.com/t/pytorch-for-jetson) to access all the PyTorch and Torchvision links. + +Here are some of the versions supported by JetPack 4.6 and above. + +**PyTorch v1.10.0** + +Supported by JetPack 4.4 (L4T R32.4.3) / JetPack 4.4.1 (L4T R32.4.4) / JetPack 4.5 (L4T R32.5.0) / JetPack 4.5.1 (L4T R32.5.1) / JetPack 4.6 (L4T R32.6.1) with Python 3.6 + +**file_name:** torch-1.10.0-cp36-cp36m-linux_aarch64.whl +**URL:** [https://nvidia.box.com/shared/static/fjtbno0vpo676a25cgvuqc1wty0fkkg6.whl](https://nvidia.box.com/shared/static/fjtbno0vpo676a25cgvuqc1wty0fkkg6.whl) + +**PyTorch v1.12.0** + +Supported by JetPack 5.0 (L4T R34.1.0) / JetPack 5.0.1 (L4T R34.1.1) / JetPack 5.0.2 (L4T R35.1.0) with Python 3.8 + +**file_name:** torch-1.12.0a0+2c916ef.nv22.3-cp38-cp38-linux_aarch64.whl +**URL:** [https://developer.download.nvidia.com/compute/redist/jp/v50/pytorch/torch-1.12.0a0+2c916ef.nv22.3-cp38-cp38-linux_aarch64.whl](https://developer.download.nvidia.com/compute/redist/jp/v50/pytorch/torch-1.12.0a0+2c916ef.nv22.3-cp38-cp38-linux_aarch64.whl) + +- **Step 1.** Install torch according to your JetPack version in the following format + +```sh +wget -O +pip3 install +``` + +For example, here we are running **JP4.6.1**, and therefore we choose **PyTorch v1.10.0** + +```sh +cd ~ +sudo apt-get install -y libopenblas-base libopenmpi-dev +wget https://nvidia.box.com/shared/static/fjtbno0vpo676a25cgvuqc1wty0fkkg6.whl -O torch-1.10.0-cp36-cp36m-linux_aarch64.whl +pip3 install torch-1.10.0-cp36-cp36m-linux_aarch64.whl +``` + +- **Step 2.** Install torchvision depending on the version of PyTorch that you have installed. For example, we chose **PyTorch v1.10.0**, which means, we need to choose **Torchvision v0.11.1** + +```sh +sudo apt install -y libjpeg-dev zlib1g-dev +git clone --branch v0.11.1 https://github.com/pytorch/vision torchvision +cd torchvision +sudo python3 setup.py install +``` + +Here a list of the corresponding torchvision version that you need to install according to the PyTorch version: + +- PyTorch v1.10 - torchvision v0.11.1 +- PyTorch v1.12 - torchvision v0.13.0 + +## DeepStream Configuration for YOLOv5 + +- **Step 1.** Clone the following repo + +```sh +cd ~ +git clone https://github.com/marcoslucianops/DeepStream-Yolo +``` + +- **Step 2.** Copy **gen_wts_yoloV5.py** from **DeepStream-Yolo/utils** into **yolov5** directory + +```sh +cp DeepStream-Yolo/utils/gen_wts_yoloV5.py yolov5 +``` + +- **Step 3.** Inside the yolov5 repo, download **pt file** from YOLOv5 releases (example for YOLOv5s 6.1) + +```sh +cd yolov5 +wget https://github.com/ultralytics/yolov5/releases/download/v6.1/yolov5s.pt +``` + +- **Step 4.** Generate the **cfg** and **wts** files + +```sh +python3 gen_wts_yoloV5.py -w yolov5s.pt +``` + +**Note**: To change the inference size (default: 640) + +```sh +-s SIZE +--size SIZE +-s HEIGHT WIDTH +--size HEIGHT WIDTH + +Example for 1280: + +-s 1280 +or +-s 1280 1280 +``` + +- **Step 5.** Copy the generated **cfg** and **wts** files into the **DeepStream-Yolo** folder + +```sh +cp yolov5s.cfg ~/DeepStream-Yolo +cp yolov5s.wts ~/DeepStream-Yolo +``` + +- **Step 6.** Open the **DeepStream-Yolo** folder and compile the library + +```sh +cd ~/DeepStream-Yolo +CUDA_VER=11.4 make -C nvdsinfer_custom_impl_Yolo # for DeepStream 6.1 +CUDA_VER=10.2 make -C nvdsinfer_custom_impl_Yolo # for DeepStream 6.0.1 / 6.0 +``` + +- **Step 7.** Edit the **config_infer_primary_yoloV5.txt** file according to your model + +```sh +[property] +... +custom-network-config=yolov5s.cfg +model-file=yolov5s.wts +... +``` + +- **Step 8.** Edit the **deepstream_app_config** file + +```sh +... +[primary-gie] +... +config-file=config_infer_primary_yoloV5.txt +``` + +- **Step 9.** Change the video source in **deepstream_app_config** file. Here a default video file is loaded as you can see below + +```sh +... +[source0] +... +uri=file:///opt/nvidia/deepstream/deepstream/samples/streams/sample_1080p_h264.mp4 +``` + +## Run the Inference + +```sh +deepstream-app -c deepstream_app_config.txt +``` + +
YOLOv5 with deepstream FP32
+ +The above result is running on **Jetson Xavier NX** with **FP32** and **YOLOv5s 640x640**. We can see that the **FPS** is around **30**. + +## INT8 Calibration + +If you want to use INT8 precision for inference, you need to follow the steps below + +- **Step 1.** Install OpenCV + +```sh +sudo apt-get install libopencv-dev +``` + +- **Step 2.** Compile/recompile the **nvdsinfer_custom_impl_Yolo** library with OpenCV support + +```sh +cd ~/DeepStream-Yolo +CUDA_VER=11.4 OPENCV=1 make -C nvdsinfer_custom_impl_Yolo # for DeepStream 6.1 +CUDA_VER=10.2 OPENCV=1 make -C nvdsinfer_custom_impl_Yolo # for DeepStream 6.0.1 / 6.0 +``` + +- **Step 3.** For COCO dataset, download the [val2017](https://drive.google.com/file/d/1gbvfn7mcsGDRZ_luJwtITL-ru2kK99aK/view?usp=sharing), extract, and move to **DeepStream-Yolo** folder + +- **Step 4.** Make a new directory for calibration images + +```sh +mkdir calibration +``` + +- **Step 5.** Run the following to select 1000 random images from COCO dataset to run calibration + +```sh +for jpg in $(ls -1 val2017/*.jpg | sort -R | head -1000); do \ + cp ${jpg} calibration/; \ +done +``` + +**Note:** NVIDIA recommends at least 500 images to get a good accuracy. On this example, 1000 images are chosen to get better accuracy (more images = more accuracy). Higher INT8_CALIB_BATCH_SIZE values will result in more accuracy and faster calibration speed. Set it according to you GPU memory. You can set it from **head -1000**. For example, for 2000 images, **head -2000**. This process can take a long time. + +- **Step 6.** Create the **calibration.txt** file with all selected images + +```sh +realpath calibration/*jpg > calibration.txt +``` + +- **Step 7.** Set environment variables + +```sh +export INT8_CALIB_IMG_PATH=calibration.txt +export INT8_CALIB_BATCH_SIZE=1 +``` + +- **Step 8.** Update the **config_infer_primary_yoloV5.txt** file + +From + +```sh +... +model-engine-file=model_b1_gpu0_fp32.engine +#int8-calib-file=calib.table +... +network-mode=0 +... +``` + +To + +```sh +... +model-engine-file=model_b1_gpu0_int8.engine +int8-calib-file=calib.table +... +network-mode=1 +... +``` + +- **Step 9.** Run the inference + +```sh +deepstream-app -c deepstream_app_config.txt +``` + +
YOLOv5 with deepstream INT8
+ +The above result is running on **Jetson Xavier NX** with **INT8** and **YOLOv5s 640x640**. We can see that the **FPS** is around **60**. + +## Benchmark results + +The following table summarizes how different models perform on **Jetson Xavier NX**. + +| Model Name | Precision | Inference Size | Inference Time (ms) | FPS | +|------------|-----------|----------------|---------------------|-----| +| YOLOv5s | FP32 | 320x320 | 16.66 | 60 | +| | FP32 | 640x640 | 33.33 | 30 | +| | INT8 | 640x640 | 16.66 | 60 | +| YOLOv5n | FP32 | 640x640 | 16.66 | 60 | + +### Additional + +This tutorial is written by our friends at seeed @lakshanthad and Elaine diff --git a/ultralytics/docs/en/yolov5/tutorials/running_on_jetson_nano.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/running_on_jetson_nano.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/running_on_jetson_nano.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/test_time_augmentation.md b/ultralytics/docs/en/yolov5/tutorials/test_time_augmentation.md new file mode 100755 index 0000000..6a585ef --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/test_time_augmentation.md @@ -0,0 +1,164 @@ +--- +comments: true +description: Boost your YOLOv5 performance with our step-by-step guide on Test-Time Augmentation (TTA). Learn to enhance your model's mAP and Recall during testing and inference. +keywords: YOLOv5, Ultralytics, Test-Time Augmentation, TTA, mAP, Recall, model performance, guide +--- + +# Test-Time Augmentation (TTA) + +๐Ÿ“š This guide explains how to use Test Time Augmentation (TTA) during testing and inference for improved mAP and Recall with YOLOv5 ๐Ÿš€. + +## Before You Start + +Clone repo and install [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) in a [**Python>=3.8.0**](https://www.python.org/) environment, including [**PyTorch>=1.8**](https://pytorch.org/get-started/locally/). [Models](https://github.com/ultralytics/yolov5/tree/master/models) and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) download automatically from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). + +```bash +git clone https://github.com/ultralytics/yolov5 # clone +cd yolov5 +pip install -r requirements.txt # install +``` + +## Test Normally + +Before trying TTA we want to establish a baseline performance to compare to. This command tests YOLOv5x on COCO val2017 at image size 640 pixels. `yolov5x.pt` is the largest and most accurate model available. Other options are `yolov5s.pt`, `yolov5m.pt` and `yolov5l.pt`, or you own checkpoint from training a custom dataset `./weights/best.pt`. For details on all available models please see our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints). + +```bash +python val.py --weights yolov5x.pt --data coco.yaml --img 640 --half +``` + +Output: + +```shell +val: data=./data/coco.yaml, weights=['yolov5x.pt'], batch_size=32, imgsz=640, conf_thres=0.001, iou_thres=0.65, task=val, device=, single_cls=False, augment=False, verbose=False, save_txt=False, save_hybrid=False, save_conf=False, save_json=True, project=runs/val, name=exp, exist_ok=False, half=True +YOLOv5 ๐Ÿš€ v5.0-267-g6a3ee7c torch 1.9.0+cu102 CUDA:0 (Tesla P100-PCIE-16GB, 16280.875MB) + +Fusing layers... +Model Summary: 476 layers, 87730285 parameters, 0 gradients + +val: Scanning '../datasets/coco/val2017' images and labels...4952 found, 48 missing, 0 empty, 0 corrupted: 100% 5000/5000 [00:01<00:00, 2846.03it/s] +val: New cache created: ../datasets/coco/val2017.cache + Class Images Labels P R mAP@.5 mAP@.5:.95: 100% 157/157 [02:30<00:00, 1.05it/s] + all 5000 36335 0.746 0.626 0.68 0.49 +Speed: 0.1ms pre-process, 22.4ms inference, 1.4ms NMS per image at shape (32, 3, 640, 640) # <--- baseline speed + +Evaluating pycocotools mAP... saving runs/val/exp/yolov5x_predictions.json... +... + Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.504 # <--- baseline mAP + Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.688 + Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.546 + Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.351 + Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.551 + Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.644 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.382 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.628 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.681 # <--- baseline mAR + Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.524 + Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.735 + Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.826 +``` + +## Test with TTA + +Append `--augment` to any existing `val.py` command to enable TTA, and increase the image size by about 30% for improved results. Note that inference with TTA enabled will typically take about 2-3X the time of normal inference as the images are being left-right flipped and processed at 3 different resolutions, with the outputs merged before NMS. Part of the speed decrease is simply due to larger image sizes (832 vs 640), while part is due to the actual TTA operations. + +```bash +python val.py --weights yolov5x.pt --data coco.yaml --img 832 --augment --half +``` + +Output: + +```shell +val: data=./data/coco.yaml, weights=['yolov5x.pt'], batch_size=32, imgsz=832, conf_thres=0.001, iou_thres=0.6, task=val, device=, single_cls=False, augment=True, verbose=False, save_txt=False, save_hybrid=False, save_conf=False, save_json=True, project=runs/val, name=exp, exist_ok=False, half=True +YOLOv5 ๐Ÿš€ v5.0-267-g6a3ee7c torch 1.9.0+cu102 CUDA:0 (Tesla P100-PCIE-16GB, 16280.875MB) + +Fusing layers... +/usr/local/lib/python3.7/dist-packages/torch/nn/functional.py:718: UserWarning: Named tensors and all their associated APIs are an experimental feature and subject to change. Please do not use them for anything important until they are released as stable. (Triggered internally at /pytorch/c10/core/TensorImpl.h:1156.) + return torch.max_pool2d(input, kernel_size, stride, padding, dilation, ceil_mode) +Model Summary: 476 layers, 87730285 parameters, 0 gradients +val: Scanning '../datasets/coco/val2017' images and labels...4952 found, 48 missing, 0 empty, 0 corrupted: 100% 5000/5000 [00:01<00:00, 2885.61it/s] +val: New cache created: ../datasets/coco/val2017.cache + Class Images Labels P R mAP@.5 mAP@.5:.95: 100% 157/157 [07:29<00:00, 2.86s/it] + all 5000 36335 0.718 0.656 0.695 0.503 +Speed: 0.2ms pre-process, 80.6ms inference, 2.7ms NMS per image at shape (32, 3, 832, 832) # <--- TTA speed + +Evaluating pycocotools mAP... saving runs/val/exp2/yolov5x_predictions.json... +... + Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.516 # <--- TTA mAP + Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.701 + Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.562 + Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.361 + Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.564 + Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.656 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.388 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.640 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.696 # <--- TTA mAR + Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.553 + Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.744 + Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.833 +``` + +## Inference with TTA + +`detect.py` TTA inference operates identically to `val.py` TTA: simply append `--augment` to any existing `detect.py` command: + +```bash +python detect.py --weights yolov5s.pt --img 832 --source data/images --augment +``` + +Output: + +```bash +YOLOv5 ๐Ÿš€ v5.0-267-g6a3ee7c torch 1.9.0+cu102 CUDA:0 (Tesla P100-PCIE-16GB, 16280.875MB) + +Downloading https://github.com/ultralytics/yolov5/releases/download/v5.0/yolov5s.pt to yolov5s.pt... +100% 14.1M/14.1M [00:00<00:00, 81.9MB/s] + +Fusing layers... +Model Summary: 224 layers, 7266973 parameters, 0 gradients +image 1/2 /content/yolov5/data/images/bus.jpg: 832x640 4 persons, 1 bus, 1 fire hydrant, Done. (0.029s) +image 2/2 /content/yolov5/data/images/zidane.jpg: 480x832 3 persons, 3 ties, Done. (0.024s) +Results saved to runs/detect/exp +Done. (0.156s) +``` + +YOLOv5 test time augmentations + +### PyTorch Hub TTA + +TTA is automatically integrated into all [YOLOv5 PyTorch Hub](https://pytorch.org/hub/ultralytics_yolov5) models, and can be accessed by passing `augment=True` at inference time. + +```python +import torch + +# Model +model = torch.hub.load('ultralytics/yolov5', 'yolov5s') # or yolov5m, yolov5x, custom + +# Images +img = 'https://ultralytics.com/images/zidane.jpg' # or file, PIL, OpenCV, numpy, multiple + +# Inference +results = model(img, augment=True) # <--- TTA inference + +# Results +results.print() # or .show(), .save(), .crop(), .pandas(), etc. +``` + +### Customize + +You can customize the TTA ops applied in the YOLOv5 `forward_augment()` method [here](https://github.com/ultralytics/yolov5/blob/8c6f9e15bfc0000d18b976a95b9d7c17d407ec91/models/yolo.py#L125-L137). + +## Supported Environments + +Ultralytics provides a range of ready-to-use environments, each pre-installed with essential dependencies such as [CUDA](https://developer.nvidia.com/cuda), [CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/), and [PyTorch](https://pytorch.org/), to kickstart your projects. + +- **Free GPU Notebooks**: Run on Gradient Open In Colab Open In Kaggle +- **Google Cloud**: [GCP Quickstart Guide](../environments/google_cloud_quickstart_tutorial.md) +- **Amazon**: [AWS Quickstart Guide](../environments/aws_quickstart_tutorial.md) +- **Azure**: [AzureML Quickstart Guide](../environments/azureml_quickstart_tutorial.md) +- **Docker**: [Docker Quickstart Guide](../environments/docker_image_quickstart_tutorial.md) Docker Pulls + +## Project Status + +YOLOv5 CI + +This badge indicates that all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are successfully passing. These CI tests rigorously check the functionality and performance of YOLOv5 across various key aspects: [training](https://github.com/ultralytics/yolov5/blob/master/train.py), [validation](https://github.com/ultralytics/yolov5/blob/master/val.py), [inference](https://github.com/ultralytics/yolov5/blob/master/detect.py), [export](https://github.com/ultralytics/yolov5/blob/master/export.py), and [benchmarks](https://github.com/ultralytics/yolov5/blob/master/benchmarks.py). They ensure consistent and reliable operation on macOS, Windows, and Ubuntu, with tests conducted every 24 hours and upon each new commit. diff --git a/ultralytics/docs/en/yolov5/tutorials/test_time_augmentation.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/test_time_augmentation.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/test_time_augmentation.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/tips_for_best_training_results.md b/ultralytics/docs/en/yolov5/tutorials/tips_for_best_training_results.md new file mode 100755 index 0000000..22bfdb2 --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/tips_for_best_training_results.md @@ -0,0 +1,65 @@ +--- +comments: true +description: Our comprehensive guide provides insights on how to train your YOLOv5 system to get the best mAP. Master dataset preparation, model selection, training settings, and more. +keywords: Ultralytics, YOLOv5, Training guide, dataset preparation, model selection, training settings, mAP results, Machine Learning, Object Detection +--- + +๐Ÿ“š This guide explains how to produce the best mAP and training results with YOLOv5 ๐Ÿš€. + +Most of the time good results can be obtained with no changes to the models or training settings, **provided your dataset is sufficiently large and well labelled**. If at first you don't get good results, there are steps you might be able to take to improve, but we always recommend users **first train with all default settings** before considering any changes. This helps establish a performance baseline and spot areas for improvement. + +If you have questions about your training results **we recommend you provide the maximum amount of information possible** if you expect a helpful response, including results plots (train losses, val losses, P, R, mAP), PR curve, confusion matrix, training mosaics, test results and dataset statistics images such as labels.png. All of these are located in your `project/name` directory, typically `yolov5/runs/train/exp`. + +We've put together a full guide for users looking to get the best results on their YOLOv5 trainings below. + +## Dataset + +- **Images per class.** โ‰ฅ 1500 images per class recommended +- **Instances per class.** โ‰ฅ 10000 instances (labeled objects) per class recommended +- **Image variety.** Must be representative of deployed environment. For real-world use cases we recommend images from different times of day, different seasons, different weather, different lighting, different angles, different sources (scraped online, collected locally, different cameras) etc. +- **Label consistency.** All instances of all classes in all images must be labelled. Partial labelling will not work. +- **Label accuracy.** Labels must closely enclose each object. No space should exist between an object and it's bounding box. No objects should be missing a label. +- **Label verification.** View `train_batch*.jpg` on train start to verify your labels appear correct, i.e. see [example](https://docs.ultralytics.com/yolov5/tutorials/train_custom_data#local-logging) mosaic. +- **Background images.** Background images are images with no objects that are added to a dataset to reduce False Positives (FP). We recommend about 0-10% background images to help reduce FPs (COCO has 1000 background images for reference, 1% of the total). No labels are required for background images. + +COCO Analysis + +## Model Selection + +Larger models like YOLOv5x and [YOLOv5x6](https://github.com/ultralytics/yolov5/releases/tag/v5.0) will produce better results in nearly all cases, but have more parameters, require more CUDA memory to train, and are slower to run. For **mobile** deployments we recommend YOLOv5s/m, for **cloud** deployments we recommend YOLOv5l/x. See our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints) for a full comparison of all models. + +

YOLOv5 Models

+ +- **Start from Pretrained weights.** Recommended for small to medium-sized datasets (i.e. [VOC](https://github.com/ultralytics/yolov5/blob/master/data/VOC.yaml), [VisDrone](https://github.com/ultralytics/yolov5/blob/master/data/VisDrone.yaml), [GlobalWheat](https://github.com/ultralytics/yolov5/blob/master/data/GlobalWheat2020.yaml)). Pass the name of the model to the `--weights` argument. Models download automatically from the [latest YOLOv5 release](https://github.com/ultralytics/yolov5/releases). + +```shell +python train.py --data custom.yaml --weights yolov5s.pt + yolov5m.pt + yolov5l.pt + yolov5x.pt + custom_pretrained.pt +``` + +- **Start from Scratch.** Recommended for large datasets (i.e. [COCO](https://github.com/ultralytics/yolov5/blob/master/data/coco.yaml), [Objects365](https://github.com/ultralytics/yolov5/blob/master/data/Objects365.yaml), [OIv6](https://storage.googleapis.com/openimages/web/index.html)). Pass the model architecture YAML you are interested in, along with an empty `--weights ''` argument: + +```bash +python train.py --data custom.yaml --weights '' --cfg yolov5s.yaml + yolov5m.yaml + yolov5l.yaml + yolov5x.yaml +``` + +## Training Settings + +Before modifying anything, **first train with default settings to establish a performance baseline**. A full list of train.py settings can be found in the [train.py](https://github.com/ultralytics/yolov5/blob/master/train.py) argparser. + +- **Epochs.** Start with 300 epochs. If this overfits early then you can reduce epochs. If overfitting does not occur after 300 epochs, train longer, i.e. 600, 1200 etc. epochs. +- **Image size.** COCO trains at native resolution of `--img 640`, though due to the high amount of small objects in the dataset it can benefit from training at higher resolutions such as `--img 1280`. If there are many small objects then custom datasets will benefit from training at native or higher resolution. Best inference results are obtained at the same `--img` as the training was run at, i.e. if you train at `--img 1280` you should also test and detect at `--img 1280`. +- **Batch size.** Use the largest `--batch-size` that your hardware allows for. Small batch sizes produce poor batchnorm statistics and should be avoided. +- **Hyperparameters.** Default hyperparameters are in [hyp.scratch-low.yaml](https://github.com/ultralytics/yolov5/blob/master/data/hyps/hyp.scratch-low.yaml). We recommend you train with default hyperparameters first before thinking of modifying any. In general, increasing augmentation hyperparameters will reduce and delay overfitting, allowing for longer trainings and higher final mAP. Reduction in loss component gain hyperparameters like `hyp['obj']` will help reduce overfitting in those specific loss components. For an automated method of optimizing these hyperparameters, see our [Hyperparameter Evolution Tutorial](https://docs.ultralytics.com/yolov5/tutorials/hyperparameter_evolution). + +## Further Reading + +If you'd like to know more, a good place to start is Karpathy's 'Recipe for Training Neural Networks', which has great ideas for training that apply broadly across all ML domains: [http://karpathy.github.io/2019/04/25/recipe/](http://karpathy.github.io/2019/04/25/recipe/) + +Good luck ๐Ÿ€ and let us know if you have any other questions! diff --git a/ultralytics/docs/en/yolov5/tutorials/tips_for_best_training_results.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/tips_for_best_training_results.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/tips_for_best_training_results.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/train_custom_data.md b/ultralytics/docs/en/yolov5/tutorials/train_custom_data.md new file mode 100755 index 0000000..7f19e9d --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/train_custom_data.md @@ -0,0 +1,229 @@ +--- +comments: true +description: Learn how to train your data on custom datasets using YOLOv5. Simple and updated guide on collection and organization of images, labelling, model training and deployment. +keywords: YOLOv5, train on custom dataset, image collection, model training, object detection, image labelling, Ultralytics, PyTorch, machine learning +--- + +๐Ÿ“š This guide explains how to train your own **custom dataset** with [YOLOv5](https://github.com/ultralytics/yolov5) ๐Ÿš€. + +## Before You Start + +Clone repo and install [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) in a [**Python>=3.8.0**](https://www.python.org/) environment, including [**PyTorch>=1.8**](https://pytorch.org/get-started/locally/). [Models](https://github.com/ultralytics/yolov5/tree/master/models) and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) download automatically from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). + +```bash +git clone https://github.com/ultralytics/yolov5 # clone +cd yolov5 +pip install -r requirements.txt # install +``` + +## Train On Custom Data + + +Ultralytics active learning +
+
+ +Creating a custom model to detect your objects is an iterative process of collecting and organizing images, labeling your objects of interest, training a model, deploying it into the wild to make predictions, and then using that deployed model to collect examples of edge cases to repeat and improve. + +### 1. Create Dataset + +YOLOv5 models must be trained on labelled data in order to learn classes of objects in that data. There are two options for creating your dataset before you start training: + +
+Use Roboflow to create your dataset in YOLO format ๐ŸŒŸ + +!!! Warning + + Roboflow users can use Ultralytics under the [AGPL license](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) or can request an [Enterprise license](https://ultralytics.com/license) directly from Ultralytics. Be aware that Roboflow does not provide Ultralytics licenses, and it is the responsibility of the user to ensure appropriate licensing. + +### 1.1 Collect Images + +Your model will learn by example. Training on images similar to the ones it will see in the wild is of the utmost importance. Ideally, you will collect a wide variety of images from the same configuration (camera, angle, lighting, etc.) as you will ultimately deploy your project. + +If this is not possible, you can start from [a public dataset](https://universe.roboflow.com/?ref=ultralytics) to train your initial model and then [sample images from the wild during inference](https://blog.roboflow.com/computer-vision-active-learning-tips/?ref=ultralytics) to improve your dataset and model iteratively. + +### 1.2 Create Labels + +Once you have collected images, you will need to annotate the objects of interest to create a ground truth for your model to learn from. + +

YOLOv5 accuracies

+ +[Roboflow Annotate](https://roboflow.com/annotate?ref=ultralytics) is a simple web-based tool for managing and labeling your images with your team and exporting them in [YOLOv5's annotation format](https://roboflow.com/formats/yolov5-pytorch-txt?ref=ultralytics). + +### 1.3 Prepare Dataset for YOLOv5 + +Whether you [label your images with Roboflow](https://roboflow.com/annotate?ref=ultralytics) or not, you can use it to convert your dataset into YOLO format, create a YOLOv5 YAML configuration file, and host it for importing into your training script. + +[Create a free Roboflow account](https://app.roboflow.com/?model=yolov5&ref=ultralytics) +and upload your dataset to a `Public` workspace, label any unannotated images, then generate and export a version of your dataset in `YOLOv5 Pytorch` format. + +Note: YOLOv5 does online augmentation during training, so we do not recommend applying any augmentation steps in Roboflow for training with YOLOv5. But we recommend applying the following preprocessing steps: + +

Recommended Preprocessing Steps

+ +* **Auto-Orient** - to strip EXIF orientation from your images. +* **Resize (Stretch)** - to the square input size of your model (640x640 is the YOLOv5 default). + +Generating a version will give you a point in time snapshot of your dataset so you can always go back and compare your future model training runs against it, even if you add more images or change its configuration later. + +

Export in YOLOv5 Format

+ +Export in `YOLOv5 Pytorch` format, then copy the snippet into your training script or notebook to download your dataset. + +

Roboflow dataset download snippet

+ +Now continue with `2. Select a Model`. +
+ +
+Or manually prepare your dataset + +### 1.1 Create dataset.yaml + +[COCO128](https://www.kaggle.com/ultralytics/coco128) is an example small tutorial dataset composed of the first 128 images in [COCO](http://cocodataset.org/#home) train2017. These same 128 images are used for both training and validation to verify our training pipeline is capable of overfitting. [data/coco128.yaml](https://github.com/ultralytics/yolov5/blob/master/data/coco128.yaml), shown below, is the dataset config file that defines 1) the dataset root directory `path` and relative paths to `train` / `val` / `test` image directories (or *.txt files with image paths) and 2) a class `names` dictionary: + +```yaml +# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..] +path: ../datasets/coco128 # dataset root dir +train: images/train2017 # train images (relative to 'path') 128 images +val: images/train2017 # val images (relative to 'path') 128 images +test: # test images (optional) + +# Classes (80 COCO classes) +names: + 0: person + 1: bicycle + 2: car + # ... + 77: teddy bear + 78: hair drier + 79: toothbrush +``` + +### 1.2 Create Labels + +After using an annotation tool to label your images, export your labels to **YOLO format**, with one `*.txt` file per image (if no objects in image, no `*.txt` file is required). The `*.txt` file specifications are: + +- One row per object +- Each row is `class x_center y_center width height` format. +- Box coordinates must be in **normalized xywh** format (from 0 to 1). If your boxes are in pixels, divide `x_center` and `width` by image width, and `y_center` and `height` by image height. +- Class numbers are zero-indexed (start from 0). + +

Roboflow annotations

+ +The label file corresponding to the above image contains 2 persons (class `0`) and a tie (class `27`): + +

Roboflow dataset preprocessing

+ +### 1.3 Organize Directories + +Organize your train and val images and labels according to the example below. YOLOv5 assumes `/coco128` is inside a `/datasets` directory **next to** the `/yolov5` directory. **YOLOv5 locates labels automatically for each image** by replacing the last instance of `/images/` in each image path with `/labels/`. For example: + +```bash +../datasets/coco128/images/im0.jpg # image +../datasets/coco128/labels/im0.txt # label +``` + +

YOLOv5 dataset structure

+
+ +### 2. Select a Model + +Select a pretrained model to start training from. Here we select [YOLOv5s](https://github.com/ultralytics/yolov5/blob/master/models/yolov5s.yaml), the second-smallest and fastest model available. See our README [table](https://github.com/ultralytics/yolov5#pretrained-checkpoints) for a full comparison of all models. + +

YOLOv5 models

+ +### 3. Train + +Train a YOLOv5s model on COCO128 by specifying dataset, batch-size, image size and either pretrained `--weights yolov5s.pt` (recommended), or randomly initialized `--weights '' --cfg yolov5s.yaml` (not recommended). Pretrained weights are auto-downloaded from the [latest YOLOv5 release](https://github.com/ultralytics/yolov5/releases). + +```bash +python train.py --img 640 --epochs 3 --data coco128.yaml --weights yolov5s.pt +``` + +!!! Tip "Tip" + + ๐Ÿ’ก Add `--cache ram` or `--cache disk` to speed up training (requires significant RAM/disk resources). + +!!! Tip "Tip" + + ๐Ÿ’ก Always train from a local dataset. Mounted or network drives like Google Drive will be very slow. + +All training results are saved to `runs/train/` with incrementing run directories, i.e. `runs/train/exp2`, `runs/train/exp3` etc. For more details see the Training section of our tutorial notebook. Open In Colab Open In Kaggle + +### 4. Visualize + +#### Comet Logging and Visualization ๐ŸŒŸ NEW + +[Comet](https://bit.ly/yolov5-readme-comet) is now fully integrated with YOLOv5. Track and visualize model metrics in real time, save your hyperparameters, datasets, and model checkpoints, and visualize your model predictions with [Comet Custom Panels](https://bit.ly/yolov5-colab-comet-panels)! Comet makes sure you never lose track of your work and makes it easy to share results and collaborate across teams of all sizes! + +Getting started is easy: + +```shell +pip install comet_ml # 1. install +export COMET_API_KEY= # 2. paste API key +python train.py --img 640 --epochs 3 --data coco128.yaml --weights yolov5s.pt # 3. train +``` + +To learn more about all the supported Comet features for this integration, check out the [Comet Tutorial](https://docs.ultralytics.com/yolov5/tutorials/comet_logging_integration). If you'd like to learn more about Comet, head over to our [documentation](https://bit.ly/yolov5-colab-comet-docs). Get started by trying out the Comet Colab Notebook: +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1RG0WOQyxlDlo5Km8GogJpIEJlg_5lyYO?usp=sharing) + +YOLO UI + +#### ClearML Logging and Automation ๐ŸŒŸ NEW + +[ClearML](https://cutt.ly/yolov5-notebook-clearml) is completely integrated into YOLOv5 to track your experimentation, manage dataset versions and even remotely execute training runs. To enable ClearML: + +- `pip install clearml` +- run `clearml-init` to connect to a ClearML server (**deploy your own open-source server [here](https://github.com/allegroai/clearml-server)**, or use our free hosted server [here](https://cutt.ly/yolov5-notebook-clearml)) + +You'll get all the great expected features from an experiment manager: live updates, model upload, experiment comparison etc. but ClearML also tracks uncommitted changes and installed packages for example. Thanks to that ClearML Tasks (which is what we call experiments) are also reproducible on different machines! With only 1 extra line, we can schedule a YOLOv5 training task on a queue to be executed by any number of ClearML Agents (workers). + +You can use ClearML Data to version your dataset and then pass it to YOLOv5 simply using its unique ID. This will help you keep track of your data without adding extra hassle. Explore the [ClearML Tutorial](https://docs.ultralytics.com/yolov5/tutorials/clearml_logging_integration) for details! + + +ClearML Experiment Management UI + +#### Local Logging + +Training results are automatically logged with [Tensorboard](https://www.tensorflow.org/tensorboard) and [CSV](https://github.com/ultralytics/yolov5/pull/4148) loggers to `runs/train`, with a new experiment directory created for each new training as `runs/train/exp2`, `runs/train/exp3`, etc. + +This directory contains train and val statistics, mosaics, labels, predictions and augmented mosaics, as well as metrics and charts including precision-recall (PR) curves and confusion matrices. + +Local logging results + +Results file `results.csv` is updated after each epoch, and then plotted as `results.png` (below) after training completes. You can also plot any `results.csv` file manually: + +```python +from utils.plots import plot_results + +plot_results('path/to/results.csv') # plot 'results.csv' as 'results.png' +``` + +

results.png

+ +## Next Steps + +Once your model is trained you can use your best checkpoint `best.pt` to: + +* Run [CLI](https://github.com/ultralytics/yolov5#quick-start-examples) or [Python](https://docs.ultralytics.com/yolov5/tutorials/pytorch_hub_model_loading) inference on new images and videos +* [Validate](https://github.com/ultralytics/yolov5/blob/master/val.py) accuracy on train, val and test splits +* [Export](https://docs.ultralytics.com/yolov5/tutorials/model_export) to TensorFlow, Keras, ONNX, TFlite, TF.js, CoreML and TensorRT formats +* [Evolve](https://docs.ultralytics.com/yolov5/tutorials/hyperparameter_evolution) hyperparameters to improve performance +* [Improve](https://docs.roboflow.com/adding-data/upload-api?ref=ultralytics) your model by sampling real-world images and adding them to your dataset + +## Supported Environments + +Ultralytics provides a range of ready-to-use environments, each pre-installed with essential dependencies such as [CUDA](https://developer.nvidia.com/cuda), [CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/), and [PyTorch](https://pytorch.org/), to kickstart your projects. + +- **Free GPU Notebooks**: Run on Gradient Open In Colab Open In Kaggle +- **Google Cloud**: [GCP Quickstart Guide](../environments/google_cloud_quickstart_tutorial.md) +- **Amazon**: [AWS Quickstart Guide](../environments/aws_quickstart_tutorial.md) +- **Azure**: [AzureML Quickstart Guide](../environments/azureml_quickstart_tutorial.md) +- **Docker**: [Docker Quickstart Guide](../environments/docker_image_quickstart_tutorial.md) Docker Pulls + +## Project Status + +YOLOv5 CI + +This badge indicates that all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are successfully passing. These CI tests rigorously check the functionality and performance of YOLOv5 across various key aspects: [training](https://github.com/ultralytics/yolov5/blob/master/train.py), [validation](https://github.com/ultralytics/yolov5/blob/master/val.py), [inference](https://github.com/ultralytics/yolov5/blob/master/detect.py), [export](https://github.com/ultralytics/yolov5/blob/master/export.py), and [benchmarks](https://github.com/ultralytics/yolov5/blob/master/benchmarks.py). They ensure consistent and reliable operation on macOS, Windows, and Ubuntu, with tests conducted every 24 hours and upon each new commit. diff --git a/ultralytics/docs/en/yolov5/tutorials/train_custom_data.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/train_custom_data.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/train_custom_data.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/en/yolov5/tutorials/transfer_learning_with_frozen_layers.md b/ultralytics/docs/en/yolov5/tutorials/transfer_learning_with_frozen_layers.md new file mode 100755 index 0000000..3d919a6 --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/transfer_learning_with_frozen_layers.md @@ -0,0 +1,156 @@ +--- +comments: true +description: Learn to freeze YOLOv5 layers for efficient transfer learning. Optimize your model retraining with less resources and faster training times. +keywords: YOLOv5, freeze layers, transfer learning, model retraining, Ultralytics +--- + +๐Ÿ“š This guide explains how to **freeze** YOLOv5 ๐Ÿš€ layers when **transfer learning**. Transfer learning is a useful way to quickly retrain a model on new data without having to retrain the entire network. Instead, part of the initial weights are frozen in place, and the rest of the weights are used to compute loss and are updated by the optimizer. This requires less resources than normal training and allows for faster training times, though it may also result in reductions to final trained accuracy. + +## Before You Start + +Clone repo and install [requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) in a [**Python>=3.8.0**](https://www.python.org/) environment, including [**PyTorch>=1.8**](https://pytorch.org/get-started/locally/). [Models](https://github.com/ultralytics/yolov5/tree/master/models) and [datasets](https://github.com/ultralytics/yolov5/tree/master/data) download automatically from the latest YOLOv5 [release](https://github.com/ultralytics/yolov5/releases). + +```bash +git clone https://github.com/ultralytics/yolov5 # clone +cd yolov5 +pip install -r requirements.txt # install +``` + +## Freeze Backbone + +All layers that match the train.py `freeze` list in train.py will be frozen by setting their gradients to zero before training starts. + +```python +# Freeze +freeze = [f'model.{x}.' for x in range(freeze)] # layers to freeze +for k, v in model.named_parameters(): + v.requires_grad = True # train all layers + if any(x in k for x in freeze): + print(f'freezing {k}') + v.requires_grad = False +``` + +To see a list of module names: + +```python +for k, v in model.named_parameters(): + print(k) + +"""Output: +model.0.conv.conv.weight +model.0.conv.bn.weight +model.0.conv.bn.bias +model.1.conv.weight +model.1.bn.weight +model.1.bn.bias +model.2.cv1.conv.weight +model.2.cv1.bn.weight +... +model.23.m.0.cv2.bn.weight +model.23.m.0.cv2.bn.bias +model.24.m.0.weight +model.24.m.0.bias +model.24.m.1.weight +model.24.m.1.bias +model.24.m.2.weight +model.24.m.2.bias +""" +``` + +Looking at the model architecture we can see that the model backbone is layers 0-9: + +```yaml +# YOLOv5 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, BottleneckCSP, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 9, BottleneckCSP, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, BottleneckCSP, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 1, SPP, [1024, [5, 9, 13]]], + [-1, 3, BottleneckCSP, [1024, False]], # 9 + ] + +# YOLOv5 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, BottleneckCSP, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, BottleneckCSP, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, BottleneckCSP, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, BottleneckCSP, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] +``` + +so we can define the freeze list to contain all modules with 'model.0.' - 'model.9.' in their names: + +```bash +python train.py --freeze 10 +``` + +## Freeze All Layers + +To freeze the full model except for the final output convolution layers in Detect(), we set freeze list to contain all modules with 'model.0.' - 'model.23.' in their names: + +```bash +python train.py --freeze 24 +``` + +## Results + +We train YOLOv5m on VOC on both of the above scenarios, along with a default model (no freezing), starting from the official COCO pretrained `--weights yolov5m.pt`: + +```bash +train.py --batch 48 --weights yolov5m.pt --data voc.yaml --epochs 50 --cache --img 512 --hyp hyp.finetune.yaml +``` + +### Accuracy Comparison + +The results show that freezing speeds up training, but reduces final accuracy slightly. + +![Freezing training mAP50 results](https://user-images.githubusercontent.com/26833433/98394454-11579f80-205b-11eb-8e57-d8318e1cc2f8.png) + +![Freezing training mAP50-95 results](https://user-images.githubusercontent.com/26833433/98394459-13216300-205b-11eb-871b-49e20691a423.png) + +Table results + +### GPU Utilization Comparison + +Interestingly, the more modules are frozen the less GPU memory is required to train, and the lower GPU utilization. This indicates that larger models, or models trained at larger --image-size may benefit from freezing in order to train faster. + +![Training GPU memory allocated percent](https://user-images.githubusercontent.com/26833433/98394920-c2f6d080-205b-11eb-9611-fd68522b4e0e.png) + +![Training GPU memory utilization percent](https://user-images.githubusercontent.com/26833433/98394918-bf634980-205b-11eb-948d-311036ef9325.png) + +## Supported Environments + +Ultralytics provides a range of ready-to-use environments, each pre-installed with essential dependencies such as [CUDA](https://developer.nvidia.com/cuda), [CUDNN](https://developer.nvidia.com/cudnn), [Python](https://www.python.org/), and [PyTorch](https://pytorch.org/), to kickstart your projects. + +- **Free GPU Notebooks**: Run on Gradient Open In Colab Open In Kaggle +- **Google Cloud**: [GCP Quickstart Guide](../environments/google_cloud_quickstart_tutorial.md) +- **Amazon**: [AWS Quickstart Guide](../environments/aws_quickstart_tutorial.md) +- **Azure**: [AzureML Quickstart Guide](../environments/azureml_quickstart_tutorial.md) +- **Docker**: [Docker Quickstart Guide](../environments/docker_image_quickstart_tutorial.md) Docker Pulls + +## Project Status + +YOLOv5 CI + +This badge indicates that all [YOLOv5 GitHub Actions](https://github.com/ultralytics/yolov5/actions) Continuous Integration (CI) tests are successfully passing. These CI tests rigorously check the functionality and performance of YOLOv5 across various key aspects: [training](https://github.com/ultralytics/yolov5/blob/master/train.py), [validation](https://github.com/ultralytics/yolov5/blob/master/val.py), [inference](https://github.com/ultralytics/yolov5/blob/master/detect.py), [export](https://github.com/ultralytics/yolov5/blob/master/export.py), and [benchmarks](https://github.com/ultralytics/yolov5/blob/master/benchmarks.py). They ensure consistent and reliable operation on macOS, Windows, and Ubuntu, with tests conducted every 24 hours and upon each new commit. diff --git a/ultralytics/docs/en/yolov5/tutorials/transfer_learning_with_frozen_layers.md:Zone.Identifier b/ultralytics/docs/en/yolov5/tutorials/transfer_learning_with_frozen_layers.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/en/yolov5/tutorials/transfer_learning_with_frozen_layers.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/index.md b/ultralytics/docs/es/index.md new file mode 100755 index 0000000..df84458 --- /dev/null +++ b/ultralytics/docs/es/index.md @@ -0,0 +1,83 @@ +--- +comments: true +description: Explore una guรญa completa sobre Ultralytics YOLOv8, un modelo de alta velocidad y precisiรณn para detecciรณn de objetos y segmentaciรณn de imรกgenes. Tutoriales de instalaciรณn, predicciรณn, entrenamiento y mรกs. +keywords: Ultralytics, YOLOv8, detecciรณn de objetos, segmentaciรณn de imรกgenes, aprendizaje automรกtico, aprendizaje profundo, visiรณn por computadora, instalaciรณn YOLOv8, predicciรณn YOLOv8, entrenamiento YOLOv8, historia de YOLO, licencias YOLO +--- + +
+

+ + Banner de Ultralytics YOLO +

+ GitHub de Ultralytics + space + LinkedIn de Ultralytics + space + Twitter de Ultralytics + space + YouTube de Ultralytics + space + TikTok de Ultralytics + space + Instagram de Ultralytics + space + Discord de Ultralytics +
+
+ Integraciรณn continua de Ultralytics + Cobertura de cรณdigo de Ultralytics + Cita de YOLOv8 + Descargas de Docker + Discord +
+ Ejecutar en Gradient + Abrir en Colab + Abrir en Kaggle +
+ +Presentamos [Ultralytics](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics), la รบltima versiรณn del aclamado modelo para detecciรณn de objetos y segmentaciรณn de imรกgenes en tiempo real. YOLOv8 estรก construido sobre avances de vanguardia en aprendizaje profundo y visiรณn por computadora, ofreciendo un rendimiento sin paralelo en tรฉrminos de velocidad y precisiรณn. Su diseรฑo simplificado lo hace adecuado para varias aplicaciones y fรกcilmente adaptable a diferentes plataformas de hardware, desde dispositivos de borde hasta API en la nube. + +Explore los documentos de YOLOv8, un recurso integral diseรฑado para ayudarle a comprender y utilizar sus caracterรญsticas y capacidades. Independientemente de que sea un practicante experimentado en aprendizaje automรกtico o nuevo en el campo, este centro tiene como objetivo maximizar el potencial de YOLOv8 en sus proyectos. + +!!! Note "Nota" + + ๐Ÿšง Nuestra documentaciรณn en varios idiomas estรก actualmente en construcciรณn y estamos trabajando duro para mejorarla. ยกGracias por su paciencia! ๐Ÿ™ + +## Dรณnde empezar + +- **Instalar** `ultralytics` con pip y comenzar a funcionar en minutos   [:material-clock-fast: Comenzar](quickstart.md){ .md-button } +- **Predecir** nuevas imรกgenes y videos con YOLOv8   [:octicons-image-16: Predecir en Imรกgenes](modes/predict.md){ .md-button } +- **Entrenar** un nuevo modelo YOLOv8 en su propio conjunto de datos personalizado   [:fontawesome-solid-brain: Entrenar un Modelo](modes/train.md){ .md-button } +- **Explorar** tareas de YOLOv8 como segmentar, clasificar, posar y seguir   [:material-magnify-expand: Explorar Tareas](tasks/index.md){ .md-button } + +

+
+ +
+ Ver: Cรณmo entrenar un modelo YOLOv8 en Su Conjunto de Datos Personalizado en Google Colab. +

+ +## YOLO: Una Breve Historia + +[YOLO](https://arxiv.org/abs/1506.02640) (You Only Look Once), un modelo popular de detecciรณn de objetos y segmentaciรณn de imรกgenes, fue desarrollado por Joseph Redmon y Ali Farhadi en la Universidad de Washington. Lanzado en 2015, YOLO rรกpidamente ganรณ popularidad por su alta velocidad y precisiรณn. + +- [YOLOv2](https://arxiv.org/abs/1612.08242), lanzado en 2016, mejorรณ el modelo original incorporando normalizaciรณn por lotes, cajas ancla y clรบsteres de dimensiones. +- [YOLOv3](https://pjreddie.com/media/files/papers/YOLOv3.pdf), lanzado en 2018, mejorรณ aรบn mรกs el rendimiento del modelo usando una red dorsal mรกs eficiente, mรบltiples anclas y agrupaciรณn piramidal espacial. +- [YOLOv4](https://arxiv.org/abs/2004.10934) fue lanzado en 2020, introduciendo innovaciones como la ampliaciรณn de datos del mosaico, un nuevo cabezal de detecciรณn sin ancla y una nueva funciรณn de pรฉrdida. +- [YOLOv5](https://github.com/ultralytics/yolov5) mejorรณ aรบn mรกs el rendimiento del modelo y agregรณ nuevas caracterรญsticas como la optimizaciรณn de hiperparรกmetros, seguimiento de experimentos integrados y exportaciรณn automรกtica a formatos de exportaciรณn populares. +- [YOLOv6](https://github.com/meituan/YOLOv6) fue publicado en cรณdigo abierto por [Meituan](https://about.meituan.com/) en 2022 y se utiliza en muchos de los robots de entrega autรณnomos de la empresa. +- [YOLOv7](https://github.com/WongKinYiu/yolov7) aรฑadiรณ tareas adicionales como la estimaciรณn de posturas en el conjunto de datos COCO keypoints. +- [YOLOv8](https://github.com/ultralytics/ultralytics) es la รบltima versiรณn de YOLO de Ultralytics. Como un modelo de vanguardia y del estado del arte (SOTA), YOLOv8 se basa en el รฉxito de las versiones anteriores, introduciendo nuevas caracterรญsticas y mejoras para obtener un rendimiento mejorado, flexibilidad y eficiencia. YOLOv8 soporta una gama completa de tareas de IA de visiรณn, incluyendo [detecciรณn](tasks/detect.md), [segmentaciรณn](tasks/segment.md), [estimaciรณn de pose](tasks/pose.md), [seguimiento](modes/track.md) y [clasificaciรณn](tasks/classify.md). Esta versatilidad permite a los usuarios aprovechar las capacidades de YOLOv8 en una amplia gama de aplicaciones y dominios. + +## Licencias de YOLO: ยฟCรณmo estรกn licenciados los YOLO de Ultralytics? + +Ultralytics ofrece dos opciones de licencia para acomodar casos de uso diversos: + +- **Licencia AGPL-3.0**: Esta licencia de cรณdigo abierto aprobada por [OSI](https://opensource.org/licenses/) es ideal para estudiantes y entusiastas, promoviendo la colaboraciรณn abierta y el intercambio de conocimiento. Consulte el archivo [LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) para obtener mรกs detalles. +- **Licencia Empresarial**: Diseรฑada para uso comercial, esta licencia permite la integraciรณn sin problemas de software de Ultralytics y modelos de IA en bienes y servicios comerciales, eludiendo los requisitos de cรณdigo abierto de AGPL-3.0. Si su escenario implica la incorporaciรณn de nuestras soluciones en una oferta comercial, pรณngase en contacto a travรฉs de [Licencias de Ultralytics](https://ultralytics.com/license). + +Nuestra estrategia de licenciamiento estรก diseรฑada para asegurar que cualquier mejora a nuestros proyectos de cรณdigo abierto se devuelva a la comunidad. Mantenemos los principios del cรณdigo abierto cerca de nuestros corazones โค๏ธ, y nuestra misiรณn es garantizar que nuestras contribuciones puedan ser utilizadas y ampliadas de formas que sean beneficiosas para todos. diff --git a/ultralytics/docs/es/index.md:Zone.Identifier b/ultralytics/docs/es/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/models/fast-sam.md b/ultralytics/docs/es/models/fast-sam.md new file mode 100755 index 0000000..dfaee11 --- /dev/null +++ b/ultralytics/docs/es/models/fast-sam.md @@ -0,0 +1,193 @@ +--- +comments: true +description: Explora FastSAM, una soluciรณn basada en CNN para la segmentaciรณn en tiempo real de objetos en imรกgenes. Ofrece una interacciรณn mejorada del usuario, eficiencia computacional y es adaptable a diversas tareas de visiรณn. +keywords: FastSAM, aprendizaje automรกtico, soluciรณn basada en CNN, segmentaciรณn de objetos, soluciรณn en tiempo real, Ultralytics, tareas de visiรณn, procesamiento de imรกgenes, aplicaciones industriales, interacciรณn del usuario +--- + +# Modelo para Segmentar Cualquier Cosa Rรกpidamente (FastSAM) + +El Modelo para Segmentar Cualquier Cosa Rรกpidamente (FastSAM) es una soluciรณn novedosa basada en CNN que funciona en tiempo real para la tarea de Segmentar Cualquier Cosa. Esta tarea estรก diseรฑada para segmentar cualquier objeto dentro de una imagen basรกndose en diversas indicaciones posibles de interacciรณn del usuario. FastSAM reduce significativamente las demandas computacionales a la vez que mantiene un rendimiento competitivo, lo que lo convierte en una opciรณn prรกctica para una variedad de tareas de visiรณn. + +![Descripciรณn general de la arquitectura del Modelo para Segmentar Cualquier Cosa Rรกpidamente (FastSAM)](https://user-images.githubusercontent.com/26833433/248551984-d98f0f6d-7535-45d0-b380-2e1440b52ad7.jpg) + +## Descripciรณn general + +FastSAM estรก diseรฑado para abordar las limitaciones del [Modelo para Segmentar Cualquier Cosa (SAM)](sam.md), un modelo Transformer pesado con requerimientos sustanciales de recursos computacionales. FastSAM divide la tarea de segmentar cualquier cosa en dos etapas secuenciales: segmentaciรณn de todas las instancias y selecciรณn basada en indicaciones. La primera etapa utiliza [YOLOv8-seg](../tasks/segment.md) para producir las mรกscaras de segmentaciรณn de todas las instancias en la imagen. En la segunda etapa, produce la regiรณn de interรฉs correspondiente a la indicaciรณn. + +## Caracterรญsticas principales + +1. **Soluciรณn en tiempo real:** Al aprovechar la eficiencia computacional de las CNN, FastSAM proporciona una soluciรณn en tiempo real para la tarea de segmentar cualquier cosa, lo que lo hace valioso para aplicaciones industriales que requieren resultados rรกpidos. + +2. **Eficiencia y rendimiento:** FastSAM ofrece una reducciรณn significativa en las demandas computacionales y de recursos sin comprometer la calidad del rendimiento. Alcanza un rendimiento comparable al de SAM, pero con recursos computacionales drรกsticamente reducidos, lo que permite su aplicaciรณn en tiempo real. + +3. **Segmentaciรณn guiada por indicaciones:** FastSAM puede segmentar cualquier objeto dentro de una imagen guiado por diversas indicaciones posibles de interacciรณn del usuario, lo que proporciona flexibilidad y adaptabilidad en diferentes escenarios. + +4. **Basado en YOLOv8-seg:** FastSAM se basa en [YOLOv8-seg](../tasks/segment.md), un detector de objetos equipado con una rama de segmentaciรณn de instancias. Esto le permite producir de manera efectiva las mรกscaras de segmentaciรณn de todas las instancias en una imagen. + +5. **Resultados competitivos en pruebas de referencia:** En la tarea de propuesta de objetos de MS COCO, FastSAM alcanza puntuaciones altas a una velocidad significativamente mรกs rรกpida que [SAM](sam.md) en una sola tarjeta NVIDIA RTX 3090, lo que demuestra su eficiencia y capacidad. + +6. **Aplicaciones prรกcticas:** El enfoque propuesto proporciona una soluciรณn nueva y prรกctica para un gran nรบmero de tareas de visiรณn a una velocidad muy alta, varias veces mรกs rรกpida que los mรฉtodos actuales. + +7. **Factibilidad de compresiรณn del modelo:** FastSAM demuestra la factibilidad de un camino que puede reducir significativamente el esfuerzo computacional al introducir una prioridad artificial en la estructura, abriendo asรญ nuevas posibilidades para la arquitectura de modelos grandes en tareas generales de visiรณn. + +## Modelos disponibles, tareas admitidas y modos de funcionamiento + +Esta tabla presenta los modelos disponibles con sus pesos pre-entrenados especรญficos, las tareas que admiten y su compatibilidad con diferentes modos de funcionamiento, como [Inference](../modes/predict.md) (inferencia), [Validation](../modes/val.md) (validaciรณn), [Training](../modes/train.md) (entrenamiento) y [Export](../modes/export.md) (exportaciรณn), indicados mediante emojis โœ… para los modos admitidos y emojis โŒ para los modos no admitidos. + +| Tipo de modelo | Pesos pre-entrenados | Tareas admitidas | Inferencia | Validaciรณn | Entrenamiento | Exportaciรณn | +|----------------|----------------------|---------------------------------------------------|------------|------------|---------------|-------------| +| FastSAM-s | `FastSAM-s.pt` | [Segmentaciรณn de Instancias](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| FastSAM-x | `FastSAM-x.pt` | [Segmentaciรณn de Instancias](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Ejemplos de uso + +Los modelos FastSAM son fรกciles de integrar en tus aplicaciones Python. Ultralytics proporciona una API y comandos de lรญnea de comandos (CLI) fรกciles de usar para agilizar el desarrollo. + +### Uso de predicciรณn + +Para realizar la detecciรณn de objetos en una imagen, utiliza el mรฉtodo `predict` de la siguiente manera: + +!!! Example "Ejemplo" + + === "Python" + ```python + from ultralytics import FastSAM + from ultralytics.models.fastsam import FastSAMPrompt + + # Define una fuente de inferencia + source = 'ruta/hacia/bus.jpg' + + # Crea un modelo FastSAM + model = FastSAM('FastSAM-s.pt') # o FastSAM-x.pt + + # Ejecuta la inferencia en una imagen + everything_results = model(source, device='cpu', retina_masks=True, imgsz=1024, conf=0.4, iou=0.9) + + # Prepara un objeto de procesamiento de indicaciones + prompt_process = FastSAMPrompt(source, everything_results, device='cpu') + + # Indicaciรณn Everything + ann = prompt_process.everything_prompt() + + # Caja predeterminada [0,0,0,0] -> [x1,y1,x2,y2] + ann = prompt_process.box_prompt(bbox=[200, 200, 300, 300]) + + # Indicaciรณn de texto + ann = prompt_process.text_prompt(text='una foto de un perro') + + # Indicaciรณn de punto + # puntos predeterminados [[0,0]] [[x1,y1],[x2,y2]] + # etiqueta_predeterminada [0] [1,0] 0:fondo, 1:primer plano + ann = prompt_process.point_prompt(points=[[200, 200]], pointlabel=[1]) + prompt_process.plot(annotations=ann, output='./') + ``` + + === "CLI" + ```bash + # Carga un modelo FastSAM y segmenta todo con รฉl + yolo segment predict model=FastSAM-s.pt source=ruta/hacia/bus.jpg imgsz=640 + ``` + +Este fragmento de cรณdigo demuestra la simplicidad de cargar un modelo pre-entrenado y realizar una predicciรณn en una imagen. + +### Uso de validaciรณn + +La validaciรณn del modelo en un conjunto de datos se puede realizar de la siguiente manera: + +!!! Example "Ejemplo" + + === "Python" + ```python + from ultralytics import FastSAM + + # Crea un modelo FastSAM + model = FastSAM('FastSAM-s.pt') # o FastSAM-x.pt + + # Valida el modelo + results = model.val(data='coco8-seg.yaml') + ``` + + === "CLI" + ```bash + # Carga un modelo FastSAM y valida en el conjunto de datos de ejemplo COCO8 con un tamaรฑo de imagen de 640 + yolo segment val model=FastSAM-s.pt data=coco8.yaml imgsz=640 + ``` + +Ten en cuenta que FastSAM solo admite la detecciรณn y segmentaciรณn de una sola clase de objeto. Esto significa que reconocerรก y segmentarรก todos los objetos como si fueran de la misma clase. Por lo tanto, al preparar el conjunto de datos, debes convertir todos los IDs de categorรญa de objetos a 0. + +## Uso oficial de FastSAM + +FastSAM tambiรฉn estรก disponible directamente en el repositorio [https://github.com/CASIA-IVA-Lab/FastSAM](https://github.com/CASIA-IVA-Lab/FastSAM). Aquรญ hay una descripciรณn general breve de los pasos tรญpicos que podrรญas seguir para usar FastSAM: + +### Instalaciรณn + +1. Clona el repositorio de FastSAM: + ```shell + git clone https://github.com/CASIA-IVA-Lab/FastSAM.git + ``` + +2. Crea y activa un entorno Conda con Python 3.9: + ```shell + conda create -n FastSAM python=3.9 + conda activate FastSAM + ``` + +3. Navega hasta el repositorio clonado e instala los paquetes requeridos: + ```shell + cd FastSAM + pip install -r requirements.txt + ``` + +4. Instala el modelo CLIP: + ```shell + pip install git+https://github.com/openai/CLIP.git + ``` + +### Ejemplo de uso + +1. Descarga un [punto de control del modelo](https://drive.google.com/file/d/1m1sjY4ihXBU1fZXdQ-Xdj-mDltW-2Rqv/view?usp=sharing). + +2. Utiliza FastSAM para inferencia. Ejemplos de comandos: + + - Segmentar todo en una imagen: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg + ``` + + - Segmentar objetos especรญficos utilizando una indicaciรณn de texto: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --text_prompt "el perro amarillo" + ``` + + - Segmentar objetos dentro de una caja delimitadora (proporciona las coordenadas de la caja en formato xywh): + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --box_prompt "[570,200,230,400]" + ``` + + - Segmentar objetos cerca de puntos especรญficos: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --point_prompt "[[520,360],[620,300]]" --point_label "[1,0]" + ``` + +Ademรกs, puedes probar FastSAM a travรฉs de una [demostraciรณn en Colab](https://colab.research.google.com/drive/1oX14f6IneGGw612WgVlAiy91UHwFAvr9?usp=sharing) o en la [demostraciรณn web de HuggingFace](https://huggingface.co/spaces/An-619/FastSAM) para tener una experiencia visual. + +## Citas y agradecimientos + +Nos gustarรญa agradecer a los autores de FastSAM por sus importantes contribuciones en el campo de la segmentaciรณn de instancias en tiempo real: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{zhao2023fast, + title={Fast Segment Anything}, + author={Xu Zhao and Wenchao Ding and Yongqi An and Yinglong Du and Tao Yu and Min Li and Ming Tang and Jinqiao Wang}, + year={2023}, + eprint={2306.12156}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +El artรญculo original de FastSAM se puede encontrar en [arXiv](https://arxiv.org/abs/2306.12156). Los autores han puesto su trabajo a disposiciรณn del pรบblico, y el cรณdigo base se puede acceder en [GitHub](https://github.com/CASIA-IVA-Lab/FastSAM). Agradecemos sus esfuerzos para avanzar en el campo y hacer que su trabajo sea accesible a la comunidad en general. diff --git a/ultralytics/docs/es/models/fast-sam.md:Zone.Identifier b/ultralytics/docs/es/models/fast-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/models/fast-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/models/index.md b/ultralytics/docs/es/models/index.md new file mode 100755 index 0000000..a7137f9 --- /dev/null +++ b/ultralytics/docs/es/models/index.md @@ -0,0 +1,98 @@ +--- +comments: true +description: Explore la amplia gama de modelos de la familia YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS y RT-DETR soportados por Ultralytics. Comienza con ejemplos para el uso tanto de CLI como de Python. +keywords: Ultralytics, documentaciรณn, YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS, RT-DETR, modelos, arquitecturas, Python, CLI +--- + +# Modelos soportados por Ultralytics + +ยกBienvenido a la documentaciรณn de modelos de Ultralytics! Ofrecemos soporte para una amplia gama de modelos, cada uno adaptado a tareas especรญficas como [detecciรณn de objetos](../tasks/detect.md), [segmentaciรณn de instancias](../tasks/segment.md), [clasificaciรณn de imรกgenes](../tasks/classify.md), [estimaciรณn de posturas](../tasks/pose.md), y [seguimiento de mรบltiples objetos](../modes/track.md). Si estรกs interesado en contribuir con tu arquitectura de modelo a Ultralytics, consulta nuestra [Guรญa de Contribuciรณn](../../help/contributing.md). + +!!! Note "Nota" + + ๐Ÿšง Estamos trabajando arduamente para mejorar nuestra documentaciรณn en varios idiomas actualmente en construcciรณn. ยกGracias por tu paciencia! ๐Ÿ™ + +## Modelos destacados + +Aquรญ estรกn algunos de los modelos clave soportados: + +1. **[YOLOv3](yolov3.md)**: La tercera iteraciรณn de la familia de modelos YOLO, original de Joseph Redmon, conocida por su capacidad de detecciรณn de objetos en tiempo real eficientemente. +2. **[YOLOv4](yolov4.md)**: Una actualizaciรณn nativa de darknet para YOLOv3, lanzada por Alexey Bochkovskiy en 2020. +3. **[YOLOv5](yolov5.md)**: Una versiรณn mejorada de la arquitectura YOLO por Ultralytics, ofreciendo un mejor rendimiento y compromiso de velocidad comparado con versiones anteriores. +4. **[YOLOv6](yolov6.md)**: Lanzado por [Meituan](https://about.meituan.com/) en 2022, y utilizado en muchos de los robots de entrega autรณnomos de la compaรฑรญa. +5. **[YOLOv7](yolov7.md)**: Modelos YOLO actualizados lanzados en 2022 por los autores de YOLOv4. +6. **[YOLOv8](yolov8.md) NUEVO ๐Ÿš€**: La รบltima versiรณn de la familia YOLO, con capacidades mejoradas como segmentaciรณn de instancias, estimaciรณn de posturas/puntos clave y clasificaciรณn. +7. **[Modelo Segment Anything (SAM)](sam.md)**: Modelo Segment Anything (SAM) de Meta. +8. **[Mobile Segment Anything Model (MobileSAM)](mobile-sam.md)**: MobileSAM para aplicaciones mรณviles, por la Universidad de Kyung Hee. +9. **[Fast Segment Anything Model (FastSAM)](fast-sam.md)**: FastSAM por el Grupo de Anรกlisis de Imagen y Video, Instituto de Automatizaciรณn, Academia China de Ciencias. +10. **[YOLO-NAS](yolo-nas.md)**: Modelos YOLO de Bรบsqueda de Arquitectura Neural (NAS). +11. **[Transformadores de Detecciรณn en Tiempo Real (RT-DETR)](rtdetr.md)**: Modelos de Transformador de Detecciรณn en Tiempo Real (RT-DETR) de Baidu's PaddlePaddle. + +

+
+ +
+ Mira: Ejecuta modelos YOLO de Ultralytics en solo unas pocas lรญneas de cรณdigo. +

+ +## Empezando: Ejemplos de Uso + +Este ejemplo proporciona ejemplos simples de entrenamiento e inferencia YOLO. Para la documentaciรณn completa de estos y otros [modos](../modes/index.md), consulta las pรกginas de documentaciรณn de [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) y [Export](../modes/export.md). + +Nota que el siguiente ejemplo es para los modelos YOLOv8 [Detect](../tasks/detect.md) para detecciรณn de objetos. Para tareas adicionales soportadas, consulta la documentaciรณn de [Segment](../tasks/segment.md), [Classify](../tasks/classify.md) y [Pose](../tasks/pose.md). + +!!! Example "Ejemplo" + + === "Python" + + Los modelos pre-entrenados `*.pt` de PyTorch asรญ como los archivos de configuraciรณn `*.yaml` se pueden pasar a las clases `YOLO()`, `SAM()`, `NAS()` y `RTDETR()` para crear una instancia de modelo en Python: + + ```python + from ultralytics import YOLO + + # Cargar un modelo YOLOv8n preentrenado en COCO + model = YOLO('yolov8n.pt') + + # Mostrar informaciรณn del modelo (opcional) + model.info() + + # Entrenar el modelo en el conjunto de datos de ejemplo COCO8 durante 100 รฉpocas + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Ejecutar inferencia con el modelo YOLOv8n en la imagen 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + Los comandos CLI estรกn disponibles para ejecutar directamente los modelos: + + ```bash + # Cargar un modelo YOLOv8n preentrenado en COCO y entrenarlo en el conjunto de datos de ejemplo COCO8 durante 100 รฉpocas + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Cargar un modelo YOLOv8n preentrenado en COCO y ejecutar inferencia en la imagen 'bus.jpg' + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## Contribuir con Nuevos Modelos + +ยฟInteresado en contribuir con tu modelo a Ultralytics? ยกGenial! Siempre estamos abiertos a expandir nuestro portafolio de modelos. + +1. **Haz un Fork del Repositorio**: Comienza haciendo un fork del [repositorio de GitHub de Ultralytics](https://github.com/ultralytics/ultralytics). + +2. **Clona tu Fork**: Clona tu fork a tu mรกquina local y crea una nueva rama para trabajar. + +3. **Implementa tu Modelo**: Aรฑade tu modelo siguiendo los estรกndares de codificaciรณn y directrices proporcionadas en nuestra [Guรญa de Contribuciรณn](../../help/contributing.md). + +4. **Prueba Rigurosamente**: Asegรบrate de probar tu modelo rigurosamente, tanto de forma aislada como parte del proceso. + +5. **Crea un Pull Request**: Una vez que estรฉs satisfecho con tu modelo, crea un pull request al repositorio principal para revisiรณn. + +6. **Revisiรณn de Cรณdigo y Fusiรณn**: Despuรฉs de la revisiรณn, si tu modelo cumple con nuestros criterios, serรก fusionado al repositorio principal. + +Para pasos detallados, consulta nuestra [Guรญa de Contribuciรณn](../../help/contributing.md). diff --git a/ultralytics/docs/es/models/index.md:Zone.Identifier b/ultralytics/docs/es/models/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/models/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/models/mobile-sam.md b/ultralytics/docs/es/models/mobile-sam.md new file mode 100755 index 0000000..bf68ab7 --- /dev/null +++ b/ultralytics/docs/es/models/mobile-sam.md @@ -0,0 +1,116 @@ +--- +comments: true +description: Obtรฉn mรกs informaciรณn sobre MobileSAM, su implementaciรณn, comparaciรณn con SAM original y cรณmo descargarlo y probarlo en el framework de Ultralytics. ยกMejora tus aplicaciones mรณviles hoy mismo! +keywords: MobileSAM, Ultralytics, SAM, aplicaciones mรณviles, Arxiv, GPU, API, codificador de imรกgenes, decodificador de mรกscaras, descarga de modelos, mรฉtodo de prueba +--- + +![Logotipo de MobileSAM](https://github.com/ChaoningZhang/MobileSAM/blob/master/assets/logo2.png?raw=true) + +# Segmentaciรณn Mรณvil de Cualquier Cosa (MobileSAM) + +El artรญculo de MobileSAM ahora estรก disponible en [arXiv](https://arxiv.org/pdf/2306.14289.pdf). + +Una demostraciรณn de MobileSAM funcionando en una CPU se puede acceder en este [enlace de demostraciรณn](https://huggingface.co/spaces/dhkim2810/MobileSAM). El rendimiento en una CPU Mac i5 tarda aproximadamente 3 segundos. En la demostraciรณn de Hugging Face, la interfaz y las CPUs de menor rendimiento contribuyen a una respuesta mรกs lenta, pero sigue funcionando de manera efectiva. + +MobileSAM se implementa en varios proyectos, incluyendo [Grounding-SAM](https://github.com/IDEA-Research/Grounded-Segment-Anything), [AnyLabeling](https://github.com/vietanhdev/anylabeling) y [Segment Anything in 3D](https://github.com/Jumpat/SegmentAnythingin3D). + +MobileSAM se entrena en una sola GPU con un conjunto de datos de 100k (1% de las imรกgenes originales) en menos de un dรญa. El cรณdigo para este entrenamiento estarรก disponible en el futuro. + +## Modelos Disponibles, Tareas Admitidas y Modos de Operaciรณn + +Esta tabla presenta los modelos disponibles con sus pesos pre-entrenados especรญficos, las tareas que admiten y su compatibilidad con diferentes modos de operaciรณn como [Inference (Inferencia)](../modes/predict.md), [Validation (Validaciรณn)](../modes/val.md), [Training (Entrenamiento)](../modes/train.md) y [Export (Exportaciรณn)](../modes/export.md), indicados por emojis โœ… para los modos admitidos y emojis โŒ para los modos no admitidos. + +| Tipo de Modelo | Pesos Pre-entrenados | Tareas Admitidas | Inferencia | Validaciรณn | Entrenamiento | Exportaciรณn | +|----------------|----------------------|---------------------------------------------------|------------|------------|---------------|-------------| +| MobileSAM | `mobile_sam.pt` | [Segmentaciรณn de Instancias](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Adaptaciรณn de SAM a MobileSAM + +Dado que MobileSAM mantiene el mismo pipeline que SAM original, hemos incorporado el pre-procesamiento, post-procesamiento y todas las demรกs interfaces del original. En consecuencia, aquellos que actualmente utilizan SAM original pueden hacer la transiciรณn a MobileSAM con un esfuerzo mรญnimo. + +MobileSAM tiene un rendimiento comparable a SAM original y mantiene el mismo pipeline excepto por un cambio en el codificador de imรกgenes. Especรญficamente, reemplazamos el codificador de imรกgenes original ViT-H pesado (632M) por uno mรกs pequeรฑo, Tiny-ViT (5M). En una sola GPU, MobileSAM funciona a aproximadamente 12ms por imagen: 8ms en el codificador de imรกgenes y 4ms en el decodificador de mรกscaras. + +La siguiente tabla proporciona una comparaciรณn de los codificadores de imรกgenes basados en ViT: + +| Codificador de Imรกgenes | SAM Original | MobileSAM | +|-------------------------|--------------|-----------| +| Parรกmetros | 611M | 5M | +| Velocidad | 452ms | 8ms | + +Tanto SAM original como MobileSAM utilizan el mismo decodificador de mรกscaras guiado por instrucciones: + +| Decodificador de Mรกscaras | SAM Original | MobileSAM | +|---------------------------|--------------|-----------| +| Parรกmetros | 3.876M | 3.876M | +| Velocidad | 4ms | 4ms | + +Aquรญ estรก la comparaciรณn de todo el pipeline: + +| Pipeline Completo (Enc+Dec) | SAM Original | MobileSAM | +|-----------------------------|--------------|-----------| +| Parรกmetros | 615M | 9.66M | +| Velocidad | 456ms | 12ms | + +El rendimiento de MobileSAM y SAM original se demuestra utilizando tanto un punto como una caja como instrucciones. + +![Imagen con Punto como Instrucciรณn](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +![Imagen con Caja como Instrucciรณn](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +Con su rendimiento superior, MobileSAM es aproximadamente 5 veces mรกs pequeรฑo y 7 veces mรกs rรกpido que el actual FastSAM. Mรกs detalles estรกn disponibles en la [pรกgina del proyecto de MobileSAM](https://github.com/ChaoningZhang/MobileSAM). + +## Probando MobileSAM en Ultralytics + +Al igual que SAM original, ofrecemos un mรฉtodo sencillo de prueba en Ultralytics, que incluye modos tanto para instrucciones de Punto como para Caja. + +### Descarga del Modelo + +Puedes descargar el modelo [aquรญ](https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt). + +### Instrucciรณn de Punto + +!!! Example "Ejemplo" + + === "Python" + ```python + from ultralytics import SAM + + # Carga el modelo + model = SAM('mobile_sam.pt') + + # Predice un segmento basado en una instrucciรณn de punto + model.predict('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +### Instrucciรณn de Caja + +!!! Example "Ejemplo" + + === "Python" + ```python + from ultralytics import SAM + + # Carga el modelo + model = SAM('mobile_sam.pt') + + # Predice un segmento basado en una instrucciรณn de caja + model.predict('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + ``` + +Hemos implementado `MobileSAM` y `SAM` utilizando la misma API. Para obtener mรกs informaciรณn sobre cรณmo usarlo, consulta la [pรกgina de SAM](sam.md). + +## Citaciones y Reconocimientos + +Si encuentras รบtil MobileSAM en tu investigaciรณn o trabajo de desarrollo, considera citar nuestro artรญculo: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{mobile_sam, + title={Faster Segment Anything: Towards Lightweight SAM for Mobile Applications}, + author={Zhang, Chaoning and Han, Dongshen and Qiao, Yu and Kim, Jung Uk and Bae, Sung Ho and Lee, Seungkyu and Hong, Choong Seon}, + journal={arXiv preprint arXiv:2306.14289}, + year={2023} + } diff --git a/ultralytics/docs/es/models/mobile-sam.md:Zone.Identifier b/ultralytics/docs/es/models/mobile-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/models/mobile-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/models/rtdetr.md b/ultralytics/docs/es/models/rtdetr.md new file mode 100755 index 0000000..fada0ae --- /dev/null +++ b/ultralytics/docs/es/models/rtdetr.md @@ -0,0 +1,93 @@ +--- +comments: true +description: Descubre las caracterรญsticas y beneficios de RT-DETR, un eficiente y adaptable detector de objetos en tiempo real desarrollado por Baidu y potenciado por Vision Transformers, que incluye modelos pre-entrenados. +keywords: RT-DETR, Baidu, Vision Transformers, detecciรณn de objetos, rendimiento en tiempo real, CUDA, TensorRT, selecciรณn de consultas IoU, Ultralytics, API de Python, PaddlePaddle +--- + +# RT-DETR de Baidu: Un Detector de Objetos en Tiempo Real Basado en Vision Transformers + +## Resumen + +Real-Time Detection Transformer (RT-DETR), desarrollado por Baidu, es un avanzado detector de objetos de extremo a extremo que proporciona un rendimiento en tiempo real manteniendo una alta precisiรณn. Utiliza la potencia de Vision Transformers (ViT) para procesar de manera eficiente caracterรญsticas de mรบltiples escalas mediante la descomposiciรณn de la interacciรณn intra-escala y la fusiรณn inter-escala. RT-DETR es altamente adaptable y permite ajustar de manera flexible la velocidad de inferencia utilizando diferentes capas de decodificador sin necesidad de volver a entrenar el modelo. El modelo se destaca en plataformas aceleradas como CUDA con TensorRT, superando a muchos otros detectores de objetos en tiempo real. + +![Ejemplo de imagen del modelo](https://user-images.githubusercontent.com/26833433/238963168-90e8483f-90aa-4eb6-a5e1-0d408b23dd33.png) +**Resumen de RT-DETR de Baidu.** El diagrama de la arquitectura del modelo RT-DETR muestra las รบltimas tres etapas del canal (S3, S4, S5) como entrada al codificador. El eficiente codificador hรญbrido transforma caracterรญsticas de mรบltiples escalas en una secuencia de caracterรญsticas de imagen a travรฉs del mรณdulo de interacciรณn de caracterรญsticas intra-escala (AIFI) y el mรณdulo de fusiรณn de caracterรญsticas inter-escala (CCFM). Se utiliza la selecciรณn de consultas IoU-aware para seleccionar un nรบmero fijo de caracterรญsticas de imagen que servirรกn como consultas iniciales de objetos para el decodificador. Finalmente, el decodificador con cabeceras de predicciรณn auxiliares optimiza iterativamente las consultas de objetos para generar cajas y puntuaciones de confianza ([fuente](https://arxiv.org/pdf/2304.08069.pdf)). + +### Caracterรญsticas Clave + +- **Codificador Hรญbrido Eficiente:** RT-DETR de Baidu utiliza un codificador hรญbrido eficiente que procesa caracterรญsticas de mรบltiples escalas mediante la descomposiciรณn de la interacciรณn intra-escala y la fusiรณn inter-escala. Este diseรฑo รบnico basado en Vision Transformers reduce los costos computacionales y permite la detecciรณn de objetos en tiempo real. +- **Selecciรณn de Consultas IoU-aware:** RT-DETR de Baidu mejora la inicializaciรณn de las consultas de objetos utilizando la selecciรณn de consultas IoU-aware. Esto permite que el modelo se enfoque en los objetos mรกs relevantes de la escena, mejorando la precisiรณn en la detecciรณn. +- **Velocidad de Inferencia Adaptable:** RT-DETR de Baidu admite ajustes flexibles de la velocidad de inferencia utilizando diferentes capas de decodificador sin necesidad de volver a entrenar el modelo. Esta adaptabilidad facilita la aplicaciรณn prรกctica en diversos escenarios de detecciรณn de objetos en tiempo real. + +## Modelos Pre-entrenados + +La API de Python de Ultralytics proporciona modelos pre-entrenados de RT-DETR de PaddlePaddle en diferentes escalas: + +- RT-DETR-L: 53.0% AP en COCO val2017, 114 FPS en GPU T4 +- RT-DETR-X: 54.8% AP en COCO val2017, 74 FPS en GPU T4 + +## Ejemplos de Uso + +Este ejemplo proporciona ejemplos sencillos de entrenamiento e inferencia de RT-DETRR. Para obtener una documentaciรณn completa sobre estos y otros [modos](../modes/index.md), consulta las pรกginas de documentaciรณn de [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) y [Export](../modes/export.md). + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import RTDETR + + # Cargar un modelo RT-DETR-l pre-entrenado en COCO + model = RTDETR('rtdetr-l.pt') + + # Mostrar informaciรณn del modelo (opcional) + model.info() + + # Entrenar el modelo en el conjunto de datos de ejemplo COCO8 durante 100 รฉpocas + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Realizar inferencia con el modelo RT-DETR-l en la imagen 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ```bash + # Cargar un modelo RT-DETR-l pre-entrenado en COCO y entrenarlo en el conjunto de datos de ejemplo COCO8 durante 100 รฉpocas + yolo train model=rtdetr-l.pt data=coco8.yaml epochs=100 imgsz=640 + + # Cargar un modelo RT-DETR-l pre-entrenado en COCO y realizar inferencia en la imagen 'bus.jpg' + yolo predict model=rtdetr-l.pt source=path/to/bus.jpg + ``` + +## Tareas y Modos Admitidos + +Esta tabla presenta los tipos de modelos, los pesos pre-entrenados especรญficos, las tareas admitidas por cada modelo y los diversos modos ([Train](../modes/train.md) , [Val](../modes/val.md), [Predict](../modes/predict.md), [Export](../modes/export.md)) admitidos, indicados por los emojis โœ…. + +| Tipo de Modelo | Pesos Pre-entrenados | Tareas Admitidas | Inferencia | Validaciรณn | Entrenamiento | Exportaciรณn | +|---------------------|----------------------|--------------------------------------------|------------|------------|---------------|-------------| +| RT-DETR Large | `rtdetr-l.pt` | [Detecciรณn de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| RT-DETR Extra-Large | `rtdetr-x.pt` | [Detecciรณn de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +## Citaciones y Agradecimientos + +Si utilizas RT-DETR de Baidu en tu investigaciรณn o trabajo de desarrollo, por favor cita el [artรญculo original](https://arxiv.org/abs/2304.08069): + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{lv2023detrs, + title={DETRs Beat YOLOs on Real-time Object Detection}, + author={Wenyu Lv and Shangliang Xu and Yian Zhao and Guanzhong Wang and Jinman Wei and Cheng Cui and Yuning Du and Qingqing Dang and Yi Liu}, + year={2023}, + eprint={2304.08069}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +Nos gustarรญa agradecer a Baidu y al equipo de [PaddlePaddle](https://github.com/PaddlePaddle/PaddleDetection) por crear y mantener este valioso recurso para la comunidad de visiรณn por computadora. Apreciamos enormemente su contribuciรณn al campo con el desarrollo del detector de objetos en tiempo real basado en Vision Transformers, RT-DETR. + +*keywords: RT-DETR, Transformer, ViT, Vision Transformers, Baidu RT-DETR, PaddlePaddle, Paddle Paddle RT-DETR, detecciรณn de objetos en tiempo real, detecciรณn de objetos basada en Vision Transformers, modelos pre-entrenados PaddlePaddle RT-DETR, uso de RT-DETR de Baidu, API de Python de Ultralytics* diff --git a/ultralytics/docs/es/models/rtdetr.md:Zone.Identifier b/ultralytics/docs/es/models/rtdetr.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/models/rtdetr.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/models/sam.md b/ultralytics/docs/es/models/sam.md new file mode 100755 index 0000000..966c5be --- /dev/null +++ b/ultralytics/docs/es/models/sam.md @@ -0,0 +1,226 @@ +--- +comments: true +description: Explora el revolucionario Segment Anything Model (SAM) de Ultralytics que permite la segmentaciรณn de imรกgenes en tiempo real. Aprende sobre su segmentaciรณn por indicaciรณn, rendimiento en la transferencia sin entrenamiento y cรณmo usarlo. +keywords: Ultralytics, segmentaciรณn de imรกgenes, Segment Anything Model, SAM, SA-1B dataset, rendimiento en tiempo real, transferencia sin entrenamiento, detecciรณn de objetos, anรกlisis de imรกgenes, aprendizaje automรกtico +--- + +# Segment Anything Model (SAM) + +Bienvenido al frontera de la segmentaciรณn de imรกgenes con el Segment Anything Model, o SAM. Este modelo revolucionario ha cambiado el juego al introducir la segmentaciรณn de imรกgenes por indicaciรณn con rendimiento en tiempo real, estableciendo nuevos estรกndares en el campo. + +## Introducciรณn a SAM: Segment Anything Model + +El Segment Anything Model, o SAM, es un modelo de segmentaciรณn de imรกgenes de vanguardia que permite la segmentaciรณn por indicaciรณn, ofreciendo una versatilidad sin igual en las tareas de anรกlisis de imรกgenes. SAM forma el corazรณn de la iniciativa Segment Anything, un proyecto innovador que presenta un modelo, una tarea y un conjunto de datos nuevos para la segmentaciรณn de imรกgenes. + +El diseรฑo avanzado de SAM le permite adaptarse a nuevas distribuciones y tareas de imรกgenes sin conocimientos previos, una caracterรญstica conocida como transferencia sin entrenamiento. Entrenado en el extenso [conjunto de datos SA-1B](https://ai.facebook.com/datasets/segment-anything/), que contiene mรกs de mil millones de mรกscaras distribuidas en once millones de imรกgenes seleccionadas cuidadosamente, SAM ha demostrado un impresionante rendimiento en la transferencia sin entrenamiento, superando en muchos casos los resultados de supervisiรณn completa anteriores. + +![Ejemplo de imagen del conjunto de datos](https://user-images.githubusercontent.com/26833433/238056229-0e8ffbeb-f81a-477e-a490-aff3d82fd8ce.jpg) +Imรกgenes de ejemplo con mรกscaras superpuestas de nuestro nuevo conjunto de datos, SA-1B. SA-1B contiene 11 millones de imรกgenes diversas de alta resoluciรณn, con licencia y protecciรณn de la privacidad, y 1.1 mil millones de mรกscaras de segmentaciรณn de alta calidad. Estas mรกscaras fueron anotadas completamente automรกticamente por SAM y, segรบn las calificaciones humanas y numerosos experimentos, tienen una alta calidad y diversidad. Las imรกgenes se agrupan por nรบmero de mรกscaras por imagen para su visualizaciรณn (hay aproximadamente 100 mรกscaras por imagen en promedio). + +## Caracterรญsticas clave del Segment Anything Model (SAM) + +- **Tarea de segmentaciรณn por indicaciรณn**: SAM fue diseรฑado teniendo en cuenta una tarea de segmentaciรณn por indicaciรณn, lo que le permite generar mรกscaras de segmentaciรณn vรกlidas a partir de cualquier indicaciรณn dada, como pistas espaciales o de texto que identifican un objeto. +- **Arquitectura avanzada**: El Segment Anything Model utiliza un potente codificador de imรกgenes, un codificador de indicaciones y un decodificador de mรกscaras ligero. Esta arquitectura รบnica permite la indicaciรณn flexible, el cรกlculo de mรกscaras en tiempo real y la conciencia de ambigรผedades en las tareas de segmentaciรณn. +- **El conjunto de datos SA-1B**: Introducido por el proyecto Segment Anything, el conjunto de datos SA-1B cuenta con mรกs de mil millones de mรกscaras en once millones de imรกgenes. Como el conjunto de datos de segmentaciรณn mรกs grande hasta la fecha, proporciona a SAM una fuente de datos de entrenamiento diversa y a gran escala. +- **Rendimiento en la transferencia sin entrenamiento**: SAM muestra un destacado rendimiento en la transferencia sin entrenamiento en diversas tareas de segmentaciรณn, lo que lo convierte en una herramienta lista para usar en diversas aplicaciones con una necesidad mรญnima de ingenierรญa de indicaciรณn. + +Para obtener una visiรณn mรกs detallada del Segment Anything Model y el conjunto de datos SA-1B, visita el [sitio web de Segment Anything](https://segment-anything.com) y consulta el artรญculo de investigaciรณn [Segment Anything](https://arxiv.org/abs/2304.02643). + +## Modelos disponibles, tareas admitidas y modos de funcionamiento + +Esta tabla muestra los modelos disponibles con sus pesos pre-entrenados especรญficos, las tareas que admiten y su compatibilidad con diferentes modos de funcionamiento como [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md) y [Export](../modes/export.md), indicados con emojis โœ… para los modos admitidos y emojis โŒ para los modos no admitidos. + +| Tipo de modelo | Pesos pre-entrenados | Tareas admitidas | Inference | Validation | Training | Export | +|----------------|----------------------|---------------------------------------------------|-----------|------------|----------|--------| +| SAM base | `sam_b.pt` | [Segmentaciรณn de instancias](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| SAM large | `sam_l.pt` | [Segmentaciรณn de instancias](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Cรณmo usar SAM: Versatilidad y potencia en la segmentaciรณn de imรกgenes + +El Segment Anything Model se puede utilizar para una multitud de tareas posteriores que van mรกs allรก de sus datos de entrenamiento. Esto incluye detecciรณn de bordes, generaciรณn de propuestas de objetos, segmentaciรณn de instancias y predicciรณn preliminar de texto a mรกscara. Con la ingenierรญa de indicaciรณn, SAM puede adaptarse rรกpidamente a nuevas tareas y distribuciones de datos de manera sin entrenamiento, estableciรฉndolo como una herramienta versรกtil y potente para todas tus necesidades de segmentaciรณn de imรกgenes. + +### Ejemplo de predicciรณn con SAM + +!!! Example "Segmentar con indicaciones" + + Segmenta la imagen con las indicaciones proporcionadas. + + === "Python" + + ```python + from ultralytics import SAM + + # Cargar un modelo + modelo = SAM('sam_b.pt') + + # Mostrar informaciรณn del modelo (opcional) + modelo.info() + + # Ejecutar inferencia con indicaciones de bboxes + modelo('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + + # Ejecutar inferencia con indicaciones de puntos + modelo('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +!!! Example "Segmentar todo" + + Segmenta toda la imagen. + + === "Python" + + ```python + from ultralytics import SAM + + # Cargar un modelo + modelo = SAM('sam_b.pt') + + # Mostrar informaciรณn del modelo (opcional) + modelo.info() + + # Ejecutar inferencia + modelo('ruta/hacia/imagen.jpg') + ``` + + === "CLI" + + ```bash + # Ejecutar inferencia con un modelo SAM + yolo predict model=sam_b.pt source=ruta/hacia/imagen.jpg + ``` + +- La lรณgica aquรญ es segmentar toda la imagen si no se proporcionan indicaciones (bboxes/puntos/mรกscaras). + +!!! Example "Ejemplo de SAMPredictor" + + De esta manera, puedes configurar una imagen una vez y ejecutar inferencia con indicaciones mรบltiples sin ejecutar el codificador de imรกgenes mรบltiples veces. + + === "Inferencia con indicaciones" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # Crear SAMPredictor + opciones = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(opciones=opciones) + + # Establecer imagen + predictor.set_image("ultralytics/assets/zidane.jpg") # establecer con archivo de imagen + predictor.set_image(cv2.imread("ultralytics/assets/zidane.jpg")) # establecer con np.ndarray + resultados = predictor(bboxes=[439, 437, 524, 709]) + resultados = predictor(points=[900, 370], labels=[1]) + + # Restablecer imagen + predictor.reset_image() + ``` + + Segmentar todo con argumentos adicionales. + + === "Segmentar todo" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # Crear SAMPredictor + opciones = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(opciones=opciones) + + # Segmentar con argumentos adicionales + resultados = predictor(source="ultralytics/assets/zidane.jpg", crop_n_layers=1, points_stride=64) + ``` + +- Mรกs argumentos adicionales para `Segmentar todo` en [`Referencia de Predictor/generate`](../../../reference/models/sam/predict.md). + +## SAM comparado con YOLOv8 + +Aquรญ comparamos el modelo SAM mรกs pequeรฑo de Meta, SAM-b, con el modelo de segmentaciรณn mรกs pequeรฑo de Ultralytics, [YOLOv8n-seg](../tasks/segment.md): + +| Modelo | Tamaรฑo | Parรกmetros | Velocidad (CPU) | +|-------------------------------------------------|-------------------------------------|------------------------------|-------------------------------------| +| SAM-b de Meta | 358 MB | 94.7 M | 51096 ms/im | +| [MobileSAM](mobile-sam.md) | 40.7 MB | 10.1 M | 46122 ms/im | +| [FastSAM-s](fast-sam.md) con respaldo de YOLOv8 | 23.7 MB | 11.8 M | 115 ms/im | +| YOLOv8n-seg de Ultralytics | **6.7 MB** (53.4 veces mรกs pequeรฑo) | **3.4 M** (27.9 veces menos) | **59 ms/im** (866 veces mรกs rรกpido) | + +Esta comparaciรณn muestra las diferencias de รณrdenes de magnitud en los tamaรฑos y velocidades de los modelos. Si bien SAM presenta capacidades รบnicas para la segmentaciรณn automรกtica, no es un competidor directo de los modelos de segmentaciรณn YOLOv8, que son mรกs pequeรฑos, mรกs rรกpidos y mรกs eficientes. + +Las pruebas se realizaron en una MacBook Apple M2 de 2023 con 16 GB de RAM. Para reproducir esta prueba: + +!!! Example "Ejemplo" + + === "Python" + ```python + from ultralytics import FastSAM, SAM, YOLO + + # Perfil del modelo SAM-b + modelo = SAM('sam_b.pt') + modelo.info() + modelo('ultralytics/assets') + + # Perfil de MobileSAM + modelo = SAM('mobile_sam.pt') + modelo.info() + modelo('ultralytics/assets') + + # Perfil de FastSAM-s + modelo = FastSAM('FastSAM-s.pt') + modelo.info() + modelo('ultralytics/assets') + + # Perfil de YOLOv8n-seg + modelo = YOLO('yolov8n-seg.pt') + modelo.info() + modelo('ultralytics/assets') + ``` + +## Auto-anotaciรณn: un camino rรกpido hacia conjuntos de datos de segmentaciรณn + +La auto-anotaciรณn es una caracterรญstica clave de SAM que permite a los usuarios generar un [conjunto de datos de segmentaciรณn](https://docs.ultralytics.com/datasets/segment) utilizando un modelo de detecciรณn pre-entrenado. Esta funciรณn permite una anotaciรณn rรกpida y precisa de un gran nรบmero de imรกgenes, evitando la necesidad de una etiquetaciรณn manual que consume mucho tiempo. + +### Generar tu conjunto de datos de segmentaciรณn utilizando un modelo de detecciรณn + +Para auto-anotar tu conjunto de datos con el marco de trabajo de Ultralytics, utiliza la funciรณn `auto_annotate` como se muestra a continuaciรณn: + +!!! Example "Ejemplo" + + === "Python" + ```python + from ultralytics.data.annotator import auto_annotate + + auto_annotate(data="ruta/a/las/imagenes", det_model="yolov8x.pt", sam_model='sam_b.pt') + ``` + +| Argumento | Tipo | Descripciรณn | Predeterminado | +|------------|---------------------|-----------------------------------------------------------------------------------------------------------------------|----------------| +| data | str | Ruta a una carpeta que contiene las imรกgenes a anotar. | | +| det_model | str, opcional | Modelo de detecciรณn YOLO pre-entrenado. Por defecto, 'yolov8x.pt'. | 'yolov8x.pt' | +| sam_model | str, opcional | Modelo de segmentaciรณn SAM pre-entrenado. Por defecto, 'sam_b.pt'. | 'sam_b.pt' | +| device | str, opcional | Dispositivo en el que ejecutar los modelos. Por defecto, una cadena vacรญa (CPU o GPU, si estรก disponible). | | +| output_dir | str, None, opcional | Directorio para guardar los resultados anotados. Por defecto, una carpeta 'labels' en el mismo directorio que 'data'. | None | + +La funciรณn `auto_annotate` toma la ruta de tus imรกgenes, con argumentos opcionales para especificar los modelos de detecciรณn y segmentaciรณn SAM pre-entrenados, el dispositivo en el que ejecutar los modelos, y el directorio de salida para guardar los resultados anotados. + +La auto-anotaciรณn con modelos pre-entrenados puede reducir drรกsticamente el tiempo y el esfuerzo requeridos para crear conjuntos de datos de segmentaciรณn de alta calidad. Esta caracterรญstica es especialmente beneficiosa para investigadores y desarrolladores que trabajan con grandes colecciones de imรกgenes, ya que les permite centrarse en el desarrollo y la evaluaciรณn de modelos en lugar de en la anotaciรณn manual. + +## Citas y agradecimientos + +Si encuentras รบtil SAM en tu trabajo de investigaciรณn o desarrollo, considera citar nuestro artรญculo: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{kirillov2023segment, + title={Segment Anything}, + author={Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollรกr and Ross Girshick}, + year={2023}, + eprint={2304.02643}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +Nos gustarรญa expresar nuestro agradecimiento a Meta AI por crear y mantener este valioso recurso para la comunidad de visiรณn por computadora. + +*keywords: Segment Anything, Segment Anything Model, SAM, Meta SAM, segmentaciรณn de imรกgenes, segmentaciรณn por indicaciรณn, rendimiento en la transferencia sin entrenamiento, conjunto de datos SA-1B, arquitectura avanzada, auto-anotaciรณn, Ultralytics, modelos pre-entrenados, SAM base, SAM large, segmentaciรณn de instancias, visiรณn por computadora, IA, inteligencia artificial, aprendizaje automรกtico, anotaciรณn de datos, mรกscaras de segmentaciรณn, modelo de detecciรณn, modelo de detecciรณn YOLO, bibtex, Meta AI.* diff --git a/ultralytics/docs/es/models/sam.md:Zone.Identifier b/ultralytics/docs/es/models/sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/models/sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/models/yolo-nas.md b/ultralytics/docs/es/models/yolo-nas.md new file mode 100755 index 0000000..3b25a60 --- /dev/null +++ b/ultralytics/docs/es/models/yolo-nas.md @@ -0,0 +1,121 @@ +--- +comments: true +description: Explora la documentaciรณn detallada de YOLO-NAS, un modelo de detecciรณn de objetos superior. Aprende sobre sus caracterรญsticas, modelos pre-entrenados, uso con la API de Ultralytics Python, y mรกs. +keywords: YOLO-NAS, Deci AI, detecciรณn de objetos, aprendizaje profundo, bรบsqueda de arquitectura neural, API de Ultralytics Python, modelo YOLO, modelos pre-entrenados, cuantizaciรณn, optimizaciรณn, COCO, Objects365, Roboflow 100 +--- + +# YOLO-NAS + +## Visiรณn general + +Desarrollado por Deci AI, YOLO-NAS es un modelo revolucionario de detecciรณn de objetos. Es el producto de una tecnologรญa avanzada de Bรบsqueda de Arquitectura Neural, meticulosamente diseรฑada para abordar las limitaciones de los modelos YOLO anteriores. Con mejoras significativas en el soporte de cuantizaciรณn y el equilibrio entre precisiรณn y latencia, YOLO-NAS representa un gran avance en la detecciรณn de objetos. + +![Ejemplo de imagen del modelo](https://learnopencv.com/wp-content/uploads/2023/05/yolo-nas_COCO_map_metrics.png) +**Visiรณn general de YOLO-NAS.** YOLO-NAS utiliza bloques conscientes de cuantizaciรณn y cuantizaciรณn selectiva para un rendimiento รณptimo. El modelo, cuando se convierte en su versiรณn cuantizada INT8, experimenta una caรญda mรญnima de precisiรณn, una mejora significativa en comparaciรณn con otros modelos. Estos avances culminan en una arquitectura superior con capacidades de detecciรณn de objetos sin precedentes y un rendimiento sobresaliente. + +### Caracterรญsticas clave + +- **Bloque bรกsico compatible con cuantizaciรณn:** YOLO-NAS introduce un nuevo bloque bรกsico que es compatible con la cuantizaciรณn, abordando una de las limitaciones significativas de los modelos YOLO anteriores. +- **Entrenamiento sofisticado y cuantizaciรณn:** YOLO-NAS utiliza esquemas avanzados de entrenamiento y cuantizaciรณn posterior para mejorar el rendimiento. +- **Optimizaciรณn AutoNAC y pre-entrenamiento:** YOLO-NAS utiliza la optimizaciรณn AutoNAC y se pre-entrena en conjuntos de datos prominentes como COCO, Objects365 y Roboflow 100. Este pre-entrenamiento lo hace extremadamente adecuado para tareas de detecciรณn de objetos en entornos de producciรณn. + +## Modelos pre-entrenados + +Experimenta el poder de la detecciรณn de objetos de prรณxima generaciรณn con los modelos pre-entrenados de YOLO-NAS proporcionados por Ultralytics. Estos modelos estรกn diseรฑados para ofrecer un rendimiento de primera clase tanto en velocidad como en precisiรณn. Elige entre una variedad de opciones adaptadas a tus necesidades especรญficas: + +| Modelo | mAP | Latencia (ms) | +|------------------|-------|---------------| +| YOLO-NAS S | 47.5 | 3.21 | +| YOLO-NAS M | 51.55 | 5.85 | +| YOLO-NAS L | 52.22 | 7.87 | +| YOLO-NAS S INT-8 | 47.03 | 2.36 | +| YOLO-NAS M INT-8 | 51.0 | 3.78 | +| YOLO-NAS L INT-8 | 52.1 | 4.78 | + +Cada variante del modelo estรก diseรฑada para ofrecer un equilibrio entre la Precisiรณn Promedio de las Areas (mAP, por sus siglas en inglรฉs) y la latencia, ayudรกndote a optimizar tus tareas de detecciรณn de objetos en tรฉrminos de rendimiento y velocidad. + +## Ejemplos de uso + +Ultralytics ha facilitado la integraciรณn de los modelos YOLO-NAS en tus aplicaciones de Python a travรฉs de nuestro paquete `ultralytics`. El paquete proporciona una API de Python fรกcil de usar para agilizar el proceso. + +Los siguientes ejemplos muestran cรณmo usar los modelos YOLO-NAS con el paquete `ultralytics` para inferencia y validaciรณn: + +### Ejemplos de inferencia y validaciรณn + +En este ejemplo validamos YOLO-NAS-s en el conjunto de datos COCO8. + +!!! Example "Ejemplo" + + Este ejemplo proporciona un cรณdigo simple de inferencia y validaciรณn para YOLO-NAS. Para manejar los resultados de la inferencia, consulta el modo [Predict](../modes/predict.md). Para usar YOLO-NAS con modos adicionales, consulta [Val](../modes/val.md) y [Export](../modes/export.md). El paquete `ultralytics` para YOLO-NAS no admite entrenamiento. + + === "Python" + + Los archivos de modelos pre-entrenados `*.pt` de PyTorch se pueden pasar a la clase `NAS()` para crear una instancia del modelo en Python: + + ```python + from ultralytics import NAS + + # Carga un modelo YOLO-NAS-s pre-entrenado en COCO + modelo = NAS('yolo_nas_s.pt') + + # Muestra informaciรณn del modelo (opcional) + modelo.info() + + # Valida el modelo en el conjunto de datos de ejemplo COCO8 + resultados = modelo.val(data='coco8.yaml') + + # Ejecuta inferencia con el modelo YOLO-NAS-s en la imagen 'bus.jpg' + resultados = modelo('path/to/bus.jpg') + ``` + + === "CLI" + + Los comandos CLI estรกn disponibles para ejecutar directamente los modelos: + + ```bash + # Carga un modelo YOLO-NAS-s pre-entrenado en COCO y valida su rendimiento en el conjunto de datos de ejemplo COCO8 + yolo val model=yolo_nas_s.pt data=coco8.yaml + + # Carga un modelo YOLO-NAS-s pre-entrenado en COCO y ejecuta inferencia en la imagen 'bus.jpg' + yolo predict model=yolo_nas_s.pt source=path/to/bus.jpg + ``` + +## Tareas y modos compatibles + +Ofrecemos tres variantes de los modelos YOLO-NAS: Small (s), Medium (m) y Large (l). Cada variante estรก diseรฑada para satisfacer diferentes necesidades computacionales y de rendimiento: + +- **YOLO-NAS-s**: Optimizado para entornos donde los recursos computacionales son limitados pero la eficiencia es clave. +- **YOLO-NAS-m**: Ofrece un enfoque equilibrado, adecuado para la detecciรณn de objetos de propรณsito general con mayor precisiรณn. +- **YOLO-NAS-l**: Adaptados para escenarios que requieren la mayor precisiรณn, donde los recursos computacionales son menos restrictivos. + +A continuaciรณn se muestra una descripciรณn detallada de cada modelo, incluyendo enlaces a sus pesos pre-entrenados, las tareas que admiten y su compatibilidad con diferentes modos de funcionamiento. + +| Tipo de modelo | Pesos pre-entrenados | Tareas admitidas | Inferencia | Validaciรณn | Entrenamiento | Exportaciรณn | +|----------------|-----------------------------------------------------------------------------------------------|--------------------------------------------|------------|------------|---------------|-------------| +| YOLO-NAS-s | [yolo_nas_s.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_s.pt) | [Detecciรณn de objetos](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-m | [yolo_nas_m.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_m.pt) | [Detecciรณn de objetos](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-l | [yolo_nas_l.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_l.pt) | [Detecciรณn de objetos](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | + +## Citaciones y agradecimientos + +Si utilizas YOLO-NAS en tu investigaciรณn o trabajo de desarrollo, por favor cita SuperGradients: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{supergradients, + doi = {10.5281/ZENODO.7789328}, + url = {https://zenodo.org/record/7789328}, + author = {Aharon, Shay and {Louis-Dupont} and {Ofri Masad} and Yurkova, Kate and {Lotem Fridman} and {Lkdci} and Khvedchenya, Eugene and Rubin, Ran and Bagrov, Natan and Tymchenko, Borys and Keren, Tomer and Zhilko, Alexander and {Eran-Deci}}, + title = {Super-Gradients}, + publisher = {GitHub}, + journal = {GitHub repository}, + year = {2021}, + } + ``` + +Agradecemos al equipo de [SuperGradients](https://github.com/Deci-AI/super-gradients/) de Deci AI por sus esfuerzos en la creaciรณn y mantenimiento de este valioso recurso para la comunidad de visiรณn por computadora. Creemos que YOLO-NAS, con su arquitectura innovadora y sus capacidades de detecciรณn de objetos superiores, se convertirรก en una herramienta fundamental tanto para desarrolladores como para investigadores. + +*keywords: YOLO-NAS, Deci AI, detecciรณn de objetos, aprendizaje profundo, bรบsqueda de arquitectura neural, API de Ultralytics Python, modelo YOLO, SuperGradients, modelos pre-entrenados, bloque bรกsico compatible con cuantizaciรณn, esquemas avanzados de entrenamiento, cuantizaciรณn posterior, optimizaciรณn AutoNAC, COCO, Objects365, Roboflow 100* diff --git a/ultralytics/docs/es/models/yolo-nas.md:Zone.Identifier b/ultralytics/docs/es/models/yolo-nas.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/models/yolo-nas.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/models/yolov3.md b/ultralytics/docs/es/models/yolov3.md new file mode 100755 index 0000000..1990e5b --- /dev/null +++ b/ultralytics/docs/es/models/yolov3.md @@ -0,0 +1,98 @@ +--- +comments: true +description: Obtรฉn una descripciรณn general de YOLOv3, YOLOv3-Ultralytics y YOLOv3u. Aprende sobre sus caracterรญsticas clave, uso y tareas admitidas para la detecciรณn de objetos. +keywords: YOLOv3, YOLOv3-Ultralytics, YOLOv3u, Detecciรณn de objetos, Inferencia, Entrenamiento, Ultralytics +--- + +# YOLOv3, YOLOv3-Ultralytics y YOLOv3u + +## Descripciรณn general + +Este documento presenta una descripciรณn general de tres modelos de detecciรณn de objetos estrechamente relacionados, conocidos como [YOLOv3](https://pjreddie.com/darknet/yolo/), [YOLOv3-Ultralytics](https://github.com/ultralytics/yolov3) y [YOLOv3u](https://github.com/ultralytics/ultralytics). + +1. **YOLOv3:** Esta es la tercera versiรณn del algoritmo de detecciรณn de objetos You Only Look Once (YOLO). Originalmente desarrollado por Joseph Redmon, YOLOv3 mejorรณ a sus predecesores al introducir caracterรญsticas como predicciones multiescala y tres tamaรฑos diferentes de nรบcleos de detecciรณn. + +2. **YOLOv3-Ultralytics:** Esta es la implementaciรณn de YOLOv3 realizada por Ultralytics. Reproduce la arquitectura original de YOLOv3 y ofrece funcionalidades adicionales, como soporte para mรกs modelos pre-entrenados y opciones de personalizaciรณn mรกs fรกciles. + +3. **YOLOv3u:** Esta es una versiรณn actualizada de YOLOv3-Ultralytics que incorpora la cabeza dividida sin anclaje y sin objeto utilizada en los modelos YOLOv8. YOLOv3u mantiene la misma arquitectura de columna vertebral y cuello que YOLOv3, pero con la cabeza de detecciรณn actualizada de YOLOv8. + +![Ultralytics YOLOv3](https://raw.githubusercontent.com/ultralytics/assets/main/yolov3/banner-yolov3.png) + +## Caracterรญsticas clave + +- **YOLOv3:** Introdujo el uso de tres escalas diferentes para la detecciรณn, aprovechando tres tamaรฑos diferentes de nรบcleos de detecciรณn: 13x13, 26x26 y 52x52. Esto mejorรณ significativamente la precisiรณn de detecciรณn para objetos de diferentes tamaรฑos. Ademรกs, YOLOv3 aรฑadiรณ caracterรญsticas como predicciones con mรบltiples etiquetas para cada cuadro delimitador y una mejor red extractora de caracterรญsticas. + +- **YOLOv3-Ultralytics:** La implementaciรณn de Ultralytics de YOLOv3 proporciona el mismo rendimiento que el modelo original, pero cuenta con soporte adicional para mรกs modelos pre-entrenados, mรฉtodos de entrenamiento adicionales y opciones de personalizaciรณn mรกs fรกciles. Esto lo hace mรกs versรกtil y fรกcil de usar para aplicaciones prรกcticas. + +- **YOLOv3u:** Este modelo actualizado incorpora la cabeza dividida sin anclaje y sin objeto de YOLOv8. Al eliminar la necesidad de cajas de anclaje predefinidas y puntuaciones de objeto, este diseรฑo de cabeza de detecciรณn puede mejorar la capacidad del modelo para detectar objetos de diferentes tamaรฑos y formas. Esto hace que YOLOv3u sea mรกs robusto y preciso para tareas de detecciรณn de objetos. + +## Tareas y modos admitidos + +La serie YOLOv3, que incluye YOLOv3, YOLOv3-Ultralytics y YOLOv3u, estรก diseรฑada especรญficamente para tareas de detecciรณn de objetos. Estos modelos son reconocidos por su eficacia en diversos escenarios del mundo real, equilibrando precisiรณn y velocidad. Cada variante ofrece caracterรญsticas y optimizaciones รบnicas, lo que los hace adecuados para una variedad de aplicaciones. + +Los tres modelos admiten un conjunto completo de modos, asegurando versatilidad en diversas etapas del despliegue y desarrollo del modelo. Estos modos incluyen [Inferencia](../modes/predict.md), [Validaciรณn](../modes/val.md), [Entrenamiento](../modes/train.md) y [Exportaciรณn](../modes/export.md), proporcionando a los usuarios un conjunto completo de herramientas para una detecciรณn de objetos efectiva. + +| Tipo de modelo | Tareas admitidas | Inferencia | Validaciรณn | Entrenamiento | Exportaciรณn | +|--------------------|--------------------------------------------|------------|------------|---------------|-------------| +| YOLOv3 | [Detecciรณn de objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3-Ultralytics | [Detecciรณn de objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3u | [Detecciรณn de objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +Esta tabla proporciona una visiรณn rรกpida de las capacidades de cada variante de YOLOv3, destacando su versatilidad y aptitud para diversas tareas y modos operativos en flujos de trabajo de detecciรณn de objetos. + +## Ejemplos de uso + +Este ejemplo proporciona ejemplos sencillos de entrenamiento e inferencia de YOLOv3. Para obtener documentaciรณn completa sobre estos y otros [modos](../modes/index.md), consulta las pรกginas de documentaciรณn de [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) y [Export](../modes/export.md). + +!!! Example "Ejemplo" + + === "Python" + + Los modelos pre-entrenados de PyTorch en archivos `*.pt`, asรญ como los archivos de configuraciรณn `*.yaml`, se pueden pasar a la clase `YOLO()` para crear una instancia del modelo en Python: + + ```python + from ultralytics import YOLO + + # Cargar un modelo YOLOv3n pre-entrenado en COCO + model = YOLO('yolov3n.pt') + + # Mostrar informaciรณn del modelo (opcional) + model.info() + + # Entrenar el modelo en el conjunto de datos de ejemplo COCO8 durante 100 รฉpocas + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Ejecutar inferencia con el modelo YOLOv3n en la imagen 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + Hay comandos de CLI disponibles para ejecutar directamente los modelos: + + ```bash + # Cargar un modelo YOLOv3n pre-entrenado en COCO y entrenarlo en el conjunto de datos de ejemplo COCO8 durante 100 รฉpocas + yolo train model=yolov3n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Cargar un modelo YOLOv3n pre-entrenado en COCO y ejecutar inferencia en la imagen 'bus.jpg' + yolo predict model=yolov3n.pt source=path/to/bus.jpg + ``` + +## Citaciones y agradecimientos + +Si utilizas YOLOv3 en tu investigaciรณn, por favor, cita los artรญculos originales de YOLO y el repositorio de YOLOv3 de Ultralytics: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Redmon, Joseph and Farhadi, Ali}, + journal={arXiv preprint arXiv:1804.02767}, + year={2018} + } + ``` + +Gracias a Joseph Redmon y Ali Farhadi por desarrollar YOLOv3 original. diff --git a/ultralytics/docs/es/models/yolov3.md:Zone.Identifier b/ultralytics/docs/es/models/yolov3.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/models/yolov3.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/models/yolov4.md b/ultralytics/docs/es/models/yolov4.md new file mode 100755 index 0000000..05bd43c --- /dev/null +++ b/ultralytics/docs/es/models/yolov4.md @@ -0,0 +1,71 @@ +--- +comments: true +description: Explora nuestra detallada guรญa sobre YOLOv4, un detector de objetos en tiempo real de vanguardia. Comprende sus aspectos arquitectรณnicos destacados, caracterรญsticas innovadoras y ejemplos de aplicaciรณn. +keywords: ultralytics, YOLOv4, detecciรณn de objetos, red neuronal, detecciรณn en tiempo real, detector de objetos, aprendizaje automรกtico +--- + +# YOLOv4: Detecciรณn de objetos rรกpida y precisa + +Bienvenido a la pรกgina de documentaciรณn de Ultralytics para YOLOv4, un detector de objetos en tiempo real de vanguardia lanzado en 2020 por Alexey Bochkovskiy en [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). YOLOv4 estรก diseรฑado para ofrecer un equilibrio รณptimo entre velocidad y precisiรณn, lo que lo convierte en una excelente opciรณn para muchas aplicaciones. + +![Diagrama de arquitectura de YOLOv4](https://user-images.githubusercontent.com/26833433/246185689-530b7fe8-737b-4bb0-b5dd-de10ef5aface.png) +**Diagrama de arquitectura de YOLOv4**. Muestra el intrincado diseรฑo de red de YOLOv4, incluyendo los componentes backbone, neck y head, y sus capas interconectadas para una detecciรณn de objetos en tiempo real รณptima. + +## Introducciรณn + +YOLOv4 significa You Only Look Once versiรณn 4. Es un modelo de detecciรณn de objetos en tiempo real desarrollado para abordar las limitaciones de versiones anteriores de YOLO como [YOLOv3](yolov3.md) y otros modelos de detecciรณn de objetos. A diferencia de otros detectores de objetos basados en redes neuronales convolucionales (CNN), YOLOv4 no solo es aplicable para sistemas de recomendaciรณn, sino tambiรฉn para la gestiรณn de procesos independientes y la reducciรณn de la entrada humana. Su funcionamiento en unidades de procesamiento de grรกficos (GPU) convencionales permite su uso masivo a un precio asequible, y estรก diseรฑado para funcionar en tiempo real en una GPU convencional, siendo necesario solo una GPU para el entrenamiento. + +## Arquitectura + +YOLOv4 utiliza varias caracterรญsticas innovadoras que trabajan juntas para optimizar su rendimiento. Estas incluyen Conexiones Residuales Ponderadas (WRC), Conexiones Parciales Cruzadas en Etapas (CSP), Normalizaciรณn Cruzada de Mini-Batch (CmBN), Entrenamiento Autoadversarial (SAT), Activaciรณn Mish, Aumento de Datos Mosaico, Regularizaciรณn DropBlock y Pรฉrdida CIoU. Estas caracterรญsticas se combinan para lograr resultados de vanguardia. + +Un detector de objetos tรญpico estรก compuesto por varias partes, incluyendo la entrada, el backbone (espinazo), el neck (cuello) y el head (cabeza). El backbone de YOLOv4 estรก pre-entrenado en ImageNet y se utiliza para predecir las clases y las cajas delimitadoras de los objetos. El backbone puede ser de varios modelos, incluyendo VGG, ResNet, ResNeXt o DenseNet. La parte del neck del detector se utiliza para recolectar mapas de caracterรญsticas de diferentes etapas y generalmente incluye varias rutas de abajo hacia arriba y varias rutas de arriba hacia abajo. La parte de la cabeza es la que se utiliza para realizar las detecciones y clasificaciones finales de objetos. + +## Bolsa de regalos + +YOLOv4 tambiรฉn utiliza mรฉtodos conocidos como "bolsa de regalos" (bag of freebies), que son tรฉcnicas que mejoran la precisiรณn del modelo durante el entrenamiento sin aumentar el costo de la inferencia. La ampliaciรณn de datos es una tรฉcnica comรบn de la bolsa de regalos utilizada en la detecciรณn de objetos, que aumenta la variabilidad de las imรกgenes de entrada para mejorar la robustez del modelo. Algunos ejemplos de ampliaciรณn de datos incluyen distorsiones fotomรฉtricas (ajuste del brillo, contraste, matiz, saturaciรณn y ruido de una imagen) y distorsiones geomรฉtricas (agregar escalado, recorte, volteo y rotaciรณn aleatorios). Estas tรฉcnicas ayudan al modelo a generalizar mejor para diferentes tipos de imรกgenes. + +## Caracterรญsticas y rendimiento + +YOLOv4 estรก diseรฑado para obtener una velocidad y precisiรณn รณptimas en la detecciรณn de objetos. La arquitectura de YOLOv4 incluye CSPDarknet53 como backbone, PANet como neck y YOLOv3 como cabeza de detecciรณn. Este diseรฑo permite que YOLOv4 realice la detecciรณn de objetos a una velocidad impresionante, lo que lo hace adecuado para aplicaciones en tiempo real. YOLOv4 tambiรฉn sobresale en precisiรณn, logrando resultados de vanguardia en los benchmarks de detecciรณn de objetos. + +## Ejemplos de uso + +Hasta el momento de escribir este documento, Ultralytics actualmente no admite modelos YOLOv4. Por lo tanto, cualquier usuario interesado en usar YOLOv4 deberรก consultar directamente el repositorio de YOLOv4 en GitHub para obtener instrucciones de instalaciรณn y uso. + +Aquรญ hay un resumen breve de los pasos tรญpicos que podrรญas seguir para usar YOLOv4: + +1. Visita el repositorio de YOLOv4 en GitHub: [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). + +2. Sigue las instrucciones proporcionadas en el archivo README para la instalaciรณn. Esto generalmente implica clonar el repositorio, instalar las dependencias necesarias y configurar las variables de entorno necesarias. + +3. Una vez que la instalaciรณn estรฉ completa, puedes entrenar y usar el modelo segรบn las instrucciones de uso proporcionadas en el repositorio. Esto normalmente implica preparar tu conjunto de datos, configurar los parรกmetros del modelo, entrenar el modelo y luego usar el modelo entrenado para realizar la detecciรณn de objetos. + +Ten en cuenta que los pasos especรญficos pueden variar dependiendo de tu caso de uso especรญfico y del estado actual del repositorio de YOLOv4. Por lo tanto, se recomienda encarecidamente consultar directamente las instrucciones proporcionadas en el repositorio de YOLOv4 en GitHub. + +Lamentamos cualquier inconveniente que esto pueda causar y nos esforzaremos por actualizar este documento con ejemplos de uso para Ultralytics una vez que se implemente el soporte para YOLOv4. + +## Conclusiรณn + +YOLOv4 es un modelo de detecciรณn de objetos potente y eficiente que logra un equilibrio entre velocidad y precisiรณn. Su uso de caracterรญsticas รบnicas y tรฉcnicas de bolsa de regalos durante el entrenamiento le permite realizar un excelente desempeรฑo en tareas de detecciรณn de objetos en tiempo real. YOLOv4 puede ser entrenado y utilizado por cualquier persona con una GPU convencional, lo que lo hace accesible y prรกctico para una amplia gama de aplicaciones. + +## Citaciones y agradecimientos + +Nos gustarรญa reconocer a los autores de YOLOv4 por sus importantes contribuciones en el campo de la detecciรณn de objetos en tiempo real: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{bochkovskiy2020yolov4, + title={YOLOv4: Optimal Speed and Accuracy of Object Detection}, + author={Alexey Bochkovskiy and Chien-Yao Wang and Hong-Yuan Mark Liao}, + year={2020}, + eprint={2004.10934}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +El artรญculo original de YOLOv4 se puede encontrar en [arXiv](https://arxiv.org/abs/2004.10934). Los autores han puesto su trabajo a disposiciรณn del pรบblico, y el cรณdigo se puede acceder en [GitHub](https://github.com/AlexeyAB/darknet). Apreciamos sus esfuerzos en el avance del campo y en hacer que su trabajo sea accesible para la comunidad en general. diff --git a/ultralytics/docs/es/models/yolov4.md:Zone.Identifier b/ultralytics/docs/es/models/yolov4.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/models/yolov4.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/models/yolov5.md b/ultralytics/docs/es/models/yolov5.md new file mode 100755 index 0000000..66adf22 --- /dev/null +++ b/ultralytics/docs/es/models/yolov5.md @@ -0,0 +1,113 @@ +--- +comments: true +description: Descubra YOLOv5u, una versiรณn mejorada del modelo YOLOv5 con un mejor equilibrio entre precisiรณn y velocidad, y numerosos modelos pre-entrenados para diversas tareas de detecciรณn de objetos. +keywords: YOLOv5u, detecciรณn de objetos, modelos pre-entrenados, Ultralytics, Inferencia, Validaciรณn, YOLOv5, YOLOv8, sin anclas, sin atenciรณn al objeto, aplicaciones en tiempo real, aprendizaje automรกtico +--- + +# YOLOv5 + +## Resumen + +YOLOv5u representa un avance en las metodologรญas de detecciรณn de objetos. Originado a partir de la arquitectura fundamental del modelo [YOLOv5](https://github.com/ultralytics/yolov5) desarrollado por Ultralytics, YOLOv5u integra la divisiรณn de la cabeza Ultralytics sin anclas y sin atenciรณn al objeto, una caracterรญstica introducida previamente en los modelos [YOLOv8](yolov8.md). Esta adaptaciรณn perfecciona la arquitectura del modelo, resultando en un mejor equilibrio entre precisiรณn y velocidad en tareas de detecciรณn de objetos. Con base en los resultados empรญricos y sus caracterรญsticas derivadas, YOLOv5u proporciona una alternativa eficiente para aquellos que buscan soluciones robustas tanto en investigaciรณn como en aplicaciones prรกcticas. + +![Ultralytics YOLOv5](https://raw.githubusercontent.com/ultralytics/assets/main/yolov5/v70/splash.png) + +## Caracterรญsticas clave + +- **Cabeza dividida Ultralytics sin anclas:** Los modelos tradicionales de detecciรณn de objetos dependen de cajas de anclaje predefinidas para predecir la ubicaciรณn de los objetos. Sin embargo, YOLOv5u moderniza este enfoque. Al adoptar una cabeza Ultralytics dividida sin anclas, se garantiza un mecanismo de detecciรณn mรกs flexible y adaptable, lo que en consecuencia mejora el rendimiento en diversos escenarios. + +- **Equilibrio รณptimo entre precisiรณn y velocidad:** La velocidad y la precisiรณn suelen ser contrapuestas. Pero YOLOv5u desafรญa este equilibrio. Ofrece un balance calibrado, garantizando detecciones en tiempo real sin comprometer la precisiรณn. Esta caracterรญstica es especialmente valiosa para aplicaciones que requieren respuestas rรกpidas, como vehรญculos autรณnomos, robรณtica y anรกlisis de video en tiempo real. + +- **Variedad de modelos pre-entrenados:** Entendiendo que diferentes tareas requieren diferentes herramientas, YOLOv5u proporciona una gran cantidad de modelos pre-entrenados. Ya sea que te enfoques en Inferencia, Validaciรณn o Entrenamiento, hay un modelo a la medida esperรกndote. Esta variedad asegura que no estรฉs utilizando una soluciรณn genรฉrica, sino un modelo especรญficamente ajustado para tu desafรญo รบnico. + +## Tareas y Modos Soportados + +Los modelos YOLOv5u, con diferentes pesos pre-entrenados, sobresalen en las tareas de [Detecciรณn de Objetos](../tasks/detect.md). Soportan una amplia gama de modos que los hacen adecuados para diversas aplicaciones, desde el desarrollo hasta la implementaciรณn. + +| Tipo de Modelo | Pesos Pre-entrenados | Tarea | Inferencia | Validaciรณn | Entrenamiento | Exportaciรณn | +|----------------|-----------------------------------------------------------------------------------------------------------------------------|--------------------------------------------|------------|------------|---------------|-------------| +| YOLOv5u | `yolov5nu`, `yolov5su`, `yolov5mu`, `yolov5lu`, `yolov5xu`, `yolov5n6u`, `yolov5s6u`, `yolov5m6u`, `yolov5l6u`, `yolov5x6u` | [Detecciรณn de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +Esta tabla proporciona una descripciรณn detallada de las variantes de modelos YOLOv5u, destacando su aplicabilidad en tareas de detecciรณn de objetos y el soporte para varios modos operativos como [Inferencia](../modes/predict.md), [Validaciรณn](../modes/val.md), [Entrenamiento](../modes/train.md) y [Exportaciรณn](../modes/export.md). Este soporte integral asegura que los usuarios puedan aprovechar al mรกximo las capacidades de los modelos YOLOv5u en una amplia gama de escenarios de detecciรณn de objetos. + +## Mรฉtricas de Rendimiento + +!!! Rendimiento + + === "Detecciรณn" + + Consulta la [Documentaciรณn de Detecciรณn](https://docs.ultralytics.com/tasks/detect/) para obtener ejemplos de uso con estos modelos entrenados en [COCO](https://docs.ultralytics.com/datasets/detect/coco/), los cuales incluyen 80 clases pre-entrenadas. + + | Modelo | YAML | tamaรฑo
(pรญxeles) | mAPval
50-95 | Velocidad
CPU ONNX
(ms) | Velocidad
A100 TensorRT
(ms) | parรกmetros
(M) | FLOPs
(B) | + |---------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------|-----------------------|----------------------|--------------------------------|-------------------------------------|--------------------|-------------------| + | [yolov5nu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5nu.pt) | [yolov5n.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 34.3 | 73.6 | 1.06 | 2.6 | 7.7 | + | [yolov5su.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5su.pt) | [yolov5s.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 43.0 | 120.7 | 1.27 | 9.1 | 24.0 | + | [yolov5mu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5mu.pt) | [yolov5m.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 49.0 | 233.9 | 1.86 | 25.1 | 64.2 | + | [yolov5lu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5lu.pt) | [yolov5l.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 52.2 | 408.4 | 2.50 | 53.2 | 135.0 | + | [yolov5xu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5xu.pt) | [yolov5x.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 53.2 | 763.2 | 3.81 | 97.2 | 246.4 | + | | | | | | | | | + | [yolov5n6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5n6u.pt) | [yolov5n6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 42.1 | 211.0 | 1.83 | 4.3 | 7.8 | + | [yolov5s6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5s6u.pt) | [yolov5s6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 48.6 | 422.6 | 2.34 | 15.3 | 24.6 | + | [yolov5m6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5m6u.pt) | [yolov5m6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 53.6 | 810.9 | 4.36 | 41.2 | 65.7 | + | [yolov5l6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5l6u.pt) | [yolov5l6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 55.7 | 1470.9 | 5.47 | 86.1 | 137.4 | + | [yolov5x6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5x6u.pt) | [yolov5x6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 56.8 | 2436.5 | 8.98 | 155.4 | 250.7 | + +## Ejemplos de Uso + +Este ejemplo proporciona ejemplos sencillos de entrenamiento e inferencia de YOLOv5. Para obtener documentaciรณn completa sobre estos y otros [modos](../modes/index.md), consulta las pรกginas de documentaciรณn de [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) y [Export](../modes/export.md). + +!!! Example "Ejemplo" + + === "Python" + + Los modelos pre-entrenados `*.pt` de PyTorch, asรญ como los archivos de configuraciรณn `*.yaml`, se pueden pasar a la clase `YOLO()` para crear una instancia de modelo en Python: + + ```python + from ultralytics import YOLO + + # Cargar un modelo YOLOv5n pre-entrenado en COCO + modelo = YOLO('yolov5n.pt') + + # Mostrar informaciรณn del modelo (opcional) + modelo.info() + + # Entrenar el modelo con el conjunto de datos de ejemplo COCO8 durante 100 รฉpocas + resultados = modelo.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Ejecutar inferencia con el modelo YOLOv5n en la imagen 'bus.jpg' + resultados = modelo('path/to/bus.jpg') + ``` + + === "CLI" + + Hay comandos de CLI disponibles para ejecutar directamente los modelos: + + ```bash + # Cargar un modelo YOLOv5n pre-entrenado en COCO y entrenarlo con el conjunto de datos de ejemplo COCO8 durante 100 รฉpocas + yolo train model=yolov5n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Cargar un modelo YOLOv5n pre-entrenado en COCO y ejecutar inferencia en la imagen 'bus.jpg' + yolo predict model=yolov5n.pt source=path/to/bus.jpg + ``` + +## Citaciones y Reconocimientos + +Si utilizas YOLOv5 o YOLOv5u en tu investigaciรณn, por favor cita el repositorio de Ultralytics YOLOv5 de la siguiente manera: + +!!! Quote "" + + === "BibTeX" + ```bibtex + @software{yolov5, + title = {Ultralytics YOLOv5}, + author = {Glenn Jocher}, + year = {2020}, + version = {7.0}, + license = {AGPL-3.0}, + url = {https://github.com/ultralytics/yolov5}, + doi = {10.5281/zenodo.3908559}, + orcid = {0000-0001-5950-6979} + } + ``` + +Ten en cuenta que los modelos YOLOv5 se proporcionan bajo las licencias [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) y [Enterprise](https://ultralytics.com/license). diff --git a/ultralytics/docs/es/models/yolov5.md:Zone.Identifier b/ultralytics/docs/es/models/yolov5.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/models/yolov5.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/models/yolov6.md b/ultralytics/docs/es/models/yolov6.md new file mode 100755 index 0000000..f65f37f --- /dev/null +++ b/ultralytics/docs/es/models/yolov6.md @@ -0,0 +1,107 @@ +--- +comments: true +description: Explora Meituan YOLOv6, un modelo de detecciรณn de objetos de รบltima generaciรณn que logra un equilibrio entre velocidad y precisiรณn. Sumรฉrgete en caracterรญsticas, modelos pre-entrenados y el uso de Python. +keywords: Meituan YOLOv6, detecciรณn de objetos, Ultralytics, documentaciรณn de YOLOv6, Concatenaciรณn Bidireccional, Entrenamiento con Anclas, modelos pre-entrenados, aplicaciones en tiempo real +--- + +# Meituan YOLOv6 + +## Visiรณn general + +[Meituan](https://about.meituan.com/) YOLOv6 es un detector de objetos de รบltima generaciรณn que ofrece un notable equilibrio entre velocidad y precisiรณn, lo que lo convierte en una opciรณn popular para aplicaciones en tiempo real. Este modelo presenta varias mejoras notables en su arquitectura y esquema de entrenamiento, que incluyen la implementaciรณn de un mรณdulo de Concatenaciรณn Bidireccional (BiC), una estrategia de entrenamiento con anclas (AAT) y un diseรฑo de columna vertebral y cuello mejorado para lograr una precisiรณn de รบltima generaciรณn en el conjunto de datos COCO. + +![Meituan YOLOv6](https://user-images.githubusercontent.com/26833433/240750495-4da954ce-8b3b-41c4-8afd-ddb74361d3c2.png) +![Ejemplo de imagen del modelo](https://user-images.githubusercontent.com/26833433/240750557-3e9ec4f0-0598-49a8-83ea-f33c91eb6d68.png) +**Visiรณn general de YOLOv6.** Diagrama de la arquitectura del modelo que muestra los componentes de la red redesdiseรฑados y las estrategias de entrenamiento que han llevado a mejoras significativas en el rendimiento. (a) El cuello de YOLOv6 (N y S se muestran). Seรฑalar que, en M/L, RepBlocks es reemplazado por CSPStackRep. (b) La estructura de un mรณdulo BiC. (c) Un bloque SimCSPSPPF. ([fuente](https://arxiv.org/pdf/2301.05586.pdf)). + +### Caracterรญsticas clave + +- **Mรณdulo de Concatenaciรณn Bidireccional (BiC):** YOLOv6 introduce un mรณdulo de BiC en el cuello del detector, mejorando las seรฑales de localizaciรณn y ofreciendo mejoras en el rendimiento con una degradaciรณn de velocidad despreciable. +- **Estrategia de Entrenamiento con Anclas (AAT):** Este modelo propone AAT para disfrutar de los beneficios de los paradigmas basados en anclas y sin anclas sin comprometer la eficiencia de inferencia. +- **Diseรฑo de Columna Vertebral y Cuello Mejorado:** Al profundizar en YOLOv6 para incluir otra etapa en la columna vertebral y el cuello, este modelo logra un rendimiento de รบltima generaciรณn en el conjunto de datos COCO con una entrada de alta resoluciรณn. +- **Estrategia de Auto-Destilaciรณn:** Se implementa una nueva estrategia de auto-destilaciรณn para mejorar el rendimiento de los modelos mรกs pequeรฑos de YOLOv6, mejorando la rama de regresiรณn auxiliar durante el entrenamiento y eliminรกndola durante la inferencia para evitar una marcada disminuciรณn de velocidad. + +## Mรฉtricas de rendimiento + +YOLOv6 proporciona varios modelos pre-entrenados con diferentes escalas: + +- YOLOv6-N: 37.5% de precisiรณn promedio (AP) en COCO val2017 a 1187 FPS con la GPU NVIDIA Tesla T4. +- YOLOv6-S: 45.0% de AP a 484 FPS. +- YOLOv6-M: 50.0% de AP a 226 FPS. +- YOLOv6-L: 52.8% de AP a 116 FPS. +- YOLOv6-L6: Precisiรณn de รบltima generaciรณn en tiempo real. + +YOLOv6 tambiรฉn proporciona modelos cuantizados para diferentes precisiones y modelos optimizados para plataformas mรณviles. + +## Ejemplos de uso + +Este ejemplo proporciona ejemplos sencillos de entrenamiento e inferencia con YOLOv6. Para obtener documentaciรณn completa sobre estos y otros [modos](../modes/index.md), consulta las pรกginas de documentaciรณn de [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) y [Export](../modes/export.md). + +!!! Example "Ejemplo" + + === "Python" + + Los modelos pre-entrenados en `*.pt` de PyTorch, asรญ como los archivos de configuraciรณn `*.yaml`, se pueden pasar a la clase `YOLO()` para crear una instancia del modelo en Python: + + ```python + from ultralytics import YOLO + + # Construir un modelo YOLOv6n desde cero + modelo = YOLO('yolov6n.yaml') + + # Mostrar informaciรณn del modelo (opcional) + modelo.info() + + # Entrenar el modelo en el conjunto de datos de ejemplo COCO8 durante 100 epochs + resultados = modelo.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Ejecutar inferencia con el modelo YOLOv6n en la imagen 'bus.jpg' + resultados = modelo('path/to/bus.jpg') + ``` + + === "CLI" + + Se dispone de comandos de lรญnea de comandos (CLI) para ejecutar directamente los modelos: + + ```bash + # Construir un modelo YOLOv6n desde cero y entrenarlo en el conjunto de datos de ejemplo COCO8 durante 100 epochs + yolo train model=yolov6n.yaml data=coco8.yaml epochs=100 imgsz=640 + + # Construir un modelo YOLOv6n desde cero y ejecutar inferencia en la imagen 'bus.jpg' + yolo predict model=yolov6n.yaml source=path/to/bus.jpg + ``` + +## Tareas y Modos Soportados + +La serie YOLOv6 ofrece una variedad de modelos, cada uno optimizado para [Detecciรณn de Objetos](../tasks/detect.md) de alto rendimiento. Estos modelos se adaptan a distintas necesidades computacionales y requisitos de precisiรณn, lo que los hace versรกtiles para una amplia gama de aplicaciones. + +| Tipo de Modelo | Pesos Pre-entrenados | Tareas Soportadas | Inferencia | Validaciรณn | Entrenamiento | Exportaciรณn | +|----------------|----------------------|--------------------------------------------|------------|------------|---------------|-------------| +| YOLOv6-N | `yolov6-n.pt` | [Detecciรณn de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-S | `yolov6-s.pt` | [Detecciรณn de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-M | `yolov6-m.pt` | [Detecciรณn de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L | `yolov6-l.pt` | [Detecciรณn de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L6 | `yolov6-l6.pt` | [Detecciรณn de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +Esta tabla proporciona una descripciรณn detallada de las variantes del modelo YOLOv6, destacando sus capacidades en tareas de detecciรณn de objetos y su compatibilidad con varios modos operativos como [Inferencia](../modes/predict.md), [Validaciรณn](../modes/val.md), [Entrenamiento](../modes/train.md) y [Exportaciรณn](../modes/export.md). Este soporte integral garantiza que los usuarios puedan aprovechar al mรกximo las capacidades de los modelos YOLOv6 en una amplia gama de escenarios de detecciรณn de objetos. + +## Citaciones y Agradecimientos + +Nos gustarรญa agradecer a los autores por sus importantes contribuciones en el campo de la detecciรณn de objetos en tiempo real: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{li2023yolov6, + title={YOLOv6 v3.0: A Full-Scale Reloading}, + author={Chuyi Li and Lulu Li and Yifei Geng and Hongliang Jiang and Meng Cheng and Bo Zhang and Zaidan Ke and Xiaoming Xu and Xiangxiang Chu}, + year={2023}, + eprint={2301.05586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + + Se puede encontrar el artรญculo original de YOLOv6 en [arXiv](https://arxiv.org/abs/2301.05586). Los autores han puesto su trabajo a disposiciรณn del pรบblico y el cรณdigo fuente se puede acceder en [GitHub](https://github.com/meituan/YOLOv6). Agradecemos sus esfuerzos en avanzar en el campo y hacer que su trabajo sea accesible para la comunidad en general. diff --git a/ultralytics/docs/es/models/yolov6.md:Zone.Identifier b/ultralytics/docs/es/models/yolov6.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/models/yolov6.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/models/yolov7.md b/ultralytics/docs/es/models/yolov7.md new file mode 100755 index 0000000..6452043 --- /dev/null +++ b/ultralytics/docs/es/models/yolov7.md @@ -0,0 +1,66 @@ +--- +comments: true +description: Explora el YOLOv7, un detector de objetos en tiempo real. Comprende su velocidad superior, precisiรณn impresionante y enfoque รบnico en la optimizaciรณn de entrenamiento de bolsas de caracterรญsticas entrenables. +keywords: YOLOv7, detector de objetos en tiempo real, estado del arte, Ultralytics, conjunto de datos MS COCO, re-parametrizaciรณn del modelo, asignaciรณn dinรกmica de etiquetas, escalado extendido, escalado compuesto +--- + +# YOLOv7: Bolsa de Caracterรญsticas Entrenable + +YOLOv7 es un detector de objetos en tiempo real de รบltima generaciรณn que supera a todos los detectores de objetos conocidos tanto en velocidad como en precisiรณn en el rango de 5 FPS a 160 FPS. Tiene la mayor precisiรณn (56.8% AP) entre todos los detectores de objetos en tiempo real conocidos con una velocidad de 30 FPS o superior en la GPU V100. Ademรกs, YOLOv7 supera a otros detectores de objetos como YOLOR, YOLOX, Scaled-YOLOv4, YOLOv5 y muchos otros en cuanto a velocidad y precisiรณn. El modelo se entrena desde cero utilizando el conjunto de datos MS COCO sin utilizar ningรบn otro conjunto de datos o pesos pre-entrenados. El cรณdigo fuente de YOLOv7 estรก disponible en GitHub. + +![Comparaciรณn de YOLOv7 con detectores de objetos SOTA](https://github.com/ultralytics/ultralytics/assets/26833433/5e1e0420-8122-4c79-b8d0-2860aa79af92) +**Comparaciรณn de los detectores de objetos de estado del arte. +** Segรบn los resultados en la Tabla 2, sabemos que el mรฉtodo propuesto tiene el mejor equilibrio entre velocidad y precisiรณn de manera integral. Si comparamos YOLOv7-tiny-SiLU con YOLOv5-N (r6.1), nuestro mรฉtodo es 127 fps mรกs rรกpido y un 10.7% mรกs preciso en AP. Ademรกs, YOLOv7 tiene un AP del 51.4% a una velocidad de cuadro de 161 fps, mientras que PPYOLOE-L con el mismo AP tiene solo una velocidad de cuadro de 78 fps. En tรฉrminos de uso de parรกmetros, YOLOv7 utiliza un 41% menos que PPYOLOE-L. Si comparamos YOLOv7-X con una velocidad de inferencia de 114 fps con YOLOv5-L (r6.1) con una velocidad de inferencia de 99 fps, YOLOv7-X puede mejorar el AP en un 3.9%. Si se compara YOLOv7-X con YOLOv5-X (r6.1) de una escala similar, la velocidad de inferencia de YOLOv7-X es 31 fps mรกs rรกpida. Ademรกs, en tรฉrminos de cantidad de parรกmetros y cรกlculos, YOLOv7-X reduce un 22% de los parรกmetros y un 8% de los cรกlculos en comparaciรณn con YOLOv5-X (r6.1), pero mejora el AP en un 2.2% ([Fuente](https://arxiv.org/pdf/2207.02696.pdf)). + +## Descripciรณn general + +La detecciรณn de objetos en tiempo real es un componente importante en muchos sistemas de visiรณn por computadora, incluyendo el seguimiento de mรบltiples objetos, conducciรณn autรณnoma, robรณtica y anรกlisis de imรกgenes mรฉdicas. En los รบltimos aรฑos, el desarrollo de la detecciรณn de objetos en tiempo real se ha centrado en el diseรฑo de arquitecturas eficientes y en la mejora de la velocidad de inferencia de diversas CPUs, GPUs y unidades de procesamiento neural (NPUs). YOLOv7 es compatible tanto con GPU para dispositivos mรณviles como con GPU para dispositivos de escritorio, desde el borde hasta la nube. + +A diferencia de los detectores de objetos en tiempo real tradicionales que se centran en la optimizaciรณn de la arquitectura, YOLOv7 introduce un enfoque en la optimizaciรณn del proceso de entrenamiento. Esto incluye mรณdulos y mรฉtodos de optimizaciรณn diseรฑados para mejorar la precisiรณn de la detecciรณn de objetos sin aumentar el costo de inferencia, un concepto conocido como "bolsas de caracterรญsticas entrenables". + +## Caracterรญsticas clave + +YOLOv7 introduce varias caracterรญsticas clave: + +1. **Re-parametrizaciรณn del modelo**: YOLOv7 propone un modelo re-parametrizado planificado, que es una estrategia aplicable a capas en diferentes redes con el concepto de propagaciรณn del gradiente. + +2. **Asignaciรณn dinรกmica de etiquetas**: El entrenamiento del modelo con mรบltiples capas de salida presenta un nuevo problema: "ยฟCรณmo asignar objetivos dinรกmicos para las salidas de diferentes ramas?" Para resolver este problema, YOLOv7 introduce un nuevo mรฉtodo de asignaciรณn de etiquetas llamado asignaciรณn de etiquetas guiadas de manera gruesa a fina. + +3. **Escalado extendido y compuesto**: YOLOv7 propone mรฉtodos de "escalado extendido" y "escalado compuesto" para el detector de objetos en tiempo real que pueden utilizar eficazmente los parรกmetros y cรกlculos. + +4. **Eficiencia**: El mรฉtodo propuesto por YOLOv7 puede reducir eficazmente aproximadamente el 40% de los parรกmetros y el 50% de los cรกlculos del detector de objetos en tiempo real de รบltima generaciรณn y tiene una velocidad de inferencia mรกs rรกpida y una mayor precisiรณn de detecciรณn. + +## Ejemplos de uso + +Hasta la fecha de redacciรณn de este documento, Ultralytics no admite actualmente modelos YOLOv7. Por lo tanto, los usuarios interesados en utilizar YOLOv7 deberรกn consultar directamente el repositorio de GitHub de YOLOv7 para obtener instrucciones de instalaciรณn y uso. + +Aquรญ hay un resumen breve de los pasos tรญpicos que podrรญas seguir para usar YOLOv7: + +1. Visita el repositorio de GitHub de YOLOv7: [https://github.com/WongKinYiu/yolov7](https://github.com/WongKinYiu/yolov7). + +2. Sigue las instrucciones proporcionadas en el archivo README para la instalaciรณn. Esto generalmente implica clonar el repositorio, instalar las dependencias necesarias y configurar las variables de entorno necesarias. + +3. Una vez que la instalaciรณn estรฉ completa, puedes entrenar y utilizar el modelo segรบn las instrucciones de uso proporcionadas en el repositorio. Esto generalmente implica preparar tu conjunto de datos, configurar los parรกmetros del modelo, entrenar el modelo y luego utilizar el modelo entrenado para realizar la detecciรณn de objetos. + +Ten en cuenta que los pasos especรญficos pueden variar segรบn tu caso de uso especรญfico y el estado actual del repositorio YOLOv7. Por lo tanto, se recomienda encarecidamente consultar directamente las instrucciones proporcionadas en el repositorio de GitHub de YOLOv7. + +Lamentamos cualquier inconveniente que esto pueda causar y nos esforzaremos por actualizar este documento con ejemplos de uso para Ultralytics una vez que se implemente el soporte para YOLOv7. + +## Citaciones y Agradecimientos + +Nos gustarรญa agradecer a los autores de YOLOv7 por sus importantes contribuciones en el campo de la detecciรณn de objetos en tiempo real: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{wang2022yolov7, + title={{YOLOv7}: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors}, + author={Wang, Chien-Yao and Bochkovskiy, Alexey and Liao, Hong-Yuan Mark}, + journal={arXiv preprint arXiv:2207.02696}, + year={2022} + } + ``` + +El artรญculo original de YOLOv7 se puede encontrar en [arXiv](https://arxiv.org/pdf/2207.02696.pdf). Los autores han hecho su trabajo pรบblicamente disponible y el cรณdigo se puede acceder en [GitHub](https://github.com/WongKinYiu/yolov7). Agradecemos sus esfuerzos en el avance del campo y en hacer su trabajo accesible a la comunidad en general. diff --git a/ultralytics/docs/es/models/yolov7.md:Zone.Identifier b/ultralytics/docs/es/models/yolov7.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/models/yolov7.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/models/yolov8.md b/ultralytics/docs/es/models/yolov8.md new file mode 100755 index 0000000..7617460 --- /dev/null +++ b/ultralytics/docs/es/models/yolov8.md @@ -0,0 +1,162 @@ +--- +comments: true +description: ยกExplora las emocionantes caracterรญsticas de YOLOv8, la รบltima versiรณn de nuestro detector de objetos en tiempo real! Aprende cรณmo las arquitecturas avanzadas, los modelos preentrenados y el equilibrio รณptimo entre precisiรณn y velocidad hacen de YOLOv8 la elecciรณn perfecta para tus tareas de detecciรณn de objetos. +keywords: YOLOv8, Ultralytics, detector de objetos en tiempo real, modelos preentrenados, documentaciรณn, detecciรณn de objetos, serie YOLO, arquitecturas avanzadas, precisiรณn, velocidad +--- + +# YOLOv8 + +## Descripciรณn general + +YOLOv8 es la รบltima versiรณn de la serie YOLO de detectores de objetos en tiempo real, ofreciendo un rendimiento de vanguardia en tรฉrminos de precisiรณn y velocidad. Basรกndose en los avances de las versiones anteriores de YOLO, YOLOv8 presenta nuevas caracterรญsticas y optimizaciones que lo convierten en una opciรณn ideal para diversas tareas de detecciรณn de objetos en una amplia gama de aplicaciones. + +![Ultralytics YOLOv8](https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png) + +## Caracterรญsticas principales + +- **Arquitecturas avanzadas de columna vertebral y cuello:** YOLOv8 utiliza arquitecturas de columna vertebral y cuello de รบltima generaciรณn, lo que resulta en una mejor extracciรณn de caracterรญsticas y rendimiento de detecciรณn de objetos. +- **Cabeza Ultralytics dividida sin anclaje:** YOLOv8 adopta una cabeza Ultralytics dividida sin anclaje, lo que contribuye a una mejor precisiรณn y a un proceso de detecciรณn mรกs eficiente en comparaciรณn con los enfoques basados en anclaje. +- **Equilibrio optimizado entre precisiรณn y velocidad:** Con un enfoque en mantener un equilibrio รณptimo entre precisiรณn y velocidad, YOLOv8 es adecuado para tareas de detecciรณn de objetos en tiempo real en diversas รกreas de aplicaciรณn. +- **Variedad de modelos preentrenados:** YOLOv8 ofrece una variedad de modelos preentrenados para adaptarse a diversas tareas y requisitos de rendimiento, lo que facilita encontrar el modelo adecuado para tu caso de uso especรญfico. + +## Tareas y modos compatibles + +La serie YOLOv8 ofrece una amplia gama de modelos, cada uno especializado en tareas especรญficas en visiรณn por computadora. Estos modelos estรกn diseรฑados para adaptarse a diversos requisitos, desde la detecciรณn de objetos hasta tareas mรกs complejas como la segmentaciรณn de instancias, la detecciรณn de poses/puntos clave y la clasificaciรณn. + +Cada variante de la serie YOLOv8 estรก optimizada para su respectiva tarea, garantizando un alto rendimiento y precisiรณn. Ademรกs, estos modelos son compatibles con varios modos operativos, incluyendo [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md) y [Export](../modes/export.md), lo que facilita su uso en diferentes etapas de implementaciรณn y desarrollo. + +| Modelo | Nombres de archivo | Tarea | Inferencia | Validaciรณn | Entrenamiento | Exportaciรณn | +|-------------|----------------------------------------------------------------------------------------------------------------|---------------------------------------------------|------------|------------|---------------|-------------| +| YOLOv8 | `yolov8n.pt` `yolov8s.pt` `yolov8m.pt` `yolov8l.pt` `yolov8x.pt` | [Detecciรณn](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-seg | `yolov8n-seg.pt` `yolov8s-seg.pt` `yolov8m-seg.pt` `yolov8l-seg.pt` `yolov8x-seg.pt` | [Segmentaciรณn de instancias](../tasks/segment.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-pose | `yolov8n-pose.pt` `yolov8s-pose.pt` `yolov8m-pose.pt` `yolov8l-pose.pt` `yolov8x-pose.pt` `yolov8x-pose-p6.pt` | [Pose/Puntos clave](../tasks/pose.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-cls | `yolov8n-cls.pt` `yolov8s-cls.pt` `yolov8m-cls.pt` `yolov8l-cls.pt` `yolov8x-cls.pt` | [Clasificaciรณn](../tasks/classify.md) | โœ… | โœ… | โœ… | โœ… | + +Esta tabla proporciona una descripciรณn general de las variantes de modelos YOLOv8, resaltando su aplicabilidad en tareas especรญficas y su compatibilidad con varios modos operativos como Inferencia, Validaciรณn, Entrenamiento y Exportaciรณn. Muestra la versatilidad y robustez de la serie YOLOv8, haciรฉndolos adecuados para una variedad de aplicaciones en visiรณn por computadora. + +## Mรฉtricas de rendimiento + +!!! Rendimiento + + === "Detecciรณn (COCO)" + + Consulta la [documentaciรณn de Detecciรณn](https://docs.ultralytics.com/tasks/detect/) para ejemplos de uso con estos modelos entrenados en [COCO](https://docs.ultralytics.com/datasets/detect/coco/), que incluyen 80 clases preentrenadas. + + | Modelo | tamaรฑo
(pรญxeles) | mAPval
50-95 | Velocidad
CPU ONNX
(ms) | Velocidad
A100 TensorRT
(ms) | parรกmetros
(M) | FLOPs
(B) | + | ------------------------------------------------------------------------------------ | ----------------------- | --------------------- | ------------------------------ | --------------------------------------- | ---------------------- | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + + === "Detecciรณn (Open Images V7)" + + Consulta la [documentaciรณn de Detecciรณn](https://docs.ultralytics.com/tasks/detect/) para ejemplos de uso con estos modelos entrenados en [Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/), que incluyen 600 clases preentrenadas. + + | Modelo | tamaรฑo
(pรญxeles) | mAPval
50-95 | Velocidad
CPU ONNX
(ms) | Velocidad
A100 TensorRT
(ms) | parรกmetros
(M) | FLOPs
(B) | + | ----------------------------------------------------------------------------------------- | ----------------------- | --------------------- | -------------------------------- | --------------------------------------- | ---------------------- | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18.4 | 142.4 | 1.21 | 3.5 | 10.5 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27.7 | 183.1 | 1.40 | 11.4 | 29.7 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33.6 | 408.5 | 2.26 | 26.2 | 80.6 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34.9 | 596.9 | 2.43 | 44.1 | 167.4 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36.3 | 860.6 | 3.56 | 68.7 | 260.6 | + + === "Segmentaciรณn (COCO)" + + Consulta la [documentaciรณn de Segmentaciรณn](https://docs.ultralytics.com/tasks/segment/) para ejemplos de uso con estos modelos entrenados en [COCO](https://docs.ultralytics.com/datasets/segment/coco/), que incluyen 80 clases preentrenadas. + + | Modelo | tamaรฑo
(pรญxeles) | mAPcaja
50-95 | mAPmรกscara
50-95 | Velocidad
CPU ONNX
(ms) | Velocidad
A100 TensorRT
(ms) | parรกmetros
(M) | FLOPs
(B) | + | -------------------------------------------------------------------------------------------- | ----------------------- | ---------------------- | ----------------------- | -------------------------------- | --------------------------------------- | ---------------------- | ----------------- | + | [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | + | [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | + | [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | + | [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | + | [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + + === "Clasificaciรณn (ImageNet)" + + Consulta la [documentaciรณn de Clasificaciรณn](https://docs.ultralytics.com/tasks/classify/) para ejemplos de uso con estos modelos entrenados en [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), que incluyen 1000 clases preentrenadas. + + | Modelo | tamaรฑo
(pรญxeles) | acc
top1 | acc
top5 | Velocidad
CPU ONNX
(ms) | Velocidad
A100 TensorRT
(ms) | parรกmetros
(M) | FLOPs
(B) a 640 | + | -------------------------------------------------------------------------------------------- | ----------------------- | ---------------- | ---------------- | -------------------------------- | --------------------------------------- | ---------------------- | ------------------------ | + | [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | + | [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | + | [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | + | [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | + | [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + + === "Pose (COCO)" + + Consulta la [documentaciรณn de Estimaciรณn de Poses](https://docs.ultralytics.com/tasks/segment/) para ejemplos de uso con estos modelos entrenados en [COCO](https://docs.ultralytics.com/datasets/pose/coco/), que incluyen 1 clase preentrenada, 'person'. + + | Modelo | tamaรฑo
(pรญxeles) | mAPpose
50-95 | mAPpose
50 | Velocidad
CPU ONNX
(ms) | Velocidad
A100 TensorRT
(ms) | parรกmetros
(M) | FLOPs
(B) | + | ---------------------------------------------------------------------------------------------------- | ----------------------- | --------------------- | ------------------ | -------------------------------- | --------------------------------------- | ---------------------- | ----------------- | + | [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | + | [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | + | [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | + | [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | + | [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | + | [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +## Ejemplos de uso + +Este ejemplo proporciona ejemplos sencillos de entrenamiento e inferencia con YOLOv8. Para obtener documentaciรณn completa sobre estos y otros [modos](../modes/index.md), consulta las pรกginas de documentaciรณn de [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) y [Export](../modes/export.md). + +Ten en cuenta que el siguiente ejemplo es para modelos de detecciรณn YOLOv8. Para ver las tareas adicionales compatibles, consulta la documentaciรณn de [Segment](../tasks/segment.md), [Classify](../tasks/classify.md) y [Pose](../tasks/pose.md). + +!!! Example "Ejemplo" + + === "Python" + + Los modelos preentrenados en PyTorch `*.pt`, asรญ como los archivos de configuraciรณn `*.yaml`, se pueden pasar a la clase `YOLO()` para crear una instancia del modelo en Python: + + ```python + from ultralytics import YOLO + + # Carga un modelo YOLOv8n preentrenado en COCO + model = YOLO('yolov8n.pt') + + # Muestra informaciรณn del modelo (opcional) + model.info() + + # Entrena el modelo en el conjunto de datos de ejemplo COCO8 durante 100 รฉpocas + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Realiza inferencia con el modelo YOLOv8n en la imagen 'bus.jpg' + results = model('ruta/a/bus.jpg') + ``` + + === "CLI" + + Hay comandos de CLI disponibles para ejecutar directamente los modelos: + + ```bash + # Carga un modelo YOLOv8n preentrenado en COCO y entrรฉnalo en el conjunto de datos de ejemplo COCO8 durante 100 รฉpocas + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Carga un modelo YOLOv8n preentrenado en COCO y realiza inferencia en la imagen 'bus.jpg' + yolo predict model=yolov8n.pt source=ruta/a/bus.jpg + ``` + +## Citas y reconocimientos + +Si utilizas el modelo YOLOv8 u otro software de este repositorio en tu trabajo, por favor cรญtalo utilizando el siguiente formato: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @software{yolov8_ultralytics, + author = {Glenn Jocher and Ayush Chaurasia and Jing Qiu}, + title = {Ultralytics YOLOv8}, + version = {8.0.0}, + year = {2023}, + url = {https://github.com/ultralytics/ultralytics}, + orcid = {0000-0001-5950-6979, 0000-0002-7603-6750, 0000-0003-3783-7069}, + license = {AGPL-3.0} + } + ``` + + Ten en cuenta que el DOI estรก pendiente y se agregarรก a la cita una vez que estรฉ disponible. Los modelos de YOLOv8 se proporcionan bajo las licencias [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) y [Enterprise](https://ultralytics.com/license). diff --git a/ultralytics/docs/es/models/yolov8.md:Zone.Identifier b/ultralytics/docs/es/models/yolov8.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/models/yolov8.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/modes/benchmark.md b/ultralytics/docs/es/modes/benchmark.md new file mode 100755 index 0000000..3a165da --- /dev/null +++ b/ultralytics/docs/es/modes/benchmark.md @@ -0,0 +1,94 @@ +--- +comments: true +description: Aprenda cรณmo perfilar la velocidad y exactitud de YOLOv8 en varios formatos de exportaciรณn; obtenga perspectivas sobre las mรฉtricas mAP50-95, accuracy_top5 y mรกs. +keywords: Ultralytics, YOLOv8, benchmarking, perfilado de velocidad, perfilado de exactitud, mAP50-95, accuracy_top5, ONNX, OpenVINO, TensorRT, formatos de exportaciรณn YOLO +--- + +# Model Benchmarking con Ultralytics YOLO + +Ecosistema e integraciones de Ultralytics YOLO + +## Introducciรณn + +Una vez que su modelo estรก entrenado y validado, el siguiente paso lรณgico es evaluar su rendimiento en varios escenarios del mundo real. El modo benchmark en Ultralytics YOLOv8 cumple con este propรณsito proporcionando un marco sรณlido para valorar la velocidad y exactitud de su modelo a travรฉs de una gama de formatos de exportaciรณn. + +## ยฟPor Quรฉ Es Crucial el Benchmarking? + +- **Decisiones Informadas:** Obtenga perspectivas sobre el equilibrio entre velocidad y precisiรณn. +- **Asignaciรณn de Recursos:** Entienda cรณmo diferentes formatos de exportaciรณn se desempeรฑan en diferentes hardware. +- **Optimizaciรณn:** Aprenda cuรกl formato de exportaciรณn ofrece el mejor rendimiento para su caso de uso especรญfico. +- **Eficiencia de Costo:** Haga un uso mรกs eficiente de los recursos de hardware basado en los resultados del benchmark. + +### Mรฉtricas Clave en el Modo Benchmark + +- **mAP50-95:** Para detecciรณn de objetos, segmentaciรณn y estimaciรณn de pose. +- **accuracy_top5:** Para clasificaciรณn de imรกgenes. +- **Tiempo de Inferencia:** Tiempo tomado para cada imagen en milisegundos. + +### Formatos de Exportaciรณn Soportados + +- **ONNX:** Para un rendimiento รณptimo de CPU +- **TensorRT:** Para la mรกxima eficiencia de GPU +- **OpenVINO:** Para la optimizaciรณn en hardware de Intel +- **CoreML, TensorFlow SavedModel y Mรกs:** Para necesidades de despliegue diversas. + +!!! Tip "Consejo" + + * Exporte a ONNX o OpenVINO para acelerar la velocidad de CPU hasta 3 veces. + * Exporte a TensorRT para acelerar la velocidad de GPU hasta 5 veces. + +## Ejemplos de Uso + +Ejecute benchmarks de YOLOv8n en todos los formatos de exportaciรณn soportados incluyendo ONNX, TensorRT, etc. Vea la secciรณn de Argumentos a continuaciรณn para una lista completa de argumentos de exportaciรณn. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics.utils.benchmarks import benchmark + + # Benchmark en GPU + benchmark(model='yolov8n.pt', data='coco8.yaml', imgsz=640, half=False, device=0) + ``` + === "CLI" + + ```bash + yolo benchmark model=yolov8n.pt data='coco8.yaml' imgsz=640 half=False device=0 + ``` + +## Argumentos + +Argumentos como `model`, `data`, `imgsz`, `half`, `device`, y `verbose` proporcionan a los usuarios la flexibilidad de ajustar los benchmarks a sus necesidades especรญficas y comparar el rendimiento de diferentes formatos de exportaciรณn con facilidad. + +| Clave | Valor | Descripciรณn | +|-----------|---------|----------------------------------------------------------------------------------------------------------| +| `model` | `None` | ruta al archivo del modelo, es decir, yolov8n.pt, yolov8n.yaml | +| `data` | `None` | ruta a YAML que referencia el conjunto de datos de benchmarking (bajo la etiqueta `val`) | +| `imgsz` | `640` | tamaรฑo de imagen como escalar o lista (h, w), es decir, (640, 480) | +| `half` | `False` | cuantificaciรณn FP16 | +| `int8` | `False` | cuantificaciรณn INT8 | +| `device` | `None` | dispositivo en el que se ejecutarรก, es decir, dispositivo cuda=0 o dispositivo=0,1,2,3 o dispositivo=cpu | +| `verbose` | `False` | no continuar en caso de error (bool), o umbral de piso de valor (float) | + +## Formatos de Exportaciรณn + +Los benchmarks intentarรกn ejecutarse automรกticamente en todos los posibles formatos de exportaciรณn a continuaciรณn. + +| Formato | Argumento `format` | Modelo | Metadatos | Argumentos | +|--------------------------------------------------------------------|--------------------|---------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +Vea los detalles completos de `export` en la pรกgina [Export](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/es/modes/benchmark.md:Zone.Identifier b/ultralytics/docs/es/modes/benchmark.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/modes/benchmark.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/modes/export.md b/ultralytics/docs/es/modes/export.md new file mode 100755 index 0000000..e701ccb --- /dev/null +++ b/ultralytics/docs/es/modes/export.md @@ -0,0 +1,108 @@ +--- +comments: true +description: Guรญa paso a paso sobre cรณmo exportar sus modelos YOLOv8 a varios formatos como ONNX, TensorRT, CoreML y mรกs para su despliegue. ยกExplora ahora!. +keywords: YOLO, YOLOv8, Ultralytics, Exportaciรณn de modelos, ONNX, TensorRT, CoreML, TensorFlow SavedModel, OpenVINO, PyTorch, exportar modelo +--- + +# Exportaciรณn de Modelos con Ultralytics YOLO + +Ecosistema de Ultralytics YOLO e integraciones + +## Introducciรณn + +El objetivo final de entrenar un modelo es desplegarlo para aplicaciones en el mundo real. El modo exportaciรณn en Ultralytics YOLOv8 ofrece una gama versรกtil de opciones para exportar tu modelo entrenado a diferentes formatos, haciรฉndolo desplegable en varias plataformas y dispositivos. Esta guรญa integral pretende guiarte a travรฉs de los matices de la exportaciรณn de modelos, mostrando cรณmo lograr la mรกxima compatibilidad y rendimiento. + +

+
+ +
+ Ver: Cรณmo Exportar un Modelo Entrenado Personalizado de Ultralytics YOLOv8 y Ejecutar Inferencia en Vivo en la Webcam. +

+ +## ยฟPor Quรฉ Elegir el Modo Exportaciรณn de YOLOv8? + +- **Versatilidad:** Exporta a mรบltiples formatos incluyendo ONNX, TensorRT, CoreML y mรกs. +- **Rendimiento:** Acelera hasta 5 veces la velocidad en GPU con TensorRT y 3 veces en CPU con ONNX o OpenVINO. +- **Compatibilidad:** Hacer que tu modelo sea universalmente desplegable en numerosos entornos de hardware y software. +- **Facilidad de Uso:** Interfaz de lรญnea de comandos simple y API de Python para una exportaciรณn de modelos rรกpida y sencilla. + +### Caracterรญsticas Clave del Modo de Exportaciรณn + +Aquรญ tienes algunas de las funcionalidades destacadas: + +- **Exportaciรณn con Un Solo Clic:** Comandos simples para exportar a diferentes formatos. +- **Exportaciรณn por Lotes:** Exporta modelos capaces de inferencia por lotes. +- **Inferencia Optimizada:** Los modelos exportados estรกn optimizados para tiempos de inferencia mรกs rรกpidos. +- **Vรญdeos Tutoriales:** Guรญas y tutoriales en profundidad para una experiencia de exportaciรณn fluida. + +!!! Tip "Consejo" + + * Exporta a ONNX u OpenVINO para acelerar la CPU hasta 3 veces. + * Exporta a TensorRT para acelerar la GPU hasta 5 veces. + +## Ejemplos de Uso + +Exporta un modelo YOLOv8n a un formato diferente como ONNX o TensorRT. Consulta la secciรณn Argumentos mรกs abajo para una lista completa de argumentos de exportaciรณn. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carga un modelo + model = YOLO('yolov8n.pt') # carga un modelo oficial + model = YOLO('path/to/best.pt') # carga un modelo entrenado personalizado + + # Exporta el modelo + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # exporta modelo oficial + yolo export model=path/to/best.pt format=onnx # exporta modelo entrenado personalizado + ``` + +## Argumentos + +Los ajustes de exportaciรณn para modelos YOLO se refieren a las diversas configuraciones y opciones utilizadas para guardar o exportar el modelo para su uso en otros entornos o plataformas. Estos ajustes pueden afectar el rendimiento del modelo, su tamaรฑo y su compatibilidad con diferentes sistemas. Algunos ajustes comunes de exportaciรณn de YOLO incluyen el formato del archivo del modelo exportado (p. ej., ONNX, TensorFlow SavedModel), el dispositivo en el que se ejecutarรก el modelo (p. ej., CPU, GPU) y la presencia de caracterรญsticas adicionales como mรกscaras o mรบltiples etiquetas por caja. Otros factores que pueden afectar el proceso de exportaciรณn incluyen la tarea especรญfica para la que se estรก utilizando el modelo y los requisitos o limitaciones del entorno o plataforma objetivo. Es importante considerar y configurar cuidadosamente estos ajustes para asegurar que el modelo exportado estรก optimizado para el caso de uso previsto y se pueda utilizar eficazmente en el entorno objetivo. + +| Llave | Valor | Descripciรณn | +|-------------|-----------------|-----------------------------------------------------------------| +| `format` | `'torchscript'` | formato al que exportar | +| `imgsz` | `640` | tamaรฑo de imagen como escalar o lista (h, w), p. ej. (640, 480) | +| `keras` | `False` | usu Keras para la exportaciรณn de TF SavedModel | +| `optimize` | `False` | TorchScript: optimizar para mรณvil | +| `half` | `False` | cuantificaciรณn FP16 | +| `int8` | `False` | cuantificaciรณn INT8 | +| `dynamic` | `False` | ONNX/TensorRT: ejes dinรกmicos | +| `simplify` | `False` | ONNX/TensorRT: simplificar modelo | +| `opset` | `None` | ONNX: versiรณn de opset (opcional, por defecto la mรกs reciente) | +| `workspace` | `4` | TensorRT: tamaรฑo del espacio de trabajo (GB) | +| `nms` | `False` | CoreML: aรฑadir NMS | + +## Formatos de Exportaciรณn + +Los formatos de exportaciรณn disponibles de YOLOv8 estรกn en la tabla a continuaciรณn. Puedes exportar a cualquier formato usando el argumento `format`, por ejemplo, `format='onnx'` o `format='engine'`. + +| Formato | Argumento `format` | Modelo | Metadatos | Argumentos | +|--------------------------------------------------------------------|--------------------|---------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | diff --git a/ultralytics/docs/es/modes/export.md:Zone.Identifier b/ultralytics/docs/es/modes/export.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/modes/export.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/modes/index.md b/ultralytics/docs/es/modes/index.md new file mode 100755 index 0000000..1c9b408 --- /dev/null +++ b/ultralytics/docs/es/modes/index.md @@ -0,0 +1,73 @@ +--- +comments: true +description: Desde el entrenamiento hasta el seguimiento, aprovecha al mรกximo YOLOv8 con Ultralytics. Obtรฉn informaciรณn y ejemplos para cada modo compatible incluyendo validaciรณn, exportaciรณn y evaluaciรณn comparativa. +keywords: Ultralytics, YOLOv8, Aprendizaje Automรกtico, Detecciรณn de Objetos, Entrenamiento, Validaciรณn, Predicciรณn, Exportaciรณn, Seguimiento, Benchmarking +--- + +# Modos de Ultralytics YOLOv8 + +Ecosistema Ultralytics YOLO e integraciones + +## Introducciรณn + +Ultralytics YOLOv8 no es solo otro modelo de detecciรณn de objetos; es un marco de trabajo versรกtil diseรฑado para cubrir todo el ciclo de vida de los modelos de aprendizaje automรกtico, desde la ingesta de datos y el entrenamiento del modelo hasta la validaciรณn, implementaciรณn y seguimiento en el mundo real. Cada modo sirve para un propรณsito especรญfico y estรก diseรฑado para ofrecerte la flexibilidad y eficiencia necesarias para diferentes tareas y casos de uso. + +

+
+ +
+ Mira: Tutorial de Modos Ultralytics: Entrenar, Validar, Predecir, Exportar y Hacer Benchmarking. +

+ +### Modos a Primera Vista + +Comprender los diferentes **modos** que soporta Ultralytics YOLOv8 es crรญtico para sacar el mรกximo provecho a tus modelos: + +- **Modo Entrenar (Train)**: Afina tu modelo en conjuntos de datos personalizados o pre-cargados. +- **Modo Validar (Val)**: Un punto de control post-entrenamiento para validar el rendimiento del modelo. +- **Modo Predecir (Predict)**: Libera el poder predictivo de tu modelo en datos del mundo real. +- **Modo Exportar (Export)**: Prepara tu modelo para la implementaciรณn en varios formatos. +- **Modo Seguir (Track)**: Extiende tu modelo de detecciรณn de objetos a aplicaciones de seguimiento en tiempo real. +- **Modo Benchmark (Benchmark)**: Analiza la velocidad y precisiรณn de tu modelo en diversos entornos de implementaciรณn. + +Esta guรญa completa tiene como objetivo proporcionarte una visiรณn general y conocimientos prรกcticos de cada modo, ayudรกndote a aprovechar todo el potencial de YOLOv8. + +## [Entrenar (Train)](train.md) + +El modo Entrenar se utiliza para entrenar un modelo YOLOv8 en un conjunto de datos personalizado. En este modo, el modelo se entrena utilizando el conjunto de datos y los hiperparรกmetros especificados. El proceso de entrenamiento implica optimizar los parรกmetros del modelo para que pueda predecir con precisiรณn las clases y ubicaciones de los objetos en una imagen. + +[Ejemplos de Entrenamiento](train.md){ .md-button } + +## [Validar (Val)](val.md) + +El modo Validar se usa para validar un modelo YOLOv8 despuรฉs de haber sido entrenado. En este modo, el modelo se evalรบa en un conjunto de validaciรณn para medir su precisiรณn y rendimiento de generalizaciรณn. Este modo se puede usar para ajustar los hiperparรกmetros del modelo y mejorar su rendimiento. + +[Ejemplos de Validaciรณn](val.md){ .md-button } + +## [Predecir (Predict)](predict.md) + +El modo Predecir se utiliza para realizar predicciones usando un modelo YOLOv8 entrenado en imรกgenes o videos nuevos. En este modo, el modelo se carga desde un archivo de punto de control, y el usuario puede proporcionar imรกgenes o videos para realizar inferencias. El modelo predice las clases y ubicaciones de los objetos en las imรกgenes o videos de entrada. + +[Ejemplos de Predicciรณn](predict.md){ .md-button } + +## [Exportar (Export)](export.md) + +El modo Exportar se utiliza para exportar un modelo YOLOv8 a un formato que se pueda usar para la implementaciรณn. En este modo, el modelo se convierte a un formato que puede ser utilizado por otras aplicaciones de software o dispositivos de hardware. Este modo es รบtil al implementar el modelo en entornos de producciรณn. + +[Ejemplos de Exportaciรณn](export.md){ .md-button } + +## [Seguir (Track)](track.md) + +El modo Seguir se usa para rastrear objetos en tiempo real utilizando un modelo YOLOv8. En este modo, el modelo se carga desde un archivo de punto de control, y el usuario puede proporcionar un flujo de video en vivo para realizar seguimiento de objetos en tiempo real. Este modo es รบtil para aplicaciones como sistemas de vigilancia o coches autรณnomos. + +[Ejemplos de Seguimiento](track.md){ .md-button } + +## [Benchmark (Benchmark)](benchmark.md) + +El modo Benchmark se utiliza para perfilar la velocidad y precisiรณn de varios formatos de exportaciรณn de YOLOv8. Los benchmarks proporcionan informaciรณn sobre el tamaรฑo del formato de exportaciรณn, sus mรฉtricas de `mAP50-95` (para detecciรณn de objetos, segmentaciรณn y pose) o mรฉtricas de `accuracy_top5` (para clasificaciรณn), y el tiempo de inferencia en milisegundos por imagen a travรฉs de varios formatos de exportaciรณn como ONNX, OpenVINO, TensorRT y otros. Esta informaciรณn puede ayudar a los usuarios a elegir el formato de exportaciรณn รณptimo para su caso de uso especรญfico, basado en sus requerimientos de velocidad y precisiรณn. + +[Ejemplos de Benchmarking](benchmark.md){ .md-button } diff --git a/ultralytics/docs/es/modes/index.md:Zone.Identifier b/ultralytics/docs/es/modes/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/modes/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/modes/predict.md b/ultralytics/docs/es/modes/predict.md new file mode 100755 index 0000000..0c1751a --- /dev/null +++ b/ultralytics/docs/es/modes/predict.md @@ -0,0 +1,226 @@ +--- +comments: true +description: Descubra cรณmo utilizar el modo predictivo de YOLOv8 para diversas tareas. Aprenda acerca de diferentes fuentes de inferencia como imรกgenes, videos y formatos de datos. +keywords: Ultralytics, YOLOv8, modo predictivo, fuentes de inferencia, tareas de predicciรณn, modo de transmisiรณn, procesamiento de imรกgenes, procesamiento de videos, aprendizaje automรกtico, IA +--- + +# Predicciรณn del Modelo con YOLO de Ultralytics + +Ecosistema de YOLO de Ultralytics e integraciones + +## Introducciรณn + +En el mundo del aprendizaje automรกtico y la visiรณn por computadora, el proceso de dar sentido a los datos visuales se denomina 'inferencia' o 'predicciรณn'. YOLOv8 de Ultralytics ofrece una caracterรญstica poderosa conocida como **modo predictivo** que estรก diseรฑada para inferencias de alto rendimiento y en tiempo real en una amplia gama de fuentes de datos. + +

+
+ +
+ Ver: Cรณmo Extraer las Salidas del Modelo YOLOv8 de Ultralytics para Proyectos Personalizados. +

+ +## Aplicaciones en el Mundo Real + +| Manufactura | Deportes | Seguridad | +|:-----------------------------------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------------------------------------:|:-------------------------------------------------------------------------------------------------------------------------------------:| +| ![Detecciรณn de Repuestos de Vehรญculos](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a0f802a8-0776-44cf-8f17-93974a4a28a1) | ![Detecciรณn de Jugadores de Fรบtbol](https://github.com/RizwanMunawar/ultralytics/assets/62513924/7d320e1f-fc57-4d7f-a691-78ee579c3442) | ![Detecciรณn de Caรญdas de Personas](https://github.com/RizwanMunawar/ultralytics/assets/62513924/86437c4a-3227-4eee-90ef-9efb697bdb43) | +| Detecciรณn de Repuestos de Vehรญculos | Detecciรณn de Jugadores de Fรบtbol | Detecciรณn de Caรญdas de Personas | + +## ยฟPor Quรฉ Utilizar YOLO de Ultralytics para la Inferencia? + +Estas son algunas razones para considerar el modo predictivo de YOLOv8 para sus necesidades de inferencia: + +- **Versatilidad:** Capaz de realizar inferencias en imรกgenes, videos e incluso transmisiones en vivo. +- **Rendimiento:** Diseรฑado para procesamiento en tiempo real y de alta velocidad sin sacrificar precisiรณn. +- **Facilidad de Uso:** Interfaces de Python y CLI intuitivas para una rรกpida implementaciรณn y pruebas. +- **Alta Personalizaciรณn:** Diversos ajustes y parรกmetros para afinar el comportamiento de inferencia del modelo segรบn sus requisitos especรญficos. + +### Caracterรญsticas Principales del Modo Predictivo + +El modo predictivo de YOLOv8 estรก diseรฑado para ser robusto y versรกtil, y cuenta con: + +- **Compatibilidad con Mรบltiples Fuentes de Datos:** Ya sea que sus datos estรฉn en forma de imรกgenes individuales, una colecciรณn de imรกgenes, archivos de video o transmisiones de video en tiempo real, el modo predictivo le tiene cubierto. +- **Modo de Transmisiรณn:** Utilice la funciรณn de transmisiรณn para generar un generador eficiente de memoria de objetos `Results`. Active esto configurando `stream=True` en el mรฉtodo de llamada del predictor. +- **Procesamiento por Lotes:** La capacidad de procesar mรบltiples imรกgenes o fotogramas de video en un solo lote, acelerando aรบn mรกs el tiempo de inferencia. +- **Amigable para la Integraciรณn:** Se integra fรกcilmente con pipelines de datos existentes y otros componentes de software, gracias a su API flexible. + +Los modelos YOLO de Ultralytics devuelven ya sea una lista de objetos `Results` de Python, o un generador de objetos `Results` de Python eficiente en memoria cuando se pasa `stream=True` al modelo durante la inferencia: + +!!! Example "Predict" + + === "Devolver una lista con `stream=False`" + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n.pt') # modelo YOLOv8n preentrenado + + # Ejecutar inferencia por lotes en una lista de imรกgenes + results = model(['im1.jpg', 'im2.jpg']) # devuelve una lista de objetos Results + + # Procesar lista de resultados + for result in results: + boxes = result.boxes # Objeto Boxes para salidas de bbox + masks = result.masks # Objeto Masks para salidas de mรกscaras de segmentaciรณn + keypoints = result.keypoints # Objeto Keypoints para salidas de postura + probs = result.probs # Objeto Probs para salidas de clasificaciรณn + ``` + + === "Devolver un generador con `stream=True`" + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n.pt') # modelo YOLOv8n preentrenado + + # Ejecutar inferencia por lotes en una lista de imรกgenes + results = model(['im1.jpg', 'im2.jpg'], stream=True) # devuelve un generador de objetos Results + + # Procesar generador de resultados + for result in results: + boxes = result.boxes # Objeto Boxes para salidas de bbox + .masks = result.masks # Objeto Masks para salidas de mรกscaras de segmentaciรณn + keypoints = result.keypoints # Objeto Keypoints para salidas de postura + probs = result.probs # Objeto Probs para salidas de clasificaciรณn + ``` + +## Fuentes de Inferencia + +YOLOv8 puede procesar diferentes tipos de fuentes de entrada para la inferencia, como se muestra en la tabla a continuaciรณn. Las fuentes incluyen imรกgenes estรกticas, transmisiones de video y varios formatos de datos. La tabla tambiรฉn indica si cada fuente se puede utilizar en modo de transmisiรณn con el argumento `stream=True` โœ…. El modo de transmisiรณn es beneficioso para procesar videos o transmisiones en vivo ya que crea un generador de resultados en lugar de cargar todos los fotogramas en la memoria. + +!!! Tip "Consejo" + + Utilice `stream=True` para procesar videos largos o conjuntos de datos grandes para gestionar eficientemente la memoria. Cuando `stream=False`, los resultados de todos los fotogramas o puntos de datos se almacenan en la memoria, lo que puede aumentar rรกpidamente y causar errores de memoria insuficiente para entradas grandes. En contraste, `stream=True` utiliza un generador, que solo mantiene los resultados del fotograma o punto de datos actual en la memoria, reduciendo significativamente el consumo de memoria y previniendo problemas de falta de memoria. + +| Fuente | Argumento | Tipo | Notas | +|---------------------|--------------------------------------------|----------------|---------------------------------------------------------------------------------------------------------------------------------| +| imagen | `'image.jpg'` | `str` o `Path` | Archivo รบnico de imagen. | +| URL | `'https://ultralytics.com/images/bus.jpg'` | `str` | URL a una imagen. | +| captura de pantalla | `'screen'` | `str` | Captura una captura de pantalla. | +| PIL | `Image.open('im.jpg')` | `PIL.Image` | Formato HWC con canales RGB. | +| OpenCV | `cv2.imread('im.jpg')` | `np.ndarray` | Formato HWC con canales BGR `uint8 (0-255)`. | +| numpy | `np.zeros((640,1280,3))` | `np.ndarray` | Formato HWC con canales BGR `uint8 (0-255)`. | +| torch | `torch.zeros(16,3,320,640)` | `torch.Tensor` | Formato BCHW con canales RGB `float32 (0.0-1.0)`. | +| CSV | `'sources.csv'` | `str` o `Path` | Archivo CSV que contiene rutas a imรกgenes, videos o directorios. | +| video โœ… | `'video.mp4'` | `str` o `Path` | Archivo de video en formatos como MP4, AVI, etc. | +| directorio โœ… | `'path/'` | `str` o `Path` | Ruta a un directorio que contiene imรกgenes o videos. | +| glob โœ… | `'path/*.jpg'` | `str` | Patrรณn glob para coincidir con mรบltiples archivos. Utilice el carรกcter `*` como comodรญn. | +| YouTube โœ… | `'https://youtu.be/LNwODJXcvt4'` | `str` | URL a un video de YouTube. | +| transmisiรณn โœ… | `'rtsp://example.com/media.mp4'` | `str` | URL para protocolos de transmisiรณn como RTSP, RTMP, TCP o una direcciรณn IP. | +| multi-transmisiรณn โœ… | `'list.streams'` | `str` o `Path` | Archivo de texto `*.streams` con una URL de transmisiรณn por fila, es decir, 8 transmisiones se ejecutarรกn con tamaรฑo de lote 8. | + +A continuaciรณn se muestran ejemplos de cรณdigo para usar cada tipo de fuente: + +!!! Example "Fuentes de predicciรณn" + + === "imagen" + Ejecute inferencia en un archivo de imagen. + ```python + from ultralytics import YOLO + + # Cargar el modelo YOLOv8n preentrenado + model = YOLO('yolov8n.pt') + + # Definir la ruta al archivo de imagen + source = 'ruta/a/imagen.jpg' + + # Ejecutar inferencia en la fuente + results = model(source) # lista de objetos Results + ``` + + === "captura de pantalla" + Ejecute inferencia en el contenido actual de la pantalla como captura de pantalla. + ```python + from ultralytics import YOLO + + # Cargar el modelo YOLOv8n preentrenado + model = YOLO('yolov8n.pt') + + # Definir captura de pantalla actual como fuente + source = 'screen' + + # Ejecutar inferencia en la fuente + results = model(source) # lista de objetos Results + ``` + + === "URL" + Ejecute inferencia en una imagen o video alojados remotamente a travรฉs de URL. + ```python + from ultralytics import YOLO + + # Cargar el modelo YOLOv8n preentrenado + model = YOLO('yolov8n.pt') + + # Definir URL remota de imagen o video + source = 'https://ultralytics.com/images/bus.jpg' + + # Ejecutar inferencia en la fuente + results = model(source) # lista de objetos Results + ``` + + === "PIL" + Ejecute inferencia en una imagen abierta con la Biblioteca de Imรกgenes de Python (PIL). + ```python + from PIL import Image + from ultralytics import YOLO + + # Cargar el modelo YOLOv8n preentrenado + model = YOLO('yolov8n.pt') + + # Abrir una imagen usando PIL + source = Image.open('ruta/a/imagen.jpg') + + # Ejecutar inferencia en la fuente + results = model(source) # lista de objetos Results + ``` + + === "OpenCV" + Ejecute inferencia en una imagen leรญda con OpenCV. + ```python + import cv2 + from ultralytics import YOLO + + # Cargar el modelo YOLOv8n preentrenado + model = YOLO('yolov8n.pt') + + # Leer una imagen usando OpenCV + source = cv2.imread('ruta/a/imagen.jpg') + + # Ejecutar inferencia en la fuente + results = model(source) # lista de objetos Results + ``` + + === "numpy" + Ejecute inferencia en una imagen representada como un array de numpy. + ```python + import numpy as np + from ultralytics import YOLO + + # Cargar el modelo YOLOv8n preentrenado + model = YOLO('yolov8n.pt') + + # Crear un array aleatorio de numpy con forma HWC (640, 640, 3) con valores en rango [0, 255] y tipo uint8 + source = np.random.randint(low=0, high=255, size=(640, 640, 3), dtype='uint8') + + # Ejecutar inferencia en la fuente + results = model(source) # lista de objetos Results + ``` + + === "torch" + Ejecute inferencia en una imagen representada como un tensor de PyTorch. + ```python + import torch + from ultralytics import YOLO + + # Cargar el modelo YOLOv8n preentrenado + model = YOLO('yolov8n.pt') + + # Crear un tensor aleatorio de torch con forma BCHW (1, 3, 640, 640) con valores en rango [0, 1] y tipo float32 + source = torch.rand(1, 3, 640, 640, dtype=torch.float32) + + # Ejecutar inferencia en la fuente + results = model(source) # lista de objetos Results diff --git a/ultralytics/docs/es/modes/predict.md:Zone.Identifier b/ultralytics/docs/es/modes/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/modes/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/modes/track.md b/ultralytics/docs/es/modes/track.md new file mode 100755 index 0000000..e7b514d --- /dev/null +++ b/ultralytics/docs/es/modes/track.md @@ -0,0 +1,200 @@ +--- +comments: true +description: Aprende a utilizar Ultralytics YOLO para el seguimiento de objetos en flujos de video. Guรญas para usar diferentes rastreadores y personalizar la configuraciรณn del rastreador. +keywords: Ultralytics, YOLO, seguimiento de objetos, flujos de video, BoT-SORT, ByteTrack, guรญa de Python, guรญa de CLI +--- + +# Seguimiento de Mรบltiples Objetos con Ultralytics YOLO + +Ejemplos de seguimiento de mรบltiples objetos + +El seguimiento de objetos en el รกmbito del anรกlisis de video es una tarea crรญtica que no solo identifica la ubicaciรณn y clase de objetos dentro del cuadro, sino que tambiรฉn mantiene una ID รบnica para cada objeto detectado a medida que avanza el video. Las aplicaciones son ilimitadas, desde vigilancia y seguridad hasta anรกlisis deportivos en tiempo real. + +## ยฟPor Quรฉ Elegir Ultralytics YOLO para el Seguimiento de Objetos? + +La salida de los rastreadores de Ultralytics es consistente con la detecciรณn de objetos estรกndar, pero con el valor aรฑadido de las IDs de objetos. Esto facilita el seguimiento de objetos en flujos de video y la realizaciรณn de anรกlisis posteriores. Aquรญ tienes algunas razones por las que deberรญas considerar usar Ultralytics YOLO para tus necesidades de seguimiento de objetos: + +- **Eficiencia:** Procesa flujos de video en tiempo real sin comprometer la precisiรณn. +- **Flexibilidad:** Soporta mรบltiples algoritmos de seguimiento y configuraciones. +- **Facilidad de Uso:** API simple de Python y opciones CLI para una rรกpida integraciรณn y despliegue. +- **Personalizaciรณn:** Fรกcil de usar con modelos YOLO entrenados a medida, permitiendo la integraciรณn en aplicaciones especรญficas del dominio. + +

+
+ +
+ Ver: Detecciรณn de Objetos y Seguimiento con Ultralytics YOLOv8. +

+ +## Aplicaciones en el Mundo Real + +| Transporte | Venta al por Menor | Acuicultura | +|:------------------------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------------------------:| +| ![Seguimiento de Vehรญculos](https://github.com/RizwanMunawar/ultralytics/assets/62513924/ee6e6038-383b-4f21-ac29-b2a1c7d386ab) | ![Seguimiento de Personas](https://github.com/RizwanMunawar/ultralytics/assets/62513924/93bb4ee2-77a0-4e4e-8eb6-eb8f527f0527) | ![Seguimiento de Peces](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a5146d0f-bfa8-4e0a-b7df-3c1446cd8142) | +| Seguimiento de Vehรญculos | Seguimiento de Personas | Seguimiento de Peces | + +## Caracterรญsticas a Simple Vista + +Ultralytics YOLO extiende sus caracterรญsticas de detecciรณn de objetos para proporcionar un seguimiento de objetos robusto y versรกtil: + +- **Seguimiento en Tiempo Real:** Rastrea sin problemas los objetos en videos de alta frecuencia de cuadros. +- **Soporte de Mรบltiples Rastreadores:** Elige entre una variedad de algoritmos de seguimiento establecidos. +- **Configuraciones de Rastreador Personalizables:** Adapta el algoritmo de seguimiento para satisfacer requisitos especรญficos ajustando diversos parรกmetros. + +## Rastreadores Disponibles + +Ultralytics YOLO soporta los siguientes algoritmos de seguimiento. Pueden ser habilitados pasando el archivo de configuraciรณn YAML relevante como `tracker=tracker_type.yaml`: + +* [BoT-SORT](https://github.com/NirAharon/BoT-SORT) - Usa `botsort.yaml` para habilitar este rastreador. +* [ByteTrack](https://github.com/ifzhang/ByteTrack) - Usa `bytetrack.yaml` para habilitar este rastreador. + +El rastreador predeterminado es BoT-SORT. + +## Seguimiento + +Para ejecutar el rastreador en flujos de video, usa un modelo Detect, Segment o Pose entrenado tales como YOLOv8n, YOLOv8n-seg y YOLOv8n-pose. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo oficial o personalizado + model = YOLO('yolov8n.pt') # Cargar un modelo oficial Detect + model = YOLO('yolov8n-seg.pt') # Cargar un modelo oficial Segment + model = YOLO('yolov8n-pose.pt') # Cargar un modelo oficial Pose + model = YOLO('path/to/best.pt') # Cargar un modelo entrenado a medida + + # Realizar el seguimiento con el modelo + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True) # Seguimiento con el rastreador predeterminado + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True, tracker="bytetrack.yaml") # Seguimiento con el rastreador ByteTrack + ``` + + === "CLI" + + ```bash + # Realizar seguimiento con varios modelos usando la interfaz de lรญnea de comandos + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" # Modelo oficial Detect + yolo track model=yolov8n-seg.pt source="https://youtu.be/LNwODJXcvt4" # Modelo oficial Segment + yolo track model=yolov8n-pose.pt source="https://youtu.be/LNwODJXcvt4" # Modelo oficial Pose + yolo track model=path/to/best.pt source="https://youtu.be/LNwODJXcvt4" # Modelo entrenado a medida + + # Realizar seguimiento usando el rastreador ByteTrack + yolo track model=path/to/best.pt tracker="bytetrack.yaml" + ``` + +Como se puede ver en el uso anterior, el seguimiento estรก disponible para todos los modelos Detect, Segment y Pose ejecutados en videos o fuentes de transmisiรณn. + +## Configuraciรณn + +### Argumentos de Seguimiento + +La configuraciรณn de seguimiento comparte propiedades con el modo Predict, como `conf`, `iou` y `show`. Para configuraciones adicionales, consulta la pรกgina del modelo [Predict](https://docs.ultralytics.com/modes/predict/). + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Configurar los parรกmetros de seguimiento y ejecutar el rastreador + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", conf=0.3, iou=0.5, show=True) + ``` + + === "CLI" + + ```bash + # Configurar parรกmetros de seguimiento y ejecutar el rastreador usando la interfaz de lรญnea de comandos + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" conf=0.3, iou=0.5 show + ``` + +### Selecciรณn de Rastreador + +Ultralytics tambiรฉn te permite usar un archivo de configuraciรณn de rastreador modificado. Para hacerlo, simplemente haz una copia de un archivo de configuraciรณn de rastreador (por ejemplo, `custom_tracker.yaml`) de [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) y modifica cualquier configuraciรณn (excepto el `tracker_type`) segรบn tus necesidades. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar el modelo y ejecutar el rastreador con un archivo de configuraciรณn personalizado + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", tracker='custom_tracker.yaml') + ``` + + === "CLI" + + ```bash + # Cargar el modelo y ejecutar el rastreador con un archivo de configuraciรณn personalizado usando la interfaz de lรญnea de comandos + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" tracker='custom_tracker.yaml' + ``` + +Para obtener una lista completa de los argumentos de seguimiento, consulta la pรกgina [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers). + +## Ejemplos en Python + +### Bucle de Seguimiento Persistente + +Aquรญ hay un script en Python que utiliza OpenCV (`cv2`) y YOLOv8 para ejecutar el seguimiento de objetos en fotogramas de video. Este script aรบn asume que ya has instalado los paquetes necesarios (`opencv-python` y `ultralytics`). El argumento `persist=True` le indica al rastreador que la imagen o fotograma actual es el siguiente en una secuencia y que espera rastros de la imagen anterior en la imagen actual. + +!!! Example "Bucle de transmisiรณn en vivo con seguimiento" + + ```python + import cv2 + from ultralytics import YOLO + + # Cargar el modelo YOLOv8 + model = YOLO('yolov8n.pt') + + # Abrir el archivo de video + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # Bucle a travรฉs de los fotogramas del video + while cap.isOpened(): + # Leer un fotograma del video + success, frame = cap.read() + + if success: + # Ejecutar seguimiento YOLOv8 en el fotograma, persistiendo los rastreos entre fotogramas + results = model.track(frame, persist=True) + + # Visualizar los resultados en el fotograma + annotated_frame = results[0].plot() + + # Mostrar el fotograma anotado + cv2.imshow("Seguimiento YOLOv8", annotated_frame) + + # Romper el bucle si se presiona 'q' + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # Romper el bucle si se alcanza el final del video + break + + # Liberar el objeto de captura de video y cerrar la ventana de visualizaciรณn + cap.release() + cv2.destroyAllWindows() + ``` + +Toma en cuenta el cambio de `model(frame)` a `model.track(frame)`, que habilita el seguimiento de objetos en lugar de simplemente la detecciรณn. Este script modificado ejecutarรก el rastreador en cada fotograma del video, visualizarรก los resultados y los mostrarรก en una ventana. El bucle puede ser terminado presionando 'q'. + +## Contribuir con Nuevos Rastreadores + +ยฟEres experto en seguimiento de mรบltiples objetos y has implementado o adaptado exitosamente un algoritmo de seguimiento con Ultralytics YOLO? Te invitamos a contribuir en nuestra secciรณn de Rastreadores en [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers)! Tus aplicaciones en el mundo real y soluciones podrรญan ser invaluables para los usuarios que trabajan en tareas de seguimiento. + +Al contribuir en esta secciรณn, ayudarรกs a ampliar el alcance de las soluciones de seguimiento disponibles dentro del marco de trabajo de Ultralytics YOLO, aรฑadiendo otra capa de funcionalidad y utilidad para la comunidad. + +Para iniciar tu contribuciรณn, por favor consulta nuestra [Guรญa de Contribuciรณn](https://docs.ultralytics.com/help/contributing) para obtener instrucciones completas sobre cรณmo enviar una Solicitud de Extracciรณn (PR) ๐Ÿ› ๏ธ. ยกEstamos emocionados de ver lo que traes a la mesa! + +Juntos, vamos a mejorar las capacidades de seguimiento del ecosistema Ultralytics YOLO ๐Ÿ™! diff --git a/ultralytics/docs/es/modes/track.md:Zone.Identifier b/ultralytics/docs/es/modes/track.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/modes/track.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/modes/train.md b/ultralytics/docs/es/modes/train.md new file mode 100755 index 0000000..6cb798a --- /dev/null +++ b/ultralytics/docs/es/modes/train.md @@ -0,0 +1,206 @@ +--- +comments: true +description: Guรญa paso a paso para entrenar modelos YOLOv8 con Ultralytics YOLO incluyendo ejemplos de entrenamiento con una sola GPU y mรบltiples GPUs +keywords: Ultralytics, YOLOv8, YOLO, detecciรณn de objetos, modo de entrenamiento, conjunto de datos personalizado, entrenamiento GPU, multi-GPU, hiperparรกmetros, ejemplos CLI, ejemplos Python +--- + +# Entrenamiento de Modelos con Ultralytics YOLO + +Ecosistema e integraciones de Ultralytics YOLO + +## Introducciรณn + +Entrenar un modelo de aprendizaje profundo implica alimentarlo con datos y ajustar sus parรกmetros para que pueda hacer predicciones precisas. El modo de entrenamiento en Ultralytics YOLOv8 estรก diseรฑado para un entrenamiento efectivo y eficiente de modelos de detecciรณn de objetos, aprovechando al mรกximo las capacidades del hardware moderno. Esta guรญa tiene como objetivo cubrir todos los detalles que necesita para comenzar a entrenar sus propios modelos utilizando el robusto conjunto de caracterรญsticas de YOLOv8. + +

+
+ +
+ Ver: Cรณmo Entrenar un modelo YOLOv8 en Tu Conjunto de Datos Personalizado en Google Colab. +

+ +## ยฟPor Quรฉ Elegir Ultralytics YOLO para Entrenamiento? + +Aquรญ hay algunas razones convincentes para optar por el modo Entrenamiento de YOLOv8: + +- **Eficiencia:** Aprovecha al mรกximo tu hardware, ya sea en una configuraciรณn de una sola GPU o escalando entre mรบltiples GPUs. +- **Versatilidad:** Entrena con conjuntos de datos personalizados ademรกs de los ya disponibles como COCO, VOC e ImageNet. +- **Amigable al Usuario:** Interfaces CLI y Python simples pero potentes para una experiencia de entrenamiento sencilla. +- **Flexibilidad de Hiperparรกmetros:** Una amplia gama de hiperparรกmetros personalizables para ajustar el rendimiento del modelo. + +### Caracterรญsticas Clave del Modo Entrenamiento + +Las siguientes son algunas caracterรญsticas notables del modo Entrenamiento de YOLOv8: + +- **Descarga Automรกtica de Conjuntos de Datos:** Conjuntos de datos estรกndar como COCO, VOC e ImageNet se descargan automรกticamente en el primer uso. +- **Soporte Multi-GPU:** Escala tus esfuerzos de entrenamiento sin problemas en mรบltiples GPUs para acelerar el proceso. +- **Configuraciรณn de Hiperparรกmetros:** La opciรณn de modificar hiperparรกmetros a travรฉs de archivos de configuraciรณn YAML o argumentos CLI. +- **Visualizaciรณn y Monitoreo:** Seguimiento en tiempo real de mรฉtricas de entrenamiento y visualizaciรณn del proceso de aprendizaje para una mejor comprensiรณn. + +!!! Tip "Consejo" + + * Los conjuntos de datos de YOLOv8 como COCO, VOC, ImageNet y muchos otros se descargan automรกticamente en el primer uso, es decir, `yolo train data=coco.yaml` + +## Ejemplos de Uso + +Entrena YOLOv8n en el conjunto de datos COCO128 durante 100 รฉpocas con un tamaรฑo de imagen de 640. El dispositivo de entrenamiento se puede especificar usando el argumento `device`. Si no se pasa ningรบn argumento, se usarรก la GPU `device=0` si estรก disponible; de lo contrario, se usarรก `device=cpu`. Consulta la secciรณn de Argumentos a continuaciรณn para una lista completa de argumentos de entrenamiento. + +!!! Example "Ejemplo de Entrenamiento con una sola GPU y CPU" + + El dispositivo se determina automรกticamente. Si hay una GPU disponible, se usarรก; de lo contrario, el entrenamiento comenzarรก en la CPU. + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n.yaml') # construir un modelo nuevo desde YAML + model = YOLO('yolov8n.pt') # cargar un modelo preentrenado (recomendado para entrenamiento) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # construir desde YAML y transferir pesos + + # Entrenar el modelo + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + + === "CLI" + + ```bash + # Construir un modelo nuevo desde YAML y comenzar el entrenamiento desde cero + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # Comenzar el entrenamiento desde un modelo preentrenado *.pt + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # Construir un modelo nuevo desde YAML, transferir pesos preentrenados a รฉl y comenzar el entrenamiento + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### Entrenamiento Multi-GPU + +El entrenamiento Multi-GPU permite una utilizaciรณn mรกs eficiente de los recursos de hardware disponibles, distribuyendo la carga de entrenamiento en varias GPUs. Esta caracterรญstica estรก disponible tanto a travรฉs de la API de Python como de la interfaz de lรญnea de comandos. Para habilitar el entrenamiento Multi-GPU, especifica los IDs de los dispositivos GPU que deseas usar. + +!!! Example "Ejemplo de Entrenamiento Multi-GPU" + + Para entrenar con 2 GPUs, dispositivos CUDA 0 y 1, usa los siguientes comandos. Amplรญa a GPUs adicionales segรบn sea necesario. + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n.pt') # cargar un modelo preentrenado (recomendado para entrenamiento) + + # Entrenar el modelo con 2 GPUs + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device=[0, 1]) + ``` + + === "CLI" + + ```bash + # Comenzar el entrenamiento desde un modelo preentrenado *.pt usando las GPUs 0 y 1 + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=0,1 + ``` + +### Entrenamiento con Apple M1 y M2 MPS + +Con el soporte para los chips Apple M1 y M2 integrados en los modelos Ultralytics YOLO, ahora es posible entrenar tus modelos en dispositivos que utilizan el potente marco de Metal Performance Shaders (MPS). El MPS ofrece una forma de alto rendimiento para ejecutar tareas de cรกlculo y procesamiento de imรกgenes en el silicio personalizado de Apple. + +Para habilitar el entrenamiento en chips Apple M1 y M2, debes especificar 'mps' como tu dispositivo al iniciar el proceso de entrenamiento. A continuaciรณn se muestra un ejemplo de cรณmo podrรญas hacer esto en Python y a travรฉs de la lรญnea de comandos: + +!!! Example "Ejemplo de Entrenamiento MPS" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n.pt') # cargar un modelo preentrenado (recomendado para entrenamiento) + + # Entrenar el modelo con 2 GPUs + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device='mps') + ``` + + === "CLI" + + ```bash + # Comenzar el entrenamiento desde un modelo preentrenado *.pt usando las GPUs 0 y 1 + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=mps + ``` + +Al aprovechar el poder computacional de los chips M1/M2, esto permite un procesamiento mรกs eficiente de las tareas de entrenamiento. Para obtener una guรญa mรกs detallada y opciones de configuraciรณn avanzadas, consulta la [documentaciรณn de PyTorch MPS](https://pytorch.org/docs/stable/notes/mps.html). + +## Registros (Logging) + +Al entrenar un modelo YOLOv8, puedes encontrar valioso llevar un registro del rendimiento del modelo con el tiempo. Aquรญ es donde entra en juego el registro. Ultralytics' YOLO ofrece soporte para tres tipos de registradores: Comet, ClearML y TensorBoard. + +Para usar un registrador, selecciรณnalo en el menรบ desplegable en el fragmento de cรณdigo anterior y ejecรบtalo. El registrador elegido se instalarรก e inicializarรก. + +### Comet + +[Comet](https://www.comet.ml/site/) es una plataforma que permite a los cientรญficos de datos y desarrolladores rastrear, comparar, explicar y optimizar experimentos y modelos. Ofrece funcionalidades como mรฉtricas en tiempo real, diferencias de cรณdigo y seguimiento de hiperparรกmetros. + +Para usar Comet: + +!!! Example "Ejemplo" + + === "Python" + ```python + # pip install comet_ml + import comet_ml + + comet_ml.init() + ``` + +Recuerda iniciar sesiรณn en tu cuenta de Comet en su sitio web y obtener tu clave API. Necesitarรกs agregar esto a tus variables de entorno o tu script para registrar tus experimentos. + +### ClearML + +[ClearML](https://www.clear.ml/) es una plataforma de cรณdigo abierto que automatiza el seguimiento de experimentos y ayuda con la comparticiรณn eficiente de recursos. Estรก diseรฑado para ayudar a los equipos a gestionar, ejecutar y reproducir su trabajo de ML de manera mรกs eficiente. + +Para usar ClearML: + +!!! Example "Ejemplo" + + === "Python" + ```python + # pip install clearml + import clearml + + clearml.browser_login() + ``` + +Despuรฉs de ejecutar este script, necesitarรกs iniciar sesiรณn en tu cuenta de ClearML en el navegador y autenticar tu sesiรณn. + +### TensorBoard + +[TensorBoard](https://www.tensorflow.org/tensorboard) es una herramienta de visualizaciรณn para TensorFlow. Te permite visualizar tu grafo TensorFlow, trazar mรฉtricas cuantitativas sobre la ejecuciรณn de tu grafo y mostrar datos adicionales como imรกgenes que lo atraviesan. + +Para usar TensorBoard en [Google Colab](https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb): + +!!! Example "Ejemplo" + + === "CLI" + ```bash + load_ext tensorboard + tensorboard --logdir ultralytics/runs # reemplazar con el directorio 'runs' + ``` + +Para usar TensorBoard localmente, ejecuta el siguiente comando y visualiza los resultados en http://localhost:6006/. + +!!! Example "Ejemplo" + + === "CLI" + ```bash + tensorboard --logdir ultralytics/runs # reemplazar con el directorio 'runs' + ``` + +Esto cargarรก TensorBoard y lo dirigirรก al directorio donde se guardan tus registros de entrenamiento. + +Despuรฉs de configurar tu registrador, puedes proceder con tu entrenamiento de modelo. Todas las mรฉtricas de entrenamiento se registrarรกn automรกticamente en la plataforma elegida y podrรกs acceder a estos registros para monitorear el rendimiento de tu modelo con el tiempo, comparar diferentes modelos e identificar รกreas de mejora. diff --git a/ultralytics/docs/es/modes/train.md:Zone.Identifier b/ultralytics/docs/es/modes/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/modes/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/modes/val.md b/ultralytics/docs/es/modes/val.md new file mode 100755 index 0000000..ee81404 --- /dev/null +++ b/ultralytics/docs/es/modes/val.md @@ -0,0 +1,86 @@ +--- +comments: true +description: Guรญa para validar modelos YOLOv8. Aprenda a evaluar el rendimiento de sus modelos YOLO utilizando configuraciones y mรฉtricas de validaciรณn con ejemplos en Python y CLI. +keywords: Ultralytics, Documentaciรณn YOLO, YOLOv8, validaciรณn, evaluaciรณn de modelos, hiperparรกmetros, precisiรณn, mรฉtricas, Python, CLI +--- + +# Validaciรณn de modelos con Ultralytics YOLO + +Ecosistema e integraciones de Ultralytics YOLO + +## Introducciรณn + +La validaciรณn es un paso crรญtico en el flujo de trabajo de aprendizaje automรกtico, permitiรฉndole evaluar la calidad de sus modelos entrenados. El modo Val en Ultralytics YOLOv8 proporciona un robusto conjunto de herramientas y mรฉtricas para evaluar el rendimiento de sus modelos de detecciรณn de objetos. Esta guรญa sirve como un recurso completo para comprender cรณmo utilizar efectivamente el modo Val para asegurar que sus modelos sean precisos y confiables. + +## ยฟPor quรฉ validar con Ultralytics YOLO? + +Estas son las ventajas de usar el modo Val de YOLOv8: + +- **Precisiรณn:** Obtenga mรฉtricas precisas como mAP50, mAP75 y mAP50-95 para evaluar de manera integral su modelo. +- **Comodidad:** Utilice funciones integradas que recuerdan los ajustes de entrenamiento, simplificando el proceso de validaciรณn. +- **Flexibilidad:** Valide su modelo con el mismo conjunto de datos o diferentes conjuntos de datos y tamaรฑos de imagen. +- **Ajuste de Hiperparรกmetros:** Use las mรฉtricas de validaciรณn para ajustar su modelo y mejorar el rendimiento. + +### Caracterรญsticas principales del modo Val + +Estas son las funcionalidades notables ofrecidas por el modo Val de YOLOv8: + +- **Configuraciones Automatizadas:** Los modelos recuerdan sus configuraciones de entrenamiento para una validaciรณn sencilla. +- **Soporte de Mรบltiples Mรฉtricas:** Evalรบe su modelo basado en una gama de mรฉtricas de precisiรณn. +- **CLI y API de Python:** Elija entre la interfaz de lรญnea de comandos o API de Python basada en su preferencia para validaciรณn. +- **Compatibilidad de Datos:** Funciona sin problemas con conjuntos de datos utilizados durante la fase de entrenamiento asรญ como con conjuntos de datos personalizados. + +!!! Tip "Consejo" + + * Los modelos YOLOv8 recuerdan automรกticamente sus ajustes de entrenamiento, asรญ que puede validar un modelo en el mismo tamaรฑo de imagen y en el conjunto de datos original fรกcilmente con solo `yolo val model=yolov8n.pt` o `model('yolov8n.pt').val()` + +## Ejemplos de Uso + +Valide la precisiรณn del modelo YOLOv8n entrenado en el conjunto de datos COCO128. No es necesario pasar ningรบn argumento ya que el `modelo` retiene sus `datos` de entrenamiento y argumentos como atributos del modelo. Vea la secciรณn de Argumentos a continuaciรณn para una lista completa de argumentos de exportaciรณn. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n.pt') # cargar un modelo oficial + model = YOLO('ruta/a/best.pt') # cargar un modelo personalizado + + # Validar el modelo + metrics = model.val() # no se necesitan argumentos, el conjunto de datos y ajustes se recuerdan + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # una lista que contiene map50-95 de cada categorรญa + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # val model oficial + yolo detect val model=ruta/a/best.pt # val model personalizado + ``` + +## Argumentos + +Los ajustes de validaciรณn para modelos YOLO se refieren a los diversos hiperparรกmetros y configuraciones utilizados para evaluar el rendimiento del modelo en un conjunto de datos de validaciรณn. Estos ajustes pueden afectar el rendimiento, la velocidad y la precisiรณn del modelo. Algunos ajustes comunes de validaciรณn YOLO incluyen el tamaรฑo del lote, la frecuencia con la que se realiza la validaciรณn durante el entrenamiento y las mรฉtricas utilizadas para evaluar el rendimiento del modelo. Otros factores que pueden afectar el proceso de validaciรณn incluyen el tamaรฑo y la composiciรณn del conjunto de datos de validaciรณn y la tarea especรญfica para la que se utiliza el modelo. Es importante ajustar y experimentar cuidadosamente con estos ajustes para asegurarse de que el modelo estรฉ funcionando bien en el conjunto de datos de validaciรณn y para detectar y prevenir el sobreajuste. + +| Clave | Valor | Descripciรณn | +|---------------|---------|---------------------------------------------------------------------------------------------------| +| `data` | `None` | ruta al archivo de datos, por ejemplo coco128.yaml | +| `imgsz` | `640` | tamaรฑo de las imรกgenes de entrada como entero | +| `batch` | `16` | nรบmero de imรกgenes por lote (-1 para AutoBatch) | +| `save_json` | `False` | guardar resultados en archivo JSON | +| `save_hybrid` | `False` | guardar versiรณn hรญbrida de las etiquetas (etiquetas + predicciones adicionales) | +| `conf` | `0.001` | umbral de confianza del objeto para detecciรณn | +| `iou` | `0.6` | umbral de Intersecciรณn sobre Uniรณn (IoU) para NMS | +| `max_det` | `300` | nรบmero mรกximo de detecciones por imagen | +| `half` | `True` | usar precisiรณn de punto flotante de media preciรณn (FP16) | +| `device` | `None` | dispositivo en el que se ejecuta, por ejemplo dispositivo cuda=0/1/2/3 o dispositivo=cpu | +| `dnn` | `False` | utilizar OpenCV DNN para inferencia ONNX | +| `plots` | `False` | mostrar grรกficos durante el entrenamiento | +| `rect` | `False` | val rectangular con cada lote compilado para el mรญnimo relleno | +| `split` | `val` | divisiรณn del conjunto de datos a utilizar para la validaciรณn, por ejemplo 'val', 'test' o 'train' | +| diff --git a/ultralytics/docs/es/modes/val.md:Zone.Identifier b/ultralytics/docs/es/modes/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/modes/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/quickstart.md b/ultralytics/docs/es/quickstart.md new file mode 100755 index 0000000..bc7b5e3 --- /dev/null +++ b/ultralytics/docs/es/quickstart.md @@ -0,0 +1,198 @@ +--- +comments: true +description: Explore diversos mรฉtodos para instalar Ultralytics usando pip, conda, git y Docker. Aprende cรณmo usar Ultralytics con la interfaz de lรญnea de comandos o dentro de tus proyectos de Python. +keywords: instalaciรณn de Ultralytics, pip install Ultralytics, instalaciรณn de Docker Ultralytics, interfaz de lรญnea de comandos de Ultralytics, interfaz de Python de Ultralytics +--- + +## Instalar Ultralytics + +Ultralytics ofrece varios mรฉtodos de instalaciรณn incluyendo pip, conda y Docker. Instala YOLOv8 a travรฉs del paquete `ultralytics` de pip para la รบltima versiรณn estable o clonando el [repositorio de GitHub de Ultralytics](https://github.com/ultralytics/ultralytics) para obtener la versiรณn mรกs actualizada. Docker se puede utilizar para ejecutar el paquete en un contenedor aislado, evitando la instalaciรณn local. + +!!! Example "Instalar" + + === "Instalaciรณn con Pip (recomendado)" + Instala el paquete `ultralytics` usando pip o actualiza una instalaciรณn existente ejecutando `pip install -U ultralytics`. Visita el รndice de Paquetes de Python (PyPI) para mรกs detalles sobre el paquete `ultralytics`: [https://pypi.org/project/ultralytics/](https://pypi.org/project/ultralytics/). + + [![Versiรณn en PyPI](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![Descargas](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + + ```bash + # Instalar el paquete ultralytics desde PyPI + pip install ultralytics + ``` + + Tambiรฉn puedes instalar el paquete `ultralytics` directamente del [repositorio](https://github.com/ultralytics/ultralytics) en GitHub. Esto puede ser รบtil si quieres la รบltima versiรณn de desarrollo. Asegรบrate de tener la herramienta de lรญnea de comandos Git instalada en tu sistema. El comando `@main` instala la rama `main` y puede modificarse a otra rama, es decir, `@my-branch`, o eliminarse por completo para volver por defecto a la rama `main`. + + ```bash + # Instalar el paquete ultralytics desde GitHub + pip install git+https://github.com/ultralytics/ultralytics.git@main + ``` + + + === "Instalaciรณn con Conda" + Conda es un gestor de paquetes alternativo a pip que tambiรฉn puede utilizarse para la instalaciรณn. Visita Anaconda para mรกs detalles en [https://anaconda.org/conda-forge/ultralytics](https://anaconda.org/conda-forge/ultralytics). El repositorio de paquetes de alimentaciรณn de Ultralytics para actualizar el paquete de conda estรก en [https://github.com/conda-forge/ultralytics-feedstock/](https://github.com/conda-forge/ultralytics-feedstock/). + + + [![Receta de Conda](https://img.shields.io/badge/recipe-ultralytics-green.svg)](https://anaconda.org/conda-forge/ultralytics) [![Descargas de Conda](https://img.shields.io/conda/dn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Versiรณn de Conda](https://img.shields.io/conda/vn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Plataformas de Conda](https://img.shields.io/conda/pn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) + + ```bash + # Instalar el paquete ultralytics usando conda + conda install -c conda-forge ultralytics + ``` + + !!! Note "Nota" + + Si estรกs instalando en un entorno CUDA, la mejor prรกctica es instalar `ultralytics`, `pytorch` y `pytorch-cuda` en el mismo comando para permitir que el gestor de paquetes de conda resuelva cualquier conflicto, o en su defecto instalar `pytorch-cuda` al final para permitir que sobrescriba el paquete especรญfico de CPU `pytorch` si es necesario. + ```bash + # Instalar todos los paquetes juntos usando conda + conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics + ``` + + ### Imagen Docker de Conda + + Las imรกgenes Docker de Conda de Ultralytics tambiรฉn estรกn disponibles en [DockerHub](https://hub.docker.com/r/ultralytics/ultralytics). Estas imรกgenes estรกn basadas en [Miniconda3](https://docs.conda.io/projects/miniconda/en/latest/) y son una manera simple de comenzar a usar `ultralytics` en un entorno Conda. + + ```bash + # Establecer el nombre de la imagen como una variable + t=ultralytics/ultralytics:latest-conda + + # Descargar la รบltima imagen de ultralytics de Docker Hub + sudo docker pull $t + + # Ejecutar la imagen de ultralytics en un contenedor con soporte para GPU + sudo docker run -it --ipc=host --gpus all $t # todas las GPUs + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # especificar GPUs + ``` + + === "Clonar con Git" + Clona el repositorio `ultralytics` si estรกs interesado en contribuir al desarrollo o deseas experimentar con el cรณdigo fuente mรกs reciente. Despuรฉs de clonar, navega al directorio e instala el paquete en modo editable `-e` usando pip. + ```bash + # Clonar el repositorio ultralytics + git clone https://github.com/ultralytics/ultralytics + + # Navegar al directorio clonado + cd ultralytics + + # Instalar el paquete en modo editable para desarrollo + pip install -e . + ``` + +Consulta el archivo [requirements.txt](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) de `ultralytics` para ver una lista de dependencias. Ten en cuenta que todos los ejemplos anteriores instalan todas las dependencias requeridas. + +

+
+ +
+ Watch: Ultralytics YOLO Quick Start Guide +

+ +!!! Tip "Consejo" + + Los requisitos de PyTorch varรญan segรบn el sistema operativo y los requisitos de CUDA, por lo que se recomienda instalar primero PyTorch siguiendo las instrucciones en [https://pytorch.org/get-started/locally](https://pytorch.org/get-started/locally). + + + Instrucciones de Instalaciรณn de PyTorch + + +## Usar Ultralytics con CLI + +La interfaz de lรญnea de comandos (CLI) de Ultralytics permite el uso de comandos simples de una sola lรญnea sin la necesidad de un entorno de Python. La CLI no requiere personalizaciรณn ni cรณdigo Python. Puedes simplemente ejecutar todas las tareas desde el terminal con el comando `yolo`. Consulta la [Guรญa de CLI](/../usage/cli.md) para aprender mรกs sobre el uso de YOLOv8 desde la lรญnea de comandos. + +!!! Example "Ejemplo" + + === "Sintaxis" + + Los comandos `yolo` de Ultralytics usan la siguiente sintaxis: + ```bash + yolo TAREA MODO ARGUMENTOS + + Donde TAREA (opcional) es uno de [detectar, segmentar, clasificar] + MODO (requerido) es uno de [train, val, predict, export, track] + ARGUMENTOS (opcionales) son cualquier nรบmero de pares personalizados 'arg=valor' como 'imgsz=320' que sobrescriben los valores por defecto. + ``` + Ver todos los ARGUMENTOS en la guรญa completa [Configuration Guide](/../usage/cfg.md) o con `yolo cfg` + + === "Entrenar" + + Entrenar un modelo de detecciรณn durante 10 รฉpocas con una tasa de aprendizaje inicial de 0.01 + ```bash + yolo train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "Predecir" + + Predecir un video de YouTube usando un modelo de segmentaciรณn preentrenado con un tamaรฑo de imagen de 320: + ```bash + yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "Validar" + + Validar un modelo de detecciรณn preentrenado con un tamaรฑo de lote de 1 y un tamaรฑo de imagen de 640: + ```bash + yolo val model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + + === "Exportar" + + Exportar un modelo de clasificaciรณn YOLOv8n a formato ONNX con un tamaรฑo de imagen de 224 por 128 (no se requiere TAREA) + ```bash + yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128 + ``` + + === "Especial" + + Ejecutar comandos especiales para ver la versiรณn, ver configuraciones, ejecutar chequeos y mรกs: + ```bash + yolo help + yolo checks + yolo version + yolo settings + yolo copy-cfg + yolo cfg + ``` + +!!! Warning "Advertencia" + + Los argumentos deben pasarse como pares `arg=valor`, separados por un signo igual `=` y delimitados por espacios ` ` entre pares. No utilices prefijos de argumentos `--` ni comas `,` entre los argumentos. + + - `yolo predict model=yolov8n.pt imgsz=640 conf=0.25`   โœ… + - `yolo predict model yolov8n.pt imgsz 640 conf 0.25`   โŒ + - `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25`   โŒ + +[Guรญa de CLI](/../usage/cli.md){.md-button .md-button--primary} + +## Usar Ultralytics con Python + +La interfaz de Python de YOLOv8 permite una integraciรณn perfecta en tus proyectos de Python, facilitando la carga, ejecuciรณn y procesamiento de la salida del modelo. Diseรฑada con sencillez y facilidad de uso en mente, la interfaz de Python permite a los usuarios implementar rรกpidamente la detecciรณn de objetos, segmentaciรณn y clasificaciรณn en sus proyectos. Esto hace que la interfaz de Python de YOLOv8 sea una herramienta invaluable para cualquier persona que busque incorporar estas funcionalidades en sus proyectos de Python. + +Por ejemplo, los usuarios pueden cargar un modelo, entrenarlo, evaluar su rendimiento en un conjunto de validaciรณn e incluso exportarlo al formato ONNX con solo unas pocas lรญneas de cรณdigo. Consulta la [Guรญa de Python](/../usage/python.md) para aprender mรกs sobre el uso de YOLOv8 dentro de tus proyectos de Python. + +!!! Example "Ejemplo" + + ```python + from ultralytics import YOLO + + # Crear un nuevo modelo YOLO desde cero + model = YOLO('yolov8n.yaml') + + # Cargar un modelo YOLO preentrenado (recomendado para entrenamiento) + model = YOLO('yolov8n.pt') + + # Entrenar el modelo usando el conjunto de datos 'coco128.yaml' durante 3 รฉpocas + results = model.train(data='coco128.yaml', epochs=3) + + # Evaluar el rendimiento del modelo en el conjunto de validaciรณn + results = model.val() + + # Realizar detecciรณn de objetos en una imagen usando el modelo + results = model('https://ultralytics.com/images/bus.jpg') + + # Exportar el modelo al formato ONNX + success = model.export(format='onnx') + ``` + +[Guรญa de Python](/../usage/python.md){.md-button .md-button--primary} diff --git a/ultralytics/docs/es/quickstart.md:Zone.Identifier b/ultralytics/docs/es/quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/tasks/classify.md b/ultralytics/docs/es/tasks/classify.md new file mode 100755 index 0000000..b9b45ff --- /dev/null +++ b/ultralytics/docs/es/tasks/classify.md @@ -0,0 +1,172 @@ +--- +comments: true +description: Aprenda sobre los modelos de clasificaciรณn de imรกgenes YOLOv8 Classify. Obtenga informaciรณn detallada sobre la Lista de Modelos Preentrenados y cรณmo Entrenar, Validar, Predecir y Exportar modelos. +keywords: Ultralytics, YOLOv8, Clasificaciรณn de imรกgenes, Modelos preentrenados, YOLOv8n-cls, Entrenamiento, Validaciรณn, Predicciรณn, Exportaciรณn de modelos +--- + +# Clasificaciรณn de Imรกgenes + +Ejemplos de clasificaciรณn de imรกgenes + +La clasificaciรณn de imรกgenes es la tarea mรกs sencilla de las tres y consiste en clasificar una imagen completa en una de un conjunto de clases predefinidas. + +La salida de un clasificador de imรกgenes es una รบnica etiqueta de clase y una puntuaciรณn de confianza. La clasificaciรณn de imรกgenes es รบtil cuando solo necesita saber a quรฉ clase pertenece una imagen y no necesita conocer dรณnde estรกn ubicados los objetos de esa clase o cuรกl es su forma exacta. + +!!! Tip "Consejo" + + Los modelos YOLOv8 Classify utilizan el sufijo `-cls`, por ejemplo, `yolov8n-cls.pt` y estรกn preentrenados en [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +## [Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Los modelos Classify preentrenados YOLOv8 se muestran aquรญ. Los modelos Detect, Segment y Pose estรกn preentrenados en el conjunto de datos [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), mientras que los modelos Classify estรกn preentrenados en el conjunto de datos [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +Los [modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) se descargan automรกticamente desde el รบltimo [lanzamiento](https://github.com/ultralytics/assets/releases) de Ultralytics en el primer uso. + +| Modelo | Tamaรฑo
(pรญxeles) | Exactitud
top1 | Exactitud
top5 | Velocidad
CPU ONNX
(ms) | Velocidad
A100 TensorRT
(ms) | Parรกmetros
(M) | FLOPs
(B) en 640 | +|----------------------------------------------------------------------------------------------|--------------------------|------------------------|------------------------|------------------------------------|-----------------------------------------|------------------------|--------------------------| +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- Los valores de **Exactitud** son las precisiones de los modelos en el conjunto de datos de validaciรณn de [ImageNet](https://www.image-net.org/). +
Para reproducir usar `yolo val classify data=path/to/ImageNet device=0` +- **Velocidad** promediada sobre imรกgenes de validaciรณn de ImageNet usando una instancia de [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) +
Para reproducir usar `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu` + +## Entrenamiento + +Entrena el modelo YOLOv8n-cls en el conjunto de datos MNIST160 durante 100 รฉpocas con un tamaรฑo de imagen de 64. Para obtener una lista completa de argumentos disponibles, consulte la pรกgina de [Configuraciรณn](/../usage/cfg.md). + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n-cls.yaml') # construir un nuevo modelo desde YAML + model = YOLO('yolov8n-cls.pt') # cargar un modelo preentrenado (recomendado para entrenamiento) + model = YOLO('yolov8n-cls.yaml').load('yolov8n-cls.pt') # construir desde YAML y transferir pesos + + # Entrenar el modelo + results = model.train(data='mnist160', epochs=100, imgsz=64) + ``` + + === "CLI" + + ```bash + # Construir un nuevo modelo desde YAML y empezar entrenamiento desde cero + yolo classify train data=mnist160 model=yolov8n-cls.yaml epochs=100 imgsz=64 + + # Empezar entrenamiento desde un modelo *.pt preentrenado + yolo classify train data=mnist160 model=yolov8n-cls.pt epochs=100 imgsz=64 + + # Construir un nuevo modelo desde YAML, transferir pesos preentrenados e iniciar entrenamiento + yolo classify train data=mnist160 model=yolov8n-cls.yaml pretrained=yolov8n-cls.pt epochs=100 imgsz=64 + ``` + +### Formato del conjunto de datos + +El formato del conjunto de datos de clasificaciรณn YOLO puede encontrarse en detalle en la [Guรญa de Conjuntos de Datos](../../../datasets/classify/index.md). + +## Validaciรณn + +Validar la exactitud del modelo YOLOv8n-cls entrenado en el conjunto de datos MNIST160. No es necesario pasar ningรบn argumento ya que el `modelo` retiene su `data` y argumentos como atributos del modelo. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n-cls.pt') # cargar un modelo oficial + model = YOLO('path/to/best.pt') # cargar un modelo personalizado + + # Validar el modelo + metrics = model.val() # no se necesitan argumentos, el conjunto de datos y configuraciones se recuerdan + metrics.top1 # precisiรณn top1 + metrics.top5 # precisiรณn top5 + ``` + === "CLI" + + ```bash + yolo classify val model=yolov8n-cls.pt # validar modelo oficial + yolo classify val model=path/to/best.pt # validar modelo personalizado + ``` + +## Predicciรณn + +Usar un modelo YOLOv8n-cls entrenado para realizar predicciones en imรกgenes. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n-cls.pt') # cargar un modelo oficial + model = YOLO('path/to/best.pt') # cargar un modelo personalizado + + # Predecir con el modelo + results = model('https://ultralytics.com/images/bus.jpg') # predecir en una imagen + ``` + === "CLI" + + ```bash + yolo classify predict model=yolov8n-cls.pt source='https://ultralytics.com/images/bus.jpg' # predecir con modelo oficial + yolo classify predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # predecir con modelo personalizado + ``` + +Ver detalles completos del modo `predict` en la pรกgina de [Predicciรณn](https://docs.ultralytics.com/modes/predict/). + +## Exportaciรณn + +Exportar un modelo YOLOv8n-cls a un formato diferente como ONNX, CoreML, etc. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n-cls.pt') # cargar un modelo oficial + model = YOLO('path/to/best.pt') # cargar un modelo entrenado personalizado + + # Exportar el modelo + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-cls.pt format=onnx # exportar modelo oficial + yolo export model=path/to/best.pt format=onnx # exportar modelo entrenado personalizado + ``` + +Los formatos de exportaciรณn disponibles para YOLOv8-cls se encuentran en la tabla a continuaciรณn. Puede predecir o validar directamente en modelos exportados, por ejemplo, `yolo predict model=yolov8n-cls.onnx`. Ejemplos de uso se muestran para su modelo despuรฉs de que se completa la exportaciรณn. + +| Formato | Argumento `format` | Modelo | Metadatos | Argumentos | +|--------------------------------------------------------------------|--------------------|-------------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-cls.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-cls.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-cls.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-cls_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-cls.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-cls.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-cls_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-cls.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-cls.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-cls_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-cls_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-cls_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-cls_ncnn_model/` | โœ… | `imgsz`, `half` | + +Vea detalles completos de `exportaciรณn` en la pรกgina de [Exportaciรณn](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/es/tasks/classify.md:Zone.Identifier b/ultralytics/docs/es/tasks/classify.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/tasks/classify.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/tasks/detect.md b/ultralytics/docs/es/tasks/detect.md new file mode 100755 index 0000000..7779c78 --- /dev/null +++ b/ultralytics/docs/es/tasks/detect.md @@ -0,0 +1,184 @@ +--- +comments: true +description: Documentaciรณn oficial de YOLOv8 de Ultralytics. Aprende a entrenar, validar, predecir y exportar modelos en varios formatos. Incluyendo estadรญsticas detalladas de rendimiento. +keywords: YOLOv8, Ultralytics, detecciรณn de objetos, modelos preentrenados, entrenamiento, validaciรณn, predicciรณn, exportaciรณn de modelos, COCO, ImageNet, PyTorch, ONNX, CoreML +--- + +# Detecciรณn de Objetos + +Ejemplos de detecciรณn de objetos + +La detecciรณn de objetos es una tarea que implica identificar la ubicaciรณn y clase de objetos en una imagen o flujo de video. + +La salida de un detector de objetos es un conjunto de cajas delimitadoras que encierran a los objetos en la imagen, junto con etiquetas de clase y puntajes de confianza para cada caja. La detecciรณn de objetos es una buena opciรณn cuando necesitas identificar objetos de interรฉs en una escena, pero no necesitas saber exactamente dรณnde se encuentra el objeto o su forma exacta. + +

+
+ +
+ Ver: Detecciรณn de Objetos con Modelo Preentrenado YOLOv8 de Ultralytics. +

+ +!!! Tip "Consejo" + + Los modelos YOLOv8 Detect son los modelos predeterminados de YOLOv8, es decir, `yolov8n.pt` y estรกn preentrenados en [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml). + +## [Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Los modelos preentrenados de YOLOv8 Detect se muestran aquรญ. Los modelos de Detect, Segment y Pose estรกn preentrenados en el conjunto de datos [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), mientras que los modelos de Classify estรกn preentrenados en el conjunto de datos [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +Los [modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) se descargan automรกticamente desde el รบltimo lanzamiento de Ultralytics [release](https://github.com/ultralytics/assets/releases) en el primer uso. + +| Modelo | tamaรฑo
(pรญxeles) | mAPval
50-95 | Velocidad
CPU ONNX
(ms) | Velocidad
A100 TensorRT
(ms) | parรกmetros
(M) | FLOPs
(B) | +|--------------------------------------------------------------------------------------|--------------------------|----------------------|------------------------------------|-----------------------------------------|------------------------|-------------------| +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- Los valores de **mAPval** son para un solo modelo a una sola escala en el conjunto de datos [COCO val2017](http://cocodataset.org). +
Reproduce utilizando `yolo val detect data=coco.yaml device=0` +- La **Velocidad** es el promedio sobre las imรกgenes de COCO val utilizando una instancia [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/). +
Reproduce utilizando `yolo val detect data=coco128.yaml batch=1 device=0|cpu` + +## Entrenamiento + +Entrena a YOLOv8n en el conjunto de datos COCO128 durante 100 รฉpocas a tamaรฑo de imagen 640. Para una lista completa de argumentos disponibles, consulta la pรกgina [Configuraciรณn](/../usage/cfg.md). + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n.yaml') # construye un nuevo modelo desde YAML + model = YOLO('yolov8n.pt') # carga un modelo preentrenado (recomendado para entrenamiento) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # construye desde YAML y transfiere los pesos + + # Entrenar el modelo + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Construir un nuevo modelo desde YAML y comenzar entrenamiento desde cero + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # Comenzar entrenamiento desde un modelo *.pt preentrenado + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # Construir un nuevo modelo desde YAML, transferir pesos preentrenados y comenzar entrenamiento + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### Formato del conjunto de datos + +El formato del conjunto de datos de detecciรณn de YOLO se puede encontrar en detalle en la [Guรญa de Conjuntos de Datos](../../../datasets/detect/index.md). Para convertir tu conjunto de datos existente desde otros formatos (como COCO, etc.) al formato YOLO, por favor usa la herramienta [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) de Ultralytics. + +## Validaciรณn + +Valida la precisiรณn del modelo YOLOv8n entrenado en el conjunto de datos COCO128. No es necesario pasar ningรบn argumento, ya que el `modelo` retiene sus datos de `entrenamiento` y argumentos como atributos del modelo. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n.pt') # cargar un modelo oficial + model = YOLO('ruta/a/mejor.pt') # cargar un modelo personalizado + + # Validar el modelo + metrics = model.val() # sin argumentos necesarios, el conjunto de datos y configuraciones se recuerdan + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # una lista contiene map50-95 de cada categorรญa + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # validar modelo oficial + yolo detect val model=ruta/a/mejor.pt # validar modelo personalizado + ``` + +## Predicciรณn + +Utiliza un modelo YOLOv8n entrenado para realizar predicciones en imรกgenes. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n.pt') # cargar un modelo oficial + model = YOLO('ruta/a/mejor.pt') # cargar un modelo personalizado + + # Predecir con el modelo + results = model('https://ultralytics.com/images/bus.jpg') # predecir en una imagen + ``` + === "CLI" + + ```bash + yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' # predecir con modelo oficial + yolo detect predict model=ruta/a/mejor.pt source='https://ultralytics.com/images/bus.jpg' # predecir con modelo personalizado + ``` + +Consulta los detalles completos del modo `predict` en la pรกgina [Predicciรณn](https://docs.ultralytics.com/modes/predict/). + +## Exportaciรณn + +Exporta un modelo YOLOv8n a un formato diferente como ONNX, CoreML, etc. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n.pt') # cargar un modelo oficial + model = YOLO('ruta/a/mejor.pt') # cargar un modelo entrenado personalizado + + # Exportar el modelo + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # exportar modelo oficial + yolo export model=ruta/a/mejor.pt format=onnx # exportar modelo entrenado personalizado + ``` + +Los formatos de exportaciรณn de YOLOv8 disponibles se encuentran en la tabla a continuaciรณn. Puedes predecir o validar directamente en modelos exportados, es decir, `yolo predict model=yolov8n.onnx`. Ejemplos de uso se muestran para tu modelo despuรฉs de que la exportaciรณn se completa. + +| Formato | Argumento `format` | Modelo | Metadata | Argumentos | +|--------------------------------------------------------------------|--------------------|----------------------------|----------|------------------------------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimizar` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `mitad`, `dinรกmico`, `simplificar`, `conjunto de operaciones` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `modelo_yolov8n_openvino/` | โœ… | `imgsz`, `mitad` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `mitad`, `dinรกmico`, `simplificar`, `espacio de trabajo` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `mitad`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `modelo_guardado_yolov8n/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `mitad`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `modelo_web_yolov8n/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `modelo_yolov8n_paddle/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `modelo_ncnn_yolov8n/` | โœ… | `imgsz`, `mitad` | + +Consulta los detalles completos de la `exportaciรณn` en la pรกgina [Exportar](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/es/tasks/detect.md:Zone.Identifier b/ultralytics/docs/es/tasks/detect.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/tasks/detect.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/tasks/index.md b/ultralytics/docs/es/tasks/index.md new file mode 100755 index 0000000..04d1b5a --- /dev/null +++ b/ultralytics/docs/es/tasks/index.md @@ -0,0 +1,55 @@ +--- +comments: true +description: Aprenda sobre las tareas fundamentales de visiรณn por computadora que YOLOv8 puede realizar, incluyendo detecciรณn, segmentaciรณn, clasificaciรณn y estimaciรณn de pose. Comprenda sus usos en sus proyectos de IA. +keywords: Ultralytics, YOLOv8, Detecciรณn, Segmentaciรณn, Clasificaciรณn, Estimaciรณn de Pose, Marco de IA, Tareas de Visiรณn por Computadora +--- + +# Tareas de Ultralytics YOLOv8 + +
+Tareas soportadas por Ultralytics YOLO + +YOLOv8 es un marco de trabajo de IA que soporta mรบltiples **tareas** de visiรณn por computadora. El marco puede usarse para realizar [detecciรณn](detect.md), [segmentaciรณn](segment.md), [clasificaciรณn](classify.md) y estimaciรณn de [pose](pose.md). Cada una de estas tareas tiene un objetivo y caso de uso diferente. + +!!! Note "Nota" + + ๐Ÿšง Nuestra documentaciรณn multilenguaje estรก actualmente en construcciรณn y estamos trabajando arduamente para mejorarla. ยกGracias por su paciencia! ๐Ÿ™ + +

+
+ +
+ Mire: Explore las Tareas de Ultralytics YOLO: Detecciรณn de Objetos, Segmentaciรณn, Seguimiento y Estimaciรณn de Pose. +

+ +## [Detecciรณn](detect.md) + +La detecciรณn es la tarea principal soportada por YOLOv8. Implica detectar objetos en una imagen o cuadro de video y dibujar cuadros delimitadores alrededor de ellos. Los objetos detectados se clasifican en diferentes categorรญas basadas en sus caracterรญsticas. YOLOv8 puede detectar mรบltiples objetos en una sola imagen o cuadro de video con alta precisiรณn y velocidad. + +[Ejemplos de Detecciรณn](detect.md){ .md-button } + +## [Segmentaciรณn](segment.md) + +La segmentaciรณn es una tarea que implica segmentar una imagen en diferentes regiones basadas en el contenido de la imagen. A cada regiรณn se le asigna una etiqueta basada en su contenido. Esta tarea es รบtil en aplicaciones tales como segmentaciรณn de imรกgenes y imรกgenes mรฉdicas. YOLOv8 utiliza una variante de la arquitectura U-Net para realizar la segmentaciรณn. + +[Ejemplos de Segmentaciรณn](segment.md){ .md-button } + +## [Clasificaciรณn](classify.md) + +La clasificaciรณn es una tarea que implica clasificar una imagen en diferentes categorรญas. YOLOv8 puede usarse para clasificar imรกgenes basadas en su contenido. Utiliza una variante de la arquitectura EfficientNet para realizar la clasificaciรณn. + +[Ejemplos de Clasificaciรณn](classify.md){ .md-button } + +## [Pose](pose.md) + +La detecciรณn de pose/puntos clave es una tarea que implica detectar puntos especรญficos en una imagen o cuadro de video. Estos puntos se conocen como puntos clave y se utilizan para rastrear el movimiento o la estimaciรณn de la pose. YOLOv8 puede detectar puntos clave en una imagen o cuadro de video con alta precisiรณn y velocidad. + +[Ejemplos de Pose](pose.md){ .md-button } + +## Conclusiรณn + +YOLOv8 soporta mรบltiples tareas, incluyendo detecciรณn, segmentaciรณn, clasificaciรณn y detecciรณn de puntos clave. Cada una de estas tareas tiene diferentes objetivos y casos de uso. Al entender las diferencias entre estas tareas, puede elegir la tarea adecuada para su aplicaciรณn de visiรณn por computadora. diff --git a/ultralytics/docs/es/tasks/index.md:Zone.Identifier b/ultralytics/docs/es/tasks/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/tasks/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/tasks/pose.md b/ultralytics/docs/es/tasks/pose.md new file mode 100755 index 0000000..5374f5f --- /dev/null +++ b/ultralytics/docs/es/tasks/pose.md @@ -0,0 +1,185 @@ +--- +comments: true +description: Aprende a utilizar Ultralytics YOLOv8 para tareas de estimaciรณn de pose. Encuentra modelos preentrenados, aprende a entrenar, validar, predecir y exportar tus propios modelos. +keywords: Ultralytics, YOLO, YOLOv8, estimaciรณn de pose, detecciรณn de puntos clave, detecciรณn de objetos, modelos preentrenados, aprendizaje automรกtico, inteligencia artificial +--- + +# Estimaciรณn de Pose + +Ejemplos de estimaciรณn de pose + +La estimaciรณn de pose es una tarea que implica identificar la ubicaciรณn de puntos especรญficos en una imagen, comรบnmente referidos como puntos clave. Estos puntos clave pueden representar varias partes del objeto, como articulaciones, puntos de referencia u otras caracterรญsticas distintivas. La ubicaciรณn de los puntos clave generalmente se representa como un conjunto de coordenadas 2D `[x, y]` o 3D `[x, y, visible]`. + +La salida de un modelo de estimaciรณn de pose es un conjunto de puntos que representan los puntos clave en un objeto de la imagen, generalmente junto con las puntuaciones de confianza para cada punto. La estimaciรณn de pose es una buena opciรณn cuando se necesita identificar partes especรญficas de un objeto en una escena y su ubicaciรณn relativa entre ellas. + +

+
+ +
+ Ver: Estimaciรณn de Pose con Ultralytics YOLOv8. +

+ +!!! Tip "Consejo" + + Los modelos _pose_ YOLOv8 utilizan el sufijo `-pose`, por ejemplo, `yolov8n-pose.pt`. Estos modelos estรกn entrenados en el conjunto de datos [COCO keypoints](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco-pose.yaml) y son adecuados para una variedad de tareas de estimaciรณn de pose. + +## [Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Aquรญ se muestran los modelos preentrenados de YOLOv8 Pose. Los modelos Detect, Segment y Pose estรกn preentrenados en el conjunto de datos [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), mientras que los modelos Classify estรกn preentrenados en el conjunto de datos [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +Los [modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) se descargan automรกticamente desde el รบltimo lanzamiento de Ultralytics [release](https://github.com/ultralytics/assets/releases) en el primer uso. + +| Modelo | tamaรฑo
(pรญxeles) | mAPpose
50-95 | mAPpose
50 | Velocidad
CPU ONNX
(ms) | Velocidad
A100 TensorRT
(ms) | parรกmetros
(M) | FLOPs
(B) | +|------------------------------------------------------------------------------------------------------|--------------------------|-----------------------|--------------------|------------------------------------|-----------------------------------------|------------------------|-------------------| +| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | +| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | +| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | +| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | +| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | +| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +- Los valores de **mAPval** son para un solo modelo a una sola escala en el conjunto de datos [COCO Keypoints val2017](http://cocodataset.org). +
Reproducir con `yolo val pose data=coco-pose.yaml device=0` +- **Velocidad** promediada sobre imรกgenes COCO val usando una instancia [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/). +
Reproducir con `yolo val pose data=coco8-pose.yaml batch=1 device=0|cpu` + +## Entrenar + +Entrena un modelo YOLOv8-pose en el conjunto de datos COCO128-pose. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n-pose.yaml') # construir un nuevo modelo desde YAML + model = YOLO('yolov8n-pose.pt') # cargar un modelo preentrenado (recomendado para entrenar) + model = YOLO('yolov8n-pose.yaml').load('yolov8n-pose.pt') # construir desde YAML y transferir los pesos + + # Entrenar el modelo + results = model.train(data='coco8-pose.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Construir un nuevo modelo desde YAML y comenzar entrenamiento desde cero + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml epochs=100 imgsz=640 + + # Empezar entrenamiento desde un modelo *.pt preentrenado + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.pt epochs=100 imgsz=640 + + # Construir un nuevo modelo desde YAML, transferir pesos preentrenados y comenzar entrenamiento + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml pretrained=yolov8n-pose.pt epochs=100 imgsz=640 + ``` + +### Formato del conjunto de datos + +El formato del conjunto de datos de pose de YOLO se puede encontrar en detalle en la [Guรญa de Conjuntos de Datos](../../../datasets/pose/index.md). Para convertir tu conjunto de datos existente de otros formatos (como COCO, etc.) al formato de YOLO, usa la herramienta [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) de Ultralytics. + +## Validar + +Valida la precisiรณn del modelo YOLOv8n-pose entrenado en el conjunto de datos COCO128-pose. No es necesario pasar ningรบn argumento ya que el `modelo` mantiene sus `datos` de entrenamiento y argumentos como atributos del modelo. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n-pose.pt') # cargar un modelo oficial + model = YOLO('path/to/best.pt') # cargar un modelo personalizado + + # Validar el modelo + metrics = model.val() # no se necesitan argumentos, el conjunto de datos y configuraciones se recuerdan + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # una lista contiene map50-95 de cada categorรญa + ``` + === "CLI" + + ```bash + yolo pose val model=yolov8n-pose.pt # modelo oficial de val + yolo pose val model=path/to/best.pt # modelo personalizado de val + ``` + +## Predecir + +Usa un modelo YOLOv8n-pose entrenado para realizar predicciones en imรกgenes. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n-pose.pt') # cargar un modelo oficial + model = YOLO('path/to/best.pt') # cargar un modelo personalizado + + # Predecir con el modelo + results = model('https://ultralytics.com/images/bus.jpg') # predecir en una imagen + ``` + === "CLI" + + ```bash + yolo pose predict model=yolov8n-pose.pt source='https://ultralytics.com/images/bus.jpg' # predecir con modelo oficial + yolo pose predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # predecir con modelo personalizado + ``` + +Consulta los detalles completos del modo `predict` en la pรกgina de [Predicciรณn](https://docs.ultralytics.com/modes/predict/). + +## Exportar + +Exporta un modelo YOLOv8n Pose a un formato diferente como ONNX, CoreML, etc. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n-pose.pt') # cargar un modelo oficial + model = YOLO('path/to/best.pt') # cargar un modelo entrenado personalizado + + # Exportar el modelo + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-pose.pt format=onnx # exportar modelo oficial + yolo export model=path/to/best.pt format=onnx # exportar modelo entrenado personalizado + ``` + +Los formatos de exportaciรณn de YOLOv8-pose disponibles se muestran en la tabla a continuaciรณn. Puedes predecir o validar directamente en modelos exportados, por ejemplo, `yolo predict model=yolov8n-pose.onnx`. Los ejemplos de uso se muestran para tu modelo despuรฉs de que la exportaciรณn se completa. + +| Formato | Argumento `format` | Modelo | Metadatos | Argumentos | +|--------------------------------------------------------------------|--------------------|--------------------------------|-----------|---------------------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-pose.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-pose.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-pose.onnx` | โœ… | `imgsz`, `half`, `dinรกmico`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-pose_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-pose.engine` | โœ… | `imgsz`, `half`, `dinรกmico`, `simplify`, `espacio de trabajo` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-pose.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-pose_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-pose.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-pose.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-pose_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-pose_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-pose_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-pose_ncnn_model/` | โœ… | `imgsz`, `half` | + +Consulta los detalles completos del modo `export` en la pรกgina de [Exportaciรณn](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/es/tasks/pose.md:Zone.Identifier b/ultralytics/docs/es/tasks/pose.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/tasks/pose.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/es/tasks/segment.md b/ultralytics/docs/es/tasks/segment.md new file mode 100755 index 0000000..24de163 --- /dev/null +++ b/ultralytics/docs/es/tasks/segment.md @@ -0,0 +1,188 @@ +--- +comments: true +description: Aprende a utilizar modelos de segmentaciรณn de instancias con Ultralytics YOLO. Instrucciones sobre entrenamiento, validaciรณn, predicciรณn de imรกgenes y exportaciรณn de modelos. +keywords: yolov8, segmentaciรณn de instancias, Ultralytics, conjunto de datos COCO, segmentaciรณn de imรกgenes, detecciรณn de objetos, entrenamiento de modelos, validaciรณn de modelos, predicciรณn de imรกgenes, exportaciรณn de modelos. +--- + +# Segmentaciรณn de Instancias + +Ejemplos de segmentaciรณn de instancias + +La segmentaciรณn de instancias va un paso mรกs allรก de la detecciรณn de objetos e implica identificar objetos individuales en una imagen y segmentarlos del resto de la imagen. + +La salida de un modelo de segmentaciรณn de instancias es un conjunto de mรกscaras o contornos que delimitan cada objeto en la imagen, junto con etiquetas de clase y puntajes de confianza para cada objeto. La segmentaciรณn de instancias es รบtil cuando necesitas saber no solo dรณnde estรกn los objetos en una imagen, sino tambiรฉn cuรกl es su forma exacta. + +

+
+ +
+ Mira: Ejecuta la Segmentaciรณn con el Modelo Ultralytics YOLOv8 Preentrenado en Python. +

+ +!!! Tip "Consejo" + + Los modelos YOLOv8 Segment utilizan el sufijo `-seg`, es decir, `yolov8n-seg.pt` y estรกn preentrenados en el [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml). + +## [Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Aquรญ se muestran los modelos Segment preentrenados YOLOv8. Los modelos Detect, Segment y Pose estรกn preentrenados en el conjunto de datos [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), mientras que los modelos Classify estรกn preentrenados en el conjunto de datos [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +Los [Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) se descargan automรกticamente desde el รบltimo lanzamiento de Ultralytics [release](https://github.com/ultralytics/assets/releases) en su primer uso. + +| Modelo | Tamaรฑo
(pรญxeles) | mAPcaja
50-95 | mAPmรกscara
50-95 | Velocidad
CPU ONNX
(ms) | Velocidad
A100 TensorRT
(ms) | Parรกmetros
(M) | FLOPs
(B) | +|----------------------------------------------------------------------------------------------|--------------------------|-----------------------|--------------------------|------------------------------------|-----------------------------------------|------------------------|-------------------| +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- Los valores **mAPval** son para un รบnico modelo a una รบnica escala en el conjunto de datos [COCO val2017](http://cocodataset.org). +
Reproducir utilizando `yolo val segment data=coco.yaml device=0` +- La **Velocidad** promediada sobre imรกgenes de COCO val utilizando una instancia de [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/). +
Reproducir utilizando `yolo val segment data=coco128-seg.yaml batch=1 device=0|cpu` + +## Entrenamiento + +Entrena el modelo YOLOv8n-seg en el conjunto de datos COCO128-seg durante 100 รฉpocas con tamaรฑo de imagen de 640. Para una lista completa de argumentos disponibles, consulta la pรกgina de [Configuraciรณn](/../usage/cfg.md). + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n-seg.yaml') # construir un nuevo modelo desde YAML + model = YOLO('yolov8n-seg.pt') # cargar un modelo preentrenado (recomendado para entrenamiento) + model = YOLO('yolov8n-seg.yaml').load('yolov8n.pt') # construir desde YAML y transferir pesos + + # Entrenar el modelo + results = model.train(data='coco128-seg.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Construir un nuevo modelo desde YAML y comenzar a entrenar desde cero + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml epochs=100 imgsz=640 + + # Comenzar a entrenar desde un modelo *.pt preentrenado + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.pt epochs=100 imgsz=640 + + # Construir un nuevo modelo desde YAML, transferir pesos preentrenados y comenzar a entrenar + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml pretrained=yolov8n-seg.pt epochs=100 imgsz=640 + ``` + +### Formato del conjunto de datos + +El formato del conjunto de datos de segmentaciรณn YOLO puede encontrarse detallado en la [Guรญa de Conjuntos de Datos](../../../datasets/segment/index.md). Para convertir tu conjunto de datos existente de otros formatos (como COCO, etc.) al formato YOLO, utiliza la herramienta [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) de Ultralytics. + +## Validaciรณn + +Valida la precisiรณn del modelo YOLOv8n-seg entrenado en el conjunto de datos COCO128-seg. No es necesario pasar ningรบn argumento ya que el `modelo` retiene sus `datos` de entrenamiento y argumentos como atributos del modelo. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n-seg.pt') # cargar un modelo oficial + model = YOLO('ruta/a/mejor.pt') # cargar un modelo personalizado + + # Validar el modelo + metrics = model.val() # no se necesitan argumentos, el conjunto de datos y configuraciones se recuerdan + metrics.box.map # map50-95(B) + metrics.box.map50 # map50(B) + metrics.box.map75 # map75(B) + metrics.box.maps # una lista contiene map50-95(B) de cada categorรญa + metrics.seg.map # map50-95(M) + metrics.seg.map50 # map50(M) + metrics.seg.map75 # map75(M) + metrics.seg.maps # una lista contiene map50-95(M) de cada categorรญa + ``` + === "CLI" + + ```bash + yolo segment val model=yolov8n-seg.pt # validar el modelo oficial + yolo segment val model=ruta/a/mejor.pt # validar el modelo personalizado + ``` + +## Predicciรณn + +Usa un modelo YOLOv8n-seg entrenado para realizar predicciones en imรกgenes. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n-seg.pt') # cargar un modelo oficial + model = YOLO('ruta/a/mejor.pt') # cargar un modelo personalizado + + # Predecir con el modelo + results = model('https://ultralytics.com/images/bus.jpg') # predecir en una imagen + ``` + === "CLI" + + ```bash + yolo segment predict model=yolov8n-seg.pt source='https://ultralytics.com/images/bus.jpg' # predecir con el modelo oficial + yolo segment predict model=ruta/a/mejor.pt source='https://ultralytics.com/images/bus.jpg' # predecir con el modelo personalizado + ``` + +Consulta todos los detalles del modo `predict` en la pรกgina de [Predicciรณn](https://docs.ultralytics.com/modes/predict/). + +## Exportaciรณn + +Exporta un modelo YOLOv8n-seg a un formato diferente como ONNX, CoreML, etc. + +!!! Example "Ejemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Cargar un modelo + model = YOLO('yolov8n-seg.pt') # cargar un modelo oficial + model = YOLO('ruta/a/mejor.pt') # cargar un modelo entrenado personalizado + + # Exportar el modelo + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-seg.pt format=onnx # exportar el modelo oficial + yolo export model=ruta/a/mejor.pt format=onnx # exportar el modelo entrenado personalizado + ``` + +Los formatos disponibles para exportar YOLOv8-seg se muestran en la tabla a continuaciรณn. Puedes predecir o validar directamente en modelos exportados, es decir, `yolo predict model=yolov8n-seg.onnx`. Se muestran ejemplos de uso para tu modelo despuรฉs de que se completa la exportaciรณn. + +| Formato | Argumento `format` | Modelo | Metadatos | Argumentos | +|--------------------------------------------------------------------|--------------------|-------------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-seg.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-seg.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-seg.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-seg_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-seg.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-seg.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-seg_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-seg.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-seg.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-seg_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-seg_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-seg_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-seg_ncnn_model/` | โœ… | `imgsz`, `half` | + +Consulta todos los detalles del modo `export` en la pรกgina de [Exportaciรณn](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/es/tasks/segment.md:Zone.Identifier b/ultralytics/docs/es/tasks/segment.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/es/tasks/segment.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/index.md b/ultralytics/docs/fr/index.md new file mode 100755 index 0000000..dd8dcc1 --- /dev/null +++ b/ultralytics/docs/fr/index.md @@ -0,0 +1,83 @@ +--- +comments: true +description: Dรฉcouvrez un guide complet du modรจle Ultralytics YOLOv8, un modรจle de dรฉtection d'objets et de segmentation d'images ร  haute vitesse et haute prรฉcision. Tutoriels d'installation, de prรฉdiction, d'entraรฎnement et plus encore. +keywords: Ultralytics, YOLOv8, dรฉtection d'objets, segmentation d'images, apprentissage automatique, apprentissage profond, vision par ordinateur, installation de YOLOv8, prรฉdiction avec YOLOv8, entraรฎnement de YOLOv8, histoire de YOLO, licences de YOLO +--- + +
+

+ + Banniรจre Ultralytics YOLO +

+ GitHub Ultralytics + space + LinkedIn Ultralytics + space + Twitter Ultralytics + space + YouTube Ultralytics + space + TikTok Ultralytics + space + Instagram Ultralytics + space + Discord Ultralytics +
+
+ Intรฉgration continue Ultralytics + Couverture de code Ultralytics + Citation YOLOv8 + Tรฉlรฉchargements Docker + Discord +
+ Exรฉcuter sur Gradient + Ouvrir dans Colab + Ouvrir dans Kaggle +
+ +Prรฉsentation d'[Ultralytics](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics), la derniรจre version du modรจle rรฉputรฉ de dรฉtection d'objets en temps rรฉel et de segmentation d'images. YOLOv8 est construit sur des avancรฉes de pointe en apprentissage profond et vision par ordinateur, offrant des performances inรฉgalรฉes en termes de vitesse et de prรฉcision. Sa conception รฉpurรฉe le rend adaptรฉ ร  diverses applications et facilement adaptable ร  diffรฉrentes plateformes matรฉrielles, des appareils de bord aux API cloud. + +Explorez les Docs YOLOv8, une ressource complรจte conรงue pour vous aider ร  comprendre et ร  utiliser ses fonctionnalitรฉs et capacitรฉs. Que vous soyez un praticien chevronnรฉ de l'apprentissage automatique ou nouveau dans le domaine, ce hub vise ร  maximiser le potentiel de YOLOv8 dans vos projets. + +!!! Note "Note" + + ๐Ÿšง Notre documentation multilingue est actuellement en construction et nous travaillons dur pour l'amรฉliorer. Merci de votre patience ! ๐Ÿ™ + +## Par oรน commencer + +- **Installer** `ultralytics` avec pip et dรฉmarrer en quelques minutes   [:material-clock-fast: Commencer](quickstart.md){ .md-button } +- **Prรฉdire** de nouvelles images et vidรฉos avec YOLOv8   [:octicons-image-16: Prรฉdire sur Images](modes/predict.md){ .md-button } +- **Entraรฎner** un nouveau modรจle YOLOv8 sur votre propre ensemble de donnรฉes customisรฉ   [:fontawesome-solid-brain: Entraรฎner un modรจle](modes/train.md){ .md-button } +- **Explorer** les tรขches YOLOv8 comme la segmentation, la classification, l'estimation de pose et le suivi   [:material-magnify-expand: Explorer les tรขches](tasks/index.md){ .md-button } + +

+
+ +
+ Regarder : Comment entraรฎner un modรจle YOLOv8 sur votre ensemble de donnรฉes customisรฉ dans Google Colab. +

+ +## YOLO : Un bref historique + +[YOLO](https://arxiv.org/abs/1506.02640) (You Only Look Once), un modรจle populaire de dรฉtection d'objets et de segmentation d'images, a รฉtรฉ dรฉveloppรฉ par Joseph Redmon et Ali Farhadi ร  l'Universitรฉ de Washington. Lancรฉ en 2015, YOLO a rapidement gagnรฉ en popularitรฉ pour sa vitesse et sa prรฉcision รฉlevรฉes. + +- [YOLOv2](https://arxiv.org/abs/1612.08242), publiรฉ en 2016, a amรฉliorรฉ le modรจle original en intรฉgrant la normalisation par lots, les boรฎtes d'ancrage et les clusters de dimensions. +- [YOLOv3](https://pjreddie.com/media/files/papers/YOLOv3.pdf), lancรฉ en 2018, a davantage amรฉliorรฉ la performance du modรจle en utilisant un rรฉseau dorsal plus efficace, des ancres multiples et un pool pyramidal spatial. +- [YOLOv4](https://arxiv.org/abs/2004.10934) a รฉtรฉ publiรฉ en 2020, introduisant des innovations telles que l'augmentation de donnรฉes Mosaic, une nouvelle tรชte de dรฉtection sans ancre et une nouvelle fonction de perte. +- [YOLOv5](https://github.com/ultralytics/yolov5) a encore amรฉliorรฉ la performance du modรจle et a ajoutรฉ des fonctionnalitรฉs nouvelles telles que l'optimisation des hyperparamรจtres, le suivi intรฉgrรฉ des expรฉriences et l'export automatique vers des formats d'exportation populaires. +- [YOLOv6](https://github.com/meituan/YOLOv6) a รฉtรฉ rendu open-source par [Meituan](https://about.meituan.com/) en 2022 et est utilisรฉ dans de nombreux robots de livraison autonomes de l'entreprise. +- [YOLOv7](https://github.com/WongKinYiu/yolov7) a ajoutรฉ des tรขches supplรฉmentaires telles que l'estimation de pose sur le jeu de donnรฉes de points clรฉs COCO. +- [YOLOv8](https://github.com/ultralytics/ultralytics) est la derniรจre version de YOLO par Ultralytics. En tant que modรจle de pointe et dernier cri (state-of-the-art, SOTA), YOLOv8 s'appuie sur le succรจs des versions prรฉcรฉdentes, introduisant de nouvelles fonctionnalitรฉs et amรฉliorations pour des performances, une flexibilitรฉ et une efficacitรฉ renforcรฉes. YOLOv8 prend en charge une gamme complรจte de tรขches d'intelligence artificielle visuelle, y compris la [dรฉtection](tasks/detect.md), la [segmentation](tasks/segment.md), l'[estimation de pose](tasks/pose.md), le [suivi](modes/track.md) et la [classification](tasks/classify.md). Cette polyvalence permet aux utilisateurs de tirer parti des capacitรฉs de YOLOv8 dans diverses applications et domaines. + +## Licences YOLO : Comment est licenciรฉ Ultralytics YOLO ? + +Ultralytics offre deux options de licence pour rรฉpondre aux diffรฉrents cas d'utilisation : + +- **Licence AGPL-3.0** : Cette licence open source [approuvรฉe par OSI](https://opensource.org/licenses/) est idรฉale pour les รฉtudiants et les passionnรฉs, favorisant la collaboration ouverte et le partage des connaissances. Voir le fichier [LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) pour plus de dรฉtails. +- **Licence Enterprise** : Conรงue pour un usage commercial, cette licence permet l'intรฉgration transparente des logiciels et modรจles d'IA Ultralytics dans des biens et services commerciaux, en contournant les exigences open source de l'AGPL-3.0. Si votre scรฉnario implique l'incorporation de nos solutions dans une offre commerciale, n'hรฉsitez pas ร  contacter [Ultralytics Licensing](https://ultralytics.com/license). + +Notre stratรฉgie de licence est conรงue pour garantir que toute amรฉlioration de nos projets open source soit restituรฉe ร  la communautรฉ. Nous tenons les principes de l'open source ร  cล“ur โค๏ธ, et notre mission est de garantir que nos contributions puissent รชtre utilisรฉes et dรฉveloppรฉes de maniรจre bรฉnรฉfique pour tous. diff --git a/ultralytics/docs/fr/index.md:Zone.Identifier b/ultralytics/docs/fr/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/models/fast-sam.md b/ultralytics/docs/fr/models/fast-sam.md new file mode 100755 index 0000000..f741b83 --- /dev/null +++ b/ultralytics/docs/fr/models/fast-sam.md @@ -0,0 +1,193 @@ +--- +comments: true +description: Dรฉcouvrez FastSAM, une solution basรฉe sur les rรฉseaux de neurones ร  convolution (CNN) pour la segmentation d'objets en temps rรฉel dans les images. Interaction utilisateur amรฉliorรฉe, efficacitรฉ computationnelle et adaptabilitรฉ ร  diffรฉrentes tรขches de vision. +keywords: FastSAM, apprentissage automatique, solution basรฉe sur les CNN, segmentation d'objets, solution en temps rรฉel, Ultralytics, tรขches de vision, traitement d'images, applications industrielles, interaction utilisateur +--- + +# Fast Segment Anything Model (FastSAM) + +Le Fast Segment Anything Model (FastSAM) est une solution basรฉe sur les rรฉseaux de neurones ร  convolution (CNN) en temps rรฉel pour la tรขche Segment Anything. Cette tรขche est conรงue pour segmenter n'importe quel objet dans une image en fonction de diffรฉrentes interactions utilisateur possibles. FastSAM rรฉduit considรฉrablement les demandes computationnelles tout en maintenant des performances compรฉtitives, ce qui en fait un choix pratique pour diverses tรขches de vision. + +![Vue d'ensemble de l'architecture du Fast Segment Anything Model (FastSAM)](https://user-images.githubusercontent.com/26833433/248551984-d98f0f6d-7535-45d0-b380-2e1440b52ad7.jpg) + +## Vue d'ensemble + +FastSAM est conรงu pour remรฉdier aux limitations du [Segment Anything Model (SAM)](sam.md), un modรจle Transformer lourd nรฉcessitant des ressources computationnelles importantes. FastSAM dรฉcoupe la tรขche de segmentation en deux รฉtapes sรฉquentielles : la segmentation de toutes les instances et la sรฉlection guidรฉe par une invitation. La premiรจre รฉtape utilise [YOLOv8-seg](../tasks/segment.md) pour produire les masques de segmentation de toutes les instances de l'image. Dans la deuxiรจme รฉtape, il gรฉnรจre la rรฉgion d'intรฉrรชt correspondant ร  l'invitation. + +## Fonctionnalitรฉs clรฉs + +1. **Solution en temps rรฉel :** En exploitant l'efficacitรฉ computationnelle des CNN, FastSAM fournit une solution en temps rรฉel pour la tรขche Segment Anything, ce qui en fait une solution prรฉcieuse pour les applications industrielles nรฉcessitant des rรฉsultats rapides. + +2. **Efficacitรฉ et performances :** FastSAM offre une rรฉduction significative des demandes computationnelles et des ressources sans compromettre la qualitรฉ des performances. Il atteint des performances comparables ร  SAM, mais avec une rรฉduction drastique des ressources computationnelles, ce qui permet une application en temps rรฉel. + +3. **Segmentation guidรฉe par une invitation :** FastSAM peut segmenter n'importe quel objet dans une image, guidรฉ par diffรฉrentes invitations d'interaction utilisateur possibles, offrant ainsi flexibilitรฉ et adaptabilitรฉ dans diffรฉrents scรฉnarios. + +4. **Basรฉ sur YOLOv8-seg :** FastSAM est basรฉ sur [YOLOv8-seg](../tasks/segment.md), un dรฉtecteur d'objets รฉquipรฉ d'une branche de segmentation d'instances. Cela lui permet de produire efficacement les masques de segmentation de toutes les instances dans une image. + +5. **Rรฉsultats concurrentiels sur les bancs d'essai :** Dans la tรขche de proposition d'objets sur MS COCO, FastSAM obtient des scores รฉlevรฉs ร  une vitesse significativement plus rapide que [SAM](sam.md) sur une seule NVIDIA RTX 3090, dรฉmontrant ainsi son efficacitรฉ et sa capacitรฉ. + +6. **Applications pratiques :** Cette approche propose une nouvelle solution pratique pour un grand nombre de tรขches de vision ร  une vitesse trรจs รฉlevรฉe, des dizaines ou des centaines de fois plus rapide que les mรฉthodes actuelles. + +7. **Faisabilitรฉ de la compression du modรจle :** FastSAM dรฉmontre la faisabilitรฉ d'une voie qui peut rรฉduire considรฉrablement l'effort computationnel en introduisant une contrainte artificielle dans la structure, ouvrant ainsi de nouvelles possibilitรฉs pour l'architecture de modรจles de grande taille pour les tรขches de vision gรฉnรฉrales. + +## Modรจles disponibles, tรขches prises en charge et modes d'exploitation + +Ce tableau prรฉsente les modรจles disponibles avec leurs poids prรฉ-entraรฎnรฉs spรฉcifiques, les tรขches qu'ils prennent en charge et leur compatibilitรฉ avec diffรฉrents modes d'exploitation tels que [Infรฉrence](../modes/predict.md), [Validation](../modes/val.md), [Entraรฎnement](../modes/train.md) et [Exportation](../modes/export.md), indiquรฉs par des emojis โœ… pour les modes pris en charge et des emojis โŒ pour les modes non pris en charge. + +| Type de modรจle | Poids prรฉ-entraรฎnรฉs | Tรขches prises en charge | Infรฉrence | Validation | Entraรฎnement | Exportation | +|----------------|---------------------|-------------------------------------------------|-----------|------------|--------------|-------------| +| FastSAM-s | `FastSAM-s.pt` | [Segmentation d'instances](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| FastSAM-x | `FastSAM-x.pt` | [Segmentation d'instances](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Exemples d'utilisation + +Les modรจles FastSAM sont faciles ร  intรฉgrer dans vos applications Python. Ultralytics propose une API Python conviviale et des commandes CLI pour simplifier le dรฉveloppement. + +### Utilisation de la prรฉdiction + +Pour effectuer une dรฉtection d'objets sur une image, utilisez la mรฉthode `Predict` comme indiquรฉ ci-dessous : + +!!! Example "Exemple" + + === "Python" + ```python + from ultralytics import FastSAM + from ultralytics.models.fastsam import FastSAMPrompt + + # Dรฉfinir une source d'infรฉrence + source = 'chemin/vers/bus.jpg' + + # Crรฉer un modรจle FastSAM + model = FastSAM('FastSAM-s.pt') # ou FastSAM-x.pt + + # Effectuer une infรฉrence sur une image + everything_results = model(source, device='cpu', retina_masks=True, imgsz=1024, conf=0.4, iou=0.9) + + # Prรฉparer un objet Processus Invitation + prompt_process = FastSAMPrompt(source, everything_results, device='cpu') + + # Invitation Everything + ann = prompt_process.everything_prompt() + + # Bbox shape par dรฉfaut [0,0,0,0] -> [x1,y1,x2,y2] + ann = prompt_process.box_prompt(bbox=[200, 200, 300, 300]) + + # Invitation Text + ann = prompt_process.text_prompt(text='une photo d\'un chien') + + # Invitation Point + # points par dรฉfaut [[0,0]] [[x1,y1],[x2,y2]] + # point_label par dรฉfaut [0] [1,0] 0:fond, 1:premier plan + ann = prompt_process.point_prompt(points=[[200, 200]], pointlabel=[1]) + prompt_process.plot(annotations=ann, output='./') + ``` + + === "CLI" + ```bash + # Charger un modรจle FastSAM et segmenter tout avec + yolo segment predict model=FastSAM-s.pt source=chemin/vers/bus.jpg imgsz=640 + ``` + +Cet exemple dรฉmontre la simplicitรฉ du chargement d'un modรจle prรฉ-entraรฎnรฉ et de l'exรฉcution d'une prรฉdiction sur une image. + +### Utilisation de la validation + +La validation du modรจle sur un ensemble de donnรฉes peut รชtre effectuรฉe de la maniรจre suivante : + +!!! Example "Exemple" + + === "Python" + ```python + from ultralytics import FastSAM + + # Crรฉer un modรจle FastSAM + model = FastSAM('FastSAM-s.pt') # ou FastSAM-x.pt + + # Valider le modรจle + results = model.val(data='coco8-seg.yaml') + ``` + + === "CLI" + ```bash + # Charger un modรจle FastSAM et le valider sur l'ensemble de donnรฉes d'exemple COCO8 avec une taille d'image de 640 pixels + yolo segment val model=FastSAM-s.pt data=coco8.yaml imgsz=640 + ``` + +Veuillez noter que FastSAM ne prend en charge que la dรฉtection et la segmentation d'une seule classe d'objet. Cela signifie qu'il reconnaรฎtra et segmentera tous les objets comme รฉtant de la mรชme classe. Par consรฉquent, lors de la prรฉparation de l'ensemble de donnรฉes, vous devez convertir tous les identifiants de catรฉgorie d'objet en 0. + +## Utilisation officielle de FastSAM + +FastSAM est รฉgalement disponible directement ร  partir du dรฉpรดt [https://github.com/CASIA-IVA-Lab/FastSAM](https://github.com/CASIA-IVA-Lab/FastSAM). Voici un bref aperรงu des รฉtapes typiques que vous pourriez suivre pour utiliser FastSAM : + +### Installation + +1. Clonez le dรฉpรดt FastSAM : + ```shell + git clone https://github.com/CASIA-IVA-Lab/FastSAM.git + ``` + +2. Crรฉez et activez un environnement Conda avec Python 3.9 : + ```shell + conda create -n FastSAM python=3.9 + conda activate FastSAM + ``` + +3. Accรฉdez au dรฉpรดt clonรฉ et installez les packages requis : + ```shell + cd FastSAM + pip install -r requirements.txt + ``` + +4. Installez le modรจle CLIP : + ```shell + pip install git+https://github.com/openai/CLIP.git + ``` + +### Exemple d'utilisation + +1. Tรฉlรฉchargez un [point de contrรดle de modรจle](https://drive.google.com/file/d/1m1sjY4ihXBU1fZXdQ-Xdj-mDltW-2Rqv/view?usp=sharing). + +2. Utilisez FastSAM pour l'infรฉrence. Exemples de commandes : + + - Segmentez tout dans une image : + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg + ``` + + - Segmentez des objets spรฉcifiques ร  l'aide de l'invitation de texte : + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --text_prompt "le chien jaune" + ``` + + - Segmentez des objets dans un rectangle englobant (fournir les coordonnรฉes du rectangle au format xywh) : + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --box_prompt "[570,200,230,400]" + ``` + + - Segmentez des objets ร  proximitรฉ de points spรฉcifiques : + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --point_prompt "[[520,360],[620,300]]" --point_label "[1,0]" + ``` + +De plus, vous pouvez essayer FastSAM via une [dรฉmonstration Colab](https://colab.research.google.com/drive/1oX14f6IneGGw612WgVlAiy91UHwFAvr9?usp=sharing) ou sur la [dรฉmonstration Web HuggingFace](https://huggingface.co/spaces/An-619/FastSAM) pour une expรฉrience visuelle. + +## Citations et remerciements + +Nous tenons ร  remercier les auteurs de FastSAM pour leurs contributions importantes dans le domaine de la segmentation d'instances en temps rรฉel : + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{zhao2023fast, + title={Fast Segment Anything}, + author={Xu Zhao and Wenchao Ding and Yongqi An and Yinglong Du and Tao Yu and Min Li and Ming Tang and Jinqiao Wang}, + year={2023}, + eprint={2306.12156}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +Le document original FastSAM peut รชtre consultรฉ sur [arXiv](https://arxiv.org/abs/2306.12156). Les auteurs ont rendu leur travail accessible au public, et le code source peut รชtre consultรฉ sur [GitHub](https://github.com/CASIA-IVA-Lab/FastSAM). Nous apprรฉcions leurs efforts pour faire avancer le domaine et rendre leur travail accessible ร  la communautรฉ dans son ensemble. diff --git a/ultralytics/docs/fr/models/fast-sam.md:Zone.Identifier b/ultralytics/docs/fr/models/fast-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/models/fast-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/models/index.md b/ultralytics/docs/fr/models/index.md new file mode 100755 index 0000000..abf329d --- /dev/null +++ b/ultralytics/docs/fr/models/index.md @@ -0,0 +1,98 @@ +--- +comments: true +description: Explorez la gamme diversifiรฉe de modรจles de la famille YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS et RT-DETR pris en charge par Ultralytics. Commencez avec des exemples pour l'utilisation CLI et Python. +keywords: Ultralytics, documentation, YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS, RT-DETR, modรจles, architectures, Python, CLI +--- + +# Modรจles pris en charge par Ultralytics + +Bienvenue dans la documentation des modรจles d'Ultralytics ! Nous offrons un soutien pour une large gamme de modรจles, chacun รฉtant adaptรฉ ร  des tรขches spรฉcifiques comme [la dรฉtection d'objets](../tasks/detect.md), [la segmentation d'instance](../tasks/segment.md), [la classification d'images](../tasks/classify.md), [l'estimation de pose](../tasks/pose.md), et [le suivi multi-objets](../modes/track.md). Si vous รชtes intรฉressรฉ ร  contribuer avec votre architecture de modรจle ร  Ultralytics, consultez notre [Guide de Contribution](../../help/contributing.md). + +!!! Note "Remarque" + + ๐Ÿšง Notre documentation dans diffรฉrentes langues est actuellement en construction, et nous travaillons dur pour l'amรฉliorer. Merci de votre patience ! ๐Ÿ™ + +## Modรจles en vedette + +Voici quelques-uns des modรจles clรฉs pris en charge : + +1. **[YOLOv3](yolov3.md)** : La troisiรจme itรฉration de la famille de modรจles YOLO, initialement par Joseph Redmon, connue pour ses capacitรฉs de dรฉtection d'objets en temps rรฉel efficaces. +2. **[YOLOv4](yolov4.md)** : Une mise ร  jour native darknet de YOLOv3, publiรฉe par Alexey Bochkovskiy en 2020. +3. **[YOLOv5](yolov5.md)** : Une version amรฉliorรฉe de l'architecture YOLO par Ultralytics, offrant de meilleures performances et compromis de vitesse par rapport aux versions prรฉcรฉdentes. +4. **[YOLOv6](yolov6.md)** : Publiรฉ par [Meituan](https://about.meituan.com/) en 2022, et utilisรฉ dans beaucoup de ses robots de livraison autonomes. +5. **[YOLOv7](yolov7.md)** : Modรจles YOLO mis ร  jour publiรฉs en 2022 par les auteurs de YOLOv4. +6. **[YOLOv8](yolov8.md) NOUVEAU ๐Ÿš€**: La derniรจre version de la famille YOLO, prรฉsentant des capacitรฉs amรฉliorรฉes telles que la segmentation d'instance, l'estimation de pose/points clรฉs et la classification. +7. **[Segment Anything Model (SAM)](sam.md)** : Le modรจle Segment Anything Model (SAM) de Meta. +8. **[Mobile Segment Anything Model (MobileSAM)](mobile-sam.md)** : MobileSAM pour applications mobiles, dรฉveloppรฉ par l'Universitรฉ de Kyung Hee. +9. **[Fast Segment Anything Model (FastSAM)](fast-sam.md)** : FastSAM par le Image & Video Analysis Group, Institute of Automation, Chinese Academy of Sciences. +10. **[YOLO-NAS](yolo-nas.md)** : Modรจles de Recherche d'Architecture Neuronale YOLO (NAS). +11. **[Realtime Detection Transformers (RT-DETR)](rtdetr.md)** : Modรจles du Transformateur de Dรฉtection en Temps Rรฉel (RT-DETR) de PaddlePaddle de Baidu. + +

+
+ +
+ Regardez : Exรฉcutez les modรจles YOLO d'Ultralytics en seulement quelques lignes de code. +

+ +## Pour Commencer : Exemples d'Utilisation + +Cet exemple fournit des exemples simples d'entraรฎnement et d'infรฉrence YOLO. Pour une documentation complรจte sur ces [modes](../modes/index.md) et d'autres, consultez les pages de documentation [Prรฉdire](../modes/predict.md), [Entraรฎner](../modes/train.md), [Val](../modes/val.md) et [Exporter](../modes/export.md). + +Notez que l'exemple ci-dessous concerne les modรจles [Detect](../tasks/detect.md) YOLOv8 pour la dรฉtection d'objets. Pour des tรขches supplรฉmentaires prises en charge, voir les documentations [Segmenter](../tasks/segment.md), [Classifier](../tasks/classify.md) et [Poser](../tasks/pose.md). + +!!! Example "Exemple" + + === "Python" + + Des modรจles prรฉ-entraรฎnรฉs PyTorch `*.pt` ainsi que des fichiers de configuration `*.yaml` peuvent รชtre passรฉs aux classes `YOLO()`, `SAM()`, `NAS()` et `RTDETR()` pour crรฉer une instance de modรจle en Python : + + ```python + from ultralytics import YOLO + + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ sur COCO + model = YOLO('yolov8n.pt') + + # Afficher les informations du modรจle (optionnel) + model.info() + + # Entraรฎner le modรจle sur le jeu de donnรฉes exemple COCO8 pendant 100 รฉpoques + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Exรฉcuter l'infรฉrence avec le modรจle YOLOv8n sur l'image 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + Des commandes CLI sont disponibles pour exรฉcuter directement les modรจles : + + ```bash + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ sur COCO et l'entraรฎner sur le jeu de donnรฉes exemple COCO8 pendant 100 รฉpoques + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ sur COCO et exรฉcuter l'infรฉrence sur l'image 'bus.jpg' + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## Contribution de Nouveaux Modรจles + +Vous รชtes intรฉressรฉ ร  contribuer votre modรจle ร  Ultralytics ? Gรฉnial ! Nous sommes toujours ouverts ร  l'expansion de notre portefeuille de modรจles. + +1. **Forkez le Rรฉfรฉrentiel** : Commencez par forker le [rรฉfรฉrentiel GitHub d'Ultralytics](https://github.com/ultralytics/ultralytics). + +2. **Clonez Votre Fork** : Clonez votre fork sur votre machine locale et crรฉez une nouvelle branche pour travailler dessus. + +3. **Implรฉmentez Votre Modรจle** : Ajoutez votre modรจle en suivant les normes et directives de codage fournies dans notre [Guide de Contribution](../../help/contributing.md). + +4. **Testez Rigoureusement** : Assurez-vous de tester votre modรจle de maniรจre rigoureuse, ร  la fois isolรฉment et comme partie du pipeline. + +5. **Crรฉez une Pull Request** : Une fois que vous รชtes satisfait de votre modรจle, crรฉez une pull request au rรฉpertoire principal pour examen. + +6. **Revue de Code & Fusion** : Aprรจs examen, si votre modรจle rรฉpond ร  nos critรจres, il sera fusionnรฉ dans le rรฉpertoire principal. + +Pour des รฉtapes dรฉtaillรฉes, consultez notre [Guide de Contribution](../../help/contributing.md). diff --git a/ultralytics/docs/fr/models/index.md:Zone.Identifier b/ultralytics/docs/fr/models/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/models/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/models/mobile-sam.md b/ultralytics/docs/fr/models/mobile-sam.md new file mode 100755 index 0000000..d011b03 --- /dev/null +++ b/ultralytics/docs/fr/models/mobile-sam.md @@ -0,0 +1,116 @@ +--- +comments: true +description: En savoir plus sur MobileSAM, son implรฉmentation, la comparaison avec SAM d'origine, et comment le tรฉlรฉcharger et le tester dans le cadre de l'environnement Ultralytics. Amรฉliorez vos applications mobiles dรจs aujourd'hui. +keywords: MobileSAM, Ultralytics, SAM, applications mobiles, Arxiv, GPU, API, encodeur d'image, dรฉcodeur de masque, tรฉlรฉchargement de modรจle, mรฉthode de test +--- + +![Logo MobileSAM](https://github.com/ChaoningZhang/MobileSAM/blob/master/assets/logo2.png?raw=true) + +# Segmenter N'importe Quoi sur Mobile (MobileSAM) + +Le document MobileSAM est maintenant disponible sur [arXiv](https://arxiv.org/pdf/2306.14289.pdf). + +Une dรฉmonstration de MobileSAM exรฉcutรฉe sur un processeur CPU est accessible via ce [lien de dรฉmonstration](https://huggingface.co/spaces/dhkim2810/MobileSAM). Les performances sur un CPU Mac i5 prennent environ 3 secondes. Sur la dรฉmo de Hugging Face, l'interface ainsi que les CPU moins performants contribuent ร  une rรฉponse plus lente, mais cela continue de fonctionner efficacement. + +MobileSAM est implรฉmentรฉ dans divers projets, notamment [Grounding-SAM](https://github.com/IDEA-Research/Grounded-Segment-Anything), [AnyLabeling](https://github.com/vietanhdev/anylabeling), et [Segment Anything en 3D](https://github.com/Jumpat/SegmentAnythingin3D). + +MobileSAM est entraรฎnรฉ sur un seul GPU avec un ensemble de donnรฉes de 100 000 images (1% des images originales) en moins d'une journรฉe. Le code de cet entraรฎnement sera disponible ร  l'avenir. + +## Modรจles Disponibles, Tรขches Prises en Charge et Modes d'Utilisation + +Ce tableau prรฉsente les modรจles disponibles avec leurs poids prรฉ-entraรฎnรฉs spรฉcifiques, les tรขches qu'ils prennent en charge, et leur compatibilitรฉ avec les diffรฉrents modes d'utilisation tels que [Infรฉrence](../modes/predict.md), [Validation](../modes/val.md), [Entraรฎnement](../modes/train.md) et [Export](../modes/export.md), indiquรฉs par les emojis โœ… pour les modes pris en charge et โŒ pour les modes non pris en charge. + +| Type de Modรจle | Poids Prรฉ-entraรฎnรฉs | Tรขches Prises en Charge | Infรฉrence | Validation | Entraรฎnement | Export | +|----------------|---------------------|-------------------------------------------------|-----------|------------|--------------|--------| +| MobileSAM | `mobile_sam.pt` | [Segmentation d'Instances](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Passage de SAM ร  MobileSAM + +ร‰tant donnรฉ que MobileSAM conserve le mรชme pipeline que SAM d'origine, nous avons incorporรฉ le prรฉ-traitement, le post-traitement et toutes les autres interfaces de l'original. Par consรฉquent, ceux qui utilisent actuellement SAM d'origine peuvent passer ร  MobileSAM avec un effort minimal. + +MobileSAM a des performances comparables ร  celles de SAM d'origine et conserve le mรชme pipeline ร  l'exception d'un changement dans l'encodeur d'image. Plus prรฉcisรฉment, nous remplaรงons l'encodeur d'image lourd original ViT-H (632M) par un encodeur Tiny-ViT plus petit (5M). Sur un seul GPU, MobileSAM fonctionne ร  environ 12 ms par image : 8 ms sur l'encodeur d'image et 4 ms sur le dรฉcodeur de masque. + +Le tableau suivant prรฉsente une comparaison des encodeurs d'image basรฉs sur ViT : + +| Encodeur d'Image | SAM d'Origine | MobileSAM | +|------------------|---------------|-----------| +| Paramรจtres | 611M | 5M | +| Vitesse | 452 ms | 8 ms | + +SAM d'origine et MobileSAM utilisent tous deux le mรชme dรฉcodeur de masque basรฉ sur une instruction : + +| Dรฉcodeur de Masque | SAM d'Origine | MobileSAM | +|--------------------|---------------|-----------| +| Paramรจtres | 3.876M | 3.876M | +| Vitesse | 4 ms | 4 ms | + +Voici une comparaison du pipeline complet : + +| Pipeline Complet (Enc+Dec) | SAM d'Origine | MobileSAM | +|----------------------------|---------------|-----------| +| Paramรจtres | 615M | 9.66M | +| Vitesse | 456 ms | 12 ms | + +Les performances de MobileSAM et de SAM d'origine sont dรฉmontrรฉes en utilisant ร  la fois un point et une boรฎte comme instructions. + +![Image avec un Point comme Instruction](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +![Image avec une Boรฎte comme Instruction](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +Avec ses performances supรฉrieures, MobileSAM est environ 5 fois plus petit et 7 fois plus rapide que FastSAM actuel. Plus de dรฉtails sont disponibles sur la [page du projet MobileSAM](https://github.com/ChaoningZhang/MobileSAM). + +## Test de MobileSAM dans Ultralytics + +Tout comme SAM d'origine, nous proposons une mรฉthode de test simple dans Ultralytics, comprenant des modes pour les instructions Point et Boรฎte. + +### Tรฉlรฉchargement du modรจle + +Vous pouvez tรฉlรฉcharger le modรจle [ici](https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt). + +### Instruction Point + +!!! Example "Exemple" + + === "Python" + ```python + from ultralytics import SAM + + # Chargement du modรจle + model = SAM('mobile_sam.pt') + + # Prรฉdiction d'un segment ร  partir d'une instruction Point + model.predict('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +### Instruction Boรฎte + +!!! Example "Exemple" + + === "Python" + ```python + from ultralytics import SAM + + # Chargement du modรจle + model = SAM('mobile_sam.pt') + + # Prรฉdiction d'un segment ร  partir d'une instruction Boรฎte + model.predict('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + ``` + +Nous avons mis en ล“uvre `MobileSAM` et `SAM` en utilisant la mรชme API. Pour plus d'informations sur l'utilisation, veuillez consulter la [page SAM](sam.md). + +## Citations et Remerciements + +Si vous trouvez MobileSAM utile dans vos travaux de recherche ou de dรฉveloppement, veuillez envisager de citer notre document : + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{mobile_sam, + title={Faster Segment Anything: Towards Lightweight SAM for Mobile Applications}, + author={Zhang, Chaoning and Han, Dongshen and Qiao, Yu and Kim, Jung Uk and Bae, Sung Ho and Lee, Seungkyu and Hong, Choong Seon}, + journal={arXiv preprint arXiv:2306.14289}, + year={2023} + } diff --git a/ultralytics/docs/fr/models/mobile-sam.md:Zone.Identifier b/ultralytics/docs/fr/models/mobile-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/models/mobile-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/models/rtdetr.md b/ultralytics/docs/fr/models/rtdetr.md new file mode 100755 index 0000000..13439be --- /dev/null +++ b/ultralytics/docs/fr/models/rtdetr.md @@ -0,0 +1,93 @@ +--- +comments: true +description: Dรฉcouvrez les fonctionnalitรฉs et les avantages de RT-DETR de Baidu, un dรฉtecteur d'objets en temps rรฉel efficace et adaptable grรขce aux Vision Transformers, incluant des modรจles prรฉ-entraรฎnรฉs. +keywords: RT-DETR, Baidu, Vision Transformers, dรฉtection d'objets, performance en temps rรฉel, CUDA, TensorRT, sรฉlection de requรชtes informรฉe par IoU, Ultralytics, API Python, PaddlePaddle +--- + +# RT-DETR de Baidu : un dรฉtecteur d'objets en temps rรฉel basรฉ sur les Vision Transformers + +## Prรฉsentation + +Le Real-Time Detection Transformer (RT-DETR), dรฉveloppรฉ par Baidu, est un dรฉtecteur d'objets de pointe de bout en bout qui offre des performances en temps rรฉel tout en maintenant une grande prรฉcision. Il exploite la puissance des Vision Transformers (ViT) pour traiter efficacement les caractรฉristiques multiscalaires en dissociant l'interaction intra-รฉchelle et la fusion inter-รฉchelles. RT-DETR est hautement adaptable, permettant un ajustement flexible de la vitesse d'infรฉrence en utilisant diffรฉrentes couches de dรฉcodeur sans nรฉcessiter de nouvelle formation. Le modรจle est performant sur des infrastructures accรฉlรฉrรฉes telles que CUDA avec TensorRT, surpassant de nombreux autres dรฉtecteurs d'objets en temps rรฉel. + +![Exemple d'image du modรจle](https://user-images.githubusercontent.com/26833433/238963168-90e8483f-90aa-4eb6-a5e1-0d408b23dd33.png) +**Vue d'ensemble du RT-DETR de Baidu.** Le diagramme d'architecture du modรจle RT-DETR montre les trois derniรจres รฉtapes du rรฉseau {S3, S4, S5} comme entrรฉe de l'encodeur. L'encodeur hybride efficace transforme les caractรฉristiques multiscalaires en une sรฉquence de caractรฉristiques d'image grรขce ร  l'interaction ร  l'intรฉrieur de l'รฉchelle (AIFI - *Adeptation of Intra-scale Feature Interaction*) et au module de fusion inter-รฉchelles (CCFM - *Cross-scale Context-aware Feature Fusion Module*). La sรฉlection de requรชtes informรฉe par IoU est utilisรฉe pour sรฉlectionner un nombre fixe de caractรฉristiques d'image pour servir de requรชtes d'objets initiales pour le dรฉcodeur. Enfin, le dรฉcodeur avec des tรชtes de prรฉdictions auxiliaires optimise de maniรจre itรฉrative les requรชtes d'objets pour gรฉnรฉrer des boรฎtes et des scores de confiance ([source](https://arxiv.org/pdf/2304.08069.pdf)). + +### Fonctionnalitรฉs principales + +- **Encodeur hybride efficace :** RT-DETR de Baidu utilise un encodeur hybride efficace qui traite les caractรฉristiques multiscalaires en dissociant l'interaction intra-รฉchelle et la fusion inter-รฉchelles. Cette conception unique basรฉe sur les Vision Transformers rรฉduit les coรปts de calcul et permet une dรฉtection d'objets en temps rรฉel. +- **Sรฉlection de requรชtes informรฉe par IoU :** RT-DETR de Baidu amรฉliore l'initialisation des requรชtes d'objets en utilisant une sรฉlection de requรชtes informรฉe par IoU. Cela permet au modรจle de se concentrer sur les objets les plus pertinents de la scรจne, amรฉliorant ainsi la prรฉcision de la dรฉtection. +- **Vitesse d'infรฉrence adaptable :** RT-DETR de Baidu prend en charge des ajustements flexibles de la vitesse d'infรฉrence en utilisant diffรฉrentes couches de dรฉcodeur sans nรฉcessiter de nouvelle formation. Cette adaptabilitรฉ facilite l'application pratique dans diffรฉrents scรฉnarios de dรฉtection d'objets en temps rรฉel. + +## Modรจles prรฉ-entraรฎnรฉs + +L'API Python Ultralytics fournit des modรจles prรฉ-entraรฎnรฉs RT-DETR de PaddlePaddle avec diffรฉrentes รฉchelles : + +- RT-DETR-L : 53,0 % de prรฉcision moyenne (AP) sur COCO val2017, 114 images par seconde (FPS) sur GPU T4 +- RT-DETR-X : 54,8 % de prรฉcision moyenne (AP) sur COCO val2017, 74 images par seconde (FPS) sur GPU T4 + +## Exemples d'utilisation + +Cet exemple prรฉsente des exemples simples d'entraรฎnement et d'infรฉrence avec RT-DETRR. Pour une documentation complรจte sur ceux-ci et d'autres [modes](../modes/index.md), consultez les pages de documentation [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) et [Export](../modes/export.md). + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import RTDETR + + # Charger un modรจle RT-DETR-l prรฉ-entraรฎnรฉ sur COCO + model = RTDETR('rtdetr-l.pt') + + # Afficher des informations sur le modรจle (facultatif) + model.info() + + # Entraรฎner le modรจle sur l'ensemble de donnรฉes d'exemple COCO8 pendant 100 รฉpoques + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Effectuer une infรฉrence avec le modรจle RT-DETR-l sur l'image 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ```bash + # Charger un modรจle RT-DETR-l prรฉ-entraรฎnรฉ sur COCO et l'entraรฎner sur l'ensemble de donnรฉes d'exemple COCO8 pendant 100 รฉpoques + yolo train model=rtdetr-l.pt data=coco8.yaml epochs=100 imgsz=640 + + # Charger un modรจle RT-DETR-l prรฉ-entraรฎnรฉ sur COCO et effectuer une infรฉrence sur l'image 'bus.jpg' + yolo predict model=rtdetr-l.pt source=path/to/bus.jpg + ``` + +## Tรขches et modes pris en charge + +Ce tableau prรฉsente les types de modรจles, les poids prรฉ-entraรฎnรฉs spรฉcifiques, les tรขches prises en charge par chaque modรจle et les diffรฉrents modes ([Train](../modes/train.md), [Val](../modes/val.md), [Predict](../modes/predict.md), [Export](../modes/export.md)) pris en charge, indiquรฉs par des emojis โœ…. + +| Type de modรจle | Poids prรฉ-entraรฎnรฉs | Tรขches prises en charge | Infรฉrence | Validation | Entraรฎnement | Export | +|---------------------|---------------------|------------------------------------------|-----------|------------|--------------|--------| +| RT-DETR Large | `rtdetr-l.pt` | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| RT-DETR Extra-Large | `rtdetr-x.pt` | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +## Citations et Remerciements + +Si vous utilisez RT-DETR de Baidu dans votre travail de recherche ou de dรฉveloppement, veuillez citer l'[article original](https://arxiv.org/abs/2304.08069) : + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{lv2023detrs, + title={DETRs Beat YOLOs on Real-time Object Detection}, + author={Wenyu Lv and Shangliang Xu and Yian Zhao and Guanzhong Wang and Jinman Wei and Cheng Cui and Yuning Du and Qingqing Dang and Yi Liu}, + year={2023}, + eprint={2304.08069}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +Nous tenons ร  remercier Baidu et l'รฉquipe [PaddlePaddle](https://github.com/PaddlePaddle/PaddleDetection) pour la crรฉation et la maintenance de cette prรฉcieuse ressource pour la communautรฉ de la vision par ordinateur. Leur contribution au domaine avec le dรฉveloppement du dรฉtecteur d'objets en temps rรฉel basรฉ sur les Vision Transformers, RT-DETR, est grandement apprรฉciรฉe. + +*keywords: RT-DETR, Transformer, ViT, Vision Transformers, RT-DETR de Baidu, PaddlePaddle, Modรจles PaddlePaddle RT-DETR prรฉ-entraรฎnรฉs, utilisation de RT-DETR de Baidu, API Python Ultralytics, dรฉtection d'objets en temps rรฉel* diff --git a/ultralytics/docs/fr/models/rtdetr.md:Zone.Identifier b/ultralytics/docs/fr/models/rtdetr.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/models/rtdetr.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/models/sam.md b/ultralytics/docs/fr/models/sam.md new file mode 100755 index 0000000..9dfed8a --- /dev/null +++ b/ultralytics/docs/fr/models/sam.md @@ -0,0 +1,226 @@ +--- +comments: true +description: Dรฉcouvrez le modรจle Segment Anything (SAM) de pointe d'Ultralytics permettant la segmentation d'images en temps rรฉel. Apprenez-en davantage sur sa segmentation promptable, ses performances hors รฉchantillon et comment l'utiliser. +keywords: Ultralytics, segmentation d'image, Segment Anything Model, SAM, SA-1B dataset, performances en temps rรฉel, transfert hors รฉchantillon, dรฉtection d'objets, analyse d'images, apprentissage automatique +--- + +# Segment Anything Model (SAM) + +Bienvenue ร  la pointe de la segmentation d'image avec le modรจle Segment Anything, ou SAM. Ce modรจle rรฉvolutionnaire a changรฉ la donne en introduisant la segmentation d'image promptable avec des performances en temps rรฉel, รฉtablissant de nouvelles normes dans le domaine. + +## Introduction ร  SAM : Le modรจle Segment Anything + +Le modรจle Segment Anything, ou SAM, est un modรจle de segmentation d'image de pointe qui permet une segmentation promptable, offrant une polyvalence inรฉgalรฉe dans les tรขches d'analyse d'image. SAM forme le cล“ur de l'initiative Segment Anything, un projet innovant qui introduit un modรจle, une tรขche et un jeu de donnรฉes novateurs pour la segmentation d'images. + +La conception avancรฉe de SAM lui permet de s'adapter ร  de nouvelles distributions et tรขches d'images sans connaissance prรฉalable, une fonctionnalitรฉ connue sous le nom de transfert hors รฉchantillon. Entraรฎnรฉ sur le vaste ensemble de donnรฉes [SA-1B](https://ai.facebook.com/datasets/segment-anything/), qui contient plus d'un milliard de masques rรฉpartis sur 11 millions d'images soigneusement sรฉlectionnรฉes, SAM a affichรฉ des performances hors รฉchantillon impressionnantes, dรฉpassant les rรฉsultats entiรจrement supervisรฉs prรฉcรฉdents dans de nombreux cas. + +![Image d'รฉchantillon de jeu de donnรฉes](https://user-images.githubusercontent.com/26833433/238056229-0e8ffbeb-f81a-477e-a490-aff3d82fd8ce.jpg) +Exemple d'images avec des masques superposรฉs provenant de notre nouveau jeu de donnรฉes, SA-1B. SA-1B contient 11 millions d'images diverses, haute rรฉsolution, autorisรฉes et protรฉgeant la vie privรฉe, ainsi que 1,1 milliard de masques de segmentation de haute qualitรฉ. Ces masques ont รฉtรฉ annotรฉs entiรจrement automatiquement par SAM, et comme le confirment des รฉvaluations humaines et de nombreux tests, leur qualitรฉ et leur diversitรฉ sont รฉlevรฉes. Les images sont regroupรฉes par nombre de masques par image pour la visualisation (il y a environ 100 masques par image en moyenne). + +## Caractรฉristiques clรฉs du modรจle Segment Anything (SAM) + +- **Tรขche de segmentation promptable :** SAM a รฉtรฉ conรงu en gardant ร  l'esprit une tรขche de segmentation promptable, ce qui lui permet de gรฉnรฉrer des masques de segmentation valides ร  partir de n'importe quelle indication donnรฉe, telle que des indices spatiaux ou des indices textuels identifiant un objet. +- **Architecture avancรฉe :** Le modรจle Segment Anything utilise un puissant encodeur d'images, un encodeur de prompt et un dรฉcodeur de masques lรฉger. Cette architecture unique permet une invitation flexible, un calcul de masques en temps rรฉel et une prise en compte de l'ambiguรฏtรฉ dans les tรขches de segmentation. +- **Le jeu de donnรฉes SA-1B :** Introduit par le projet Segment Anything, le jeu de donnรฉes SA-1B comprend plus d'un milliard de masques sur 11 millions d'images. En tant que plus grand jeu de donnรฉes de segmentation ร  ce jour, il offre ร  SAM une source de donnรฉes d'entraรฎnement diversifiรฉe et ร  grande รฉchelle. +- **Performances hors รฉchantillon :** SAM affiche des performances hors รฉchantillon exceptionnelles dans diverses tรขches de segmentation, ce qui en fait un outil prรชt ร  l'emploi pour des applications diverses nรฉcessitant un minimum d'ingรฉnierie de prompt. + +Pour une analyse approfondie du modรจle Segment Anything et du jeu de donnรฉes SA-1B, veuillez visiter le [site web Segment Anything](https://segment-anything.com) et consulter l'article de recherche [Segment Anything](https://arxiv.org/abs/2304.02643). + +## Modรจles disponibles, tรขches prises en charge et modes d'exploitation + +Ce tableau prรฉsente les modรจles disponibles avec leurs poids prรฉ-entraรฎnรฉs spรฉcifiques, les tรขches qu'ils prennent en charge et leur compatibilitรฉ avec diffรฉrents modes d'exploitation tels que [Infรฉrence](../modes/predict.md), [Validation](../modes/val.md), [Entraรฎnement](../modes/train.md) et [Exportation](../modes/export.md), indiquรฉs par des emojis โœ… pour les modes pris en charge et des emojis โŒ pour les modes non pris en charge. + +| Type de modรจle | Poids prรฉ-entraรฎnรฉs | Tรขches prises en charge | Infรฉrence | Validation | Entraรฎnement | Exportation | +|----------------|---------------------|------------------------------------------------|-----------|------------|--------------|-------------| +| SAM de base | `sam_b.pt` | [Segmentation d'instance](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| SAM large | `sam_l.pt` | [Segmentation d'instance](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Comment utiliser SAM : Polyvalence et puissance dans la segmentation d'images + +Le modรจle Segment Anything peut รชtre utilisรฉ pour une multitude de tรขches secondaires qui vont au-delร  de ses donnรฉes d'entraรฎnement. Cela comprend la dรฉtection des contours, la gรฉnรฉration de propositions d'objets, la segmentation d'instances et la prรฉdiction prรฉliminaire texte-ร -masque. Grรขce ร  l'ingรฉnierie de prompts, SAM peut s'adapter rapidement ร  de nouvelles tรขches et distributions de donnรฉes de maniรจre sans apprentissage, ce qui en fait un outil polyvalent et puissant pour tous vos besoins en matiรจre de segmentation d'images. + +### Exemple de prรฉdiction SAM + +!!! Example "Segmentation avec des prompts" + + Segmenter l'image avec des prompts donnรฉs. + + === "Python" + + ```python + from ultralytics import SAM + + # Charger un modรจle + model = SAM('sam_b.pt') + + # Afficher les informations sur le modรจle (facultatif) + model.info() + + # Exรฉcuter l'infรฉrence avec un prompt de zones de dรฉlimitation + model('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + + # Exรฉcuter l'infรฉrence avec un prompt de points + model('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +!!! Example "Segmenter tout" + + Segmenter toute l'image. + + === "Python" + + ```python + from ultralytics import SAM + + # Charger un modรจle + model = SAM('sam_b.pt') + + # Afficher les informations sur le modรจle (facultatif) + model.info() + + # Exรฉcuter l'infรฉrence + model('path/to/image.jpg') + ``` + + === "CLI" + + ```bash + # Exรฉcuter l'infรฉrence avec un modรจle SAM + yolo predict model=sam_b.pt source=path/to/image.jpg + ``` + +- La logique ici est de segmenter toute l'image si vous ne passez aucun prompt (bboxes/points/masks). + +!!! Example "Exemple SAMPredictor" + + De cette maniรจre, vous pouvez dรฉfinir l'image une fois et exรฉcuter l'infรฉrence des prompts plusieurs fois sans exรฉcuter l'encodeur d'image plusieurs fois. + + === "Infรฉrence avec des prompts" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # Crรฉer un SAMPredictor + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # Dรฉfinir l'image + predictor.set_image("ultralytics/assets/zidane.jpg") # dรฉfinir avec un fichier image + predictor.set_image(cv2.imread("ultralytics/assets/zidane.jpg")) # dรฉfinir avec np.ndarray + results = predictor(bboxes=[439, 437, 524, 709]) + results = predictor(points=[900, 370], labels=[1]) + + # Rรฉinitialiser l'image + predictor.reset_image() + ``` + + Segmenter toute l'image avec des arguments supplรฉmentaires. + + === "Segmenter tout" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # Crรฉer un SAMPredictor + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # Segmenter avec des arguments supplรฉmentaires + results = predictor(source="ultralytics/assets/zidane.jpg", crop_n_layers=1, points_stride=64) + ``` + +- Plus d'arguments supplรฉmentaires pour `Segmenter tout` voir la rรฉfรฉrence [`Predictor/generate`](../../../reference/models/sam/predict.md). + +## Comparaison de SAM avec YOLOv8 + +Nous comparons ici le plus petit modรจle SAM de Meta, SAM-b, avec le plus petit modรจle de segmentation d'Ultralytics, [YOLOv8n-seg](../tasks/segment.md) : + +| Modรจle | Taille | Paramรจtres | Vitesse (CPU) | +|--------------------------------------------------------------|-----------------------------------|-----------------------------|-------------------------------------| +| SAM-b - Meta's SAM-b | 358 Mo | 94,7 M | 51096 ms/im | +| [MobileSAM](mobile-sam.md) | 40,7 Mo | 10,1 M | 46122 ms/im | +| [FastSAM-s](fast-sam.md) with YOLOv8 backbone | 23,7 Mo | 11,8 M | 115 ms/im | +| YOLOv8n-seg - Ultralytics [YOLOv8n-seg](../tasks/segment.md) | **6,7 Mo** (53,4 fois plus petit) | **3,4 M** (27,9 fois moins) | **59 ms/im** (866 fois plus rapide) | + +Cette comparaison montre les diffรฉrences d'ordre de grandeur dans les tailles et les vitesses des modรจles. Alors que SAM prรฉsente des fonctionnalitรฉs uniques pour la segmentation automatique, il ne rivalise pas directement avec les modรจles de segmentation YOLOv8, qui sont plus petits, plus rapides et plus efficaces. + +Tests effectuรฉs sur un MacBook Apple M2 de 2023 avec 16 Go de RAM. Pour reproduire ce test : + +!!! Example "Exemple" + + === "Python" + ```python + from ultralytics import FastSAM, SAM, YOLO + + # Profiler SAM-b + modรจle = SAM('sam_b.pt') + modรจle.info() + modรจle('ultralytics/assets') + + # Profiler MobileSAM + modรจle = SAM('mobile_sam.pt') + modรจle.info() + modรจle('ultralytics/assets') + + # Profiler FastSAM-s + modรจle = FastSAM('FastSAM-s.pt') + modรจle.info() + modรจle('ultralytics/assets') + + # Profiler YOLOv8n-seg + modรจle = YOLO('yolov8n-seg.pt') + modรจle.info() + modรจle('ultralytics/assets') + ``` + +## Annotation automatique : Un moyen rapide d'obtenir des jeux de donnรฉes de segmentation + +L'annotation automatique est une fonctionnalitรฉ clรฉ de SAM, permettant aux utilisateurs de gรฉnรฉrer un [jeu de donnรฉes de segmentation](https://docs.ultralytics.com/datasets/segment) ร  l'aide d'un modรจle de dรฉtection prรฉ-entraรฎnรฉ. Cette fonctionnalitรฉ permet une annotation rapide et prรฉcise d'un grand nombre d'images, en contournant la nรฉcessitรฉ d'une annotation manuelle chronophage. + +### Gรฉnรฉrez votre jeu de donnรฉes de segmentation ร  l'aide d'un modรจle de dรฉtection + +Pour annoter automatiquement votre jeu de donnรฉes avec le framework Ultralytics, utilisez la fonction `auto_annotate` comme indiquรฉ ci-dessous : + +!!! Example "Exemple" + + === "Python" + ```python + from ultralytics.data.annotator import auto_annotate + + auto_annotate(data="path/to/images", det_model="yolov8x.pt", sam_model='sam_b.pt') + ``` + +| Argument | Type | Description | Default | +|------------|----------------------|------------------------------------------------------------------------------------------------------------------------|--------------| +| data | str | Chemin d'accรจs ร  un dossier contenant les images ร  annoter. | | +| det_model | str, optionnel | Modรจle de dรฉtection prรฉ-entraรฎnรฉ YOLO. Par dรฉfaut, 'yolov8x.pt'. | 'yolov8x.pt' | +| sam_model | str, optionnel | Modรจle de segmentation prรฉ-entraรฎnรฉ SAM. Par dรฉfaut, 'sam_b.pt'. | 'sam_b.pt' | +| device | str, optionnel | Appareil sur lequel exรฉcuter les modรจles. Par dรฉfaut, une chaรฎne vide (CPU ou GPU, si disponible). | | +| output_dir | str, None, optionnel | Rรฉpertoire pour enregistrer les rรฉsultats annotรฉs. Par dรฉfaut, un dossier 'labels' dans le mรชme rรฉpertoire que 'data'. | None | + +La fonction `auto_annotate` prend en compte le chemin de vos images, avec des arguments optionnels pour spรฉcifier les modรจles de dรฉtection et de segmentation SAM prรฉ-entraรฎnรฉs, l'appareil sur lequel exรฉcuter les modรจles et le rรฉpertoire de sortie pour enregistrer les rรฉsultats annotรฉs. + +L'annotation automatique avec des modรจles prรฉ-entraรฎnรฉs peut rรฉduire considรฉrablement le temps et les efforts nรฉcessaires pour crรฉer des jeux de donnรฉes de segmentation de haute qualitรฉ. Cette fonctionnalitรฉ est particuliรจrement bรฉnรฉfique pour les chercheurs et les dรฉveloppeurs travaillant avec de grandes collections d'images, car elle leur permet de se concentrer sur le dรฉveloppement et l'รฉvaluation des modรจles plutรดt que sur l'annotation manuelle. + +## Citations et remerciements + +Si vous trouvez SAM utile dans vos travaux de recherche ou de dรฉveloppement, veuillez envisager de citer notre article : + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{kirillov2023segment, + title={Segment Anything}, + author={Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollรกr and Ross Girshick}, + year={2023}, + eprint={2304.02643}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +Nous tenons ร  exprimer notre gratitude ร  Meta AI pour la crรฉation et la maintenance de cette ressource prรฉcieuse pour la communautรฉ de la vision par ordinateur. + +*keywords: Segment Anything, Segment Anything Model, SAM, Meta SAM, segmentation d'image, segmentation promptable, performances hors รฉchantillon, jeu de donnรฉes SA-1B, architecture avancรฉe, annotation automatique, Ultralytics, modรจles prรฉ-entraรฎnรฉs, SAM de base, SAM large, segmentation d'instance, vision par ordinateur, IA, intelligence artificielle, apprentissage automatique, annotation de donnรฉes, masques de segmentation, modรจle de dรฉtection, modรจle de dรฉtection YOLO, bibtex, Meta AI.* diff --git a/ultralytics/docs/fr/models/sam.md:Zone.Identifier b/ultralytics/docs/fr/models/sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/models/sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/models/yolo-nas.md b/ultralytics/docs/fr/models/yolo-nas.md new file mode 100755 index 0000000..e1165ec --- /dev/null +++ b/ultralytics/docs/fr/models/yolo-nas.md @@ -0,0 +1,121 @@ +--- +comments: true +description: Dรฉcouvrez une documentation dรฉtaillรฉe sur YOLO-NAS, un modรจle de dรฉtection d'objets supรฉrieur. Apprenez-en davantage sur ses fonctionnalitรฉs, les modรจles prรฉ-entraรฎnรฉs, son utilisation avec l'API Python d'Ultralytics, et bien plus encore. +keywords: YOLO-NAS, Deci AI, dรฉtection d'objets, apprentissage profond, recherche architecturale neuronale, API Python d'Ultralytics, modรจle YOLO, modรจles prรฉ-entraรฎnรฉs, quantification, optimisation, COCO, Objects365, Roboflow 100 +--- + +# YOLO-NAS + +## Aperรงu + +Dรฉveloppรฉ par Deci AI, YOLO-NAS est un modรจle de dรฉtection d'objets rรฉvolutionnaire. Il est le fruit d'une technologie avancรฉe de recherche architecturale neuronale, minutieusement conรงu pour pallier les limitations des prรฉcรฉdents modรจles YOLO. Avec des amรฉliorations significatives en matiรจre de prise en charge de la quantification et de compromis entre prรฉcision et latence, YOLO-NAS reprรฉsente une avancรฉe majeure en matiรจre de dรฉtection d'objets. + +![Exemple de modรจle](https://learnopencv.com/wp-content/uploads/2023/05/yolo-nas_COCO_map_metrics.png) +**Aperรงu de YOLO-NAS**. YOLO-NAS utilise des blocs adaptรฉs ร  la quantification et une quantification sรฉlective pour des performances optimales. Le modรจle, une fois converti en version quantifiรฉe INT8, prรฉsente une baisse de prรฉcision minimale, ce qui constitue une amรฉlioration significative par rapport aux autres modรจles. Ces avancรฉes aboutissent ร  une architecture supรฉrieure offrant des capacitรฉs de dรฉtection d'objets inรฉgalรฉes et des performances exceptionnelles. + +### Fonctionnalitรฉs clรฉs + +- **Bloc de base compatible avec la quantification:** YOLO-NAS introduit un nouveau bloc de base adaptรฉ ร  la quantification, ce qui permet de pallier l'une des principales limitations des prรฉcรฉdents modรจles YOLO. +- **Entraรฎnement sophistiquรฉ et quantification:** YOLO-NAS utilise des schรฉmas d'entraรฎnement avancรฉs et une quantification aprรจs l'entraรฎnement pour amรฉliorer les performances. +- **Optimisation AutoNAC et prรฉ-entraรฎnement:** YOLO-NAS utilise l'optimisation AutoNAC et est prรฉ-entraรฎnรฉ sur des ensembles de donnรฉes renommรฉs tels que COCO, Objects365 et Roboflow 100. Ce prรฉ-entraรฎnement le rend extrรชmement adaptรฉ aux tรขches de dรฉtection d'objets ultรฉrieures dans des environnements de production. + +## Modรจles prรฉ-entraรฎnรฉs + +Dรฉcouvrez la puissance de la dรฉtection d'objets de nouvelle gรฉnรฉration avec les modรจles YOLO-NAS prรฉ-entraรฎnรฉs fournis par Ultralytics. Ces modรจles sont conรงus pour offrir des performances exceptionnelles en termes de vitesse et de prรฉcision. Choisissez parmi une variรฉtรฉ d'options adaptรฉes ร  vos besoins spรฉcifiques : + +| Modรจle | mAP | Latence (ms) | +|------------------|-------|--------------| +| YOLO-NAS S | 47.5 | 3.21 | +| YOLO-NAS M | 51.55 | 5.85 | +| YOLO-NAS L | 52.22 | 7.87 | +| YOLO-NAS S INT-8 | 47.03 | 2.36 | +| YOLO-NAS M INT-8 | 51.0 | 3.78 | +| YOLO-NAS L INT-8 | 52.1 | 4.78 | + +Chaque variante de modรจle est conรงue pour offrir un รฉquilibre entre la prรฉcision moyenne (mAP) et la latence, vous permettant ainsi d'optimiser vos tรขches de dรฉtection d'objets en termes de performance et de vitesse. + +## Exemples d'utilisation + +Ultralytics a rendu les modรจles YOLO-NAS faciles ร  intรฉgrer dans vos applications Python grรขce ร  notre package Python `ultralytics`. Le package fournit une interface conviviale pour simplifier le processus. + +Les exemples suivants montrent comment utiliser les modรจles YOLO-NAS avec le package `ultralytics` pour l'infรฉrence et la validation : + +### Exemples d'infรฉrence et de validation + +Dans cet exemple, nous validons YOLO-NAS-s sur l'ensemble de donnรฉes COCO8. + +!!! Example "Exemple" + + Cet exemple fournit un code simple pour l'infรฉrence et la validation de YOLO-NAS. Pour gรฉrer les rรฉsultats de l'infรฉrence, consultez le mode [Predict](../modes/predict.md). Pour utiliser YOLO-NAS avec des modes supplรฉmentaires, consultez [Val](../modes/val.md) et [Export](../modes/export.md). L'entraรฎnement n'est pas pris en charge pour YOLO-NAS avec le package `ultralytics`. + + === "Python" + + Il est possible de passer des modรจles prรฉ-entraรฎnรฉs `*.pt` de PyTorch ร  la classe `NAS()` pour crรฉer une instance de modรจle en Python : + + ```python + from ultralytics import NAS + + # Charger un modรจle YOLO-NAS-s prรฉ-entraรฎnรฉ sur COCO + model = NAS('yolo_nas_s.pt') + + # Afficher les informations sur le modรจle (facultatif) + model.info() + + # Valider le modรจle sur l'ensemble de donnรฉes COCO8 + results = model.val(data='coco8.yaml') + + # Effectuer une infรฉrence avec le modรจle YOLO-NAS-s sur l'image 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + Des commandes CLI sont disponibles pour exรฉcuter directement les modรจles : + + ```bash + # Charger un modรจle YOLO-NAS-s prรฉ-entraรฎnรฉ sur COCO et valider ses performances sur l'ensemble de donnรฉes COCO8 + yolo val model=yolo_nas_s.pt data=coco8.yaml + + # Charger un modรจle YOLO-NAS-s prรฉ-entraรฎnรฉ sur COCO et effectuer une infรฉrence sur l'image 'bus.jpg' + yolo predict model=yolo_nas_s.pt source=path/to/bus.jpg + ``` + +## Tรขches et modes pris en charge + +Nous proposons trois variantes des modรจles YOLO-NAS : Small (s), Medium (m) et Large (l). Chaque variante est conรงue pour rรฉpondre ร  des besoins computationnels et de performances diffรฉrents : + +- **YOLO-NAS-s** : Optimisรฉ pour les environnements oรน les ressources computationnelles sont limitรฉes mais l'efficacitรฉ est primordiale. +- **YOLO-NAS-m** : Offre une approche รฉquilibrรฉe, adaptรฉe ร  la dรฉtection d'objets polyvalente avec une prรฉcision accrue. +- **YOLO-NAS-l** : Adaptรฉ aux scรฉnarios nรฉcessitant la plus haute prรฉcision, oรน les ressources computationnelles sont moins contraignantes. + +Voici un aperรงu dรฉtaillรฉ de chaque modรจle, comprenant des liens vers leurs poids prรฉ-entraรฎnรฉs, les tรขches qu'ils prennent en charge et leur compatibilitรฉ avec diffรฉrents modes opรฉrationnels. + +| Type de modรจle | Poids prรฉ-entraรฎnรฉs | Tรขches prises en charge | Infรฉrence | Validation | Entraรฎnement | Export | +|----------------|-----------------------------------------------------------------------------------------------|------------------------------------------|-----------|------------|--------------|--------| +| YOLO-NAS-s | [yolo_nas_s.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_s.pt) | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-m | [yolo_nas_m.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_m.pt) | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-l | [yolo_nas_l.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_l.pt) | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | + +## Citations et remerciements + +Si vous utilisez YOLO-NAS dans vos travaux de recherche ou de dรฉveloppement, veuillez citer SuperGradients : + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{supergradients, + doi = {10.5281/ZENODO.7789328}, + url = {https://zenodo.org/record/7789328}, + author = {Aharon, Shay and {Louis-Dupont} and {Ofri Masad} and Yurkova, Kate and {Lotem Fridman} and {Lkdci} and Khvedchenya, Eugene and Rubin, Ran and Bagrov, Natan and Tymchenko, Borys and Keren, Tomer and Zhilko, Alexander and {Eran-Deci}}, + title = {Super-Gradients}, + publisher = {GitHub}, + journal = {GitHub repository}, + year = {2021}, + } + ``` + +Nous exprimons notre gratitude ร  l'รฉquipe [Super-Gradients](https://github.com/Deci-AI/super-gradients/) de Deci AI pour ses efforts dans la crรฉation et la maintenance de cette prรฉcieuse ressource pour la communautรฉ de la vision par ordinateur. Nous sommes convaincus que YOLO-NAS, avec son architecture innovante et ses capacitรฉs de dรฉtection d'objets supรฉrieures, deviendra un outil essentiel pour les dรฉveloppeurs et les chercheurs. + +*keywords: YOLO-NAS, Deci AI, dรฉtection d'objets, apprentissage profond, recherche architecturale neuronale, API Python d'Ultralytics, modรจle YOLO, SuperGradients, modรจles prรฉ-entraรฎnรฉs, bloc de base compatible avec la quantification, schรฉmas d'entraรฎnement avancรฉs, quantification aprรจs l'entraรฎnement, optimisation AutoNAC, COCO, Objects365, Roboflow 100* diff --git a/ultralytics/docs/fr/models/yolo-nas.md:Zone.Identifier b/ultralytics/docs/fr/models/yolo-nas.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/models/yolo-nas.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/models/yolov3.md b/ultralytics/docs/fr/models/yolov3.md new file mode 100755 index 0000000..ca17b1d --- /dev/null +++ b/ultralytics/docs/fr/models/yolov3.md @@ -0,0 +1,98 @@ +--- +comments: true +description: Obtenez un aperรงu des modรจles YOLOv3, YOLOv3-Ultralytics et YOLOv3u. Apprenez-en davantage sur leurs fonctionnalitรฉs clรฉs, leur utilisation et les tรขches prises en charge pour la dรฉtection d'objets. +keywords: YOLOv3, YOLOv3-Ultralytics, YOLOv3u, Dรฉtection d'objets, Infรฉrence, Entraรฎnement, Ultralytics +--- + +# YOLOv3, YOLOv3-Ultralytics et YOLOv3u + +## Aperรงu + +Ce document prรฉsente un aperรงu de trois modรจles de dรฉtection d'objets รฉtroitement liรฉs, ร  savoir [YOLOv3](https://pjreddie.com/darknet/yolo/), [YOLOv3-Ultralytics](https://github.com/ultralytics/yolov3) et [YOLOv3u](https://github.com/ultralytics/ultralytics). + +1. **YOLOv3**: Il s'agit de la troisiรจme version de l'algorithme de dรฉtection d'objets You Only Look Once (YOLO). Initiรฉe par Joseph Redmon, YOLOv3 a amรฉliorรฉ ses prรฉdรฉcesseurs en introduisant des fonctionnalitรฉs telles que des prรฉdictions ร  plusieurs รฉchelles et trois tailles diffรฉrentes de noyaux de dรฉtection. + +2. **YOLOv3-Ultralytics**: Il s'agit de l'implรฉmentation par Ultralytics du modรจle YOLOv3. Il reproduit l'architecture d'origine de YOLOv3 et offre des fonctionnalitรฉs supplรฉmentaires, telles que la prise en charge de plusieurs modรจles prรฉ-entraรฎnรฉs et des options de personnalisation plus faciles. + +3. **YOLOv3u**: Il s'agit d'une version mise ร  jour de YOLOv3-Ultralytics qui intรจgre la nouvelle tรชte de dรฉtection sans ancrage et sans objectivitรฉ utilisรฉe dans les modรจles YOLOv8. YOLOv3u conserve la mรชme architecture de base et de cou de YOLOv3, mais avec la nouvelle tรชte de dรฉtection de YOLOv8. + +![Ultralytics YOLOv3](https://raw.githubusercontent.com/ultralytics/assets/main/yolov3/banner-yolov3.png) + +## Caractรฉristiques clรฉs + +- **YOLOv3**: A introduit l'utilisation de trois รฉchelles diffรฉrentes pour la dรฉtection, en tirant parti de trois tailles diffรฉrentes de noyaux de dรฉtection : 13x13, 26x26 et 52x52. Cela a considรฉrablement amรฉliorรฉ la prรฉcision de la dรฉtection pour les objets de diffรฉrentes tailles. De plus, YOLOv3 a ajoutรฉ des fonctionnalitรฉs telles que des prรฉdictions multi-รฉtiquettes pour chaque boรฎte englobante et un meilleur rรฉseau d'extraction de caractรฉristiques. + +- **YOLOv3-Ultralytics**: L'implรฉmentation d'Ultralytics de YOLOv3 offre les mรชmes performances que le modรจle d'origine, mais propose รฉgalement un support supplรฉmentaire pour plus de modรจles prรฉ-entraรฎnรฉs, des mรฉthodes d'entraรฎnement supplรฉmentaires et des options de personnalisation plus faciles. Cela le rend plus polyvalent et convivial pour les applications pratiques. + +- **YOLOv3u**: Ce modรจle mis ร  jour intรจgre la nouvelle tรชte de dรฉtection sans ancrage et sans objectivitรฉ de YOLOv8. En รฉliminant le besoin de boรฎtes d'ancrage prรฉdรฉfinies et de scores d'objectivitรฉ, cette conception de tรชte de dรฉtection peut amรฉliorer la capacitรฉ du modรจle ร  dรฉtecter des objets de diffรฉrentes tailles et formes. Cela rend YOLOv3u plus robuste et prรฉcis pour les tรขches de dรฉtection d'objets. + +## Tรขches et modes pris en charge + +Les modรจles de la sรฉrie YOLOv3, notamment YOLOv3, YOLOv3-Ultralytics et YOLOv3u, sont spรฉcialement conรงus pour les tรขches de dรฉtection d'objets. Ces modรจles sont rรฉputรฉs pour leur efficacitรฉ dans divers scรฉnarios rรฉels, alliant prรฉcision et rapiditรฉ. Chaque variante propose des fonctionnalitรฉs et des optimisations uniques, les rendant adaptรฉs ร  une gamme d'applications. + +Les trois modรจles prennent en charge un ensemble complet de modes, garantissant ainsi leur polyvalence ร  diffรฉrentes รฉtapes du dรฉploiement et du dรฉveloppement du modรจle. Ces modes comprennent [Infรฉrence](../modes/predict.md), [Validation](../modes/val.md), [Entraรฎnement](../modes/train.md) et [Export](../modes/export.md), offrant aux utilisateurs un ensemble complet d'outils pour une dรฉtection d'objets efficace. + +| Type de modรจle | Tรขches prises en charge | Infรฉrence | Validation | Entraรฎnement | Export | +|--------------------|------------------------------------------|-----------|------------|--------------|--------| +| YOLOv3 | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3-Ultralytics | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3u | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +Ce tableau offre un aperรงu rapide des capacitรฉs de chaque variante de YOLOv3, mettant en รฉvidence leur polyvalence et leur pertinence pour diverses tรขches et modes opรฉrationnels dans les flux de travail de dรฉtection d'objets. + +## Exemples d'utilisation + +Cet exemple prรฉsente des exemples simples d'entraรฎnement et d'infรฉrence de YOLOv3. Pour une documentation complรจte sur ces exemples et d'autres [modes](../modes/index.md), consultez les pages de documentation sur [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) et [Export](../modes/export.md). + +!!! Example "Exemple" + + === "Python" + + Les modรจles prรฉ-entraรฎnรฉs PyTorch `*.pt`, ainsi que les fichiers de configuration `*.yaml`, peuvent รชtre transmis ร  la classe `YOLO()` pour crรฉer une instance de modรจle en Python : + + ```python + from ultralytics import YOLO + + # Charger un modรจle YOLOv3n prรฉ-entraรฎnรฉ avec COCO + model = YOLO('yolov3n.pt') + + # Afficher les informations sur le modรจle (facultatif) + model.info() + + # Entraรฎner le modรจle sur l'ensemble de donnรฉes d'exemple COCO8 pendant 100 รฉpoques + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Exรฉcuter l'infรฉrence avec le modรจle YOLOv3n sur l'image 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + Des commandes CLI sont disponibles pour exรฉcuter directement les modรจles : + + ```bash + # Charger un modรจle YOLOv3n prรฉ-entraรฎnรฉ avec COCO et l'entraรฎner sur l'ensemble de donnรฉes d'exemple COCO8 pendant 100 รฉpoques + yolo train model=yolov3n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Charger un modรจle YOLOv3n prรฉ-entraรฎnรฉ avec COCO et exรฉcuter l'infรฉrence sur l'image 'bus.jpg' + yolo predict model=yolov3n.pt source=path/to/bus.jpg + ``` + +## Citations et remerciements + +Si vous utilisez YOLOv3 dans le cadre de vos recherches, veuillez citer les articles originaux sur YOLO et le rรฉfรฉrentiel YOLOv3 d'Ultralytics : + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Redmon, Joseph and Farhadi, Ali}, + journal={arXiv preprint arXiv:1804.02767}, + year={2018} + } + ``` + +Merci ร  Joseph Redmon et Ali Farhadi pour le dรฉveloppement du YOLOv3 original. diff --git a/ultralytics/docs/fr/models/yolov3.md:Zone.Identifier b/ultralytics/docs/fr/models/yolov3.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/models/yolov3.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/models/yolov4.md b/ultralytics/docs/fr/models/yolov4.md new file mode 100755 index 0000000..0221483 --- /dev/null +++ b/ultralytics/docs/fr/models/yolov4.md @@ -0,0 +1,71 @@ +--- +comments: true +description: Dรฉcouvrez notre guide dรฉtaillรฉ sur YOLOv4, un dรฉtecteur d'objets en temps rรฉel de pointe. Comprenez ses points forts architecturaux, ses fonctionnalitรฉs innovantes et des exemples d'application. +keywords: ultralytics, YOLOv4, dรฉtection d'objets, rรฉseau neuronal, dรฉtection en temps rรฉel, dรฉtecteur d'objets, apprentissage automatique +--- + +# YOLOv4: Dรฉtection d'Objets Rapide et Prรฉcise + +Bienvenue sur la page de documentation d'Ultralytics pour YOLOv4, un dรฉtecteur d'objets en temps rรฉel de pointe lancรฉ en 2020 par Alexey Bochkovskiy sur [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). YOLOv4 est conรงu pour offrir un รฉquilibre optimal entre vitesse et prรฉcision, en en faisant un excellent choix pour de nombreuses applications. + +![Schรฉma d'architecture de YOLOv4](https://user-images.githubusercontent.com/26833433/246185689-530b7fe8-737b-4bb0-b5dd-de10ef5aface.png) +**Schรฉma d'architecture de YOLOv4**. Prรฉsentant la conception dรฉtaillรฉe du rรฉseau de YOLOv4, comprenant les composants backbone, neck et head, ainsi que leurs couches interconnectรฉes pour une dรฉtection d'objets en temps rรฉel optimale. + +## Introduction + +YOLOv4 signifie You Only Look Once version 4. Il s'agit d'un modรจle de dรฉtection d'objets en temps rรฉel dรฉveloppรฉ pour remรฉdier aux limitations des versions prรฉcรฉdentes de YOLO comme [YOLOv3](yolov3.md) et d'autres modรจles de dรฉtection d'objets. Contrairement ร  d'autres dรฉtecteurs d'objets basรฉs sur des rรฉseaux neuronaux convolutifs (CNN), YOLOv4 n'est pas seulement applicable aux systรจmes de recommandation, mais aussi ร  la gestion de processus autonomes et ร  la rรฉduction de l'entrรฉe humaine. Son utilisation sur des unitรฉs de traitement graphique (GPU) conventionnelles permet une utilisation massive ร  un prix abordable, et il est conรงu pour fonctionner en temps rรฉel sur un GPU conventionnel tout en ne nรฉcessitant qu'un seul de ces GPU pour l'entraรฎnement. + +## Architecture + +YOLOv4 utilise plusieurs fonctionnalitรฉs innovantes qui travaillent ensemble pour optimiser ses performances. Celles-ci incluent les connexions rรฉsiduelles pondรฉrรฉes (WRC), les connexions partielles ร  travers les รฉtapes (CSP), la normalisation mini-batch traversรฉe (CmBN), l'entraรฎnement auto-antagoniste (SAT), l'activation Mish, l'augmentation des donnรฉes en mosaรฏque, la rรฉgularisation DropBlock et la perte CIoU. Ces fonctionnalitรฉs sont combinรฉes pour obtenir des rรฉsultats de pointe. + +Un dรฉtecteur d'objets typique est composรฉ de plusieurs parties, notamment l'entrรฉe, le backbone, le neck et le head. Le backbone de YOLOv4 est prรฉ-entraรฎnรฉ sur ImageNet et est utilisรฉ pour prรฉdire les classes et les boรฎtes englobantes des objets. Le backbone peut provenir de plusieurs modรจles, notamment VGG, ResNet, ResNeXt ou DenseNet. La partie "neck" du dรฉtecteur est utilisรฉe pour collecter des cartes de caractรฉristiques ร  partir de diffรฉrentes รฉtapes et comprend gรฉnรฉralement plusieurs chemins "bottom-up" et plusieurs chemins "top-down". La partie "head" est ce qui est utilisรฉ pour faire les dรฉtections et classifications finales des objets. + +## Ensemble de Bonus + +YOLOv4 utilise รฉgalement des mรฉthodes appelรฉes "ensemble de bonus", qui sont des techniques permettant d'amรฉliorer la prรฉcision du modรจle lors de l'entraรฎnement sans augmenter le coรปt de l'infรฉrence. L'augmentation de donnรฉes est une technique commune de l'ensemble de bonus utilisรฉe dans la dรฉtection d'objets, qui augmente la variabilitรฉ des images d'entrรฉe pour amรฉliorer la robustesse du modรจle. Quelques exemples d'augmentation de donnรฉes incluent les distorsions photomรฉtriques (ajustement de la luminositรฉ, du contraste, de la teinte, de la saturation et du bruit d'une image) et les distorsions gรฉomรฉtriques (ajout d'รฉchelle alรฉatoire, de recadrage, de retournement et de rotation). Ces techniques aident le modรจle ร  mieux gรฉnรฉraliser ร  diffรฉrents types d'images. + +## Fonctionnalitรฉs et Performances + +YOLOv4 est conรงu pour une vitesse et une prรฉcision optimales dans la dรฉtection d'objets. L'architecture de YOLOv4 comprend CSPDarknet53 en tant que backbone, PANet en tant que neck et YOLOv3 en tant que detection head. Ce design permet ร  YOLOv4 de rรฉaliser une dรฉtection d'objets ร  une vitesse impressionnante, ce qui le rend adaptรฉ aux applications en temps rรฉel. YOLOv4 excelle รฉgalement en prรฉcision, atteignant des rรฉsultats de pointe dans les benchmarks de dรฉtection d'objets. + +## Exemples d'Utilisation + +Au moment de la rรฉdaction de ce document, Ultralytics ne prend pas en charge les modรจles YOLOv4. Par consรฉquent, les utilisateurs intรฉressรฉs par l'utilisation de YOLOv4 devront consulter directement le rรฉfรฉrentiel GitHub de YOLOv4 pour les instructions d'installation et d'utilisation. + +Voici un bref aperรงu des รฉtapes typiques que vous pourriez suivre pour utiliser YOLOv4 : + +1. Rendez-vous sur le rรฉfรฉrentiel GitHub de YOLOv4 : [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). + +2. Suivez les instructions fournies dans le fichier README pour l'installation. Cela implique gรฉnรฉralement de cloner le rรฉfรฉrentiel, d'installer les dรฉpendances nรฉcessaires et de configurer les variables d'environnement nรฉcessaires. + +3. Une fois l'installation terminรฉe, vous pouvez entraรฎner et utiliser le modรจle selon les instructions d'utilisation fournies dans le rรฉfรฉrentiel. Cela implique gรฉnรฉralement la prรฉparation de votre ensemble de donnรฉes, la configuration des paramรจtres du modรจle, l'entraรฎnement du modรจle, puis l'utilisation du modรจle entraรฎnรฉ pour effectuer la dรฉtection d'objets. + +Veuillez noter que les รฉtapes spรฉcifiques peuvent varier en fonction de votre cas d'utilisation spรฉcifique et de l'รฉtat actuel du rรฉfรฉrentiel YOLOv4. Il est donc fortement recommandรฉ de se rรฉfรฉrer directement aux instructions fournies dans le rรฉfรฉrentiel GitHub de YOLOv4. + +Nous regrettons tout inconvรฉnient que cela pourrait causer et nous nous efforcerons de mettre ร  jour ce document avec des exemples d'utilisation pour Ultralytics une fois que le support de YOLOv4 sera implรฉmentรฉ. + +## Conclusion + +YOLOv4 est un modรจle de dรฉtection d'objets puissant et efficace qui concilie vitesse et prรฉcision. Son utilisation de fonctionnalitรฉs uniques et de techniques "ensemble de bonus" lors de l'entraรฎnement lui permet de rรฉaliser d'excellentes performances dans les tรขches de dรฉtection d'objets en temps rรฉel. YOLOv4 peut รชtre entraรฎnรฉ et utilisรฉ par n'importe qui disposant d'un GPU conventionnel, le rendant accessible et pratique pour un large รฉventail d'applications. + +## Citations et Remerciements + +Nous tenons ร  remercier les auteurs de YOLOv4 pour leurs contributions importantes dans le domaine de la dรฉtection d'objets en temps rรฉel : + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{bochkovskiy2020yolov4, + title={YOLOv4: Optimal Speed and Accuracy of Object Detection}, + author={Alexey Bochkovskiy and Chien-Yao Wang and Hong-Yuan Mark Liao}, + year={2020}, + eprint={2004.10934}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +L'article original de YOLOv4 peut รชtre consultรฉ sur [arXiv](https://arxiv.org/abs/2004.10934). Les auteurs ont rendu leur travail accessible au public, et le code source peut รชtre consultรฉ sur [GitHub](https://github.com/AlexeyAB/darknet). Nous apprรฉcions leurs efforts pour faire progresser le domaine et rendre leur travail accessible ร  la communautรฉ รฉlargie. diff --git a/ultralytics/docs/fr/models/yolov4.md:Zone.Identifier b/ultralytics/docs/fr/models/yolov4.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/models/yolov4.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/models/yolov5.md b/ultralytics/docs/fr/models/yolov5.md new file mode 100755 index 0000000..885c70e --- /dev/null +++ b/ultralytics/docs/fr/models/yolov5.md @@ -0,0 +1,113 @@ +--- +comments: true +description: Dรฉcouvrez YOLOv5u, une version amรฉliorรฉe du modรจle YOLOv5 offrant un meilleur compromis entre prรฉcision et vitesse, ainsi que de nombreux modรจles prรฉ-entraรฎnรฉs pour diverses tรขches de dรฉtection d'objets. +keywords: YOLOv5u, dรฉtection d'objets, modรจles prรฉ-entraรฎnรฉs, Ultralytics, infรฉrence, validation, YOLOv5, YOLOv8, sans ancre, sans objectivitรฉ, applications temps rรฉel, apprentissage automatique +--- + +# YOLOv5 + +## Prรฉsentation + +YOLOv5u reprรฉsente une avancรฉe dans les mรฉthodologies de dรฉtection d'objets. Originaire de l'architecture fondamentale du modรจle [YOLOv5](https://github.com/ultralytics/yolov5) dรฉveloppรฉ par Ultralytics, YOLOv5u intรจgre la division sans ancre et sans objectivitรฉ, une fonctionnalitรฉ prรฉcรฉdemment introduite dans les modรจles [YOLOv8](yolov8.md). Cette adaptation affine l'architecture du modรจle, ce qui conduit ร  un meilleur compromis entre prรฉcision et vitesse dans les tรขches de dรฉtection d'objets. Compte tenu des rรฉsultats empiriques et des fonctionnalitรฉs dรฉrivรฉes, YOLOv5u offre une alternative efficace pour ceux qui recherchent des solutions robustes ร  la fois pour la recherche et les applications pratiques. + +![YOLOv5 Ultralytics](https://raw.githubusercontent.com/ultralytics/assets/main/yolov5/v70/splash.png) + +## Principales fonctionnalitรฉs + +- **Division sans ancre Ultralytics :** Les modรจles de dรฉtection d'objets traditionnels reposent sur des boรฎtes d'ancrage prรฉdรฉfinies pour prรฉdire les emplacements des objets. Cependant, YOLOv5u modernise cette approche. En adoptant une division sans ancre Ultralytics, il garantit un mรฉcanisme de dรฉtection plus flexible et adaptatif, ce qui amรฉliore les performances dans divers scรฉnarios. + +- **Bon compromis entre prรฉcision et vitesse optimisรฉe :** La vitesse et la prรฉcision sont souvent opposรฉes. Mais YOLOv5u remet en question ce compromis. Il offre un รฉquilibre calibrรฉ, garantissant des dรฉtections en temps rรฉel sans compromettre la prรฉcision. Cette fonctionnalitรฉ est particuliรจrement prรฉcieuse pour les applications qui demandent des rรฉponses rapides, comme les vรฉhicules autonomes, la robotique et l'analyse vidรฉo en temps rรฉel. + +- **Variรฉtรฉ de modรจles prรฉ-entraรฎnรฉs :** Comprendre que diffรฉrentes tรขches nรฉcessitent diffรฉrents ensembles d'outils, YOLOv5u propose une plรฉthore de modรจles prรฉ-entraรฎnรฉs. Que vous vous concentriez sur l'infรฉrence, la validation ou l'entraรฎnement, un modรจle sur mesure vous attend. Cette variรฉtรฉ garantit que vous n'utilisez pas une solution universelle, mais un modรจle spรฉcifiquement ajustรฉ ร  votre dรฉfi unique. + +## Tรขches et modes pris en charge + +Les modรจles YOLOv5u, avec divers poids prรฉ-entraรฎnรฉs, excellent dans les tรขches de [dรฉtection d'objets](../tasks/detect.md). Ils prennent en charge une gamme complรจte de modes, ce qui les rend adaptรฉs ร  diverses applications, du dรฉveloppement au dรฉploiement. + +| Type de modรจle | Poids prรฉ-entraรฎnรฉs | Tรขche | Infรฉrence | Validation | Entraรฎnement | Export | +|----------------|-----------------------------------------------------------------------------------------------------------------------------|------------------------------------------|-----------|------------|--------------|--------| +| YOLOv5u | `yolov5nu`, `yolov5su`, `yolov5mu`, `yolov5lu`, `yolov5xu`, `yolov5n6u`, `yolov5s6u`, `yolov5m6u`, `yolov5l6u`, `yolov5x6u` | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +Ce tableau fournit un aperรงu dรฉtaillรฉ des variantes de modรจles YOLOv5u, mettant en รฉvidence leur applicabilitรฉ dans les tรขches de dรฉtection d'objets et leur prise en charge de divers modes opรฉrationnels tels que [Infรฉrence](../modes/predict.md), [Validation](../modes/val.md), [Entraรฎnement](../modes/train.md) et [Exportation](../modes/export.md). Cette prise en charge complรจte garantit que les utilisateurs peuvent exploiter pleinement les capacitรฉs des modรจles YOLOv5u dans un large รฉventail de scรฉnarios de dรฉtection d'objets. + +## Mรฉtriques de performance + +!!! Performance + + === "Dรฉtection" + + Consultez la [documentation sur la dรฉtection](https://docs.ultralytics.com/tasks/detect/) pour des exemples d'utilisation avec ces modรจles formรฉs sur [COCO](https://docs.ultralytics.com/datasets/detect/coco/), qui comprennent 80 classes prรฉ-entraรฎnรฉes. + + | Modรจle | YAML | taille
(pixels) | mAPval
50-95 | Vitesse
CPU ONNX
(ms) | Vitesse
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | + |-------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------|-----------------------|----------------------|--------------------------------|-------------------------------------|--------------------|-------------------| + | [yolov5nu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5nu.pt) | [yolov5n.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 34,3 | 73,6 | 1,06 | 2,6 | 7,7 | + | [yolov5su.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5su.pt) | [yolov5s.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 43,0 | 120,7 | 1,27 | 9,1 | 24,0 | + | [yolov5mu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5mu.pt) | [yolov5m.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 49,0 | 233,9 | 1,86 | 25,1 | 64,2 | + | [yolov5lu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5lu.pt) | [yolov5l.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 52,2 | 408,4 | 2,50 | 53,2 | 135,0 | + | [yolov5xu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5xu.pt) | [yolov5x.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 53,2 | 763,2 | 3,81 | 97,2 | 246,4 | + | | | | | | | | | + | [yolov5n6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5n6u.pt) | [yolov5n6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 42,1 | 211,0 | 1,83 | 4,3 | 7,8 | + | [yolov5s6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5s6u.pt) | [yolov5s6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 48,6 | 422,6 | 2,34 | 15,3 | 24,6 | + | [yolov5m6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5m6u.pt) | [yolov5m6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 53,6 | 810,9 | 4,36 | 41,2 | 65,7 | + | [yolov5l6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5l6u.pt) | [yolov5l6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 55,7 | 1470,9 | 5,47 | 86,1 | 137,4 | + | [yolov5x6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5x6u.pt) | [yolov5x6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 56,8 | 2436,5 | 8,98 | 155,4 | 250,7 | + +## Exemples d'utilisation + +Cet exemple prรฉsente des exemples simples d'entraรฎnement et d'infรฉrence YOLOv5. Pour une documentation complรจte sur ces exemples et d'autres [modes](../modes/index.md), consultez les pages de documentation [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) et [Export](../modes/export.md). + +!!! Example "Exemple" + + === "Python" + + Les modรจles PyTorch prรฉ-entraรฎnรฉs `*.pt` ainsi que les fichiers de configuration `*.yaml` peuvent รชtre passรฉs ร  la classe `YOLO()` pour crรฉer une instance de modรจle en python : + + ```python + from ultralytics import YOLO + + # Charger un modรจle YOLOv5n prรฉ-entraรฎnรฉ sur COCO + model = YOLO('yolov5n.pt') + + # Afficher les informations sur le modรจle (facultatif) + model.info() + + # Former le modรจle sur l'ensemble de donnรฉes d'exemple COCO8 pendant 100 รฉpoques + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Exรฉcuter l'infรฉrence avec le modรจle YOLOv5n sur l'image 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + Des commandes CLI sont disponibles pour exรฉcuter directement les modรจles : + + ```bash + # Charger un modรจle YOLOv5n prรฉ-entraรฎnรฉ sur COCO et l'entraรฎner sur l'ensemble de donnรฉes d'exemple COCO8 pendant 100 รฉpoques + yolo train model=yolov5n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Charger un modรจle YOLOv5n prรฉ-entraรฎnรฉ sur COCO et exรฉcuter l'infรฉrence sur l'image 'bus.jpg' + yolo predict model=yolov5n.pt source=path/to/bus.jpg + ``` + +## Citations et remerciements + +Si vous utilisez YOLOv5 ou YOLOv5u dans vos recherches, veuillez citer le rรฉfรฉrentiel Ultralytics YOLOv5 comme suit : + +!!! Quote "" + + === "BibTeX" + ```bibtex + @software{yolov5, + title = {Ultralytics YOLOv5}, + author = {Glenn Jocher}, + year = {2020}, + version = {7.0}, + license = {AGPL-3.0}, + url = {https://github.com/ultralytics/yolov5}, + doi = {10.5281/zenodo.3908559}, + orcid = {0000-0001-5950-6979} + } + ``` + +Veuillez noter que les modรจles YOLOv5 sont fournis sous les licences [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) et [Enterprise](https://ultralytics.com/license). diff --git a/ultralytics/docs/fr/models/yolov5.md:Zone.Identifier b/ultralytics/docs/fr/models/yolov5.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/models/yolov5.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/models/yolov6.md b/ultralytics/docs/fr/models/yolov6.md new file mode 100755 index 0000000..3d4cc36 --- /dev/null +++ b/ultralytics/docs/fr/models/yolov6.md @@ -0,0 +1,107 @@ +--- +comments: true +description: Explorez Meituan YOLOv6, un modรจle de dรฉtection d'objets ร  la pointe de la technologie offrant un รฉquilibre entre vitesse et prรฉcision. Plongez-vous dans les fonctionnalitรฉs, les modรจles prรฉ-entraรฎnรฉs et l'utilisation de Python. +keywords: Meituan YOLOv6, dรฉtection d'objets, Ultralytics, YOLOv6 docs, Bi-directional Concatenation, Anchor-Aided Training, modรจles prรฉ-entraรฎnรฉs, applications en temps rรฉel +--- + +# Meituan YOLOv6 + +## Vue d'ensemble + +[Meituan](https://about.meituan.com/) YOLOv6 est un dรฉtecteur d'objets de pointe qui offre un รฉquilibre remarquable entre vitesse et prรฉcision, ce qui en fait un choix populaire pour les applications en temps rรฉel. Ce modรจle introduit plusieurs amรฉliorations remarquables sur son architecture et son schรฉma d'entraรฎnement, notamment la mise en ล“uvre d'un module de concatรฉnation bidirectionnelle (BiC), d'une stratรฉgie d'entraรฎnement assistรฉe par ancrage (AAT) et d'une conception amรฉliorรฉe de l'รฉpine dorsale et du cou pour une prรฉcision de pointe sur l'ensemble de donnรฉes COCO. + +![Meituan YOLOv6](https://user-images.githubusercontent.com/26833433/240750495-4da954ce-8b3b-41c4-8afd-ddb74361d3c2.png) +![Exemple d'image du modรจle](https://user-images.githubusercontent.com/26833433/240750557-3e9ec4f0-0598-49a8-83ea-f33c91eb6d68.png) +**Aperรงu de YOLOv6.** Diagramme de l'architecture du modรจle montrant les composants du rรฉseau redessinรฉs et les stratรฉgies d'entraรฎnement qui ont conduit ร  d'importantes amรฉliorations des performances. (a) L'รฉpine dorsale de YOLOv6 (N et S sont indiquรฉs). Notez que pour M/L, RepBlocks est remplacรฉ par CSPStackRep. (b) La structure d'un module BiC. (c) Un bloc SimCSPSPPF. ([source](https://arxiv.org/pdf/2301.05586.pdf)). + +### Caractรฉristiques principales + +- **Module de concatรฉnation bidirectionnelle (BiC) :** YOLOv6 introduit un module BiC dans le cou du dรฉtecteur, amรฉliorant les signaux de localisation et offrant des gains de performance avec une dรฉgradation de vitesse nรฉgligeable. +- **Stratรฉgie d'entraรฎnement assistรฉe par ancrage (AAT) :** Ce modรจle propose AAT pour profiter des avantages des paradigmes basรฉs sur ancrage et sans ancrage sans compromettre l'efficacitรฉ de l'infรฉrence. +- **Conception amรฉliorรฉe de l'รฉpine dorsale et du cou :** En approfondissant YOLOv6 pour inclure une autre รฉtape dans l'รฉpine dorsale et le cou, ce modรจle atteint des performances de pointe sur l'ensemble de donnรฉes COCO avec une entrรฉe haute rรฉsolution. +- **Stratรฉgie d'autodistillation :** Une nouvelle stratรฉgie d'autodistillation est mise en ล“uvre pour amรฉliorer les performances des modรจles plus petits de YOLOv6, en amรฉliorant la branche de rรฉgression auxiliaire pendant l'entraรฎnement et en la supprimant lors de l'infรฉrence afin d'รฉviter une baisse notable de la vitesse. + +## Mรฉtriques de performance + +YOLOv6 propose diffรฉrents modรจles prรฉ-entraรฎnรฉs avec diffรฉrentes รฉchelles : + +- YOLOv6-N : 37,5 % de prรฉcision sur COCO val2017 ร  1187 FPS avec le GPU NVIDIA Tesla T4. +- YOLOv6-S : 45,0 % de prรฉcision ร  484 FPS. +- YOLOv6-M : 50,0 % de prรฉcision ร  226 FPS. +- YOLOv6-L : 52,8 % de prรฉcision ร  116 FPS. +- YOLOv6-L6 : Prรฉcision de pointe en temps rรฉel. + +YOLOv6 propose รฉgalement des modรจles quantifiรฉs pour diffรฉrentes prรฉcisions et des modรจles optimisรฉs pour les plates-formes mobiles. + +## Exemples d'utilisation + +Cet exemple fournit des exemples simples d'entraรฎnement et d'infรฉrence de YOLOv6. Pour une documentation complรจte sur ces exemples et d'autres [modes](../modes/index.md), consultez les pages de documentation [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) et [Export](../modes/export.md). + +!!! Example "Exemple" + + === "Python" + + Les modรจles prรฉ-entraรฎnรฉs PyTorch `*.pt`, ainsi que les fichiers de configuration `*.yaml`, peuvent รชtre utilisรฉs pour crรฉer une instance de modรจle en python en utilisant la classe `YOLO()` : + + ```python + from ultralytics import YOLO + + # Crรฉer un modรจle YOLOv6n ร  partir de zรฉro + model = YOLO('yolov6n.yaml') + + # Afficher les informations sur le modรจle (facultatif) + model.info() + + # Entraรฎner le modรจle sur l'ensemble de donnรฉes d'exemple COCO8 pendant 100 epochs + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Effectuer une infรฉrence avec le modรจle YOLOv6n sur l'image 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + Des commandes CLI sont disponibles pour exรฉcuter directement les modรจles : + + ```bash + # Crรฉer un modรจle YOLOv6n ร  partir de zรฉro et l'entraรฎner sur l'ensemble de donnรฉes d'exemple COCO8 pendant 100 epochs + yolo train model=yolov6n.yaml data=coco8.yaml epochs=100 imgsz=640 + + # Crรฉer un modรจle YOLOv6n ร  partir de zรฉro et effectuer une infรฉrence sur l'image 'bus.jpg' + yolo predict model=yolov6n.yaml source=path/to/bus.jpg + ``` + +## Tรขches et modes pris en charge + +La sรฉrie YOLOv6 propose une gamme de modรจles, chacun optimisรฉ pour la [dรฉtection d'objets](../tasks/detect.md) haute performance. Ces modรจles rรฉpondent ร  des besoins computationnels et des exigences de prรฉcision variables, ce qui les rend polyvalents pour une large gamme d'applications. + +| Type de modรจle | Modรจles prรฉ-entraรฎnรฉs | Tรขches prises en charge | Infรฉrence | Validation | Entraรฎnement | Export | +|----------------|-----------------------|------------------------------------------|-----------|------------|--------------|--------| +| YOLOv6-N | `yolov6-n.pt` | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-S | `yolov6-s.pt` | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-M | `yolov6-m.pt` | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L | `yolov6-l.pt` | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L6 | `yolov6-l6.pt` | [Dรฉtection d'objets](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +Ce tableau fournit un aperรงu dรฉtaillรฉ des variantes du modรจle YOLOv6, mettant en รฉvidence leurs capacitรฉs dans les tรขches de dรฉtection d'objets et leur compatibilitรฉ avec diffรฉrents modes opรฉrationnels tels que l'[Infรฉrence](../modes/predict.md), la [Validation](../modes/val.md), l'[Entraรฎnement](../modes/train.md) et l'[Export](../modes/export.md). Cette prise en charge complรจte permet aux utilisateurs de tirer pleinement parti des capacitรฉs des modรจles YOLOv6 dans un large รฉventail de scรฉnarios de dรฉtection d'objets. + +## Citations et remerciements + +Nous tenons ร  remercier les auteurs pour leur contribution importante dans le domaine de la dรฉtection d'objets en temps rรฉel : + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{li2023yolov6, + title={YOLOv6 v3.0: A Full-Scale Reloading}, + author={Chuyi Li and Lulu Li and Yifei Geng and Hongliang Jiang and Meng Cheng and Bo Zhang and Zaidan Ke and Xiaoming Xu and Xiangxiang Chu}, + year={2023}, + eprint={2301.05586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + + Le document original de YOLOv6 peut รชtre consultรฉ sur [arXiv](https://arxiv.org/abs/2301.05586). Les auteurs ont rendu leur travail accessible au public, et le code source peut รชtre consultรฉ sur [GitHub](https://github.com/meituan/YOLOv6). Nous apprรฉcions leurs efforts pour faire avancer le domaine et rendre leur travail accessible ร  la communautรฉ plus large. diff --git a/ultralytics/docs/fr/models/yolov6.md:Zone.Identifier b/ultralytics/docs/fr/models/yolov6.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/models/yolov6.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/models/yolov7.md b/ultralytics/docs/fr/models/yolov7.md new file mode 100755 index 0000000..b2ec749 --- /dev/null +++ b/ultralytics/docs/fr/models/yolov7.md @@ -0,0 +1,66 @@ +--- +comments: true +description: Dรฉcouvrez le YOLOv7, un dรฉtecteur d'objets en temps rรฉel. Comprenez sa vitesse supรฉrieure, son impressionnante prรฉcision et son accent unique sur l'optimisation bag-of-freebies entraรฎnable. +keywords: YOLOv7, dรฉtecteur d'objets en temps rรฉel, รฉtat de l'art, Ultralytics, jeu de donnรฉes MS COCO, rรฉ-paramรฉtrisation du modรจle, affectation des รฉtiquettes dynamiques, mise ร  l'รฉchelle รฉtendue, mise ร  l'รฉchelle composรฉe +--- + +# YOLOv7 : Bag-of-Freebies Entraรฎnable + +YOLOv7 est un dรฉtecteur d'objets en temps rรฉel ร  la pointe de la technologie qui surpasse tous les dรฉtecteurs d'objets connus en termes de vitesse et de prรฉcision, dans une plage de 5 FPS ร  160 FPS. Il prรฉsente la prรฉcision la plus รฉlevรฉe (56,8% AP) parmi tous les dรฉtecteurs d'objets en temps rรฉel connus avec un FPS de 30 ou plus sur GPU V100. De plus, YOLOv7 surpasse les autres dรฉtecteurs d'objets tels que YOLOR, YOLOX, Scaled-YOLOv4, YOLOv5 et bien d'autres en termes de vitesse et de prรฉcision. Le modรจle est entraรฎnรฉ ร  partir de zรฉro sur le jeu de donnรฉes MS COCO, sans utiliser d'autres jeux de donnรฉes ou de poids prรฉ-entraรฎnรฉs. Le code source de YOLOv7 est disponible sur GitHub. + +![Comparaison de YOLOv7 avec les dรฉtecteurs d'objets SOTA](https://github.com/ultralytics/ultralytics/assets/26833433/5e1e0420-8122-4c79-b8d0-2860aa79af92) +**Comparaison des dรฉtecteurs d'objets de pointe. +** ร€ partir des rรฉsultats du Tableau 2, nous savons que la mรฉthode proposรฉe prรฉsente le meilleur compromis vitesse-prรฉcision dans l'ensemble. Si nous comparons YOLOv7-tiny-SiLU avec YOLOv5-N (r6.1), notre mรฉthode est 127 FPS plus rapide et plus prรฉcise de 10,7% en AP. De plus, YOLOv7 atteint 51,4% d'AP ร  une frรฉquence d'images de 161 FPS, tandis que PPYOLOE-L avec la mรชme AP atteint seulement 78 FPS. En termes d'utilisation des paramรจtres, YOLOv7 consomme 41% de moins que PPYOLOE-L. Si nous comparons YOLOv7-X avec une vitesse d'infรฉrence de 114 FPS ร  YOLOv5-L (r6.1) avec une vitesse d'infรฉrence de 99 FPS, YOLOv7-X peut amรฉliorer l'AP de 3,9%. Si YOLOv7-X est comparรฉ ร  YOLOv5-X (r6.1) de taille similaire, la vitesse d'infรฉrence de YOLOv7-X est de 31 FPS plus rapide. De plus, en termes de nombre de paramรจtres et de calculs, YOLOv7-X rรฉduit de 22% les paramรจtres et de 8% les calculs par rapport ร  YOLOv5-X (r6.1), mais amรฉliore l'AP de 2,2% ([Source](https://arxiv.org/pdf/2207.02696.pdf)). + +## Aperรงu + +La dรฉtection d'objets en temps rรฉel est un composant important de nombreux systรจmes de vision par ordinateur, notamment le suivi multi-objets, la conduite autonome, la robotique et l'analyse d'images mรฉdicales. Ces derniรจres annรฉes, le dรฉveloppement de la dรฉtection d'objets en temps rรฉel s'est concentrรฉ sur la conception d'architectures efficaces et l'amรฉlioration de la vitesse d'infรฉrence des CPU, des GPU et des unitรฉs de traitement neuronal (NPU) dans diffรฉrentes configurations. YOLOv7 prend en charge les GPU mobiles et les appareils GPU, de l'edge au cloud. + +Contrairement aux dรฉtecteurs d'objets en temps rรฉel traditionnels qui se concentrent sur l'optimisation de l'architecture, YOLOv7 introduit une approche axรฉe sur l'optimisation du processus d'entraรฎnement. Cela comprend des modules et des mรฉthodes d'optimisation conรงus pour amรฉliorer la prรฉcision de la dรฉtection d'objets sans augmenter le coรปt de l'infรฉrence, un concept connu sous le nom de "bag-of-freebies entraรฎnable". + +## Fonctionnalitรฉs Principales + +YOLOv7 propose plusieurs fonctionnalitรฉs principales : + +1. **Rรฉ-paramรฉtrisation du Modรจle** : YOLOv7 propose un modรจle re-paramรฉtrรฉ planifiรฉ, qui est une stratรฉgie applicable aux couches de diffรฉrents rรฉseaux avec le concept de propagation des gradients. + +2. **Affectation Dynamique des ร‰tiquettes** : La formation du modรจle avec des couches de sortie multiples prรฉsente un nouveau problรจme : "Comment attribuer des cibles dynamiques aux sorties des diffรฉrentes branches ?" Pour rรฉsoudre ce problรจme, YOLOv7 introduit une nouvelle mรฉthode d'affectation des รฉtiquettes appelรฉe affectation des รฉtiquettes guidรฉe en cascade de grossiรจres ร  fines. + +3. **Mise ร  l'ร‰chelle ร‰tendue et Composรฉe** : YOLOv7 propose des mรฉthodes de "mise ร  l'รฉchelle รฉtendue" et de "mise ร  l'รฉchelle composรฉe" pour le dรฉtecteur d'objets en temps rรฉel, qui permettent d'utiliser efficacement les paramรจtres et les calculs. + +4. **Efficacitรฉ** : La mรฉthode proposรฉe par YOLOv7 permet de rรฉduire efficacement environ 40% des paramรจtres et 50% des calculs du dรฉtecteur d'objets en temps rรฉel de pointe, tout en offrant une vitesse d'infรฉrence plus rapide et une plus grande prรฉcision de dรฉtection. + +## Exemples d'Utilisation + +Au moment de la rรฉdaction de cet article, Ultralytics ne prend pas en charge les modรจles YOLOv7. Par consรฉquent, tout utilisateur intรฉressรฉ par l'utilisation de YOLOv7 devra se rรฉfรฉrer directement au dรฉpรดt GitHub de YOLOv7 pour obtenir les instructions d'installation et d'utilisation. + +Voici un bref aperรงu des รฉtapes typiques que vous pourriez suivre pour utiliser YOLOv7 : + +1. Rendez-vous sur le dรฉpรดt GitHub de YOLOv7 : [https://github.com/WongKinYiu/yolov7](https://github.com/WongKinYiu/yolov7). + +2. Suivez les instructions fournies dans le fichier README pour l'installation. Cela implique gรฉnรฉralement de cloner le dรฉpรดt, d'installer les dรฉpendances nรฉcessaires et de configurer les variables d'environnement nรฉcessaires. + +3. Une fois l'installation terminรฉe, vous pouvez entraรฎner et utiliser le modรจle selon les instructions d'utilisation fournies dans le dรฉpรดt. Cela implique gรฉnรฉralement la prรฉparation de votre ensemble de donnรฉes, la configuration des paramรจtres du modรจle, l'entraรฎnement du modรจle, puis l'utilisation du modรจle entraรฎnรฉ pour effectuer la dรฉtection d'objets. + +Veuillez noter que les รฉtapes spรฉcifiques peuvent varier en fonction de votre cas d'utilisation spรฉcifique et de l'รฉtat actuel du dรฉpรดt YOLOv7. Par consรฉquent, il est fortement recommandรฉ de vous reporter directement aux instructions fournies dans le dรฉpรดt GitHub de YOLOv7. + +Nous nous excusons pour tout inconvรฉnient que cela pourrait causer et nous nous efforcerons de mettre ร  jour ce document avec des exemples d'utilisation pour Ultralytics une fois la prise en charge de YOLOv7 mise en place. + +## Citations et Remerciements + +Nous tenons ร  remercier les auteurs de YOLOv7 pour leurs contributions significatives dans le domaine de la dรฉtection d'objets en temps rรฉel : + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{wang2022yolov7, + title={{YOLOv7}: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors}, + author={Wang, Chien-Yao and Bochkovskiy, Alexey and Liao, Hong-Yuan Mark}, + journal={arXiv preprint arXiv:2207.02696}, + year={2022} + } + ``` + +Le document original de YOLOv7 peut รชtre consultรฉ sur [arXiv](https://arxiv.org/pdf/2207.02696.pdf). Les auteurs ont rendu leur travail accessible au public, et le code source peut รชtre consultรฉ sur [GitHub](https://github.com/WongKinYiu/yolov7). Nous apprรฉcions leurs efforts pour faire avancer le domaine et rendre leur travail accessible ร  la communautรฉ รฉlargie. diff --git a/ultralytics/docs/fr/models/yolov7.md:Zone.Identifier b/ultralytics/docs/fr/models/yolov7.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/models/yolov7.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/models/yolov8.md b/ultralytics/docs/fr/models/yolov8.md new file mode 100755 index 0000000..f07c40a --- /dev/null +++ b/ultralytics/docs/fr/models/yolov8.md @@ -0,0 +1,162 @@ +--- +comments: true +description: Explorez les fonctionnalitรฉs passionnantes de YOLOv8, la derniรจre version de notre dรฉtecteur d'objets en temps rรฉel ! Dรฉcouvrez comment les architectures avancรฉes, les modรจles prรฉ-entraรฎnรฉs et un รฉquilibre optimal entre prรฉcision et vitesse font de YOLOv8 le choix parfait pour vos tรขches de dรฉtection d'objets. +keywords: YOLOv8, Ultralytics, dรฉtecteur d'objets en temps rรฉel, modรจles prรฉ-entraรฎnรฉs, documentation, dรฉtection d'objets, sรฉrie YOLO, architectures avancรฉes, prรฉcision, vitesse +--- + +# YOLOv8 + +## Aperรงu + +YOLOv8 est la derniรจre itรฉration de la sรฉrie YOLO de dรฉtecteurs d'objets en temps rรฉel, offrant des performances de pointe en termes de prรฉcision et de vitesse. S'appuyant sur les avancรฉes des versions prรฉcรฉdentes de YOLO, YOLOv8 introduit de nouvelles fonctionnalitรฉs et optimisations qui en font un choix idรฉal pour diverses tรขches de dรฉtection d'objets dans une large gamme d'applications. + +![Ultralytics YOLOv8](https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png) + +## Principales fonctionnalitรฉs + +- **Architectures avancรฉes pour le tronc et le cou:** YOLOv8 utilise des architectures de tronc et de cou de pointe, ce qui permet une meilleure extraction des caractรฉristiques et des performances de dรฉtection d'objets amรฉliorรฉes. +- **Tรชte Ultralytics sans ancre:** YOLOv8 adopte une tรชte Ultralytics sans ancre, ce qui contribue ร  une meilleure prรฉcision et ร  un processus de dรฉtection plus efficace par rapport aux approches basรฉes sur les ancres. +- **ร‰quilibre optimal entre prรฉcision et vitesse optimisรฉ:** En mettant l'accent sur le maintien d'un รฉquilibre optimal entre prรฉcision et vitesse, YOLOv8 convient aux tรขches de dรฉtection d'objets en temps rรฉel dans divers domaines d'application. +- **Variรฉtรฉ de modรจles prรฉ-entraรฎnรฉs:** YOLOv8 propose une gamme de modรจles prรฉ-entraรฎnรฉs pour rรฉpondre ร  diffรฉrentes tรขches et exigences de performance, ce qui facilite la recherche du modรจle adaptรฉ ร  votre cas d'utilisation spรฉcifique. + +## Tรขches et modes pris en charge + +La sรฉrie YOLOv8 propose une gamme diversifiรฉe de modรจles, chacun spรฉcialisรฉ dans des tรขches spรฉcifiques en vision par ordinateur. Ces modรจles sont conรงus pour rรฉpondre ร  diverses exigences, de la dรฉtection d'objets ร  des tรขches plus complexes telles que la segmentation d'instance, la dรฉtection de pose/points clรฉs et la classification. + +Chaque variante de la sรฉrie YOLOv8 est optimisรฉe pour sa tรขche respective, garantissant des performances et une prรฉcision รฉlevรฉes. De plus, ces modรจles sont compatibles avec divers modes opรฉrationnels, notamment l'[Infรฉrence](../modes/predict.md), la [Validation](../modes/val.md), l'[Entraรฎnement](../modes/train.md) et l'[Exportation](../modes/export.md), ce qui facilite leur utilisation ร  diffรฉrentes รฉtapes du dรฉploiement et du dรฉveloppement. + +| Modรจle | Noms de fichiers | Tรขche | Infรฉrence | Validation | Entraรฎnement | Exportation | +|-------------|----------------------------------------------------------------------------------------------------------------|------------------------------------------------|-----------|------------|--------------|-------------| +| YOLOv8 | `yolov8n.pt` `yolov8s.pt` `yolov8m.pt` `yolov8l.pt` `yolov8x.pt` | [Dรฉtection](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-seg | `yolov8n-seg.pt` `yolov8s-seg.pt` `yolov8m-seg.pt` `yolov8l-seg.pt` `yolov8x-seg.pt` | [Segmentation d'instance](../tasks/segment.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-pose | `yolov8n-pose.pt` `yolov8s-pose.pt` `yolov8m-pose.pt` `yolov8l-pose.pt` `yolov8x-pose.pt` `yolov8x-pose-p6.pt` | [Pose/Points clรฉs](../tasks/pose.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-cls | `yolov8n-cls.pt` `yolov8s-cls.pt` `yolov8m-cls.pt` `yolov8l-cls.pt` `yolov8x-cls.pt` | [Classification](../tasks/classify.md) | โœ… | โœ… | โœ… | โœ… | + +Ce tableau donne un aperรงu des variantes des modรจles YOLOv8, mettant en รฉvidence leur applicabilitรฉ dans des tรขches spรฉcifiques et leur compatibilitรฉ avec diffรฉrents modes opรฉrationnels tels que l'infรฉrence, la validation, l'entraรฎnement et l'exportation. Il met en avant la polyvalence et la robustesse de la sรฉrie YOLOv8, ce qui les rend adaptรฉs ร  une variรฉtรฉ d'applications en vision par ordinateur. + +## Mรฉtriques de performance + +!!! Performance + + === "Dรฉtection (COCO)" + + Consultez la [doc de dรฉtection](https://docs.ultralytics.com/tasks/detect/) pour des exemples d'utilisation avec ces modรจles entraรฎnรฉs sur [COCO](https://docs.ultralytics.com/datasets/detect/coco/), qui comprennent 80 classes prรฉ-entrainรฉes. + + | Modรจle | taille
(pixels) | mAPval
50-95 | Vitesse
CPU ONNX
(ms) | Vitesse
A100 TensorRT
(ms) | paramรจtres
(M) | FLOPs
(B) | + | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------- | ------------------------------------ | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37,3 | 80,4 | 0,99 | 3,2 | 8,7 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44,9 | 128,4 | 1,20 | 11,2 | 28,6 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50,2 | 234,7 | 1,83 | 25,9 | 78,9 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52,9 | 375,2 | 2,39 | 43,7 | 165,2 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53,9 | 479,1 | 3,53 | 68,2 | 257,8 | + + === "Dรฉtection (Open Images V7)" + + Consultez la [doc de dรฉtection](https://docs.ultralytics.com/tasks/detect/) pour des exemples d'utilisation avec ces modรจles entraรฎnรฉs sur [Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/), qui comprennent 600 classes prรฉ-entrainรฉes. + + | Modรจle | taille
(pixels) | mAPval
50-95 | Vitesse
CPU ONNX
(ms) | Vitesse
A100 TensorRT
(ms) | paramรจtres
(M) | FLOPs
(B) | + | ----------------------------------------------------------------------------------------- | --------------------- | -------------------- | ------------------------------- | ------------------------------------ | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18,4 | 142,4 | 1,21 | 3,5 | 10,5 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27,7 | 183,1 | 1,40 | 11,4 | 29,7 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33,6 | 408,5 | 2,26 | 26,2 | 80,6 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34,9 | 596,9 | 2,43 | 44,1 | 167,4 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36,3 | 860,6 | 3,56 | 68,7 | 260,6 | + + === "Segmentation (COCO)" + + Consultez la [doc de segmentation](https://docs.ultralytics.com/tasks/segment/) pour des exemples d'utilisation avec ces modรจles entraรฎnรฉs sur [COCO](https://docs.ultralytics.com/datasets/segment/coco/), qui comprennent 80 classes prรฉ-entrainรฉes. + + | Modรจle | taille
(pixels) | mAPbox
50-95 | mAPmask
50-95 | Vitesse
CPU ONNX
(ms) | Vitesse
A100 TensorRT
(ms) | paramรจtres
(M) | FLOPs
(B) | + | -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------- | ------------------------------------ | ------------------ | ----------------- | + | [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36,7 | 30,5 | 96,1 | 1,21 | 3,4 | 12,6 | + | [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44,6 | 36,8 | 155,7 | 1,47 | 11,8 | 42,6 | + | [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49,9 | 40,8 | 317,0 | 2,18 | 27,3 | 110,2 | + | [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52,3 | 42,6 | 572,4 | 2,79 | 46,0 | 220,5 | + | [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53,4 | 43,4 | 712,1 | 4,02 | 71,8 | 344,1 | + + === "Classification (ImageNet)" + + Consultez la [doc de classification](https://docs.ultralytics.com/tasks/classify/) pour des exemples d'utilisation avec ces modรจles entraรฎnรฉs sur [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), qui comprennent 1000 classes prรฉ-entrainรฉes. + + | Modรจle | taille
(pixels) | acc
top1 | acc
top5 | Vitesse
CPU ONNX
(ms) | Vitesse
A100 TensorRT
(ms) | paramรจtres
(M) | FLOPs
(B) at 640 | + | -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------- | ------------------------------------ | ------------------ | ------------------------ | + | [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66,6 | 87,0 | 12,9 | 0,31 | 2,7 | 4,3 | + | [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72,3 | 91,1 | 23,4 | 0,35 | 6,4 | 13,5 | + | [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76,4 | 93,2 | 85,4 | 0,62 | 17,0 | 42,7 | + | [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78,0 | 94,1 | 163,0 | 0,87 | 37,5 | 99,7 | + | [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78,4 | 94,3 | 232,0 | 1,01 | 57,4 | 154,8 | + + === "Pose (COCO)" + + Consultez la [doc d'estimation de pose](https://docs.ultralytics.com/tasks/segment/) pour des exemples d'utilisation avec ces modรจles entraรฎnรฉs sur [COCO](https://docs.ultralytics.com/datasets/pose/coco/), qui comprennent 1 classe prรฉ-entrainรฉe, 'person'. + + | Modรจle | taille
(pixels) | mAPpose
50-95 | mAPpose
50 | Vitesse
CPU ONNX
(ms) | Vitesse
A100 TensorRT
(ms) | paramรจtres
(M) | FLOPs
(B) | + | ---------------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------- | ------------------------------------ | ------------------ | ----------------- | + | [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50,4 | 80,1 | 131,8 | 1,18 | 3,3 | 9,2 | + | [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60,0 | 86,2 | 233,2 | 1,42 | 11,6 | 30,2 | + | [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65,0 | 88,8 | 456,3 | 2,00 | 26,4 | 81,0 | + | [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67,6 | 90,0 | 784,5 | 2,59 | 44,4 | 168,6 | + | [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69,2 | 90,2 | 1607,1 | 3,73 | 69,4 | 263,2 | + | [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71,6 | 91,2 | 4088,7 | 10,04 | 99,1 | 1066,4 | + +## Exemples d'utilisation + +Cet exemple fournit des exemples simples d'entraรฎnement et d'infรฉrence avec YOLOv8. Pour une documentation complรจte sur ces exemples et d'autres [modes](../modes/index.md), consultez les pages de documentation [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) et [Export](../modes/export.md). + +Veuillez noter que l'exemple ci-dessous concerne les modรจles de dรฉtection YOLOv8. Pour d'autres tรขches prises en charge, consultez la documentation de [Segmentation](../tasks/segment.md), [Classification](../tasks/classify.md) et [Pose/Points clรฉs](../tasks/pose.md). + +!!! Example "Exemple" + + === "Python" + + Les modรจles prรฉ-entraรฎnรฉs PyTorch `*.pt` ainsi que les fichiers de configuration `*.yaml` peuvent รชtre utilisรฉs pour crรฉer une instance de modรจle en python en passant aux classes `YOLO()` : + + ```python + from ultralytics import YOLO + + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ sur COCO + model = YOLO('yolov8n.pt') + + # Afficher les informations du modรจle (facultatif) + model.info() + + # Entraรฎner le modรจle sur l'exemple de jeu de donnรฉes COCO8 pendant 100 รฉpoques + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Effectuer une infรฉrence avec le modรจle YOLOv8n sur l'image 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + Des commandes CLI sont disponibles pour exรฉcuter directement les modรจles : + + ```bash + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ sur COCO et l'entraรฎner sur l'exemple de jeu de donnรฉes COCO8 pendant 100 รฉpoques + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ sur COCO et effectuer une infรฉrence sur l'image 'bus.jpg' + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## Citations et remerciements + +Si vous utilisez le modรจle YOLOv8 ou tout autre logiciel de ce rรฉfรฉrentiel dans votre travail, veuillez le citer selon le format suivant : + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @software{yolov8_ultralytics, + author = {Glenn Jocher and Ayush Chaurasia and Jing Qiu}, + title = {Ultralytics YOLOv8}, + version = {8.0.0}, + year = {2023}, + url = {https://github.com/ultralytics/ultralytics}, + orcid = {0000-0001-5950-6979, 0000-0002-7603-6750, 0000-0003-3783-7069}, + license = {AGPL-3.0} + } + ``` + +Veuillez noter que le DOI est en attente et sera ajoutรฉ ร  la citation dรจs qu'il sera disponible. Les modรจles YOLOv8 sont fournis sous licence [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) et [Enterprise](https://ultralytics.com/license). diff --git a/ultralytics/docs/fr/models/yolov8.md:Zone.Identifier b/ultralytics/docs/fr/models/yolov8.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/models/yolov8.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/modes/benchmark.md b/ultralytics/docs/fr/modes/benchmark.md new file mode 100755 index 0000000..5708aa4 --- /dev/null +++ b/ultralytics/docs/fr/modes/benchmark.md @@ -0,0 +1,94 @@ +--- +comments: true +description: Apprenez comment profiler la vitesse et l'exactitude de YOLOv8 ร  travers divers formats d'exportation ; obtenez des insights sur les mรฉtriques mAP50-95, accuracy_top5 et plus. +keywords: Ultralytics, YOLOv8, benchmarking, profilage de vitesse, profilage de prรฉcision, mAP50-95, accuracy_top5, ONNX, OpenVINO, TensorRT, formats d'exportation YOLO +--- + +# Benchmarking de Modรจles avec Ultralytics YOLO + +ร‰cosystรจme Ultralytics YOLO et intรฉgrations + +## Introduction + +Une fois votre modรจle entraรฎnรฉ et validรฉ, l'รฉtape logique suivante est d'รฉvaluer ses performances dans divers scรฉnarios du monde rรฉel. Le mode benchmark dans Ultralytics YOLOv8 rรฉpond ร  cet objectif en fournissant un cadre robuste pour รฉvaluer la vitesse et l'exactitude de votre modรจle sur une gamme de formats d'exportation. + +## Pourquoi le Benchmarking est-il Crucial ? + +- **Dรฉcisions ร‰clairรฉes :** Obtenez des insights sur les arbitrages entre la vitesse et l'exactitude. +- **Allocation des Ressources :** Comprenez comment les diffรฉrents formats d'exportation se comportent sur diffรฉrents matรฉriels. +- **Optimisation :** Dรฉcouvrez quel format d'exportation offre la meilleure performance pour votre cas d'utilisation spรฉcifique. +- **Efficacitรฉ des Coรปts :** Utilisez les ressources matรฉrielles plus efficacement en vous basant sur les rรฉsultats des benchmarks. + +### Mesures Clรฉs en Mode Benchmark + +- **mAP50-95 :** Pour la dรฉtection d'objets, la segmentation et l'estimation de pose. +- **accuracy_top5 :** Pour la classification d'images. +- **Temps d'Infรฉrence :** Temps pris pour chaque image en millisecondes. + +### Formats d'Exportation Supportรฉs + +- **ONNX :** Pour une performance optimale sur CPU. +- **TensorRT :** Pour une efficacitรฉ maximale sur GPU. +- **OpenVINO :** Pour l'optimisation du matรฉriel Intel. +- **CoreML, TensorFlow SavedModel, et Plus :** Pour des besoins variรฉs de dรฉploiement. + +!!! astuce "Conseil" + + * Exportez vers ONNX ou OpenVINO pour un gain de vitesse CPU jusqu'ร  3x. + * Exportez vers TensorRT pour un gain de vitesse GPU jusqu'ร  5x. + +## Exemples d'Utilisation + +Exรฉcutez les benchmarks YOLOv8n sur tous les formats d'exportation supportรฉs, y compris ONNX, TensorRT, etc. Consultez la section Arguments ci-dessous pour une liste complรจte des arguments d'exportation. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics.utils.benchmarks import benchmark + + # Benchmark sur GPU + benchmark(model='yolov8n.pt', data='coco8.yaml', imgsz=640, half=False, device=0) + ``` + === "CLI" + + ```bash + yolo benchmark model=yolov8n.pt data='coco8.yaml' imgsz=640 half=False device=0 + ``` + +## Arguments + +Des arguments tels que `model`, `data`, `imgsz`, `half`, `device` et `verbose` offrent aux utilisateurs la flexibilitรฉ d'ajuster prรฉcisรฉment les benchmarks ร  leurs besoins spรฉcifiques et de comparer facilement les performances de diffรฉrents formats d'exportation. + +| Clรฉ | Valeur | Description | +|-----------|---------|---------------------------------------------------------------------------------------| +| `model` | `None` | chemin vers le fichier modรจle, par ex. yolov8n.pt, yolov8n.yaml | +| `data` | `None` | chemin vers le YAML rรฉfรฉrenรงant le dataset de benchmarking (sous l'รฉtiquette `val`) | +| `imgsz` | `640` | taille de l'image comme scalaire ou liste (h, w), par ex. (640, 480) | +| `half` | `False` | quantification FP16 | +| `int8` | `False` | quantification INT8 | +| `device` | `None` | appareil sur lequel exรฉcuter, par ex. appareil cuda=0 ou device=0,1,2,3 ou device=cpu | +| `verbose` | `False` | ne pas continuer en cas d'erreur (bool), ou seuil de plancher val (float) | + +## Formats d'Exportation + +Les benchmarks tenteront de s'exรฉcuter automatiquement sur tous les formats d'exportation possibles ci-dessous. + +| Format | Argument `format` | Modรจle | Mรฉtadonnรฉes | Arguments | +|--------------------------------------------------------------------|-------------------|---------------------------|-------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +Consultez les dรฉtails complets sur `export` dans la page [Export](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/fr/modes/benchmark.md:Zone.Identifier b/ultralytics/docs/fr/modes/benchmark.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/modes/benchmark.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/modes/export.md b/ultralytics/docs/fr/modes/export.md new file mode 100755 index 0000000..e37d6e9 --- /dev/null +++ b/ultralytics/docs/fr/modes/export.md @@ -0,0 +1,108 @@ +--- +comments: true +description: Guide รฉtape par รฉtape sur l'exportation de vos modรจles YOLOv8 vers divers formats tels que ONNX, TensorRT, CoreML et plus encore pour le dรฉploiement. Explorez maintenant !. +keywords: YOLO, YOLOv8, Ultralytics, Exportation de modรจle, ONNX, TensorRT, CoreML, TensorFlow SavedModel, OpenVINO, PyTorch, exporter un modรจle +--- + +# Exportation de modรจle avec Ultralytics YOLO + +ร‰cosystรจme et intรฉgrations Ultralytics YOLO + +## Introduction + +L'objectif ultime de l'entraรฎnement d'un modรจle est de le dรฉployer pour des applications dans le monde rรฉel. Le mode d'exportation de Ultralytics YOLOv8 offre une large gamme d'options pour exporter votre modรจle entraรฎnรฉ dans diffรฉrents formats, le rendant dรฉployable sur diverses plateformes et appareils. Ce guide complet vise ร  vous guider ร  travers les nuances de l'exportation de modรจles, en montrant comment atteindre une compatibilitรฉ et des performances maximales. + +

+
+ +
+ Regardez : Comment exporter un modรจle Ultralytics YOLOv8 entraรฎnรฉ personnalisรฉ et effectuer une infรฉrence en direct sur webcam. +

+ +## Pourquoi choisir le mode d'exportation YOLOv8 ? + +- **Polyvalence :** Exportation vers plusieurs formats, y compris ONNX, TensorRT, CoreML et plus encore. +- **Performance :** Gagnez jusqu'ร  5 fois la vitesse d'une GPU avec TensorRT et 3 fois la vitesse d'une CPU avec ONNX ou OpenVINO. +- **Compatibilitรฉ :** Rendez votre modรจle universellement dรฉployable sur de nombreux environnements matรฉriels et logiciels. +- **Facilitรฉ d'utilisation :** Interface en ligne de commande (CLI) et API Python simples pour une exportation rapide et directe du modรจle. + +### Caractรฉristiques clรฉs du mode d'exportation + +Voici quelques-unes des fonctionnalitรฉs remarquables : + +- **Exportation en un clic :** Commandes simples pour exporter vers diffรฉrents formats. +- **Exportation groupรฉe :** Exportez des modรจles capables d'infรฉrence par lot. +- **Infรฉrence optimisรฉe :** Les modรจles exportรฉs sont optimisรฉs pour des temps d'infรฉrence plus rapides. +- **Vidรฉos tutorielles :** Guides dรฉtaillรฉs et tutoriels pour une expรฉrience d'exportation fluide. + +!!! astuce "Conseil" + + * Exportez vers ONNX ou OpenVINO pour une accรฉlรฉration de la CPU jusqu'ร  3 fois. + * Exportez vers TensorRT pour une accรฉlรฉration de la GPU jusqu'ร  5 fois. + +## Exemples d'utilisation + +Exportez un modรจle YOLOv8n vers un format diffรฉrent tel que ONNX ou TensorRT. Voir la section Arguments ci-dessous pour une liste complรจte des arguments d'exportation. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n.pt') # chargez un modรจle officiel + model = YOLO('path/to/best.pt') # chargez un modรจle entraรฎnรฉ personnalisรฉ + + # Exporter le modรจle + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # exporter modรจle officiel + yolo export model=path/to/best.pt format=onnx # exporter modรจle entraรฎnรฉ personnalisรฉ + ``` + +## Arguments + +Les paramรจtres d'exportation pour les modรจles YOLO se rรฉfรจrent aux diverses configurations et options utilisรฉes pour sauvegarder ou exporter le modรจle pour utilisation dans d'autres environnements ou plateformes. Ces paramรจtres peuvent affecter la performance, la taille et la compatibilitรฉ du modรจle avec diffรฉrents systรจmes. Certains paramรจtres d'exportation YOLO courants incluent le format du fichier modรจle exportรฉ (par exemple, ONNX, TensorFlow SavedModel), le dispositif sur lequel le modรจle sera exรฉcutรฉ (par exemple, CPU, GPU), et la prรฉsence de fonctionnalitรฉs supplรฉmentaires telles que des masques ou des รฉtiquettes multiples par boรฎte. D'autres facteurs qui peuvent affecter le processus d'exportation incluent la tรขche spรฉcifique pour laquelle le modรจle est utilisรฉ et les exigences ou contraintes de l'environnement ou de la plateforme cible. Il est important de considรฉrer et de configurer ces paramรจtres avec soin pour s'assurer que le modรจle exportรฉ est optimisรฉ pour le cas d'utilisation visรฉ et peut รชtre utilisรฉ efficacement dans l'environnement cible. + +| Clรฉ | Valeur | Description | +|-------------|-----------------|----------------------------------------------------------------------------------| +| `format` | `'torchscript'` | format vers lequel exporter | +| `imgsz` | `640` | taille d'image sous forme scalaire ou liste (h, w), par ex. (640, 480) | +| `keras` | `False` | utilisez Keras pour l'exportation TensorFlow SavedModel | +| `optimize` | `False` | TorchScript : optimisation pour mobile | +| `half` | `False` | quantification FP16 | +| `int8` | `False` | quantification INT8 | +| `dynamic` | `False` | ONNX/TensorRT : axes dynamiques | +| `simplify` | `False` | ONNX/TensorRT : simplifier le modรจle | +| `opset` | `None` | ONNX : version de l'ensemble d'opรฉrations (facultatif, par dรฉfaut ร  la derniรจre) | +| `workspace` | `4` | TensorRT : taille de l'espace de travail (GB) | +| `nms` | `False` | CoreML : ajout de la NMS | + +## Formats d'exportation + +Les formats d'exportation disponibles pour YOLOv8 sont dans le tableau ci-dessous. Vous pouvez exporter vers n'importe quel format en utilisant l'argument `format`, par ex. `format='onnx'` ou `format='engine'`. + +| Format | Argument `format` | Modรจle | Mรฉtadonnรฉes | Arguments | +|--------------------------------------------------------------------|-------------------|---------------------------|-------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | diff --git a/ultralytics/docs/fr/modes/export.md:Zone.Identifier b/ultralytics/docs/fr/modes/export.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/modes/export.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/modes/index.md b/ultralytics/docs/fr/modes/index.md new file mode 100755 index 0000000..9975e04 --- /dev/null +++ b/ultralytics/docs/fr/modes/index.md @@ -0,0 +1,73 @@ +--- +comments: true +description: De l'entraรฎnement au suivi, exploitez au mieux YOLOv8 d'Ultralytics. Obtenez des aperรงus et des exemples pour chaque mode pris en charge, y compris la validation, l'exportation et le benchmarking. +keywords: Ultralytics, YOLOv8, Machine Learning, Dรฉtection d'objets, Entraรฎnement, Validation, Prรฉdiction, Exportation, Suivi, Benchmarking +--- + +# Modes Ultralytics YOLOv8 + +ร‰cosystรจme Ultralytics YOLO et intรฉgrations + +## Introduction + +Ultralytics YOLOv8 n'est pas simplement un autre modรจle de dรฉtection d'objets ; c'est un cadre polyvalent conรงu pour couvrir l'intรฉgralitรฉ du cycle de vie des modรจles d'apprentissage automatique โ€” de l'ingestion de donnรฉes et l'entraรฎnement des modรจles ร  la validation, le dรฉploiement et le suivi en conditions rรฉelles. Chaque mode remplit un objectif spรฉcifique et est conรงu pour vous offrir la flexibilitรฉ et l'efficacitรฉ nรฉcessaires pour diffรฉrentes tรขches et cas d'utilisation. + +

+
+ +
+ Regardez : Tutoriel sur les modes Ultralytics : Entraรฎnement, Validation, Prรฉdiction, Exportation & Benchmark. +

+ +### Aperรงu des Modes + +Comprendre les diffรฉrents **modes** pris en charge par Ultralytics YOLOv8 est crucial pour tirer le maximum de vos modรจles : + +- **Mode d'entraรฎnement (Train)** : Affinez votre modรจle sur des jeux de donnรฉes personnalisรฉs ou prรฉchargรฉs. +- **Mode de validation (Val)** : Un contrรดle post-entraรฎnement pour รฉvaluer la performance du modรจle. +- **Mode de prรฉdiction (Predict)** : Dรฉployez la puissance prรฉdictive de votre modรจle sur des donnรฉes du monde rรฉel. +- **Mode d'exportation (Export)** : Prรฉparez votre modรจle au dรฉploiement dans diffรฉrents formats. +- **Mode de suivi (Track)** : ร‰tendez votre modรจle de dรฉtection d'objets ร  des applications de suivi en temps rรฉel. +- **Mode benchmark (Benchmark)** : Analysez la vitesse et la prรฉcision de votre modรจle dans divers environnements de dรฉploiement. + +Ce guide complet vise ร  vous donner un aperรงu et des informations pratiques sur chaque mode, en vous aidant ร  exploiter tout le potentiel de YOLOv8. + +## [Entraรฎnement (Train)](train.md) + +Le mode d'entraรฎnement est utilisรฉ pour entraรฎner un modรจle YOLOv8 sur un jeu de donnรฉes personnalisรฉ. Dans ce mode, le modรจle est entraรฎnรฉ en utilisant le jeu de donnรฉes et les hyperparamรจtres spรฉcifiรฉs. Le processus d'entraรฎnement implique l'optimisation des paramรจtres du modรจle afin qu'il puisse prรฉdire avec prรฉcision les classes et les emplacements des objets dans une image. + +[Exemples d'entraรฎnement](train.md){ .md-button } + +## [Validation (Val)](val.md) + +Le mode de validation est utilisรฉ pour valider un modรจle YOLOv8 aprรจs qu'il ait รฉtรฉ entraรฎnรฉ. Dans ce mode, le modรจle est รฉvaluรฉ sur un ensemble de validation pour mesurer sa prรฉcision et sa capacitรฉ de gรฉnรฉralisation. Ce mode peut รชtre utilisรฉ pour ajuster les hyperparamรจtres du modรจle afin d'amรฉliorer ses performances. + +[Exemples de validation](val.md){ .md-button } + +## [Prรฉdiction (Predict)](predict.md) + +Le mode de prรฉdiction est utilisรฉ pour faire des prรฉdictions ร  l'aide d'un modรจle YOLOv8 entraรฎnรฉ sur de nouvelles images ou vidรฉos. Dans ce mode, le modรจle est chargรฉ ร  partir d'un fichier de checkpoint, et l'utilisateur peut fournir des images ou vidรฉos pour effectuer l'infรฉrence. Le modรจle prรฉdit les classes et les emplacements des objets dans les images ou vidรฉos fournies. + +[Exemples de prรฉdiction](predict.md){ .md-button } + +## [Exportation (Export)](export.md) + +Le mode d'exportation est utilisรฉ pour exporter un modรจle YOLOv8 dans un format pouvant รชtre utilisรฉ pour le dรฉploiement. Dans ce mode, le modรจle est converti dans un format pouvant รชtre utilisรฉ par d'autres applications logicielles ou dispositifs matรฉriels. Ce mode est pratique pour dรฉployer le modรจle dans des environnements de production. + +[Exemples d'exportation](export.md){ .md-button } + +## [Suivi (Track)](track.md) + +Le mode de suivi est utilisรฉ pour suivre des objets en temps rรฉel ร  l'aide d'un modรจle YOLOv8. Dans ce mode, le modรจle est chargรฉ ร  partir d'un fichier de checkpoint, et l'utilisateur peut fournir un flux vidรฉo en direct pour effectuer le suivi d'objets en temps rรฉel. Ce mode est utile pour des applications telles que les systรจmes de surveillance ou les voitures autonomes. + +[Exemples de suivi](track.md){ .md-button } + +## [Benchmark (Benchmark)](benchmark.md) + +Le mode benchmark est utilisรฉ pour profiler la vitesse et la prรฉcision de divers formats d'exportation pour YOLOv8. Les benchmarks fournissent des informations sur la taille du format exportรฉ, ses mรฉtriques `mAP50-95` (pour la dรฉtection d'objets, la segmentation et la pose) ou `accuracy_top5` (pour la classification), et le temps d'infรฉrence en millisecondes par image pour diffรฉrents formats d'exportation comme ONNX, OpenVINO, TensorRT et autres. Ces informations peuvent aider les utilisateurs ร  choisir le format d'export optimal pour leur cas d'utilisation spรฉcifique en fonction de leurs exigences de vitesse et de prรฉcision. + +[Exemples de benchmark](benchmark.md){ .md-button } diff --git a/ultralytics/docs/fr/modes/index.md:Zone.Identifier b/ultralytics/docs/fr/modes/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/modes/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/modes/predict.md b/ultralytics/docs/fr/modes/predict.md new file mode 100755 index 0000000..4279dff --- /dev/null +++ b/ultralytics/docs/fr/modes/predict.md @@ -0,0 +1,226 @@ +--- +comments: true +description: Dรฉcouvrez comment utiliser le mode de prรฉdiction YOLOv8 pour diverses tรขches. Apprenez sur diffรฉrentes sources d'infรฉrence comme des images, vidรฉos et formats de donnรฉes. +keywords: Ultralytics, YOLOv8, mode de prรฉdiction, sources d'infรฉrence, tรขches de prรฉdiction, mode streaming, traitement d'images, traitement vidรฉo, apprentissage automatique, IA +--- + +# Prรฉdiction de Modรจle avec Ultralytics YOLO + +ร‰cosystรจme et intรฉgrations Ultralytics YOLO + +## Introduction + +Dans l'univers de l'apprentissage automatique et de la vision par ordinateur, le processus de donner du sens aux donnรฉes visuelles est appelรฉ 'infรฉrence' ou 'prรฉdiction'. Ultralytics YOLOv8 propose une fonctionnalitรฉ puissante connue sous le nom de **mode de prรฉdiction** adaptรฉ pour l'infรฉrence en temps rรฉel et haute performance sur une large gamme de sources de donnรฉes. + +

+
+ +
+ Regardez : Comment Extraire les Sorties du Modรจle Ultralytics YOLOv8 pour des Projets Personnalisรฉs. +

+ +## Applications Rรฉelles + +| Fabrication | Sports | Sรฉcuritรฉ | +|:---------------------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------------------------------------:| +| ![Dรฉtection des Piรจces de Vรฉhicules](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a0f802a8-0776-44cf-8f17-93974a4a28a1) | ![Dรฉtection des Joueurs de Football](https://github.com/RizwanMunawar/ultralytics/assets/62513924/7d320e1f-fc57-4d7f-a691-78ee579c3442) | ![Dรฉtection de Chutes de Personnes](https://github.com/RizwanMunawar/ultralytics/assets/62513924/86437c4a-3227-4eee-90ef-9efb697bdb43) | +| Dรฉtection des Piรจces de Vรฉhicules | Dรฉtection des Joueurs de Football | Dรฉtection de Chutes de Personnes | + +## Pourquoi Utiliser Ultralytics YOLO pour l'Infรฉrence ? + +Voici pourquoi vous devriez considรฉrer le mode de prรฉdiction YOLOv8 pour vos besoins variรฉs en infรฉrence : + +- **Polyvalence :** Capable de faire des infรฉrences sur des images, des vidรฉos et mรชme des flux en direct. +- **Performance :** Conรงu pour le traitement en temps rรฉel ร  grande vitesse sans sacrifier la prรฉcision. +- **Facilitรฉ d'Utilisation :** Interfaces Python et CLI intuitives pour un dรฉploiement et des tests rapides. +- **Trรจs Personnalisable :** Divers paramรจtres et rรฉglages pour ajuster le comportement d'infรฉrence du modรจle selon vos besoins spรฉcifiques. + +### Caractรฉristiques Clรฉs du Mode de Prรฉdiction + +Le mode de prรฉdiction YOLOv8 est conรงu pour รชtre robuste et polyvalent, avec des fonctionnalitรฉs telles que : + +- **Compatibilitรฉ avec Plusieurs Sources de Donnรฉes :** Que vos donnรฉes soient sous forme d'images individuelles, d'une collection d'images, de fichiers vidรฉo ou de flux vidรฉo en temps rรฉel, le mode de prรฉdiction rรฉpond ร  vos besoins. +- **Mode Streaming :** Utilisez la fonctionnalitรฉ de streaming pour gรฉnรฉrer un gรฉnรฉrateur efficace en termes de mรฉmoire d'objets `Results`. Activez-le en rรฉglant `stream=True` dans la mรฉthode d'appel du prรฉdicteur. +- **Traitement par Lots :** La capacitรฉ de traiter plusieurs images ou trames vidรฉo dans un seul lot, accรฉlรฉrant ainsi le temps d'infรฉrence. +- **Facile ร  Intรฉgrer :** S'intรจgre facilement dans les pipelines de donnรฉes existants et autres composants logiciels, grรขce ร  son API souple. + +Les modรจles YOLO d'Ultralytics renvoient soit une liste d'objets `Results` Python, soit un gรฉnรฉrateur Python efficace en termes de mรฉmoire d'objets `Results` lorsque `stream=True` est passรฉ au modรจle pendant l'infรฉrence : + +!!! Example "Prรฉdire" + + === "Renvoie une liste avec `stream=False`" + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n.pt') # modรจle YOLOv8n prรฉ-entraรฎnรฉ + + # Exรฉcuter une infรฉrence par lots sur une liste d'images + results = model(['im1.jpg', 'im2.jpg']) # renvoie une liste d'objets Results + + # Traiter la liste des rรฉsultats + for result in results: + boxes = result.boxes # Objet Boxes pour les sorties bbox + masks = result.masks # Objet Masks pour les masques de segmentation + keypoints = result.keypoints # Objet Keypoints pour les sorties de pose + probs = result.probs # Objet Probs pour les sorties de classification + ``` + + === "Renvoie un gรฉnรฉrateur avec `stream=True`" + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n.pt') # modรจle YOLOv8n prรฉ-entraรฎnรฉ + + # Exรฉcuter une infรฉrence par lots sur une liste d'images + results = model(['im1.jpg', 'im2.jpg'], stream=True) # renvoie un gรฉnรฉrateur d'objets Results + + # Traiter le gรฉnรฉrateur de rรฉsultats + for result in results: + boxes = result.boxes # Objet Boxes pour les sorties bbox + masks = result.masks # Objet Masks pour les masques de segmentation + keypoints = result.keypoints # Objet Keypoints pour les sorties de pose + probs = result.probs # Objet Probs pour les sorties de classification + ``` + +## Sources d'Infรฉrence + +YOLOv8 peut traiter diffรฉrents types de sources d'entrรฉe pour l'infรฉrence, comme illustrรฉ dans le tableau ci-dessous. Les sources incluent des images statiques, des flux vidรฉos et divers formats de donnรฉes. Le tableau indique รฉgalement si chaque source peut รชtre utilisรฉe en mode streaming avec l'argument `stream=True` โœ…. Le mode streaming est bรฉnรฉfique pour traiter des vidรฉos ou des flux en direct car il crรฉe un gรฉnรฉrateur de rรฉsultats au lieu de charger tous les cadres en mรฉmoire. + +!!! astuce "Astuce" + + Utilisez `stream=True` pour traiter des vidรฉos longues ou des jeux de donnรฉes volumineux afin de gรฉrer efficacement la mรฉmoire. Quand `stream=False`, les rรฉsultats pour tous les cadres ou points de donnรฉes sont stockรฉs en mรฉmoire, ce qui peut rapidement s'accumuler et provoquer des erreurs de mรฉmoire insuffisante pour de grandes entrรฉes. En revanche, `stream=True` utilise un gรฉnรฉrateur, qui ne garde que les rรฉsultats du cadre ou point de donnรฉes actuel en mรฉmoire, rรฉduisant considรฉrablement la consommation de mรฉmoire et prรฉvenant les problรจmes de mรฉmoire insuffisante. + +| Source | Argument | Type | Notes | +|-----------------|--------------------------------------------|-----------------|------------------------------------------------------------------------------------------------------------------------------| +| image | `'image.jpg'` | `str` ou `Path` | Fichier image unique. | +| URL | `'https://ultralytics.com/images/bus.jpg'` | `str` | URL vers une image. | +| capture d'รฉcran | `'screen'` | `str` | Prendre une capture d'รฉcran. | +| PIL | `Image.open('im.jpg')` | `PIL.Image` | Format HWC avec canaux RGB. | +| OpenCV | `cv2.imread('im.jpg')` | `np.ndarray` | Format HWC avec canaux BGR `uint8 (0-255)`. | +| numpy | `np.zeros((640,1280,3))` | `np.ndarray` | Format HWC avec canaux BGR `uint8 (0-255)`. | +| torch | `torch.zeros(16,3,320,640)` | `torch.Tensor` | Format BCHW avec canaux RGB `float32 (0.0-1.0)`. | +| CSV | `'sources.csv'` | `str` ou `Path` | Fichier CSV contenant des chemins vers des images, vidรฉos ou rรฉpertoires. | +| vidรฉo โœ… | `'video.mp4'` | `str` ou `Path` | Fichier vidรฉo dans des formats comme MP4, AVI, etc. | +| rรฉpertoire โœ… | `'chemin/'` | `str` ou `Path` | Chemin vers un rรฉpertoire contenant des images ou des vidรฉos. | +| motif global โœ… | `'chemin/*.jpg'` | `str` | Motif glob pour faire correspondre plusieurs fichiers. Utilisez le caractรจre `*` comme joker. | +| YouTube โœ… | `'https://youtu.be/LNwODJXcvt4'` | `str` | URL vers une vidรฉo YouTube. | +| flux โœ… | `'rtsp://exemple.com/media.mp4'` | `str` | URL pour des protocoles de streaming comme RTSP, RTMP, TCP, ou une adresse IP. | +| multi-flux โœ… | `'liste.streams'` | `str` ou `Path` | Fichier texte `*.streams` avec une URL de flux par ligne, c'est-ร -dire que 8 flux s'exรฉcuteront avec une taille de lot de 8. | + +Ci-dessous des exemples de code pour utiliser chaque type de source : + +!!! Example "Sources de prรฉdiction" + + === "image" + Exรฉcutez une infรฉrence sur un fichier image. + ```python + from ultralytics import YOLO + + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ + model = YOLO('yolov8n.pt') + + # Dรฉfinir le chemin vers le fichier image + source = 'chemin/vers/image.jpg' + + # Exรฉcuter une infรฉrence sur la source + results = model(source) # liste d'objets Results + ``` + + === "capture d'รฉcran" + Exรฉcutez une infรฉrence sur le contenu actuel de l'รฉcran sous forme de capture d'รฉcran. + ```python + from ultralytics import YOLO + + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ + model = YOLO('yolov8n.pt') + + # Dรฉfinir la capture d'รฉcran actuelle comme source + source = 'screen' + + # Exรฉcuter une infรฉrence sur la source + results = model(source) # liste d'objets Results + ``` + + === "URL" + Exรฉcutez une infรฉrence sur une image ou vidรฉo hรฉbergรฉe ร  distance via URL. + ```python + from ultralytics import YOLO + + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ + model = YOLO('yolov8n.pt') + + # Dรฉfinir l'URL d'une image ou vidรฉo distante + source = 'https://ultralytics.com/images/bus.jpg' + + # Exรฉcuter une infรฉrence sur la source + results = model(source) # liste d'objets Results + ``` + + === "PIL" + Exรฉcutez une infรฉrence sur une image ouverte avec la bibliothรจque Python Imaging Library (PIL). + ```python + from PIL import Image + from ultralytics import YOLO + + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ + model = YOLO('yolov8n.pt') + + # Ouvrir une image avec PIL + source = Image.open('chemin/vers/image.jpg') + + # Exรฉcuter une infรฉrence sur la source + results = model(source) # liste d'objets Results + ``` + + === "OpenCV" + Exรฉcutez une infรฉrence sur une image lue avec OpenCV. + ```python + import cv2 + from ultralytics import YOLO + + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ + model = YOLO('yolov8n.pt') + + # Lire une image avec OpenCV + source = cv2.imread('chemin/vers/image.jpg') + + # Exรฉcuter une infรฉrence sur la source + results = model(source) # liste d'objets Results + ``` + + === "numpy" + Exรฉcutez une infรฉrence sur une image reprรฉsentรฉe sous forme de tableau numpy. + ```python + import numpy as np + from ultralytics import YOLO + + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ + model = YOLO('yolov8n.pt') + + # Crรฉer un tableau numpy alรฉatoire de forme HWC (640, 640, 3) avec des valeurs dans l'intervalle [0, 255] et de type uint8 + source = np.random.randint(low=0, high=255, size=(640, 640, 3), dtype='uint8') + + # Exรฉcuter une infรฉrence sur la source + results = model(source) # liste d'objets Results + ``` + + === "torch" + Exรฉcutez une infรฉrence sur une image reprรฉsentรฉe sous forme de tenseur PyTorch. + ```python + import torch + from ultralytics import YOLO + + # Charger un modรจle YOLOv8n prรฉ-entraรฎnรฉ + model = YOLO('yolov8n.pt') + + # Crรฉer un tenseur alรฉatoire torch de forme BCHW (1, 3, 640, 640) avec des valeurs dans l'intervalle [0, 1] et de type float32 + source = torch.rand(1, 3, 640, 640, dtype=torch.float32) + + # Exรฉcuter une infรฉrence sur la source + results = model(source) # liste d'objets Results diff --git a/ultralytics/docs/fr/modes/predict.md:Zone.Identifier b/ultralytics/docs/fr/modes/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/modes/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/modes/track.md b/ultralytics/docs/fr/modes/track.md new file mode 100755 index 0000000..5da691c --- /dev/null +++ b/ultralytics/docs/fr/modes/track.md @@ -0,0 +1,200 @@ +--- +comments: true +description: Apprenez ร  utiliser Ultralytics YOLO pour le suivi d'objets dans les flux vidรฉo. Guides pour utiliser diffรฉrents traceurs et personnaliser les configurations de traceurs. +keywords: Ultralytics, YOLO, suivi d'objets, flux vidรฉo, BoT-SORT, ByteTrack, guide Python, guide CLI +--- + +# Suivi Multi-Objets avec Ultralytics YOLO + +Exemples de suivi multi-objets + +Le suivi d'objets dans le domaine de l'analyse vidรฉo est une tรขche essentielle qui non seulement identifie l'emplacement et la classe des objets ร  l'intรฉrieur de l'image, mais maintient รฉgalement un identifiant unique pour chaque objet dรฉtectรฉ au fur et ร  mesure que la vidรฉo progresse. Les applications sont illimitรฉes, allant de la surveillance et de la sรฉcuritรฉ ร  l'analytique sportive en temps rรฉel. + +## Pourquoi Choisir Ultralytics YOLO pour le Suivi d'Objet ? + +La sortie des traceurs Ultralytics est cohรฉrente avec la dรฉtection standard d'objets mais apporte la valeur ajoutรฉe des identifiants d'objets. Cela facilite le suivi des objets dans les flux vidรฉo et effectue des analyses subsรฉquentes. Voici pourquoi vous devriez envisager d'utiliser Ultralytics YOLO pour vos besoins de suivi d'objet : + +- **Efficacitรฉ :** Traitez les flux vidรฉo en temps rรฉel sans compromettre la prรฉcision. +- **Flexibilitรฉ :** Prend en charge de multiples algorithmes de suivi et configurations. +- **Facilitรฉ d'Utilisation :** API Python simple et options CLI pour une intรฉgration et un dรฉploiement rapides. +- **Personnalisabilitรฉ :** Facile ร  utiliser avec des modรจles YOLO entraรฎnรฉs sur mesure, permettant une intรฉgration dans des applications spรฉcifiques au domaine. + +

+
+ +
+ Regardez : Dรฉtection et suivi d'objets avec Ultralytics YOLOv8. +

+ +## Applications dans le Monde Rรฉel + +| Transport | Distribution | Aquaculture | +|:------------------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------------:| +| ![Suivi de vรฉhicules](https://github.com/RizwanMunawar/ultralytics/assets/62513924/ee6e6038-383b-4f21-ac29-b2a1c7d386ab) | ![Suivi de personnes](https://github.com/RizwanMunawar/ultralytics/assets/62513924/93bb4ee2-77a0-4e4e-8eb6-eb8f527f0527) | ![Suivi de poissons](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a5146d0f-bfa8-4e0a-b7df-3c1446cd8142) | +| Suivi de Vรฉhicules | Suivi de Personnes | Suivi de Poissons | + +## Caractรฉristiques en Bref + +Ultralytics YOLO รฉtend ses fonctionnalitรฉs de dรฉtection d'objets pour fournir un suivi d'objets robuste et polyvalent : + +- **Suivi en Temps Rรฉel :** Suivi fluide d'objets dans des vidรฉos ร  frรฉquence d'images รฉlevรฉe. +- **Prise en Charge de Multiples Traceurs :** Choisissez parmi une variรฉtรฉ d'algorithmes de suivi รฉprouvรฉs. +- **Configurations de Traceurs Personnalisables :** Adaptez l'algorithme de suivi pour rรฉpondre ร  des exigences spรฉcifiques en rรฉglant divers paramรจtres. + +## Traceurs Disponibles + +Ultralytics YOLO prend en charge les algorithmes de suivi suivants. Ils peuvent รชtre activรฉs en passant le fichier de configuration YAML correspondant tel que `tracker=tracker_type.yaml` : + +* [BoT-SORT](https://github.com/NirAharon/BoT-SORT) - Utilisez `botsort.yaml` pour activer ce traceur. +* [ByteTrack](https://github.com/ifzhang/ByteTrack) - Utilisez `bytetrack.yaml` pour activer ce traceur. + +Le traceur par dรฉfaut est BoT-SORT. + +## Suivi + +Pour exรฉcuter le traceur sur des flux vidรฉo, utilisez un modรจle Detect, Segment ou Pose formรฉ tel que YOLOv8n, YOLOv8n-seg et YOLOv8n-pose. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle officiel ou personnalisรฉ + model = YOLO('yolov8n.pt') # Charger un modรจle Detect officiel + model = YOLO('yolov8n-seg.pt') # Charger un modรจle Segment officiel + model = YOLO('yolov8n-pose.pt') # Charger un modรจle Pose officiel + model = YOLO('chemin/vers/best.pt') # Charger un modรจle entraรฎnรฉ personnalisรฉ + + # Effectuer le suivi avec le modรจle + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True) # Suivi avec le traceur par dรฉfaut + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True, tracker="bytetrack.yaml") # Suivi avec le traceur ByteTrack + ``` + + === "CLI" + + ```bash + # Effectuer le suivi avec divers modรจles en utilisant l'interface en ligne de commande + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" # Modรจle Detect officiel + yolo track model=yolov8n-seg.pt source="https://youtu.be/LNwODJXcvt4" # Modรจle Segment officiel + yolo track model=yolov8n-pose.pt source="https://youtu.be/LNwODJXcvt4" # Modรจle Pose officiel + yolo track model=chemin/vers/best.pt source="https://youtu.be/LNwODJXcvt4" # Modรจle entraรฎnรฉ personnalisรฉ + + # Suivi en utilisant le traceur ByteTrack + yolo track model=chemin/vers/best.pt tracker="bytetrack.yaml" + ``` + +Comme on peut le voir dans l'utilisation ci-dessus, le suivi est disponible pour tous les modรจles Detect, Segment et Pose exรฉcutรฉs sur des vidรฉos ou des sources de diffusion. + +## Configuration + +### Arguments de Suivi + +La configuration du suivi partage des propriรฉtรฉs avec le mode Prรฉdiction, telles que `conf`, `iou`, et `show`. Pour des configurations supplรฉmentaires, rรฉfรฉrez-vous ร  la page [Predict](https://docs.ultralytics.com/modes/predict/) du modรจle. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Configurer les paramรจtres de suivi et exรฉcuter le traceur + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", conf=0.3, iou=0.5, show=True) + ``` + + === "CLI" + + ```bash + # Configurer les paramรจtres de suivi et exรฉcuter le traceur en utilisant l'interface en ligne de commande + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" conf=0.3, iou=0.5 show + ``` + +### Sรฉlection du Traceur + +Ultralytics vous permet รฉgalement d'utiliser un fichier de configuration de traceur modifiรฉ. Pour cela, faites simplement une copie d'un fichier de configuration de traceur (par exemple, `custom_tracker.yaml`) ร  partir de [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) et modifiez toute configuration (ร  l'exception du `tracker_type`) selon vos besoins. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger le modรจle et exรฉcuter le traceur avec un fichier de configuration personnalisรฉ + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", tracker='custom_tracker.yaml') + ``` + + === "CLI" + + ```bash + # Charger le modรจle et exรฉcuter le traceur avec un fichier de configuration personnalisรฉ en utilisant l'interface en ligne de commande + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" tracker='custom_tracker.yaml' + ``` + +Pour une liste complรจte des arguments de suivi, rรฉfรฉrez-vous ร  la page [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers). + +## Exemples Python + +### Boucle de Persistance des Pistes + +Voici un script Python utilisant OpenCV (`cv2`) et YOLOv8 pour exรฉcuter le suivi d'objet sur des images vidรฉo. Ce script suppose toujours que vous avez dรฉjร  installรฉ les packages nรฉcessaires (`opencv-python` et `ultralytics`). L'argument `persist=True` indique au traceur que l'image ou la trame actuelle est la suivante dans une sรฉquence et s'attend ร  ce que les pistes de l'image prรฉcรฉdente soient prรฉsentes dans l'image actuelle. + +!!! Example "Boucle for streaming avec suivi" + + ```python + import cv2 + from ultralytics import YOLO + + # Charger le modรจle YOLOv8 + model = YOLO('yolov8n.pt') + + # Ouvrir le fichier vidรฉo + video_path = "chemin/vers/video.mp4" + cap = cv2.VideoCapture(video_path) + + # Parcourir les images vidรฉo + while cap.isOpened(): + # Lire une image de la vidรฉo + success, frame = cap.read() + + if success: + # Exรฉcuter le suivi YOLOv8 sur l'image, en persistant les pistes entre les images + results = model.track(frame, persist=True) + + # Visualiser les rรฉsultats sur l'image + annotated_frame = results[0].plot() + + # Afficher l'image annotรฉe + cv2.imshow("Suivi YOLOv8", annotated_frame) + + # Interrompre la boucle si 'q' est pressรฉe + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # Interrompre la boucle si la fin de la vidรฉo est atteinte + break + + # Relรขcher l'objet de capture vidรฉo et fermer la fenรชtre d'affichage + cap.release() + cv2.destroyAllWindows() + ``` + +Veuillez noter le changement de `model(frame)` ร  `model.track(frame)`, qui active le suivi d'objet ร  la place de la simple dรฉtection. Ce script modifiรฉ exรฉcutera le traceur sur chaque image de la vidรฉo, visualisera les rรฉsultats et les affichera dans une fenรชtre. La boucle peut รชtre quittรฉe en appuyant sur 'q'. + +## Contribuer de Nouveaux Traceurs + +รŠtes-vous compรฉtent en suivi multi-objets et avez-vous rรฉussi ร  implรฉmenter ou adapter un algorithme de suivi avec Ultralytics YOLO ? Nous vous invitons ร  contribuer ร  notre section Traceurs sur [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) ! Vos applications et solutions dans le monde rรฉel pourraient รชtre inestimables pour les utilisateurs travaillant sur des tรขches de suivi. + +En contribuant ร  cette section, vous aidez ร  รฉlargir l'รฉventail des solutions de suivi disponibles au sein du cadre Ultralytics YOLO, ajoutant une autre couche de fonctionnalitรฉ et d'utilitรฉ pour la communautรฉ. + +Pour initier votre contribution, veuillez vous rรฉfรฉrer ร  notre [Guide de Contribution](https://docs.ultralytics.com/help/contributing) pour des instructions complรจtes sur la soumission d'une Pull Request (PR) ๐Ÿ› ๏ธ. Nous sommes impatients de voir ce que vous apportez ร  la table ! + +Ensemble, amรฉliorons les capacitรฉs de suivi de l'รฉcosystรจme Ultralytics YOLO ๐Ÿ™ ! diff --git a/ultralytics/docs/fr/modes/track.md:Zone.Identifier b/ultralytics/docs/fr/modes/track.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/modes/track.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/modes/train.md b/ultralytics/docs/fr/modes/train.md new file mode 100755 index 0000000..652a918 --- /dev/null +++ b/ultralytics/docs/fr/modes/train.md @@ -0,0 +1,206 @@ +--- +comments: true +description: Guide รฉtape par รฉtape pour entraรฎner des modรจles YOLOv8 avec Ultralytics YOLO incluant des exemples d'entraรฎnement mono-GPU et multi-GPU +keywords: Ultralytics, YOLOv8, YOLO, dรฉtection d'objet, mode entraรฎnement, jeu de donnรฉes personnalisรฉ, entraรฎnement GPU, multi-GPU, hyperparamรจtres, exemples CLI, exemples Python +--- + +# Entraรฎnement de modรจles avec Ultralytics YOLO + +Ultralytics YOLO รฉcosystรจme et intรฉgrations + +## Introduction + +L'entraรฎnement d'un modรจle d'apprentissage profond implique de lui fournir des donnรฉes et d'ajuster ses paramรจtres afin qu'il puisse faire des prรฉdictions prรฉcises. Le mode Entraรฎnement de Ultralytics YOLOv8 est conรงu pour un entraรฎnement efficace et performant de modรจles de dรฉtection d'objets, en utilisant pleinement les capacitรฉs du matรฉriel moderne. Ce guide vise ร  couvrir tous les dรฉtails nรฉcessaires pour commencer ร  entraรฎner vos propres modรจles en utilisant l'ensemble robuste de fonctionnalitรฉs de YOLOv8. + +

+
+ +
+ Regardez : Comment entraรฎner un modรจle YOLOv8 sur votre jeu de donnรฉes personnalisรฉ dans Google Colab. +

+ +## Pourquoi choisir Ultralytics YOLO pour l'entraรฎnement ? + +Voici quelques raisons convaincantes de choisir le mode Entraรฎnement de YOLOv8 : + +- **Efficacitรฉ :** Optimisez l'utilisation de votre matรฉriel, que vous soyez sur une configuration mono-GPU ou que vous รฉchelonnier sur plusieurs GPUs. +- **Polyvalence :** Entraรฎnez sur des jeux de donnรฉes personnalisรฉs en plus de ceux dรฉjร  disponibles comme COCO, VOC et ImageNet. +- **Convivialitรฉ :** Interfaces CLI et Python simples mais puissantes pour une expรฉrience d'entraรฎnement directe. +- **Flexibilitรฉ des hyperparamรจtres :** Un large รฉventail d'hyperparamรจtres personnalisables pour peaufiner les performances du modรจle. + +### Principales caractรฉristiques du mode Entraรฎnement + +Voici quelques caractรฉristiques remarquables du mode Entraรฎnement de YOLOv8 : + +- **Tรฉlรฉchargement automatique de jeux de donnรฉes :** Les jeux de donnรฉes standards comme COCO, VOC et ImageNet sont tรฉlรฉchargรฉs automatiquement lors de la premiรจre utilisation. +- **Support multi-GPU :** ร‰chelonnez vos efforts de formation de maniรจre fluide sur plusieurs GPUs pour accรฉlรฉrer le processus. +- **Configuration des hyperparamรจtres :** La possibilitรฉ de modifier les hyperparamรจtres via des fichiers de configuration YAML ou des arguments CLI. +- **Visualisation et suivi :** Suivi en temps rรฉel des mรฉtriques d'entraรฎnement et visualisation du processus d'apprentissage pour de meilleures perspectives. + +!!! Tip "Astuce" + + * Les jeux de donnรฉes YOLOv8 comme COCO, VOC, ImageNet et bien d'autres se tรฉlรฉchargent automatiquement lors de la premiรจre utilisation, par exemple `yolo train data=coco.yaml` + +## Exemples d'utilisation + +Entraรฎnez YOLOv8n sur le jeu de donnรฉes COCO128 pendant 100 รฉpoques avec une taille d'image de 640. Le dispositif d'entraรฎnement peut รชtre spรฉcifiรฉ ร  l'aide de l'argument `device`. Si aucun argument n'est passรฉ, le GPU `device=0` sera utilisรฉ s'il est disponible, sinon `device=cpu` sera utilisรฉ. Consultez la section Arguments ci-dessous pour obtenir une liste complรจte des arguments d'entraรฎnement. + +!!! Example "Exemple d'entraรฎnement mono-GPU et CPU" + + Le dispositif est dรฉterminรฉ automatiquement. Si un GPU est disponible, il sera utilisรฉ, sinon l'entraรฎnement commencera sur CPU. + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n.yaml') # construire un nouveau modรจle ร  partir de YAML + model = YOLO('yolov8n.pt') # charger un modรจle prรฉentraรฎnรฉ (recommandรฉ pour l'entraรฎnement) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # construire ร  partir de YAML et transfรฉrer les poids + + # Entraรฎner le modรจle + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + + === "CLI" + + ```bash + # Construire un nouveau modรจle ร  partir de YAML et commencer l'entraรฎnement ร  partir de zรฉro + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # Commencer l'entraรฎnement ร  partir d'un modรจle prรฉentraรฎnรฉ *.pt + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # Construire un nouveau modรจle ร  partir de YAML, transfรฉrer les poids prรฉentraรฎnรฉs et commencer l'entraรฎnement + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### Entraรฎnement multi-GPU + +L'entraรฎnement multi-GPU permet une utilisation plus efficace des ressources matรฉrielles disponibles en rรฉpartissant la charge d'entraรฎnement sur plusieurs GPUs. Cette fonctionnalitรฉ est disponible via l'API Python et l'interface de ligne de commande. Pour activer l'entraรฎnement multi-GPU, spรฉcifiez les ID des dispositifs GPU que vous souhaitez utiliser. + +!!! Example "Exemple d'entraรฎnement multi-GPU" + + Pour s'entraรฎner avec 2 GPUs, les dispositifs CUDA 0 et 1, utilisez les commandes suivantes. Dรฉveloppez ร  des GPUs supplรฉmentaires selon le besoin. + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n.pt') # charger un modรจle prรฉentraรฎnรฉ (recommandรฉ pour l'entraรฎnement) + + # Entraรฎner le modรจle avec 2 GPUs + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device=[0, 1]) + ``` + + === "CLI" + + ```bash + # Commencer l'entraรฎnement ร  partir d'un modรจle prรฉentraรฎnรฉ *.pt en utilisant les GPUs 0 et 1 + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=0,1 + ``` + +### Entraรฎnement MPS avec Apple M1 et M2 + +Avec le support pour les puces Apple M1 et M2 intรฉgrรฉ dans les modรจles Ultralytics YOLO, il est maintenant possible d'entraรฎner vos modรจles sur des dispositifs utilisant le puissant framework Metal Performance Shaders (MPS). Le MPS offre un moyen performant d'exรฉcuter des tรขches de calcul et de traitement d'image sur le silicium personnalisรฉ d'Apple. + +Pour activer l'entraรฎnement sur les puces Apple M1 et M2, vous devez spรฉcifier 'mps' comme votre dispositif lors du lancement du processus d'entraรฎnement. Voici un exemple de la maniรจre dont vous pourriez le faire en Python et via la ligne de commande : + +!!! Example "Exemple d'entraรฎnement MPS" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n.pt') # charger un modรจle prรฉentraรฎnรฉ (recommandรฉ pour l'entraรฎnement) + + # Entraรฎner le modรจle avec MPS + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device='mps') + ``` + + === "CLI" + + ```bash + # Commencer l'entraรฎnement ร  partir d'un modรจle prรฉentraรฎnรฉ *.pt avec MPS + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=mps + ``` + +Tout en exploitant la puissance de calcul des puces M1/M2, cela permet un traitement plus efficace des tรขches d'entraรฎnement. Pour des conseils plus dรฉtaillรฉs et des options de configuration avancรฉe, veuillez consulter la [documentation MPS de PyTorch](https://pytorch.org/docs/stable/notes/mps.html). + +## Journalisation + +Lors de l'entraรฎnement d'un modรจle YOLOv8, il peut รชtre prรฉcieux de suivre la performance du modรจle au fil du temps. C'est lร  que la journalisation entre en jeu. YOLO d'Ultralytics prend en charge trois types de journaux - Comet, ClearML et TensorBoard. + +Pour utiliser un journal, sรฉlectionnez-le dans le menu dรฉroulant ci-dessus et exรฉcutez-le. Le journal choisi sera installรฉ et initialisรฉ. + +### Comet + +[Comet](https://www.comet.ml/site/) est une plateforme qui permet aux scientifiques de donnรฉes et aux dรฉveloppeurs de suivre, comparer, expliquer et optimiser les expรฉriences et les modรจles. Elle offre des fonctionnalitรฉs telles que le suivi en temps rรฉel des mesures, les diffรฉrences de code et le suivi des hyperparamรจtres. + +Pour utiliser Comet : + +!!! Example "Exemple" + + === "Python" + ```python + # pip install comet_ml + import comet_ml + + comet_ml.init() + ``` + +N'oubliez pas de vous connecter ร  votre compte Comet sur leur site web et d'obtenir votre clรฉ API. Vous devrez ajouter cela ร  vos variables d'environnement ou ร  votre script pour enregistrer vos expรฉriences. + +### ClearML + +[ClearML](https://www.clear.ml/) est une plateforme open source qui automatise le suivi des expรฉriences et aide ร  partager efficacement les ressources. Elle est conรงue pour aider les รฉquipes ร  gรฉrer, exรฉcuter et reproduire leur travail en ML plus efficacement. + +Pour utiliser ClearML : + +!!! Example "Exemple" + + === "Python" + ```python + # pip install clearml + import clearml + + clearml.browser_login() + ``` + +Aprรจs avoir exรฉcutรฉ ce script, vous devrez vous connecter ร  votre compte ClearML sur le navigateur et authentifier votre session. + +### TensorBoard + +[TensorBoard](https://www.tensorflow.org/tensorboard) est un ensemble d'outils de visualisation pour TensorFlow. Il vous permet de visualiser votre graphique TensorFlow, de tracer des mesures quantitatives sur l'exรฉcution de votre graphique et de montrer des donnรฉes supplรฉmentaires comme des images qui le traversent. + +Pour utiliser TensorBoard dans [Google Colab](https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb) : + +!!! Example "Exemple" + + === "CLI" + ```bash + load_ext tensorboard + tensorboard --logdir ultralytics/runs # remplacer par le rรฉpertoire 'runs' + ``` + +Pour utiliser TensorBoard localement, exรฉcutez la commande ci-dessous et consultez les rรฉsultats ร  l'adresse http://localhost:6006/. + +!!! Example "Exemple" + + === "CLI" + ```bash + tensorboard --logdir ultralytics/runs # remplacer par le rรฉpertoire 'runs' + ``` + +Cela chargera TensorBoard et le dirigera vers le rรฉpertoire oรน vos journaux d'entraรฎnement sont sauvegardรฉs. + +Aprรจs avoir configurรฉ votre journal, vous pouvez ensuite poursuivre l'entraรฎnement de votre modรจle. Toutes les mรฉtriques d'entraรฎnement seront automatiquement enregistrรฉes sur votre plateforme choisie, et vous pourrez accรฉder ร  ces journaux pour surveiller les performances de votre modรจle au fil du temps, comparer diffรฉrents modรจles et identifier les domaines d'amรฉlioration. diff --git a/ultralytics/docs/fr/modes/train.md:Zone.Identifier b/ultralytics/docs/fr/modes/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/modes/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/modes/val.md b/ultralytics/docs/fr/modes/val.md new file mode 100755 index 0000000..f6f51bf --- /dev/null +++ b/ultralytics/docs/fr/modes/val.md @@ -0,0 +1,86 @@ +--- +comments: true +description: Guide de validation des modรจles YOLOv8. Apprenez ร  รฉvaluer la performance de vos modรจles YOLO en utilisant les paramรจtres de validation et les mรฉtriques avec des exemples en Python et en CLI. +keywords: Ultralytics, YOLO Docs, YOLOv8, validation, รฉvaluation de modรจle, hyperparamรจtres, prรฉcision, mรฉtriques, Python, CLI +--- + +# Validation des modรจles avec Ultralytics YOLO + +ร‰cosystรจme Ultralytics YOLO et intรฉgrations + +## Introduction + +La validation est une รฉtape cruciale dans le pipeline d'apprentissage automatique, vous permettant d'รฉvaluer la qualitรฉ de vos modรจles entraรฎnรฉs. Le mode Val dans Ultralytics YOLOv8 offre une gamme robuste d'outils et de mรฉtriques pour รฉvaluer la performance de vos modรจles de dรฉtection d'objets. Ce guide sert de ressource complรจte pour comprendre comment utiliser efficacement le mode Val pour assurer que vos modรจles sont ร  la fois prรฉcis et fiables. + +## Pourquoi valider avec Ultralytics YOLO ? + +Voici pourquoi l'utilisation du mode Val de YOLOv8 est avantageuse : + +- **Prรฉcision :** Obtenez des mรฉtriques prรฉcises telles que mAP50, mAP75 et mAP50-95 pour รฉvaluer de maniรจre exhaustive votre modรจle. +- **Convenance :** Utilisez des fonctionnalitรฉs intรฉgrรฉes qui se souviennent des paramรจtres d'entraรฎnement, simplifiant ainsi le processus de validation. +- **Flexibilitรฉ :** Validez votre modรจle avec les mรชmes jeux de donnรฉes ou des jeux diffรฉrents et des tailles d'image variรฉes. +- **Rรฉglage des hyperparamรจtres :** Utilisez les mรฉtriques de validation pour peaufiner votre modรจle pour de meilleures performances. + +### Caractรฉristiques clรฉs du mode Val + +Voici les fonctionnalitรฉs notables offertes par le mode Val de YOLOv8 : + +- **Paramรจtres Automatisรฉs :** Les modรจles se souviennent de leurs configurations d'entraรฎnement pour une validation simple. +- **Support Multi-mรฉtrique :** ร‰valuez votre modรจle en fonction d'une gamme de mรฉtriques de prรฉcision. +- **CLI et API Python :** Choisissez entre l'interface en ligne de commande ou l'API Python en fonction de vos prรฉfรฉrences pour la validation. +- **Compatibilitรฉ des Donnรฉes :** Fonctionne de maniรจre transparente avec les jeux de donnรฉes utilisรฉs pendant la phase d'entraรฎnement ainsi qu'avec les jeux personnalisรฉs. + +!!! Tip "Conseil" + + * Les modรจles YOLOv8 se souviennent automatiquement de leurs paramรจtres d'entraรฎnement, vous pouvez donc facilement valider un modรจle ร  la mรชme taille d'image et sur le jeu de donnรฉes original avec juste `yolo val model=yolov8n.pt` ou `model('yolov8n.pt').val()` + +## Exemples d'utilisation + +Validez la prรฉcision du modรจle YOLOv8n entraรฎnรฉ sur le jeu de donnรฉes COCO128. Aucun argument n'a besoin d'รชtre passรฉ car le `modรจle` conserve ses `donnรฉes` d'entraรฎnement et arguments comme attributs du modรจle. Consultez la section des arguments ci-dessous pour une liste complรจte des arguments d'exportation. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n.pt') # charger un modรจle officiel + model = YOLO('chemin/vers/meilleur.pt') # charger un modรจle personnalisรฉ + + # Valider le modรจle + metrics = model.val() # pas besoin d'arguments, jeu de donnรฉes et paramรจtres mรฉmorisรฉs + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # une liste contenant map50-95 de chaque catรฉgorie + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # val modรจle officiel + yolo detect val model=chemin/vers/meilleur.pt # val modรจle personnalisรฉ + ``` + +## Arguments + +Les paramรจtres de validation pour les modรจles YOLO font rรฉfรฉrence aux divers hyperparamรจtres et configurations utilisรฉs pour รฉvaluer la performance du modรจle sur un jeu de donnรฉes de validation. Ces paramรจtres peuvent affecter la performance, la vitesse et la prรฉcision du modรจle. Certains paramรจtres de validation YOLO courants incluent la taille du lot, la frรฉquence ร  laquelle la validation est effectuรฉe pendant l'entraรฎnement et les mรฉtriques utilisรฉes pour รฉvaluer la performance du modรจle. D'autres facteurs pouvant affecter le processus de validation incluent la taille et la composition du jeu de donnรฉes de validation et la tรขche spรฉcifique pour laquelle le modรจle est utilisรฉ. Il est important de rรฉgler et d'expรฉrimenter soigneusement ces paramรจtres pour s'assurer que le modรจle fonctionne bien sur le jeu de donnรฉes de validation et pour dรฉtecter et prรฉvenir le surajustement. + +| Clรฉ | Valeur | Description | +|---------------|---------|------------------------------------------------------------------------------------------------| +| `data` | `None` | chemin vers le fichier de donnรฉes, par exemple coco128.yaml | +| `imgsz` | `640` | taille des images d'entrรฉe en tant qu'entier | +| `batch` | `16` | nombre d'images par lot (-1 pour AutoBatch) | +| `save_json` | `False` | sauvegarder les rรฉsultats dans un fichier JSON | +| `save_hybrid` | `False` | sauvegarder la version hybride des รฉtiquettes (รฉtiquettes + prรฉdictions supplรฉmentaires) | +| `conf` | `0.001` | seuil de confiance de l'objet pour la dรฉtection | +| `iou` | `0.6` | seuil d'intersection sur union (IoU) pour la NMS | +| `max_det` | `300` | nombre maximum de dรฉtections par image | +| `half` | `True` | utiliser la prรฉcision moitiรฉ (FP16) | +| `device` | `None` | appareil sur lequel exรฉcuter, par exemple cuda device=0/1/2/3 ou device=cpu | +| `dnn` | `False` | utiliser OpenCV DNN pour l'infรฉrence ONNX | +| `plots` | `False` | afficher les graphiques lors de la formation | +| `rect` | `False` | val rectangulaire avec chaque lot regroupรฉ pour un minimum de rembourrage | +| `split` | `val` | fraction du jeu de donnรฉes ร  utiliser pour la validation, par exemple 'val', 'test' ou 'train' | +| diff --git a/ultralytics/docs/fr/modes/val.md:Zone.Identifier b/ultralytics/docs/fr/modes/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/modes/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/quickstart.md b/ultralytics/docs/fr/quickstart.md new file mode 100755 index 0000000..025ae5d --- /dev/null +++ b/ultralytics/docs/fr/quickstart.md @@ -0,0 +1,198 @@ +--- +comments: true +description: Explorez diverses mรฉthodes pour installer Ultralytics en utilisant pip, conda, git et Docker. Apprenez comment utiliser Ultralytics avec l'interface en ligne de commande ou au sein de vos projets Python. +keywords: installation d'Ultralytics, pip install Ultralytics, Docker install Ultralytics, interface en ligne de commande Ultralytics, interface Python Ultralytics +--- + +## Installer Ultralytics + +Ultralytics propose diverses mรฉthodes d'installation, y compris pip, conda et Docker. Installez YOLOv8 via le package `ultralytics` avec pip pour obtenir la derniรจre version stable ou en clonant le [rรฉpertoire GitHub d'Ultralytics](https://github.com/ultralytics/ultralytics) pour la version la plus rรฉcente. Docker peut รชtre utilisรฉ pour exรฉcuter le package dans un conteneur isolรฉ, รฉvitant l'installation locale. + +!!! Example "Installer" + + === "Installation avec Pip (recommandรฉ)" + Installez le package `ultralytics` en utilisant pip, ou mettez ร  jour une installation existante en exรฉcutant `pip install -U ultralytics`. Visitez l'Index des Packages Python (PyPI) pour plus de dรฉtails sur le package `ultralytics` : [https://pypi.org/project/ultralytics/](https://pypi.org/project/ultralytics/). + + [![Version PyPI](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![Tรฉlรฉchargements](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + + ```bash + # Installer le package ultralytics depuis PyPI + pip install ultralytics + ``` + + Vous pouvez รฉgalement installer le package `ultralytics` directement depuis le [rรฉpertoire GitHub](https://github.com/ultralytics/ultralytics). Cela peut รชtre utile si vous voulez la version de dรฉveloppement la plus rรฉcente. Assurez-vous d'avoir l'outil en ligne de commande Git installรฉ sur votre systรจme. La commande `@main` installe la branche `main` et peut รชtre modifiรฉe pour une autre branche, p. ex. `@my-branch`, ou supprimรฉe entiรจrement pour revenir par dรฉfaut ร  la branche `main`. + + ```bash + # Installer le package ultralytics depuis GitHub + pip install git+https://github.com/ultralytics/ultralytics.git@main + ``` + + + === "Installation avec Conda" + Conda est un gestionnaire de packages alternatif ร  pip qui peut รฉgalement รชtre utilisรฉ pour l'installation. Visitez Anaconda pour plus de dรฉtails ร  [https://anaconda.org/conda-forge/ultralytics](https://anaconda.org/conda-forge/ultralytics). Le rรฉpertoire feedstock d'Ultralytics pour la mise ร  jour du package conda est sur [https://github.com/conda-forge/ultralytics-feedstock/](https://github.com/conda-forge/ultralytics-feedstock/). + + + [![Recette Conda](https://img.shields.io/badge/recipe-ultralytics-green.svg)](https://anaconda.org/conda-forge/ultralytics) [![Tรฉlรฉchargements Conda](https://img.shields.io/conda/dn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Version Conda](https://img.shields.io/conda/vn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Plateformes Conda](https://img.shields.io/conda/pn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) + + ```bash + # Installer le package ultralytics en utilisant conda + conda install -c conda-forge ultralytics + ``` + + !!! Note "Note" + + Si vous installez dans un environnement CUDA, la meilleure pratique est d'installer `ultralytics`, `pytorch` et `pytorch-cuda` dans la mรชme commande pour permettre au gestionnaire de package conda de rรฉsoudre les conflits, ou bien d'installer `pytorch-cuda` en dernier pour lui permettre de remplacer le package `pytorch` spรฉcifique aux CPU si nรฉcessaire. + ```bash + # Installer tous les packages ensemble en utilisant conda + conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics + ``` + + ### Image Docker Conda + + Les images Docker Conda d'Ultralytics sont รฉgalement disponibles sur [DockerHub](https://hub.docker.com/r/ultralytics/ultralytics). Ces images sont basรฉes sur [Miniconda3](https://docs.conda.io/projects/miniconda/en/latest/) et constituent un moyen simple de commencer ร  utiliser `ultralytics` dans un environnement Conda. + + ```bash + # Dรฉfinir le nom de l'image comme variable + t=ultralytics/ultralytics:latest-conda + + # Tรฉlรฉcharger la derniรจre image ultralytics de Docker Hub + sudo docker pull $t + + # Exรฉcuter l'image ultralytics dans un conteneur avec support GPU + sudo docker run -it --ipc=host --gpus all $t # tous les GPUs + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # spรฉcifier les GPUs + ``` + + === "Clone Git" + Clonez le rรฉpertoire `ultralytics` si vous รชtes intรฉressรฉ par la contribution au dรฉveloppement ou si vous souhaitez expรฉrimenter avec le dernier code source. Aprรจs le clonage, naviguez dans le rรฉpertoire et installez le package en mode รฉditable `-e` en utilisant pip. + ```bash + # Cloner le rรฉpertoire ultralytics + git clone https://github.com/ultralytics/ultralytics + + # Naviguer vers le rรฉpertoire clonรฉ + cd ultralytics + + # Installer le package en mode รฉditable pour le dรฉveloppement + pip install -e . + ``` + +Voir le fichier [requirements.txt](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) d'`ultralytics` pour une liste des dรฉpendances. Notez que tous les exemples ci-dessus installent toutes les dรฉpendances requises. + +

+
+ +
+ Watch: Ultralytics YOLO Quick Start Guide +

+ +!!! astuce "Conseil" + + Les prรฉrequis de PyTorch varient selon le systรจme d'exploitation et les exigences CUDA, donc il est recommandรฉ d'installer PyTorch en premier en suivant les instructions sur [https://pytorch.org/get-started/locally](https://pytorch.org/get-started/locally). + + + Instructions d'installation de PyTorch + + +## Utiliser Ultralytics avec CLI + +L'interface en ligne de commande (CLI) d'Ultralytics permet l'utilisation de commandes simples en une seule ligne sans nรฉcessiter d'environnement Python. La CLI ne requiert pas de personnalisation ou de code Python. Vous pouvez simplement exรฉcuter toutes les tรขches depuis le terminal avec la commande `yolo`. Consultez le [Guide CLI](/../usage/cli.md) pour en savoir plus sur l'utilisation de YOLOv8 depuis la ligne de commande. + +!!! Example "Exemple" + + === "Syntaxe" + + Les commandes `yolo` d'Ultralytics utilisent la syntaxe suivante : + ```bash + yolo Tร‚CHE MODE ARGS + + Oรน Tร‚CHE (facultatif) est l'une de [detect, segment, classify] + MODE (obligatoire) est l'un de [train, val, predict, export, track] + ARGS (facultatif) sont n'importe quel nombre de paires personnalisรฉes 'arg=valeur' comme 'imgsz=320' qui remplacent les valeurs par dรฉfaut. + ``` + Voyez tous les ARGS dans le [Guide de Configuration](/../usage/cfg.md) complet ou avec `yolo cfg` + + === "Entraรฎnement" + + Entraรฎnez un modรจle de dรฉtection pour 10 epochs avec un learning_rate initial de 0.01 + ```bash + yolo train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "Prรฉdiction" + + Prรฉdisez une vidรฉo YouTube en utilisant un modรจle de segmentation prรฉ-entraรฎnรฉ ร  une taille d'image de 320 : + ```bash + yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "Validation" + + Validez un modรจle de dรฉtection prรฉ-entraรฎnรฉ avec un batch-size de 1 et une taille d'image de 640 : + ```bash + yolo val model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + + === "Exportation" + + Exportez un modรจle de classification YOLOv8n au format ONNX ร  une taille d'image de 224 par 128 (pas de Tร‚CHE requise) + ```bash + yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128 + ``` + + === "Spรฉcial" + + Exรฉcutez des commandes spรฉciales pour voir la version, afficher les paramรจtres, effectuer des vรฉrifications et plus encore : + ```bash + yolo help + yolo checks + yolo version + yolo settings + yolo copy-cfg + yolo cfg + ``` + +!!! Warning "Avertissement" + + Les arguments doivent รชtre passรฉs sous forme de paires `arg=val`, sรฉparรฉs par un signe รฉgal `=` et dรฉlimitรฉs par des espaces ` ` entre les paires. N'utilisez pas de prรฉfixes d'arguments `--` ou de virgules `,` entre les arguments. + + - `yolo predict model=yolov8n.pt imgsz=640 conf=0.25`   โœ… + - `yolo predict model yolov8n.pt imgsz 640 conf 0.25`   โŒ + - `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25`   โŒ + +[Guide CLI](/../usage/cli.md){ .md-button } + +## Utiliser Ultralytics avec Python + +L'interface Python de YOLOv8 permet une intรฉgration transparente dans vos projets Python, facilitant le chargement, l'exรฉcution et le traitement de la sortie du modรจle. Conรงue avec simplicitรฉ et facilitรฉ d'utilisation ร  l'esprit, l'interface Python permet aux utilisateurs de mettre en ล“uvre rapidement la dรฉtection d'objets, la segmentation et la classification dans leurs projets. Cela fait de l'interface Python de YOLOv8 un outil inestimable pour quiconque cherche ร  intรฉgrer ces fonctionnalitรฉs dans ses projets Python. + +Par exemple, les utilisateurs peuvent charger un modรจle, l'entraรฎner, รฉvaluer ses performances sur un set de validation, et mรชme l'exporter au format ONNX avec seulement quelques lignes de code. Consultez le [Guide Python](/../usage/python.md) pour en savoir plus sur l'utilisation de YOLOv8 au sein de vos projets Python. + +!!! Example "Exemple" + + ```python + from ultralytics import YOLO + + # Crรฉer un nouveau modรจle YOLO ร  partir de zรฉro + model = YOLO('yolov8n.yaml') + + # Charger un modรจle YOLO prรฉ-entraรฎnรฉ (recommandรฉ pour l'entraรฎnement) + model = YOLO('yolov8n.pt') + + # Entraรฎner le modรจle en utilisant le jeu de donnรฉes 'coco128.yaml' pour 3 epochs + rรฉsultats = model.train(data='coco128.yaml', epochs=3) + + # ร‰valuer la performance du modรจle sur le set de validation + rรฉsultats = model.val() + + # Effectuer la dรฉtection d'objets sur une image en utilisant le modรจle + rรฉsultats = model('https://ultralytics.com/images/bus.jpg') + + # Exporter le modรจle au format ONNX + succรจs = model.export(format='onnx') + ``` + +[Guide Python](/../usage/python.md){.md-button .md-button--primary} diff --git a/ultralytics/docs/fr/quickstart.md:Zone.Identifier b/ultralytics/docs/fr/quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/tasks/classify.md b/ultralytics/docs/fr/tasks/classify.md new file mode 100755 index 0000000..c9d8155 --- /dev/null +++ b/ultralytics/docs/fr/tasks/classify.md @@ -0,0 +1,172 @@ +--- +comments: true +description: Apprenez-en davantage sur les modรจles de classification d'images YOLOv8 Classify. Obtenez des informations dรฉtaillรฉes sur la liste des modรจles prรฉ-entraรฎnรฉs et comment entraรฎner, valider, prรฉdire et exporter des modรจles. +keywords: Ultralytics, YOLOv8, Classification d'images, Modรจles prรฉ-entraรฎnรฉs, YOLOv8n-cls, Entraรฎnement, Validation, Prรฉdiction, Exportation de modรจles +--- + +# Classification d'images + +Exemples de classification d'images + +La classification d'images est la tรขche la plus simple des trois et consiste ร  classer une image entiรจre dans l'une d'un ensemble de classes prรฉdรฉfinies. + +Le rรฉsultat d'un classificateur d'images est une รฉtiquette de classe unique et un score de confiance. La classification d'images est utile lorsque vous avez besoin de savoir seulement ร  quelle classe appartient une image et que vous n'avez pas besoin de connaรฎtre l'emplacement des objets de cette classe ou leur forme exacte. + +!!! Tip "Astuce" + + Les modรจles YOLOv8 Classify utilisent le suffixe `-cls`, par exemple `yolov8n-cls.pt` et sont prรฉ-entraรฎnรฉs sur [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +## [Modรจles](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Les modรจles Classify prรฉ-entraรฎnรฉs YOLOv8 sont prรฉsentรฉs ici. Les modรจles Detect, Segment et Pose sont prรฉ-entraรฎnรฉs sur le dataset [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), tandis que les modรจles Classify sont prรฉ-entraรฎnรฉs sur le dataset [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +Les [modรจles](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) se tรฉlรฉchargent automatiquement depuis la derniรจre version Ultralytics [release](https://github.com/ultralytics/assets/releases) lors de la premiรจre utilisation. + +| Modรจle | taille
(pixels) | acc
top1 | acc
top5 | Vitesse
CPU ONNX
(ms) | Vitesse
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) ร  640 | +|----------------------------------------------------------------------------------------------|-------------------------|------------------|------------------|----------------------------------|---------------------------------------|--------------------|-------------------------| +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- Les valeurs **acc** sont les prรฉcisions des modรจles sur le jeu de donnรฉes de validation d'[ImageNet](https://www.image-net.org/). +
Pour reproduire : `yolo val classify data=path/to/ImageNet device=0` +- Les **vitesses** sont calculรฉes sur les images de validation d'ImageNet ร  l'aide d'une instance [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/). +
Pour reproduire : `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu` + +## Entraรฎnement + +Entraรฎnez le modรจle YOLOv8n-cls sur le dataset MNIST160 pendant 100 รฉpoques avec une taille d'image de 64. Pour une liste complรจte des arguments disponibles, consultez la page [Configuration](/../usage/cfg.md). + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n-cls.yaml') # construire un nouveau modรจle ร  partir du YAML + model = YOLO('yolov8n-cls.pt') # charger un modรจle prรฉ-entraรฎnรฉ (recommandรฉ pour l'entraรฎnement) + model = YOLO('yolov8n-cls.yaml').load('yolov8n-cls.pt') # construire ร  partir du YAML et transfรฉrer les poids + + # Entraรฎner le modรจle + results = model.train(data='mnist160', epochs=100, imgsz=64) + ``` + + === "CLI" + + ```bash + # Construire un nouveau modรจle ร  partir du YAML et commencer l'entraรฎnement ร  partir de zรฉro + yolo classify train data=mnist160 model=yolov8n-cls.yaml epochs=100 imgsz=64 + + # Commencer l'entraรฎnement ร  partir d'un modรจle *.pt prรฉ-entraรฎnรฉ + yolo classify train data=mnist160 model=yolov8n-cls.pt epochs=100 imgsz=64 + + # Construire un nouveau modรจle ร  partir du YAML, transfรฉrer les poids prรฉ-entraรฎnรฉs et commencer l'entraรฎnement + yolo classify train data=mnist160 model=yolov8n-cls.yaml pretrained=yolov8n-cls.pt epochs=100 imgsz=64 + ``` + +### Format du dataset + +Le format du dataset de classification YOLO peut รชtre trouvรฉ en dรฉtails dans le [Guide des Datasets](../../../datasets/classify/index.md). + +## Validation + +Validez la prรฉcision du modรจle YOLOv8n-cls entraรฎnรฉ sur le dataset MNIST160. Aucun argument n'est nรฉcessaire car le `modรจle` conserve ses donnรฉes d'entraรฎnement et arguments en tant qu'attributs du modรจle. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n-cls.pt') # charger un modรจle officiel + model = YOLO('path/to/best.pt') # charger un modรจle personnalisรฉ + + # Valider le modรจle + metrics = model.val() # aucun argument nรฉcessaire, les donnรฉes et les paramรจtres sont mรฉmorisรฉs + metrics.top1 # prรฉcision top 1 + metrics.top5 # prรฉcision top 5 + ``` + === "CLI" + + ```bash + yolo classify val model=yolov8n-cls.pt # valider le modรจle officiel + yolo classify val model=path/to/best.pt # valider le modรจle personnalisรฉ + ``` + +## Prรฉdiction + +Utilisez un modรจle YOLOv8n-cls entraรฎnรฉ pour exรฉcuter des prรฉdictions sur des images. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n-cls.pt') # charger un modรจle officiel + model = YOLO('path/to/best.pt') # charger un modรจle personnalisรฉ + + # Prรฉdire avec le modรจle + results = model('https://ultralytics.com/images/bus.jpg') # prรฉdire sur une image + ``` + === "CLI" + + ```bash + yolo classify predict model=yolov8n-cls.pt source='https://ultralytics.com/images/bus.jpg' # prรฉdiction avec le modรจle officiel + yolo classify predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # prรฉdiction avec le modรจle personnalisรฉ + ``` + +Voir les dรฉtails complets du mode `predict` sur la page [Prรฉdire](https://docs.ultralytics.com/modes/predict/). + +## Exportation + +Exportez un modรจle YOLOv8n-cls dans un format diffรฉrent comme ONNX, CoreML, etc. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n-cls.pt') # charger un modรจle officiel + model = YOLO('path/to/best.pt') # charger un modรจle entraรฎnรฉ personnalisรฉ + + # Exporter le modรจle + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-cls.pt format=onnx # exporter le modรจle officiel + yolo export model=path/to/best.pt format=onnx # exporter le modรจle entraรฎnรฉ personnalisรฉ + ``` + +Les formats d'exportation disponibles pour YOLOv8-cls sont prรฉsentรฉs dans le tableau ci-dessous. Vous pouvez prรฉdire ou valider directement sur les modรจles exportรฉs, par exemple `yolo predict model=yolov8n-cls.onnx`. Des exemples d'utilisation sont prรฉsentรฉs pour votre modรจle une fois l'exportation terminรฉe. + +| Format | Argument `format` | Modรจle | Mรฉtadonnรฉes | Arguments | +|--------------------------------------------------------------------|-------------------|-------------------------------|-------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-cls.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-cls.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-cls.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-cls_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-cls.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-cls.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-cls_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-cls.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-cls.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-cls_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-cls_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-cls_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-cls_ncnn_model/` | โœ… | `imgsz`, `half` | + +Voir les dรฉtails complets de l'`exportation` sur la page [Export](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/fr/tasks/classify.md:Zone.Identifier b/ultralytics/docs/fr/tasks/classify.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/tasks/classify.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/tasks/detect.md b/ultralytics/docs/fr/tasks/detect.md new file mode 100755 index 0000000..21ad108 --- /dev/null +++ b/ultralytics/docs/fr/tasks/detect.md @@ -0,0 +1,184 @@ +--- +comments: true +description: Documentation officielle pour YOLOv8 par Ultralytics. Apprenez comment entraรฎner, valider, prรฉdire et exporter des modรจles dans diffรฉrents formats. Incluant des statistiques de performances dรฉtaillรฉes. +keywords: YOLOv8, Ultralytics, dรฉtection d'objets, modรจles prรฉ-entraรฎnรฉs, entraรฎnement, validation, prรฉdiction, exportation de modรจles, COCO, ImageNet, PyTorch, ONNX, CoreML +--- + +# Dรฉtection d'Objets + +Exemples de dรฉtection d'objets + +La dรฉtection d'objets est une tรขche qui implique l'identification de l'emplacement et de la classe des objets dans une image ou un flux vidรฉo. + +La sortie d'un dรฉtecteur d'objets est un ensemble de boรฎtes englobantes qui entourent les objets de l'image, accompagnรฉes de libellรฉs de classe et de scores de confiance pour chaque boรฎte. La dรฉtection d'objets est un bon choix lorsque vous avez besoin d'identifier des objets d'intรฉrรชt dans une scรจne, mais que vous n'avez pas besoin de connaรฎtre exactement oรน se trouve l'objet ou sa forme exacte. + +

+
+ +
+ Regardez : Dรฉtection d'Objets avec le Modรจle Prรฉ-entraรฎnรฉ Ultralytics YOLOv8. +

+ +!!! Tip "Conseil" + + Les modรจles Detect YOLOv8 sont les modรจles YOLOv8 par dรฉfaut, c.-ร -d. `yolov8n.pt` et sont prรฉ-entraรฎnรฉs sur le jeu de donnรฉes [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml). + +## [Modรจles](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Les modรจles prรฉ-entraรฎnรฉs Detect YOLOv8 sont prรฉsentรฉs ici. Les modรจles Detect, Segment, et Pose sont prรฉ-entraรฎnรฉs sur le jeu de donnรฉes [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), tandis que les modรจles Classify sont prรฉ-entraรฎnรฉs sur le jeu de donnรฉes [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +[Les modรจles](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) se tรฉlรฉchargent automatiquement ร  partir de la derniรจre [version](https://github.com/ultralytics/assets/releases) d'Ultralytics lors de la premiรจre utilisation. + +| Modรจle | Taille
(pixels) | mAPval
50-95 | Vitesse
CPU ONNX
(ms) | Vitesse
A100 TensorRT
(ms) | Paramรจtres
(M) | FLOPs
(B) | +|--------------------------------------------------------------------------------------|-------------------------|----------------------|----------------------------------|---------------------------------------|------------------------|-------------------| +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- Les valeurs de **mAPval** sont pour un seul modรจle ร  une seule รฉchelle sur le jeu de donnรฉes [COCO val2017](http://cocodataset.org). +
Reproductible avec `yolo val detect data=coco.yaml device=0` +- La **Vitesse** est moyennรฉe sur les images COCO val en utilisant une instance [Amazon EC2 P4d](https://aws.amazon.com/fr/ec2/instance-types/p4/). +
Reproductible avec `yolo val detect data=coco128.yaml batch=1 device=0|cpu` + +## Entraรฎnement + +Entraรฎnez le modรจle YOLOv8n sur le jeu de donnรฉes COCO128 pendant 100 รฉpoques ร  la taille d'image de 640. Pour une liste complรจte des arguments disponibles, consultez la page [Configuration](/../usage/cfg.md). + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n.yaml') # construire un nouveau modรจle ร  partir de YAML + model = YOLO('yolov8n.pt') # charger un modรจle prรฉ-entraรฎnรฉ (recommandรฉ pour l'entraรฎnement) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # construire ร  partir de YAML et transfรฉrer les poids + + # Entraรฎner le modรจle + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Construire un nouveau modรจle ร  partir de YAML et commencer l'entraรฎnement ร  partir de zรฉro + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # Commencer l'entraรฎnement ร  partir d'un modรจle *.pt prรฉ-entraรฎnรฉ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # Construire un nouveau modรจle ร  partir de YAML, transfรฉrer les poids prรฉ-entraรฎnรฉs et commencer l'entraรฎnement + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### Format des donnรฉes + +Le format des jeux de donnรฉes de dรฉtection YOLO est dรฉtaillรฉ dans le [Guide des Jeux de Donnรฉes](../../../datasets/detect/index.md). Pour convertir votre jeu de donnรฉes existant depuis d'autres formats (comme COCO, etc.) vers le format YOLO, veuillez utiliser l'outil [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) par Ultralytics. + +## Validation + +Validez la prรฉcision du modรจle YOLOv8n entraรฎnรฉ sur le jeu de donnรฉes COCO128. Aucun argument n'est nรฉcessaire puisque le `modรจle` conserve ses `donnรฉes` d'entraรฎnement et arguments en tant qu'attributs du modรจle. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n.pt') # charger un modรจle officiel + model = YOLO('chemin/vers/best.pt') # charger un modรจle personnalisรฉ + + # Valider le modรจle + metrics = model.val() # pas d'arguments nรฉcessaires, jeu de donnรฉes et paramรจtres enregistrรฉs + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # une liste contenant map50-95 de chaque catรฉgorie + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # valider le modรจle officiel + yolo detect val model=chemin/vers/best.pt # valider le modรจle personnalisรฉ + ``` + +## Prรฉdiction + +Utilisez un modรจle YOLOv8n entraรฎnรฉ pour exรฉcuter des prรฉdictions sur des images. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n.pt') # charger un modรจle officiel + model = YOLO('chemin/vers/best.pt') # charger un modรจle personnalisรฉ + + # Prรฉdire avec le modรจle + results = model('https://ultralytics.com/images/bus.jpg') # prรฉdire sur une image + ``` + === "CLI" + + ```bash + yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' # prรฉdire avec le modรจle officiel + yolo detect predict model=chemin/vers/best.pt source='https://ultralytics.com/images/bus.jpg' # prรฉdire avec le modรจle personnalisรฉ + ``` + +Consultez les dรฉtails complets du mode `predict` sur la page [Prรฉdire](https://docs.ultralytics.com/modes/predict/). + +## Exportation + +Exportez un modรจle YOLOv8n dans un format diffรฉrent tel que ONNX, CoreML, etc. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n.pt') # charger un modรจle officiel + model = YOLO('chemin/vers/best.pt') # charger un modรจle entraรฎnรฉ personnalisรฉ + + # Exporter le modรจle + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # exporter le modรจle officiel + yolo export model=chemin/vers/best.pt format=onnx # exporter le modรจle entraรฎnรฉ personnalisรฉ + ``` + +Les formats d'exportation YOLOv8 disponibles sont prรฉsentรฉs dans le tableau ci-dessous. Vous pouvez directement prรฉdire ou valider sur des modรจles exportรฉs, c'est-ร -dire `yolo predict model=yolov8n.onnx`. Des exemples d'utilisation sont prรฉsentรฉs pour votre modรจle aprรจs l'exportation complรจte. + +| Format | Argument `format` | Modรจle | Mรฉtadonnรฉes | Arguments | +|----------------------------------------------------------------------|-------------------|---------------------------|-------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [Modรจle TF Enregistrรฉ](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [GraphDef TF](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TPU Edge TF](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +Consultez tous les dรฉtails `export` sur la page [Exporter](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/fr/tasks/detect.md:Zone.Identifier b/ultralytics/docs/fr/tasks/detect.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/tasks/detect.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/tasks/index.md b/ultralytics/docs/fr/tasks/index.md new file mode 100755 index 0000000..7c0260a --- /dev/null +++ b/ultralytics/docs/fr/tasks/index.md @@ -0,0 +1,55 @@ +--- +comments: true +description: Dรฉcouvrez les tรขches fondamentales de vision par ordinateur que YOLOv8 peut effectuer, y compris la dรฉtection, la segmentation, la classification et l'estimation de la pose. Comprenez leur utilitรฉ dans vos projets d'IA. +keywords: Ultralytics, YOLOv8, Dรฉtection, Segmentation, Classification, Estimation de la Pose, Cadre IA, Tรขches de Vision par Ordinateur +--- + +# Tรขches d'Ultralytics YOLOv8 + +
+Tรขches prises en charge par Ultralytics YOLO + +YOLOv8 est un cadre d'intelligence artificielle qui prend en charge de multiples tรขches de **vision par ordinateur**. Le cadre peut รชtre utilisรฉ pour effectuer de la [dรฉtection](detect.md), de la [segmentation](segment.md), de la [classification](classify.md) et de l'estimation de la [pose](pose.md). Chacune de ces tรขches a un objectif et un cas d'utilisation diffรฉrents. + +!!! Note "Note" + + ๐Ÿšง Notre documentation multilingue est actuellement en construction et nous travaillons dur pour l'amรฉliorer. Merci de votre patience ! ๐Ÿ™ + +

+
+ +
+ Regardez : Explorez les Tรขches YOLO Ultralytics : Dรฉtection d'Objets, Segmentation, Suivi et Estimation de la Pose. +

+ +## [Dรฉtection](detect.md) + +La dรฉtection est la tรขche principale prise en charge par YOLOv8. Elle implique de dรฉtecter des objets dans une image ou une trame vidรฉo et de dessiner des boรฎtes englobantes autour d'eux. Les objets dรฉtectรฉs sont classรฉs dans diffรฉrentes catรฉgories en fonction de leurs caractรฉristiques. YOLOv8 peut dรฉtecter plusieurs objets dans une seule image ou trame vidรฉo avec une grande prรฉcision et rapiditรฉ. + +[Exemples de Dรฉtection](detect.md){ .md-button } + +## [Segmentation](segment.md) + +La segmentation est une tรขche qui implique de segmenter une image en diffรฉrentes rรฉgions en fonction du contenu de l'image. Chaque rรฉgion se voit attribuer une รฉtiquette en fonction de son contenu. Cette tรขche est utile dans des applications telles que la segmentation d'image et l'imagerie mรฉdicale. YOLOv8 utilise une variante de l'architecture U-Net pour effectuer la segmentation. + +[Exemples de Segmentation](segment.md){ .md-button } + +## [Classification](classify.md) + +La classification est une tรขche qui implique de classer une image dans diffรฉrentes catรฉgories. YOLOv8 peut รชtre utilisรฉ pour classifier des images en fonction de leur contenu. Il utilise une variante de l'architecture EfficientNet pour effectuer la classification. + +[Exemples de Classification](classify.md){ .md-button } + +## [Pose](pose.md) + +La dรฉtection de pose/points clรฉs est une tรขche qui implique de dรฉtecter des points spรฉcifiques dans une image ou une trame vidรฉo. Ces points sont appelรฉs points clรฉs et sont utilisรฉs pour suivre le mouvement ou pour l'estimation de la pose. YOLOv8 peut dรฉtecter des points clรฉs dans une image ou une trame vidรฉo avec une grande prรฉcision et rapiditรฉ. + +[Exemples de Pose](pose.md){ .md-button } + +## Conclusion + +YOLOv8 prend en charge de multiples tรขches, y compris la dรฉtection, la segmentation, la classification et la dรฉtection de points clรฉs. Chacune de ces tรขches a des objectifs et des cas d'utilisation diffรฉrents. En comprenant les diffรฉrences entre ces tรขches, vous pouvez choisir la tรขche appropriรฉe pour votre application de vision par ordinateur. diff --git a/ultralytics/docs/fr/tasks/index.md:Zone.Identifier b/ultralytics/docs/fr/tasks/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/tasks/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/tasks/pose.md b/ultralytics/docs/fr/tasks/pose.md new file mode 100755 index 0000000..7a972de --- /dev/null +++ b/ultralytics/docs/fr/tasks/pose.md @@ -0,0 +1,176 @@ +--- +comments: true +description: Apprenez ร  utiliser Ultralytics YOLOv8 pour des tรขches d'estimation de pose. Trouvez des modรจles prรฉ-entraรฎnรฉs, apprenez ร  entraรฎner, valider, prรฉdire et exporter vos propres modรจles. +keywords: Ultralytics, YOLO, YOLOv8, estimation de pose, dรฉtection de points clรฉs, dรฉtection d'objet, modรจles prรฉ-entraรฎnรฉs, apprentissage automatique, intelligence artificielle +--- + +# Estimation de Pose + +![Estimation de pose exemples](https://user-images.githubusercontent.com/26833433/243418616-9811ac0b-a4a7-452a-8aba-484ba32bb4a8.png) + +L'estimation de pose est une tรขche qui consiste ร  identifier l'emplacement de points spรฉcifiques dans une image, souvent appelรฉs points clรฉs. Ces points clรฉs peuvent reprรฉsenter diffรฉrentes parties de l'objet telles que les articulations, les repรจres ou d'autres caractรฉristiques distinctives. L'emplacement des points clรฉs est gรฉnรฉralement reprรฉsentรฉ par un ensemble de coordonnรฉes 2D `[x, y]` ou 3D `[x, y, visible]`. + +La sortie d'un modรจle d'estimation de pose est un ensemble de points reprรฉsentant les points clรฉs sur un objet dans l'image, gรฉnรฉralement accompagnรฉs des scores de confiance pour chaque point. L'estimation de pose est un bon choix lorsque vous avez besoin d'identifier des parties spรฉcifiques d'un objet dans une scรจne, et leur emplacement les uns par rapport aux autres. + +![Regardez : Estimation de Pose avec Ultralytics YOLOv8](https://www.youtube.com/embed/Y28xXQmju64?si=pCY4ZwejZFu6Z4kZ) + +!!! astuce "Conseil" + + Les modรจles YOLOv8 _pose_ utilisent le suffixe `-pose`, c'est-ร -dire `yolov8n-pose.pt`. Ces modรจles sont entraรฎnรฉs sur le jeu de donnรฉes [COCO keypoints](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco-pose.yaml) et conviennent ร  une variรฉtรฉ de tรขches d'estimation de pose. + +## [Modรจles](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Les modรจles Pose prรฉ-entraรฎnรฉs YOLOv8 sont montrรฉs ici. Les modรจles Detect, Segment et Pose sont prรฉ-entraรฎnรฉs sur le jeu de donnรฉes [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), tandis que les modรจles Classify sont prรฉ-entraรฎnรฉs sur le jeu de donnรฉes [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +Les [Modรจles](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) se tรฉlรฉchargent automatiquement ร  partir de la derniรจre version d'Ultralytics [release](https://github.com/ultralytics/assets/releases) lors de la premiรจre utilisation. + +| Modรจle | taille
(pixels) | mAPpose
50-95 | mAPpose
50 | Vitesse
CPU ONNX
(ms) | Vitesse
A100 TensorRT
(ms) | params
(M) | FLOPs
(B) | +|------------------------------------------------------------------------------------------------------|-------------------------|-----------------------|--------------------|----------------------------------|---------------------------------------|--------------------|-------------------| +| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | +| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | +| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | +| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | +| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | +| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +- Les valeurs de **mAPval** sont pour un seul modรจle ร  une seule รฉchelle sur le jeu de donnรฉes [COCO Keypoints val2017](http://cocodataset.org). +
Reproduire avec `yolo val pose data=coco-pose.yaml device=0` +- La **vitesse** moyenne sur les images de validation COCO en utilisant une instance [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/). +
Reproduire avec `yolo val pose data=coco8-pose.yaml batch=1 device=0|cpu` + +## Entraรฎnement + +Entraรฎnez un modรจle YOLOv8-pose sur le jeu de donnรฉes COCO128-pose. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n-pose.yaml') # construire un nouveau modรจle ร  partir du YAML + model = YOLO('yolov8n-pose.pt') # charger un modรจle prรฉ-entraรฎnรฉ (recommandรฉ pour l'entraรฎnement) + model = YOLO('yolov8n-pose.yaml').load('yolov8n-pose.pt') # construire ร  partir du YAML et transfรฉrer les poids + + # Entraรฎner le modรจle + rรฉsultats = model.train(data='coco8-pose.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Construire un nouveau modรจle ร  partir du YAML et commencer l'entraรฎnement ร  partir de zรฉro + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml epochs=100 imgsz=640 + + # Commencer l'entraรฎnement ร  partir d'un modรจle *.pt prรฉ-entraรฎnรฉ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.pt epochs=100 imgsz=640 + + # Construire un nouveau modรจle ร  partir du YAML, transfรฉrer les poids prรฉ-entraรฎnรฉs et commencer l'entraรฎnement + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml pretrained=yolov8n-pose.pt epochs=100 imgsz=640 + ``` + +### Format du jeu de donnรฉes + +Le format du jeu de donnรฉes YOLO pose peut รชtre trouvรฉ en dรฉtail dans le [Guide des jeux de donnรฉes](../../../datasets/pose/index.md). Pour convertir votre jeu de donnรฉes existant ร  partir d'autres formats (comme COCO, etc.) vers le format YOLO, veuillez utiliser l'outil [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) d'Ultralytics. + +## Val + +Validez la prรฉcision du modรจle YOLOv8n-pose entraรฎnรฉ sur le jeu de donnรฉes COCO128-pose. Aucun argument n'est nรฉcessaire car le `modรจle` conserve ses donnรฉes d'entraรฎnement et arguments en tant qu'attributs du modรจle. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n-pose.pt') # charger un modรจle officiel + model = YOLO('chemin/vers/best.pt') # charger un modรจle personnalisรฉ + + # Valider le modรจle + mรฉtriques = model.val() # aucun argument nรฉcessaire, jeu de donnรฉes et paramรจtres mรฉmorisรฉs + mรฉtriques.box.map # map50-95 + mรฉtriques.box.map50 # map50 + mรฉtriques.box.map75 # map75 + mรฉtriques.box.maps # une liste contenant map50-95 de chaque catรฉgorie + ``` + === "CLI" + + ```bash + yolo pose val model=yolov8n-pose.pt # val modรจle officiel + yolo pose val model=chemin/vers/best.pt # val modรจle personnalisรฉ + ``` + +## Prรฉdiction + +Utilisez un modรจle YOLOv8n-pose entraรฎnรฉ pour exรฉcuter des prรฉdictions sur des images. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n-pose.pt') # charger un modรจle officiel + model = YOLO('chemin/vers/best.pt') # charger un modรจle personnalisรฉ + + # Prรฉdire avec le modรจle + rรฉsultats = model('https://ultralytics.com/images/bus.jpg') # prรฉdire sur une image + ``` + === "CLI" + + ```bash + yolo pose predict model=yolov8n-pose.pt source='https://ultralytics.com/images/bus.jpg' # prรฉdire avec modรจle officiel + yolo pose predict model=chemin/vers/best.pt source='https://ultralytics.com/images/bus.jpg' # prรฉdire avec modรจle personnalisรฉ + ``` + +Consultez les dรฉtails complets du mode `predict` sur la page [Prรฉdire](https://docs.ultralytics.com/modes/predict/). + +## Exportation + +Exportez un modรจle YOLOv8n Pose dans un autre format tel que ONNX, CoreML, etc. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n-pose.pt') # charger un modรจle officiel + model = YOLO('chemin/vers/best.pt') # charger un modรจle personnalisรฉ entraรฎnรฉ + + # Exporter le modรจle + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-pose.pt format=onnx # exporter modรจle officiel + yolo export model=chemin/vers/best.pt format=onnx # exporter modรจle personnalisรฉ entraรฎnรฉ + ``` + +Les formats d'exportation YOLOv8-pose disponibles sont dans le tableau ci-dessous. Vous pouvez prรฉdire ou valider directement sur des modรจles exportรฉs, par exemple `yolo predict model=yolov8n-pose.onnx`. Des exemples d'utilisation sont montrรฉs pour votre modรจle aprรจs la fin de l'exportation. + +| Format | Argument `format` | Modรจle | Mรฉtadonnรฉes | Arguments | +|--------------------------------------------------------------------|-------------------|--------------------------------|-------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-pose.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-pose.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-pose.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-pose_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-pose.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-pose.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-pose_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-pose.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-pose.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-pose_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-pose_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-pose_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-pose_ncnn_model/` | โœ… | `imgsz`, `half` | + +Consultez les dรฉtails complets de `export` sur la page [Exporter](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/fr/tasks/pose.md:Zone.Identifier b/ultralytics/docs/fr/tasks/pose.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/tasks/pose.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/fr/tasks/segment.md b/ultralytics/docs/fr/tasks/segment.md new file mode 100755 index 0000000..a49d64b --- /dev/null +++ b/ultralytics/docs/fr/tasks/segment.md @@ -0,0 +1,189 @@ +--- +comments: true +description: Apprenez ร  utiliser les modรจles de segmentation d'instance avec Ultralytics YOLO. Instructions pour la formation, la validation, la prรฉdiction d'image et l'exportation de modรจle. +keywords: yolov8, segmentation d'instance, Ultralytics, jeu de donnรฉes COCO, segmentation d'image, dรฉtection d'objet, formation de modรจle, validation de modรจle, prรฉdiction d'image, exportation de modรจle +--- + +# Segmentation d'Instance + +Exemples de segmentation d'instance + +La segmentation d'instance va plus loin que la dรฉtection d'objet et implique d'identifier des objets individuels dans une image et de les segmenter du reste de l'image. + +Le rรฉsultat d'un modรจle de segmentation d'instance est un ensemble de masques ou de contours qui dรฉlimitent chaque objet dans l'image, accompagnรฉs d'รฉtiquettes de classe et de scores de confiance pour chaque objet. La segmentation d'instance est utile lorsque vous avez besoin de savoir non seulement oรน se trouvent les objets dans une image, mais aussi quelle est leur forme exacte. + +

+
+ +
+ Regarder : Exรฉcutez la Segmentation avec le Modรจle Ultralytics YOLOv8 Prรฉ-Entraรฎnรฉ en Python. +

+ +!!! astuce "Astuce" + + Les modรจles YOLOv8 Segment utilisent le suffixe `-seg`, par exemple `yolov8n-seg.pt` et sont prรฉ-entraรฎnรฉs sur [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml). + +## [Modรจles](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Les modรจles Segment prรฉ-entraรฎnรฉs YOLOv8 sont indiquรฉs ici. Les modรจles Detect, Segment et Pose sont prรฉ-entraรฎnรฉs sur le jeu de donnรฉes [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), tandis que les modรจles Classify sont prรฉ-entraรฎnรฉs sur le jeu de donnรฉes [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +Les [modรจles](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) se tรฉlรฉchargent automatiquement depuis la derniรจre [version](https://github.com/ultralytics/assets/releases) Ultralytics lors de la premiรจre utilisation. + +| Modรจle | Taille
(pixels) | mAPboรฎte
50-95 | mAPmasque
50-95 | Vitesse
CPU ONNX
(ms) | Vitesse
A100 TensorRT
(ms) | Paramรจtres
(M) | FLOPs
(B) | +|----------------------------------------------------------------------------------------------|-------------------------|------------------------|-------------------------|----------------------------------|---------------------------------------|------------------------|-------------------| +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- Les valeurs **mAPval** sont pour un seul modรจle ร  une seule รฉchelle sur le jeu de donnรฉes [COCO val2017](http://cocodataset.org). +
Pour reproduire, utilisez `yolo val segment data=coco.yaml device=0` +- **Vitesse** moyennรฉe sur les images COCO val en utilisant une instance [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/). +
Pour reproduire, utilisez `yolo val segment data=coco128-seg.yaml batch=1 device=0|cpu` + +## Formation + +Entraรฎnez YOLOv8n-seg sur le jeu de donnรฉes COCO128-seg pendant 100 รฉpoques ร  la taille d'image 640. Pour une liste complรจte des arguments disponibles, consultez la page [Configuration](/../usage/cfg.md). + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n-seg.yaml') # construire un nouveau modรจle ร  partir du YAML + model = YOLO('yolov8n-seg.pt') # charger un modรจle prรฉ-entraรฎnรฉ (recommandรฉ pour la formation) + model = YOLO('yolov8n-seg.yaml').load('yolov8n.pt') # construire ร  partir du YAML et transfรฉrer les poids + + # Entraรฎner le modรจle + rรฉsultats = model.train(data='coco128-seg.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Construire un nouveau modรจle ร  partir du YAML et commencer la formation ร  partir de zรฉro + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml epochs=100 imgsz=640 + + # Commencer la formation ร  partir d'un modรจle *.pt prรฉ-entraรฎnรฉ + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.pt epochs=100 imgsz=640 + + # Construire un nouveau modรจle ร  partir du YAML, transfรฉrer les poids prรฉ-entraรฎnรฉs et commencer la formation + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml pretrained=yolov8n-seg.pt epochs=100 imgsz=640 + ``` + +### Format des donnรฉes + +Le format des donnรฉes de segmentation YOLO peut รชtre trouvรฉ en dรฉtail dans le [Guide du Jeu de Donnรฉes](../../../datasets/segment/index.md). Pour convertir votre jeu de donnรฉes existant ร  partir d'autres formats (comme COCO, etc.) au format YOLO, veuillez utiliser l'outil [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) par Ultralytics. + +## Validation + +Validez la prรฉcision du modรจle YOLOv8n-seg entraรฎnรฉ sur le jeu de donnรฉes COCO128-seg. Aucun argument n'est nรฉcessaire car le `modรจle` +conserve ses donnรฉes de formation et ses arguments comme attributs du modรจle. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n-seg.pt') # charger un modรจle officiel + model = YOLO('chemin/vers/le/meilleur.pt') # charger un modรจle personnalisรฉ + + # Valider le modรจle + mรฉtriques = model.val() # aucun argument nรฉcessaire, jeu de donnรฉes et paramรจtres mรฉmorisรฉs + mรฉtriques.box.map # map50-95(B) + mรฉtriques.box.map50 # map50(B) + mรฉtriques.box.map75 # map75(B) + mรฉtriques.box.maps # une liste contient map50-95(B) de chaque catรฉgorie + mรฉtriques.seg.map # map50-95(M) + mรฉtriques.seg.map50 # map50(M) + mรฉtriques.seg.map75 # map75(M) + mรฉtriques.seg.maps # une liste contient map50-95(M) de chaque catรฉgorie + ``` + === "CLI" + + ```bash + yolo segment val model=yolov8n-seg.pt # valider le modรจle officiel + yolo segment val model=chemin/vers/le/meilleur.pt # valider le modรจle personnalisรฉ + ``` + +## Prรฉdiction + +Utilisez un modรจle YOLOv8n-seg entraรฎnรฉ pour effectuer des prรฉdictions sur des images. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n-seg.pt') # charger un modรจle officiel + model = YOLO('chemin/vers/le/meilleur.pt') # charger un modรจle personnalisรฉ + + # Prรฉdire avec le modรจle + rรฉsultats = model('https://ultralytics.com/images/bus.jpg') # prรฉdire sur une image + ``` + === "CLI" + + ```bash + yolo segment predict model=yolov8n-seg.pt source='https://ultralytics.com/images/bus.jpg' # prรฉdire avec le modรจle officiel + yolo segment predict model=chemin/vers/le/meilleur.pt source='https://ultralytics.com/images/bus.jpg' # prรฉdire avec le modรจle personnalisรฉ + ``` + +Voir les dรฉtails complets du mode `predict` sur la page [Predict](https://docs.ultralytics.com/modes/predict/). + +## Exportation + +Exportez un modรจle YOLOv8n-seg vers un format diffรฉrent comme ONNX, CoreML, etc. + +!!! Example "Exemple" + + === "Python" + + ```python + from ultralytics import YOLO + + # Charger un modรจle + model = YOLO('yolov8n-seg.pt') # charger un modรจle officiel + model = YOLO('chemin/vers/le/meilleur.pt') # charger un modรจle entraรฎnรฉ personnalisรฉ + + # Exporter le modรจle + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-seg.pt format=onnx # exporter le modรจle officiel + yolo export model=chemin/vers/le/meilleur.pt format=onnx # exporter le modรจle entraรฎnรฉ personnalisรฉ + ``` + +Les formats d'exportation YOLOv8-seg disponibles sont dans le tableau ci-dessous. Vous pouvez prรฉdire ou valider directement sur les modรจles exportรฉs, par exemple `yolo predict model=yolov8n-seg.onnx`. Des exemples d'utilisation sont prรฉsentรฉs pour votre modรจle aprรจs l'exportation. + +| Format | Argument `format` | Modรจle | Mรฉtadonnรฉes | Arguments | +|--------------------------------------------------------------------|-------------------|-------------------------------|-------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-seg.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-seg.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-seg.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-seg_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-seg.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-seg.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-seg_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-seg.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-seg.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-seg_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-seg_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-seg_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-seg_ncnn_model/` | โœ… | `imgsz`, `half` | + +Voir les dรฉtails complets d'`export` sur la page [Export](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/fr/tasks/segment.md:Zone.Identifier b/ultralytics/docs/fr/tasks/segment.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/fr/tasks/segment.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/index.md b/ultralytics/docs/hi/index.md new file mode 100755 index 0000000..89f296d --- /dev/null +++ b/ultralytics/docs/hi/index.md @@ -0,0 +1,84 @@ +--- +comments: true +description: Ultralytics YOLOv8 เค•เฅ‡ เคชเฅ‚เคฐเฅเคฃ เค—เคพเค‡เคก เค•เฅ‹ เคœเคพเคจเฅ‡เค‚, เคเค• เค‰เคšเฅเคš เค—เคคเคฟ, เค‰เคšเฅเคš เคฏเฅ‹เค—เฅเคฏเคคเคพ เคตเคพเคฒเฅ‡ เคตเคธเฅเคคเฅ เค•เคพ เคชเคคเคพ เคฒเค—เคพเคจเฅ‡ เค”เคฐ เค›เคตเคฟ เคตเคฟเคญเคพเคœเคจ เคฎเฅ‰เคกเคฒเฅค เคธเฅเคฅเคพเคชเคจเคพ, เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€, เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคŸเฅเคฏเฅ‚เคŸเฅ‹เคฐเคฟเคฏเคฒ เค”เคฐ เคฌเคนเฅเคค เค•เฅเค›เฅค +keywords: Ultralytics, YOLOv8, เคตเคธเฅเคคเฅ เคชเคคเคพ เคฒเค—เคพเคจเคพ, เค›เคตเคฟ เคตเคฟเคญเคพเคœเคจ, เคฎเคถเฅ€เคจ เคฒเคฐเฅเคจเคฟเค‚เค—, เค—เคนเคฐเฅ€ เคฒเคฐเฅเคจเคฟเค‚เค—, เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฐ เคตเคฟเคœเคผเคจ, YOLOv8 เคธเฅเคฅเคพเคชเคจเคพ, YOLOv8 เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€, YOLOv8 เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ, YOLO เค‡เคคเคฟเคนเคพเคธ, YOLO เคฒเคพเค‡เคธเฅ‡เค‚เคธเฅ‡เคธ +--- + +
+

+ + Ultralytics YOLO banner +

+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ Ultralytics CI + Ultralytics Code Coverage + YOLOv8 Citation + Docker Pulls + Discord +
+ Run on Gradient + Open In Colab + Open In Kaggle +
+ + +เคชเฅ‡เคถ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ [เคฏเฅเคฒเฅเคŸเฅเคฐเคพเคฒเคฟเคŸเคฟเค•เฅเคธ](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics), เคชเฅเคฐเคธเคฟเคฆเฅเคง เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคตเคธเฅเคคเฅ เคชเคคเคพ เคฒเค—เคพเคจเฅ‡ เค”เคฐ เค›เคตเคฟ เคตเคฟเคญเคพเคœเคจ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคจเคตเฅ€เคจเคคเคฎ เคธเค‚เคธเฅเค•เคฐเคฃเฅค YOLOv8 เค—เคนเคฐเฅ€ เคฒเคฐเฅเคจเคฟเค‚เค— เค”เคฐ เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฐ เคตเคฟเคœเคผเคจ เคฎเฅ‡เค‚ เค•เคŸเคฟเค‚เค—-เคเคœ เค‰เคจเฅเคจเคคเคฟ เคชเคฐ เค†เคงเคพเคฐเคฟเคค เคนเฅˆ, เค‡เคธเคฒเคฟเค เค—เคคเคฟ เค”เคฐ เคฏเฅ‹เค—เฅเคฏเคคเคพ เค•เฅ‡ เคฎเคพเคฎเคฒเฅ‡ เคฎเฅ‡เค‚ เค‡เคธเค•เคพ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคนเฅˆเฅค เค‡เคธเค•เคพ เคธเค‚เค•เฅเคทเฅ‡เคชเคฟเคค เคกเคฟเคœเคผเคพเค‡เคจ เค‡เคธเฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅเค•เฅเคค เคฌเคจเคพเคคเคพ เคนเฅˆ เค”เคฐ เคตเคฟเคญเคฟเคจเฅเคจ เคนเคพเคฐเฅเคกเคตเฅ‡เคฏเคฐ เคชเฅเคฒเฅ‡เคŸเคซเคผเฅ‰เคฐเฅเคฎเฅเคธ เคชเคฐ เค†เคธเคพเคจเฅ€ เคธเฅ‡ เค…เคจเฅเค•เฅ‚เคฒ เคฌเคจเคพเคคเคพ เคนเฅˆ, เคถเฅเคฐเฅ‚ เค”เคฐ เคงเคพเคฐเคฃ เค•เฅ‡ เคฒเคฟเค เคฟเคœเฅ‹เค—เฅเคฏ เค•เคฐเคคเคพ เคนเฅˆเฅค + +YOLOv8 เคกเฅ‰เค•เฅเคธ เค•เคพ เค…เคจเฅเคตเฅ‡เคทเคฃ เค•เคฐเฅ‡เค‚, เคฏเคน เคเค• เคตเฅเคฏเคพเคชเค• เคธเฅเคฐเฅ‹เคค เคนเฅˆ เคœเฅ‹ เค†เคชเค•เฅ‹ เค‡เคธเค•เฅ‡ เคธเฅเคตเคฟเคงเคพเค“เค‚ เค”เคฐ เค•เฅเคทเคฎเคคเคพเค“เค‚ เค•เฅ‹ เคธเคฎเคเคจเฅ‡ เค”เคฐ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เคฎเฅ‡เค‚ เคฎเคฆเคฆ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคตเคฟเค•เคธเคฟเคค เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค เคšเคพเคนเฅ‡ เค†เคช เคเค• เค…เคจเฅเคญเคตเฅ€ เคฎเคถเฅ€เคจ เคฒเคฐเฅเคจเคฟเค‚เค— เคชเฅเคฐเฅˆเค•เฅเคŸเฅ€เคถเคจเคฐ เคนเฅ‹ เคฏเคพ เค•เฅเคทเฅ‡เคคเฅเคฐ เคฎเฅ‡เค‚ เคจเคฏเฅ‡ เคนเฅ‹เค‚, เค‡เคธ เคนเคฌ เค•เคพ เค‰เคฆเฅเคฆเฅ‡เคถเฅเคฏ เค†เคชเค•เฅ‡ เคชเคฐเคฟเคฏเฅ‹เคœเคจเคพเค“เค‚ เคฎเฅ‡เค‚ YOLOv8 เค•เฅ€ เค•เฅเคทเคฎเคคเคพเค“เค‚ เค•เฅ‹ เค…เคงเคฟเค•เคคเคฎ เค•เคฐเคจเคพ เคนเฅˆเฅค + +!!! Note "เคจเฅ‹เคŸ" + + ๐Ÿšง เคนเคฎเคพเคฐเฅ€ เคฌเคนเฅเคญเคพเคทเฅ€ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเฅ€เค•เคฐเคฃ เคตเคฐเฅเคคเคฎเคพเคจ เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฎเคพเคฃเคพเคงเฅ€เคจ เคนเฅˆ, เค”เคฐ เคนเคฎ เค‡เคธเฅ‡ เคธเฅเคงเคพเคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เค เคฟเคจเคคเคพเค“เค‚ เคชเคฐ เค•เคพเคฎ เค•เคฐ เคฐเคนเฅ‡ เคนเฅˆเค‚เฅค เค†เคชเค•เฅ€ เคธเคนเคพเคฏเคคเคพ เค•เฅ‡ เคฒเคฟเค เคงเคจเฅเคฏเคตเคพเคฆ! ๐Ÿ™ + +## เคถเฅเคฐเฅเค†เคค เค•เคนเคพเค เคธเฅ‡ เค•เคฐเฅ‡เค‚ + +- **Install** `pip` เค•เฅ‡ เคธเคพเคฅ `ultralytics` เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเฅ‡เค‚ เค”เคฐ เค•เฅเค› เคฎเคฟเคจเคŸ เคฎเฅ‡เค‚ เคšเคฒเคคเคพ เคนเฅเค† เคชเคพเคเค‚   [:material-clock-fast: เคถเฅเคฐเฅ‚ เคนเฅ‹ เคœเคพเค“](quickstart.md){ .md-button } +- **Predict** เคฏเฅ‚เคจเคฟเค• images เค”เคฐ videos เค•เฅ‹ YOLOv8 เค•เฅ‡ เคธเคพเคฅ   [:octicons-image-16: เค›เคตเคฟเคฏเฅ‹เค‚ เคชเคฐ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค•เคฐเฅ‡เค‚](modes/predict.md){ .md-button } +- **Train** เค…เคชเคจเฅ‡ เค–เฅเคฆ เค•เฅ‡ custom เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคเค• เคจเคฏเคพ YOLOv8 เคฎเฅ‰เคกเคฒ   [:fontawesome-solid-brain: เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚](modes/train.md){ .md-button } +- **เค…เคจเฅเคตเฅ‡เคทเคฃ** เค•เคฐเฅ‡เค‚ YOLOv8 tasks เคœเฅˆเคธเฅ‡ เค•เคฟ เคตเคฟเคญเคพเคœเคฟเคค, เคตเคฐเฅเค—เฅ€เค•เฅƒเคค, เคธเฅเคฅเคฟเคคเคฟ เค”เคฐ เคŸเฅเคฐเฅˆเค• เค•เคฐเฅ‡เค‚   [:material-magnify-expand: เคŸเคพเคธเฅเค•เฅเคธ เค…เคจเฅเคตเฅ‡เคทเคฃ เค•เคฐเฅ‡เค‚](tasks/index.md){ .md-button } + +

+
+ +
+ เคฆเฅ‡เค–เฅ‡เค‚: เค…เคชเคจเฅ‡ เค•เคธเฅเคŸเคฎ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ YOLOv8 เคฎเฅ‰เคกเคฒ เค•เฅ‹ เค•เฅˆเคธเฅ‡ เคŸเฅเคฐเฅ‡เคจ เค•เคฐเฅ‡เค‚ Google Colab เคฎเฅ‡เค‚เฅค +

+ +## YOLO: เคเค• เคธเค‚เค•เฅเคทเคฟเคชเฅเคค เค‡เคคเคฟเคนเคพเคธ + +[YOLO](https://arxiv.org/abs/1506.02640) (You Only Look Once), เคเค• เคฒเฅ‹เค•เคชเฅเคฐเคฟเคฏ เคตเคธเฅเคคเฅ เคชเคคเคพ เคฒเค—เคพเคจเฅ‡ เค”เคฐ เค›เคตเคฟ เคตเคฟเคญเคพเคœเคจ เคฎเฅ‰เคกเคฒ, เคฏเฅ‚เคจเคฟเคตเคฐเฅเคธเคฟเคŸเฅ€ เค‘เคซ เคตเคพเคถเคฟเค‚เค—เคŸเคจ เคฎเฅ‡เค‚ Joseph Redmon เค”เคฐ Ali Farhadi เคฆเฅเคตเคพเคฐเคพ เคตเคฟเค•เคธเคฟเคค เค•เคฟเคฏเคพ เค—เคฏเคพ เคฅเคพเฅค YOLO เค•เฅ€ เค‰เคšเฅเคš เค—เคคเคฟ เค”เคฐ เคฏเฅ‹เค—เฅเคฏเคคเคพ เค•เฅ‡ เค•เคพเคฐเคฃ, เคฏเคน 2015 เคฎเฅ‡เค‚ เคคเฅ‡เคœเฅ€ เคธเฅ‡ เคชเฅเคฐเคธเคฟเคฆเฅเคง เคนเฅเค†เฅค + +- [YOLOv2](https://arxiv.org/abs/1612.08242), 2016 เคฎเฅ‡เค‚ เคœเคพเคฐเฅ€ เค•เคฟเคฏเคพ เค—เคฏเคพ, เคฎเฅ‚เคฒ เคฎเฅ‰เคกเคฒ เคฎเฅ‡เค‚ batch normalization, anchor boxes เค”เคฐ dimension clusters เคถเคพเคฎเคฟเคฒ เค•เคฐเค•เฅ‡ เคฎเฅ‰เคกเคฒ เคฎเฅ‡เค‚ เคธเฅเคงเคพเคฐ เค•เคฟเคฏเคพเฅค +- [YOLOv3](https://pjreddie.com/media/files/papers/YOLOv3.pdf), 2018 เคฎเฅ‡เค‚ เคฒเฅ‰เคจเฅเคš เค•เคฟเคฏเคพ เค—เคฏเคพ, เคเค• เค…เคงเคฟเค• เคชเฅเคฐเคญเคพเคตเฅ€ เคฌเฅˆเค•เคฌเฅ‹เคจ เคจเฅ‡เคŸเคตเคฐเฅเค•, เคเค‚เค•เคฐ เคฌเฅ‰เค•เฅเคธเฅ‡เคธ เค”เคฐ เคธเฅเคฅเคพเคจเคฟเค• เคชเคฟเคฐเคพเคฎเคฟเคก เคชเฅ‚เคฒเคฟเค‚เค— เค•เฅ‡ เค‰เคชเคฏเฅ‹เค— เคธเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‹ เค”เคฐ เคฌเคขเคผเคพเคฏเคพเฅค +- [YOLOv4](https://arxiv.org/abs/2004.10934) 2020 เคฎเฅ‡เค‚ เคœเคพเคฐเฅ€ เค•เคฟเคฏเคพ เค—เคฏเคพ, Mosaic เคกเฅ‡เคŸเคพ เคตเฅƒเคฆเฅเคงเคฟ, เคเค• เคจเคฏเคพ anchor-free เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคนเฅ‡เคก เค”เคฐ เคเค• เคจเคฏเคพ เคฒเฅ‰เคธ เคซเคผเค‚เค•เฅเคถเคจ เค•เฅ‡ เคœเฅˆเคธเฅ‡ เคจเคตเคพเคšเคพเคฐ เคฆเฅเคตเคพเคฐเคพ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฌเฅ‡เคนเคคเคฐ เคฌเคจเคพเคฏเคพ เค—เคฏเคพเฅค +- [YOLOv5](https://github.com/ultralytics/yolov5) เคฎเฅ‰เคกเคฒ เค•เฅ€ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‹ เค”เคฐ เคฌเฅ‡เคนเคคเคฐ เคฌเคจเคพเคจเฅ‡ เค•เฅ‡ เคธเคพเคฅ, เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค‘เคชเฅเคŸเคฟเคฎเคพเค‡เคœเคผเฅ‡เคถเคจ, เคเค•เฅ€เค•เฅƒเคค เคชเฅเคฐเคฏเฅ‹เค— เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค”เคฐ เคฒเฅ‹เค•เคชเฅเคฐเคฟเคฏ export formats เคฎเฅ‡เค‚ เคธเฅเคตเคšเคพเคฒเคฟเคค เคจเคฟเคฐเฅเคฏเคพเคค เคœเฅˆเคธเฅ‡ เคจเค เคธเฅเคตเคฟเคงเคพเคเค‚ เคœเฅ‹เคกเคผเฅ€ เค—เคˆเค‚เฅค +- [YOLOv6](https://github.com/meituan/YOLOv6) 2022 เคฎเฅ‡เค‚ [เคฎเฅ‡เคŸเฅเค†เคจ](https://about.meituan.com/) เคฆเฅเคตเคพเคฐเคพ เค“เคชเคจ-เคธเฅ‹เคธเฅเคก เค•เคฟเคฏเคพ เค—เคฏเคพ เคฅเคพ เค”เคฐ เค•เคˆ เค•เคฎเฅเคชเคจเฅ€ เค•เฅ‡ เคธเฅเคตเคคเค‚เคคเฅเคฐ เคตเคฟเคคเคฐเคฃ เคฐเฅ‹เคฌเฅ‹เคŸ เคฎเฅ‡เค‚ เค‰เคชเคฏเฅ‹เค— เคฎเฅ‡เค‚ เคนเฅˆเฅค +- [YOLOv7](https://github.com/WongKinYiu/yolov7) เคจเฅ‡ COCO keypoints เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅ‹เคœ เค…เคจเฅเคฎเคพเคจ เคœเฅˆเคธเฅ‡ เค…เคคเคฟเคฐเคฟเค•เฅเคค เคŸเคพเคธเฅเค• เคœเฅ‹เคกเคผ เคฆเคฟเคฏเคพเฅค +- [YOLOv8](https://github.com/ultralytics/ultralytics) Ultralytics เคฆเฅเคตเคพเคฐเคพ YOLO เค•เคพ เคจเคตเฅ€เคจเคคเคฎ เคธเค‚เคธเฅเค•เคฐเคฃ เคนเฅˆเฅค เคเค• เคคเคฒเคตเคพเคฐ เค•เฅ€ เค•เคพเคŸ, เค†เคชเคพเคคเคคเคพ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคซเคฒเคคเคพ เคชเคฐ เคจเคฟเคฐเฅเคฎเคฟเคคเค•เคฐเฅเคคเคพเค“เค‚ เค•เฅ€ เคฎเฅ‡เคนเคจเคค เค•เฅ€ เคšเคŸเคพเคจเฅ‹เค‚ เคชเคฐ เคจเคฟเคฐเฅเคฎเคพเคฃ เค•เคฐเค•เฅ‡ YOLOv8 เคจเฅ‡ เคชเคฟเค›เคฒเฅ‡ เคธเค‚เคธเฅเค•เคฐเคฃเฅ‹เค‚ เค•เฅ€ เคธเคซเคฒเคคเคพ เคชเคฐ เค†เคงเคพเคฐเคฟเคค, เคจเคˆ เคธเฅเคตเคฟเคงเคพเคเค‚ เค”เคฐ เคธเฅเคงเคพเคฐ เค…เคฆเฅเคฏเคคเคฟเคค เคชเฅเคฐเคฆเคฐเฅเคถเคจ, เคฒเคšเฅ€เคฒเคพเคชเคจ เค”เคฐ เคชเฅเคฐเคฆเคพเคฐเฅเคฅเคคเคพ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคธเฅเคคเฅเคค เค•เคฟเค เคนเฅˆเค‚เฅค YOLOv8 เคตเคฟเคœเคจ AI tasks, เคœเฅˆเคธเฅ‡ [เคชเคคเคพ เคฒเค—เคพเคจเคพ](tasks/detect.md), [เคตเคฟเคญเคพเคœเคจ](tasks/segment.md), [เคชเฅ‹เคœ เค…เคจเฅเคฎเคพเคจ](tasks/pose.md), [เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—](modes/track.md), เค”เคฐ [เคตเคฐเฅเค—เฅ€เค•เคฐเคฃ](tasks/classify.md) เค•เคพ เคชเฅ‚เคฐเคพ เคธเคฎเคฐเฅเคฅเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เคฏเคน เคตเคฟเคตเคฟเคง เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค— เค”เคฐ เค•เฅเคทเฅ‡เคคเฅเคฐเฅ‹เค‚ เคฎเฅ‡เค‚ เคฏเฅ‹เคฒเฅ‹เคตเฅ€8 เค•เฅ€ เค•เฅเคทเคฎเคคเคพเค“เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเคพ เคนเฅˆเฅค + +YOLO เคฒเคพเค‡เคธเฅ‡เค‚เคธเฅ‡เคธ: Ultralytics YOLO เค•เคพ เคชเฅเคฐเคฏเฅ‹เค— เค•เฅˆเคธเฅ‡ เคนเฅ‹เคคเคพ เคนเฅˆ? + +Ultralytics เคตเคฟเคญเคฟเคจเฅเคจ เค‰เคชเคฏเฅ‹เค— เคฎเคพเคฎเคฒเฅ‹เค‚ เค•เฅ‹ เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฆเฅ‹ เคฒเคพเค‡เคธเฅ‡เค‚เคธเคฟเค‚เค— เคตเคฟเค•เคฒเฅเคช เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ: + +- **AGPL-3.0 เคฒเคพเค‡เคธเฅ‡เค‚เคธ**: เคฏเคน [OSI เคธเฅเคตเฅ€เค•เฅƒเคค](https://opensource.org/licenses/) เค“เคชเคจ-เคธเฅ‹เคฐเฅเคธ เคฒเคพเค‡เคธเฅ‡เค‚เคธ เค›เคพเคคเฅเคฐเฅ‹เค‚ เค”เคฐ เค‰เคคเฅเคธเคพเคนเฅ€เคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅเค•เฅเคค เคนเฅˆ, เค—เคนเคจ เคธเคนเคฏเฅ‹เค— เค”เคฐ เคœเฅเคžเคพเคจ เคธเคพเคเคพ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเฅ‹เคคเฅเคธเคพเคนเคฟเคค เค•เคฐเคคเคพ เคนเฅˆเฅค เค…เคงเคฟเค• เคœเคพเคจเค•เคพเคฐเฅ€ เค•เฅ‡ เคฒเคฟเค [LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) เคซเคผเคพเค‡เคฒ เคฆเฅ‡เค–เฅ‡เค‚เฅค +- **เคตเฅเคฏเคตเคธเคพเคฏเคฟเค• เคฒเคพเค‡เคธเฅ‡เค‚เคธ**: เคตเฅเคฏเคพเคตเคธเคพเคฏเคฟเค• เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคฒเคฟเค เคกเคฟเคœเคผเคพเค‡เคจ เค•เคฟเคฏเคพ เค—เคฏเคพ, เคฏเคน เคฒเคพเค‡เคธเฅ‡เค‚เคธ Ultralytics เคธเฅ‰เคซเคผเฅเคŸเคตเฅ‡เคฏเคฐ เค”เคฐ AI เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคตเคพเคฃเคฟเคœเฅเคฏเคฟเค• เคฎเคพเคฒ เค”เคฐ เคธเฅ‡เคตเคพเค“เค‚ เคฎเฅ‡เค‚ เคธเคฐเคฒเคคเคพเคชเฅ‚เคฐเฅเคตเค• เคธเคฎเฅเคฎเคฟเคฒเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเคพ เคนเฅˆ, AGPL-3.0 เค•เฅ€ เค“เคชเคจ-เคธเฅ‹เคฐเฅเคธ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‹ เค›เฅ‹เคกเคผเคคเคพ เคนเฅˆเฅค เคฏเคฆเคฟ เค†เคชเค•เฅ‡ เคชเคฐเคฟเคฆเฅƒเคถเฅเคฏ เคฎเฅ‡เค‚ เคนเคฎเคพเคฐเฅ‡ เคธเคฎเคพเคงเคพเคจเฅ‹เค‚ เค•เฅ‹ เคเค• เคตเคพเคฃเคฟเคœเฅเคฏเคฟเค• เคชเฅเคฐเคธเฅเคคเคพเคต เคฎเฅ‡เค‚ เคเคฎเฅเคฌเฅ‡เคก เค•เคฐเคจเคพ เคถเคพเคฎเคฟเคฒ เคนเฅˆ, [Ultralytics Licensing](https://ultralytics.com/license) เค•เฅ‡ เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡ เคธเค‚เคชเคฐเฅเค• เค•เคฐเฅ‡เค‚เฅค + +เคนเคฎเคพเคฐเฅ€ เคฒเคพเค‡เคธเฅ‡เค‚เคธเคฟเค‚เค— เคฐเคฃเคจเฅ€เคคเคฟ เค‡เคธ เคธเฅเคจเคฟเคถเฅเคšเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคนเฅˆ เค•เคฟ เคนเคฎเคพเคฐเฅ‡ เค“เคชเคจ-เคธเฅ‹เคฐเฅเคธ เคชเคฐเคฟเคฏเฅ‹เคœเคจเคพเค“เค‚ เคฎเฅ‡เค‚ เค•เคฟเค เค—เค เค•เฅ‹เคˆ เคญเฅ€ เคธเฅเคงเคพเคฐ เคธเคฎเฅเคฆเคพเคฏ เค•เฅ‹ เคฒเฅŒเคŸเคพเค เคœเคพเคเค‚เฅค เคนเคฎ เค“เคชเคจ เคธเฅ‹เคฐเฅเคธ เค•เฅ‡ เคธเคฟเคฆเฅเคงเคพเค‚เคคเฅ‹เค‚ เค•เฅ‹ เค…เคชเคจเฅ‡ เคฆเคฟเคฒ เค•เฅ‡ เคชเคพเคธ เคฐเค–เคคเฅ‡ เคนเฅˆเค‚ โค๏ธ, เค”เคฐ เคนเคฎเคพเคฐเคพ เคฎเคฟเคถเคจ เคฏเคน เคธเฅเคจเคฟเคถเฅเคšเคฟเคค เค•เคฐเคจเคพ เคนเฅˆ เค•เคฟ เคนเคฎเคพเคฐเฅ‡ เคฏเฅ‹เค—เคฆเคพเคจเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค”เคฐ เคตเคฟเคธเฅเคคเคพเคฐ เค•เคฟเค เคœเคพเคจเฅ‡ เค•เฅ‡ เคคเคฐเฅ€เค•เฅ‹เค‚ เคฎเฅ‡เค‚ เค•เฅเคฐเคฟเคฏเคพเคจเฅเคตเคฏเคจ เค•เคฟเค เคœเคพเคเค‚ เคœเฅ‹ เคธเคญเฅ€ เค•เฅ‡ เคฒเคฟเค เคฒเคพเคญเคฆเคพเคฏเค• เคนเฅ‹เค‚เฅค diff --git a/ultralytics/docs/hi/index.md:Zone.Identifier b/ultralytics/docs/hi/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/models/fast-sam.md b/ultralytics/docs/hi/models/fast-sam.md new file mode 100755 index 0000000..4dc714f --- /dev/null +++ b/ultralytics/docs/hi/models/fast-sam.md @@ -0,0 +1,193 @@ +--- +comments: true +description: FastSAM เคเค• เคธเฅ€เคเคจเคเคจ เคชเคฐ เค†เคงเคพเคฐเคฟเคค เคธเคฎเคพเคงเคพเคจ เคนเฅˆ เคœเฅ‹ เค›เคตเคฟเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค•เฅ‡ เคฒเคฟเค เคฌเคจเคพเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค เคฏเคน เคฏเฅ‚เคœเคฐ เค‡เค‚เคŸเคฐเฅ‡เค•เฅเคถเคจ, เคธเค‚เค—เคฃเคจเฅ€เคฏ เคชเฅเคฐเคญเคพเคตเคถเฅ€เคฒเคคเคพ เค”เคฐ เคตเคฟเคœเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‹ เค…เคจเฅเค•เฅ‚เคฒเคฟเคค เค•เคฐเคจเฅ‡ เคฎเฅ‡เค‚ เคธเค•เฅเคทเคฎ เคนเฅˆเฅค +keywords: FastSAM, machine learning, CNN-based solution, object segmentation, เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค•เคพ เคธเคฎเคพเคงเคพเคจ, Ultralytics, เคตเคฟเคœเคจ เค•เคพเคฐเฅเคฏ, เค›เคตเคฟ เคชเฅเคฐเฅ‹เคธเฅ‡เคธเคฟเค‚เค—, เค‰เคฆเฅเคฏเฅ‹เค—เคฟเค• เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—, เคฏเฅ‚เคœเคฐ เค‡เค‚เคŸเคฐเฅˆเค•เฅเคถเคจ +--- + +# Fast Segment Anything Model (FastSAM) + +เคซเคพเคธเฅเคŸ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เคเคจเฅ€เคฅเคฟเค‚เค— เคฎเฅ‰เคกเคฒ (เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ) เคเค• เคจเคตเฅ€เคจ, เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคฎเฅ‡เค‚ เค•เคพเคฐเฅเคฏเคฐเคค เคธเฅ€เคเคจเคเคจ เคชเคฐ เค†เคงเคพเคฐเคฟเคค เคธเคฎเคพเคงเคพเคจ เคนเฅˆ เคœเฅ‹ เคเคจเฅ€เคฅเคฟเค‚เค— เคŸเคพเคธเฅเค• เค•เฅ‹ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฌเคจเคพเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค เค‡เคธ เคŸเคพเคธเฅเค• เค•เคพ เค‰เคฆเฅเคฆเฅ‡เคถเฅเคฏ เคตเคฟเคญเคฟเคจเฅเคจ เคธเค‚เคญเคพเคตเคฟเคค เค‰เคชเคฏเฅ‹เค•เฅเคคเคพ เค‡เค‚เคŸเคฐเฅ‡เค•เฅเคถเคจ เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸเฅเคธ เคชเคฐ เค†เคงเคพเคฐเคฟเคค เค›เคตเคฟเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เค•เคฟเคธเฅ€ เคญเฅ€ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ‹ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เคฐเคจเคพ เคนเฅˆเฅค เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เคจเฅ‡ เคธเค‚เค—เคฃเคจเคพเคคเฅเคฎเค• เคฎเคพเค‚เค— เค•เฅ‹ เค•เคฎ เค•เคฐเคคเฅ‡ เคนเฅเค เคฎเฅเค•เคพเคฌเคฒเฅ‡ เค•เฅเคทเคฎเคคเคพ เค•เฅ‹ เคฌเคฐเค•เคฐเคพเคฐ เคฐเค–เคคเฅ‡ เคนเฅเค เคธเค‚เค—เคฃเค•เฅ€เคฏ เคฎเคพเค‚เค—เฅ‹เค‚ เค•เฅ‹ เค•เคพเคซเฅ€ เค•เคฎ เค•เคฟเคฏเคพ เคนเฅˆ, เคœเคฟเคธเค•เฅ‡ เค•เคพเคฐเคฃ เคฏเคน เคตเคฟเคญเคฟเคจเฅเคจ เคตเคฟเคœเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคเค• เคตเฅเคฏเคพเคตเคนเคพเคฐเคฟเค• เคตเคฟเค•เคฒเฅเคช เคฌเคจเคพเคคเคพ เคนเฅˆเฅค + +![เคซเคพเคธเฅเคŸ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เคเคจเฅ€เคฅเคฟเค‚เค— เคฎเฅ‰เคกเคฒ (เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ) เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เค‡เค‚เคŸเฅเคฐเฅ‹](https://user-images.githubusercontent.com/26833433/248551984-d98f0f6d-7535-45d0-b380-2e1440b52ad7.jpg) + +## เค…เคตเคฒเฅ‹เค•เคจ + +FastSAM, [เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เคเคจเฅ€เคฅเคฟเค‚เค— เคฎเฅ‰เคกเคฒ (เคเคธเคเคเคฎ)](sam.md) เค•เฅ€ เคธเฅ€เคฎเคพเค“เค‚ เค•เคพ เคธเคพเคฎเคจเคพ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฌเคจเคพเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคœเฅ‹ เคเค• เคญเคพเคฐเฅ€ เคŸเฅเคฐเคพเค‚เคธเคซเฅ‰เคฐเฅเคฎเคฐ เคฎเฅ‰เคกเคฒ เคนเฅˆ เค”เคฐ เค‰เคšเคฟเคค เคธเค‚เคธเคพเคงเคจ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‹ เคนเฅˆเฅค เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เคจเฅ‡ เคธเคพเคฐเฅเคตเคญเฅŒเคฎเคฟเค• เคšเคฐเคฃเฅ‹เค‚ เคฎเฅ‡เค‚ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เคเคจเฅ€เคฅเคฟเค‚เค— เคŸเคพเคธเฅเค• เค•เฅ‹ เค…เคฒเค—-เค…เคฒเค— เคฆเฅ‹ เคธเคพเคเคชเฅเคฐเคฆเคพเคฏเคฟเค• เคšเคฐเคฃเฅ‹เค‚ เคฎเฅ‡เค‚ เค…เคฒเค— เค•เคฟเคฏเคพ เคนเฅˆ: เคธเคญเฅ€ เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค”เคฐ เคชเฅเคฐเฅ‰เคฎเฅเคชเฅเคŸ-เค—เคพเค‡เคกเฅ‡เคก เคšเคฏเคจเฅค เคชเคนเคฒเฅ‡ เคšเคฐเคฃ เคฎเฅ‡เค‚, เคฏเคน [เคตเฅเคฏเฅ‹เคฒเฅ‹เคตเฅ€8-เคธเฅ‡เค—](../tasks/segment.md) เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค›เคตเคฟ เคฎเฅ‡เค‚ เคธเคญเฅ€ เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เค•เฅ€ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเคพเคธเฅเค• เค‰เคคเฅเคชเคจเฅเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เคฆเฅ‚เคธเคฐเฅ‡ เคšเคฐเคฃ เคฎเฅ‡เค‚, เคฏเคน เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ เค•เฅ‡ เคฌเคฐเคพเคฌเคฐ เค•เฅเคทเฅ‡เคคเฅเคฐ-เค‘เคซ-เค‡เค‚เคŸเคฐเฅ‡เคธเฅเคŸ เค•เฅ‹ เค†เค‰เคŸเคชเฅเคŸ เค•เคฐเคคเคพ เคนเฅˆเฅค + +## เคชเฅเคฐเคฎเฅเค– เคธเฅเคตเคฟเคงเคพเคเค‚ + +1. **เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค•เคพ เคธเคฎเคพเคงเคพเคจ:** เคธเฅ€เคเคจเคเคจ เค•เฅ€ เคธเค‚เค—เคฃเคจเคพเคคเฅเคฎเค• เคชเฅเคฐเคญเคพเคตเคถเฅ€เคฒเคคเคพ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡, เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เคเคจเฅ€เคฅเคฟเค‚เค— เคŸเคพเคธเฅเค• เค•เฅ‡ เคฒเคฟเค เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคธเคฎเคพเคงเคพเคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เคฏเคน เค‰เคฆเฅเคฏเฅ‹เค—เคฟเค• เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพ เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค + +2. **เคชเฅเคฐเคฆเคพเคฐเฅเคฅเฅ€ เค”เคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅเคทเคฎเคคเคพ:** เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เคธเค‚เค—เคฃเคจเคพเคคเฅเคฎเค• เค”เคฐ เคธเค‚เคธเคพเคงเคจ เคฎเคพเค‚เค— เคฎเฅ‡เค‚ เค•เคพเคซเฅ€ เค•เคฎเฅ€ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ เคฌเคฟเคจเคพ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค—เฅเคฃเคตเคคเฅเคคเคพ เคชเคฐ เค•เฅเค› เคฌเคฆเคฒเฅ‡ เค•เฅ‡เฅค เคฏเคน เคธเค‚เคธเคพเคงเคจเฅ‹เค‚ เค•เฅ€ เคฌเคนเฅเคค เค…เคงเคฟเค• เค•เคฎเฅ€ เค•เฅ‡ เคธเคพเคฅ เคเคธเคเคเคฎ เค•เฅ‡ เคฒเค—เคญเค— เคคเฅเคฒเคจเคพเคคเฅเคฎเค• เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเคคเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค— เคธเค‚เคญเคต เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +3. **เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ-เค—เคพเค‡เคกเฅ‡เคก เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ:** เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เค•เคˆ เคธเค‚เคญเคพเคตเคฟเคค เค‰เคชเคฏเฅ‹เค•เฅเคคเคพ เค‡เค‚เคŸเคฐเฅ‡เค•เฅเคถเคจ เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸเฅเคธ เคฆเฅเคตเคพเคฐเคพ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เค›เคตเคฟ เคฎเฅ‡เค‚ เค•เคฟเคธเฅ€ เคญเฅ€ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ‹ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เคฐ เคธเค•เคคเคพ เคนเฅˆ, เคœเฅ‹ เคตเคฟเคญเคฟเคจเฅเคจ เคชเคฐเคฟเคธเฅเคฅเคฟเคคเคฟเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เคฒเคšเฅ€เคฒเคพเคชเฅ‚เคฐเฅเคฃเคคเคพ เค”เคฐ เค…เคจเฅเค•เฅ‚เคฒเคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค + +4. **เคตเฅเคฏเฅ‹เคฒเฅ‹เคตเฅ€8-เคธเฅ‡เค— เคชเคฐ เค†เคงเคพเคฐเคฟเคค:** เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ [เคตเฅเคฏเฅ‹เคฒเฅ‹เคตเฅ€8-เคธเฅ‡เค—](../tasks/segment.md) เคชเคฐ เค†เคงเคพเคฐเคฟเคค เคนเฅˆ, เคœเฅ‹ เคเค• เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ เคนเฅˆ เคœเคฟเคธเคฎเฅ‡เค‚ เคเค• เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคŸเคพเคธเฅเค• เคนเฅˆเฅค เค‡เคธเคธเฅ‡ เคฏเคน เคธเค‚เคญเคต เคนเฅ‹เคคเคพ เคนเฅˆ เค•เคฟ เคฏเคน เค›เคตเคฟ เคฎเฅ‡เค‚ เคธเคญเฅ€ เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เค•เฅ€ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเคพเคธเฅเค• เคชเฅเคฐเคญเคพเคตเฅ€ เคขเค‚เค— เคธเฅ‡ เค‰เคคเฅเคชเคจเฅเคจ เค•เคฐเฅ‡เค‚เฅค + +5. **เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค• เคชเคฐ เคชเฅเคฐเคคเคฟเคธเฅเคชเคฐเฅเคงเฅ€ เคชเคฐเคฟเคฃเคพเคฎ:** MS COCO เคชเคฐ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคชเฅเคฐเคพเคธเคฒเคจ เคŸเคพเคธเฅเค• เคชเคฐ, เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เคเค•เคฒ NVIDIA RTX 3090 เคชเคฐ [เคเคธเคเคเคฎ](sam.md) เค•เฅ€ เคคเฅเคฒเคจเคพ เคฎเฅ‡เค‚ เค•เคพเคซเฅ€ เคคเฅ‡เคœ เค—เคคเคฟ เคฎเฅ‡เค‚ เค‰เคšเฅเคš เค…เค‚เค• เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเคคเคพ เคนเฅˆ, เคœเฅ‹ เค‡เคธเค•เฅ€ เคชเฅเคฐเคญเคพเคตเคถเฅ€เคฒเคคเคพ เค”เคฐ เค•เฅเคทเคฎเคคเคพ เค•เฅ‹ เคฆเคฟเค–เคพเคคเคพ เคนเฅˆเฅค + +6. **เคตเคพเคธเฅเคคเคตเคฟเค• เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—:** เคชเฅเคฐเคธเฅเคคเคพเคตเคฟเคค เคฆเฅƒเคทเฅเคŸเคพเค‚เคค เคเค• เคจเค, เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคฎเฅ‡เค‚ เค•เคˆ เคตเคฟเคœเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคเค• เคจเคฏเคพ, เคตเฅเคฏเคพเคตเคนเคพเคฐเคฟเค• เคธเคฎเคพเคงเคพเคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ, เคœเคฟเคธเคฎเฅ‡เค‚ เคฎเฅŒเคœเฅ‚เคฆเคพ เคตเคฟเคงเคฟเคฏเฅ‹เค‚ เคธเฅ‡ เค•เคˆ เคฏเคพ เคธเฅŒ เค—เฅเคจเคพ เคคเฅ‡เคœ เคคเคฐเฅ€เค•เฅ‡ เคธเฅ‡ เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค + +7. **เคฎเฅ‰เคกเคฒ เคธเค‚เค•เฅเคทเคฟเคชเฅเคคเคฟ เค•เฅเคทเคฎเคคเคพ:** เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เคจเฅ‡ เคชเฅเคฐเคตเฅ‡เคถเฅ€ เค…เคญเคฟเค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค เคเค• เค•เฅƒเคคเฅเคฐเคฟเคฎ เคชเฅเคฐเคพเคฅเคฎเคฟเค•เคคเคพ เค•เฅ‹ เคธเค‚เคฐเคšเคจเคพ เคฎเฅ‡เค‚ เคชเคฐเคฟเคšเคฏ เค•เคฐเคพเคจเฅ‡ เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เค•เค‚เคชเฅเคฏเฅ‚เคŸเฅ‡เคถเคจเคฒ เคชเฅเคฐเคฏเคพเคธ เค•เฅ‹ เค•เคพเคซเฅ€ เค•เคฎ เค•เคฐเคจเฅ‡ เค•เฅ€ เคธเค‚เคญเคพเคตเคจเคพ เคฆเคฟเค–เคพเคˆ เคนเฅˆ, เค‡เคธ เคชเฅเคฐเค•เคพเคฐ เคธเคพเคฎเคพเคจเฅเคฏ เคตเคฟเคœเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคฌเคกเคผเฅ‡ เคฎเฅ‰เคกเคฒ เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เค•เฅ‡ เคฒเคฟเค เคจเค เคธเค‚เคญเคพเคตเคจเคพเค“เค‚ เค•เฅ‹ เค–เฅ‹เคฒเคคเฅ€ เคนเฅˆเฅค + +## เค‰เคชเคฒเคฌเฅเคง เคฎเฅ‰เคกเคฒ, เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ เค”เคฐ เค‘เคชเคฐเฅ‡เคŸเคฟเค‚เค— เคฎเฅ‹เคก + +เค‡เคธ เคธเคพเคฐเคฃเฅ€ เคฎเฅ‡เค‚ เค‰เคชเคฒเคฌเฅเคง เคฎเฅ‰เคกเคฒ, เค‰เคจเค•เฅ‡ เคตเคฟเคถเคฟเคทเฅเคŸ เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเฅ‡เคŸ เค”เคฐ เค‰เคจเค•เฅ‡ เคธเคฎเคฐเฅเคฅเคจเคฟเคค เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‹ เคชเฅ‡เคถ เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคธเคพเคฅ เคนเฅ€ เค‰เคจเค•เฅ€ เคตเคฟเคญเคฟเคจเฅเคจ เค‘เคชเคฐเฅ‡เคŸเคฟเค‚เค— เคฎเฅ‹เคก เค•เฅ‡ เคธเคพเคฅ เคธเค‚เค—เคคเคคเคพ เค•เฅ‹ เคฆเคฐเฅเคถเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคธเคฎเคฐเฅเคฅเคฟเคค เคฎเฅ‹เคœเฅ€ เคฎเฅ‡เค‚ โœ… เค‡เคฎเฅ‹เคœเคผเฅ€ เค”เคฐ เค…เคธเคฎเคฐเฅเคฅเคฟเคค เคฎเฅ‹เคœเฅ€ เคฎเฅ‡เค‚ โŒ เค‡เคฎเฅ‹เคœเคผเฅ€ เคฒเค—เคพเค เค—เค เคนเฅˆเค‚เฅค + +| เคฎเฅ‰เคกเคฒ เคชเฅเคฐเค•เคพเคฐ | เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเฅ‡เคŸเฅเคธ | เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ | เคญเคพเคตเคจเคพ | เคฎเคพเคจเฅเคฏเฅ€เค•เคฐเคฃ | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ | เคจเคฟเคฐเฅเคฏเคพเคค | +|-----------------|------------------------|--------------------------------------------|-------|-----------|-----------|---------| +| เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ-เคเคธ | `FastSAM-s.pt` | [เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ-เคเค•เฅเคธ | `FastSAM-x.pt` | [เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เค‰เคฆเคพเคนเคฐเคฃ + +เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เค…เคชเคจเฅ‡ เคชเคพเคฏเคฅเคจ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจ เคฎเฅ‡เค‚ เค†เคธเคพเคจเฅ€ เคธเฅ‡ เคเค•เฅ€เค•เฅƒเคค เค•เคฐเคจเคพ เค†เคธเคพเคจ เคนเฅˆเฅค เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเฅเคฏเคพเคŸเคฟเค•เฅเคธ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพ-เคฎเคฟเคคเฅเคฐเคชเฅ‚เคฐเฅเคฃ เคชเคพเคฏเคฅเคจ API เค”เคฐ CLI เค•เคฎเคพเค‚เคกเฅเคธ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ เคคเคพเค•เคฟ เคตเคฟเค•เคพเคธ เค•เฅ‹ เคธเคฐเคฒ เคฌเคจเคพเคฏเคพ เคœเคพ เคธเค•เฅ‡เฅค + +### เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค‰เคชเคฏเฅ‹เค— + +เคเค• เค›เคตเคฟ เคชเคฐ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, เคจเฅ€เคšเฅ‡ เคฆเคฟเค เค—เค เค‰เคฆเคพเคนเคฐเคฃ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚: + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "เคชเคพเคฏเคฅเคจ" + ```python + from ultralytics import FastSAM + from ultralytics.models.fastsam import FastSAMPrompt + + # เคเค• inference source เคจเคฟเคฐเฅเคงเคพเคฐเคฟเคค เค•เคฐเฅ‡เค‚ + source = 'path/to/bus.jpg' + + # เคเค• เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค‚ + model = FastSAM('FastSAM-s.pt') # เคฏเคพ FastSAM-x.pt + + # เค›เคตเคฟ เคชเคฐ inference เคšเคฒเคพเคเค‚ + everything_results = model(source, device='cpu', retina_masks=True, imgsz=1024, conf=0.4, iou=0.9) + + # เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เคตเคธเฅเคคเฅ เค•เฅ‹ เคคเฅˆเคฏเคพเคฐ เค•เคฐเฅ‡เค‚ + prompt_process = FastSAMPrompt(source, everything_results, device='cpu') + + # เคธเคฌ เค•เฅเค› เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ + ann = prompt_process.everything_prompt() + + # เคฌเฅ‰เค•เฅเคธ เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸเคตเคค เค†เค•เคพเคฐ [0,0,0,0] -> [x1,y1,x2,y2] + ann = prompt_process.box_prompt(bbox=[200, 200, 300, 300]) + + # เคชเคพเค  เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ + ann = prompt_process.text_prompt(text='a photo of a dog') + + # เคชเฅ‰เค‡เค‚เคŸ เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ + # เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ point [[0,0]] [[x1,y1],[x2,y2]] + # เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ point_label [0] [1,0] 0:background, 1:foreground + ann = prompt_process.point_prompt(points=[[200, 200]], pointlabel=[1]) + prompt_process.plot(annotations=ann, output='./') + ``` + + === "CLI" + ```bash + # เคถเฅ€เค˜เฅเคฐเคฆเคฐเฅเคถเคจ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ เค‰เคธเฅ‡ เค•เฅเค› เคตเคธเฅเคคเฅเค“เค‚ เค•เฅ‡ เคธเคพเคฅ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เคฐเฅ‡เค‚ + yolo segment predict model=FastSAM-s.pt source=path/to/bus.jpg imgsz=640 + ``` + +เคฏเคน เคธเฅเคจเคฟเคชเฅ‡เคŸ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเคจเฅ‡ เค”เคฐ เคเค• เค‡เคฎเฅ‡เคœ เคชเคฐ เคชเฅเคฐเคญเคพเคธเคฟเคคเฅ€ เค•เคพ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค•เคฐเคจเฅ‡ เค•เฅ€ เคธเคฐเคฒเคคเคพ เค•เคพ เคฆเคฟเค–เคพเคตเคพ เค•เคฐเคคเคพ เคนเฅˆเฅค + +### เคตเฅˆเคฒ เค‰เคชเคฏเฅ‹เค— + +เคเค• เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคฎเคพเคจเฅเคฏเฅ€เค•เคฐเคฃ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เคคเคฐเฅ€เค•เฅ‡ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆ: + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "เคชเคพเคฏเคฅเคจ" + ```python + from ultralytics import FastSAM + + # เคเค• เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค‚ + model = FastSAM('FastSAM-s.pt') # เคฏเคพ FastSAM-x.pt + + # เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฎเคพเคจเฅเคฏเคฟเคค เค•เคฐเฅ‡เค‚ + results = model.val(data='coco8-seg.yaml') + ``` + + === "CLI" + ```bash + # เคถเฅ€เค˜เฅเคฐเคฆเคฐเฅเคถเคจ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ เค‡เคธเฅ‡ เคธเคพเค‡เคœเคผ 640 เคชเคฐ COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคฎเคพเคจเฅเคฏเคฟเคค เค•เคฐเฅ‡เค‚ + yolo segment val model=FastSAM-s.pt data=coco8.yaml imgsz=640 + ``` + +เค•เฅƒเคชเคฏเคพ เคงเฅเคฏเคพเคจ เคฆเฅ‡เค‚ เค•เคฟ เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เค•เฅ‡เคตเคฒ เคเค•เคฒ เคตเคธเฅเคคเฅ เค•เคพเคฐ เค”เคฐ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค•เคพ เคธเคฎเคฐเฅเคฅเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เค‡เคธเค•เคพ เคฎเคคเคฒเคฌ เคนเฅˆ เค•เคฟ เคฏเคน เคธเคญเฅ€ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸเฅ‹เค‚ เค•เฅ‹ เคเค• เคนเฅ€ เคตเคฐเฅเค— เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคฎเคพเคจเฅเคฏเคคเคพ เคฆเฅ‡เค—เคพ เค”เคฐ เคธเคญเฅ€ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸเฅเคธ เค•เฅ‹ เคเค• เคนเฅ€ เคตเคฐเฅเค— เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เคฐเฅ‡เค—เคพเฅค เค‡เคธเคฒเคฟเค, เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‹ เคคเฅˆเคฏเคพเคฐ เค•เคฐเคคเฅ‡ เคธเคฎเคฏ, เค†เคชเค•เฅ‹ เคธเคญเฅ€ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคถเฅเคฐเฅ‡เคฃเฅ€ เค†เคˆเคกเฅ€ เค•เฅ‹ 0 เคฎเฅ‡เค‚ เคฐเฅ‚เคชเคพเค‚เคคเคฐเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพ เคนเฅ‹เค—เฅ€เฅค + +## เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เค†เคงเคฟเค•เคพเคฐเคฟเค• เค‰เคชเคฏเฅ‹เค— + +เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เค•เฅ‹ [https://github.com/CASIA-IVA-Lab/FastSAM](https://github.com/CASIA-IVA-Lab/FastSAM) เคฐเคฟเคชเฅ‰เคœเคฟเคŸเคฐเฅ€ เคธเฅ‡ เคธเฅ€เคงเฅ‡ เคญเฅ€ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค เคฏเคนเคพเค‚ เค†เคชเค•เฅ‹ เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค†เคฎเคคเฅŒเคฐ เคชเคฐ เคฒเคฟเค เคœเคพเคจเฅ‡ เคตเคพเคฒเฅ‡ เค•เคฆเคฎเฅ‹เค‚ เค•เคพ เคธเค‚เค•เฅเคทเฅ‡เคชเคฟเค• เค…เคตเคฒเฅ‹เค•เคจ เคนเฅˆ: + +### เคธเฅเคฅเคพเคชเคจเคพ + +1. เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เคฐเคฟเคชเฅ‰เคœเคฟเคŸเคฐเฅ€ เค•เฅเคฒเฅ‹เคจ เค•เคฐเฅ‡เค‚: + ```เคถเฅ‡เคฒ + git clone https://github.com/CASIA-IVA-Lab/FastSAM.git + ``` + +2. เคชเคพเคฏเคฅเคจ 3.9 เค•เฅ‡ เคธเคพเคฅ เคเค• เคฐเฅเคชเฅ‡ เคฎเฅ‡เค‚ เคธเค‚เคšเคพเคฒเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคเค• Conda เคตเคพเคคเคพเคตเคฐเคฃ เคฌเคจเคพเคเค‚ เค”เคฐ เคธเค•เฅเคฐเคฟเคฏ เค•เคฐเฅ‡เค‚: + ```เคถเฅ‡เคฒ + conda create -n FastSAM python=3.9 + conda activate FastSAM + ``` + +3. เค•เฅเคฒเฅ‹เคจ เค•เคฟเค เค—เค เคฐเคฟเคชเฅ‰เคœเคฟเคŸเคฐเฅ€ เคฎเฅ‡เค‚ เคœเคพเคเค‚ เค”เคฐ เค†เคตเคถเฅเคฏเค• เคชเฅˆเค•เฅ‡เคœเฅ‹เค‚ เค•เฅ‹ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเฅ‡เค‚: + ```เคถเฅ‡เคฒ + cd FastSAM + pip install -r requirements.txt + ``` + +4. CLIP เคฎเฅ‰เคกเคฒ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเฅ‡เค‚: + ```เคถเฅ‡เคฒ + pip install git+https://github.com/openai/CLIP.git + ``` + +### เค‰เคฆเคพเคนเคฐเคฃ เค‰เคชเคฏเฅ‹เค— + +1. [เคฎเฅ‰เคกเคฒ เคšเฅ‡เค•เคชเฅ‰เค‡เค‚เคŸ](https://drive.google.com/file/d/1m1sjY4ihXBU1fZXdQ-Xdj-mDltW-2Rqv/view?usp=sharing)เคกเคพเค‰เคจเคฒเฅ‹เคก เค•เคฐเฅ‡เค‚เฅค + +2. FastSAM เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค‡เค‚เคซเคฐเฅ‡เค‚เคธ เค•เคฐเฅ‡เค‚เฅค เค‰เคฆเคพเคนเคฐเคฃ เค•เคฎเคพเค‚เคก: + + - เค›เคตเคฟ เคฎเฅ‡เค‚ เคธเคฌ เค•เฅเค› เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เคฐเฅ‡เค‚: + ```เคถเฅ‡เคฒ + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg + ``` + + - เคชเคพเค  เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคตเคฟเคถเฅ‡เคท เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เคฐเฅ‡เค‚: + ```เคถเฅ‡เคฒ + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --text_prompt "the yellow dog" + ``` + + - เคเค• เคฌเคพเค‰เค‚เคกเคฟเค‚เค— เคฌเฅ‰เค•เฅเคธ เค•เฅ‡ เคญเฅ€เคคเคฐ เคฌเคพเค‰เค‚เคกเคฐ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ‹ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เคฐเฅ‡เค‚ (xywh เคธเฅเคตเคฐเฅ‚เคช เคฎเฅ‡เค‚ เคฌเฅ‰เค•เฅเคธ เค•เฅ€ เค•เฅ‹เคฃเคฏเฅ‹เค‚ เค•เฅ€ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค•เคฐเฅ‡เค‚): + ```เคถเฅ‡เคฒ + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --box_prompt "[570,200,230,400]" + ``` + + - เคตเคฟเคถเฅ‡เคท เคฌเคฟเค‚เคฆเฅเค“เค‚ เค•เฅ‡ เคชเคพเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ‹ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เคฐเฅ‡เค‚: + ```เคถเฅ‡เคฒ + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --point_prompt "[[520,360],[620,300]]" --point_label "[1,0]" + ``` + +เค‡เคธเค•เฅ‡ เค…เคฒเคพเคตเคพ, เค†เคช เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคเค• [เค•เฅ‹เคฒเฅˆเคฌ เคกเฅ‡เคฎเฅ‹](https://colab.research.google.com/drive/1oX14f6IneGGw612WgVlAiy91UHwFAvr9?usp=sharing) เคฏเคพ เคเค• [เคนเค—เคฟเค‚เค—เคซเฅ‡เคธ เคตเฅ‡เคฌ เคกเฅ‡เคฎเฅ‹](https://huggingface.co/spaces/An-619/FastSAM) เคชเคฐ เคญเฅ€ เคชเฅเคฐเคฏเคพเคธ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค + +## เค‰เคฆเฅเคงเฅƒเคคเคฟ เค”เคฐ เคชเฅเคฐเคถเค‚เคธเคพเคชเคคเฅเคฐ + +เคนเคฎ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค†เคตเค‚เคŸเคจ เคธเค‚เคฌเค‚เคงเฅ€ เค•เฅเคทเฅ‡เคคเฅเคฐ เคฎเฅ‡เค‚ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคฏเฅ‹เค—เคฆเคพเคจ เคฆเฅ‡เคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคซเคพเคธเฅเคŸเคเคธเคเคเคฎ เคฒเฅ‡เค–เค•เฅ‹เค‚ เค•เฅ‹ เคชเฅเคฐเคถเค‚เคธเคพ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚: + +!!! Quote "" + + === "เคฌเคฟเคฌเคŸเฅ‡เค•เฅเคธเฅเคŸ" + + ```เคฌเคฟเคฌเคŸเฅ‡เค•เฅเคธเฅเคŸ + @misc{zhao2023fast, + title={Fast Segment Anything}, + author={Xu Zhao and Wenchao Ding and Yongqi An and Yinglong Du and Tao Yu and Min Li and Ming Tang and Jinqiao Wang}, + year={2023}, + eprint={2306.12156}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +[FastSAM](https://arxiv.org/abs/2306.12156) เคชเฅ‡เคชเคฐ เค†เคฐเคเค•เฅเคธเคฟเคต เคฎเฅ‡เค‚ เคฎเฅŒเคœเฅ‚เคฆ เคนเฅˆเฅค เคฒเฅ‡เค–เค•เฅ‹เค‚ เคจเฅ‡ เค…เคชเคจเคพ เค•เคพเคฎ เคธเคพเคฐเฅเคตเคœเคจเคฟเค• เคฐเฅ‚เคช เคธเฅ‡ เค‰เคชเคฒเคฌเฅเคง เค•เคฐเคพเคฏเคพ เคนเฅˆ, เค”เคฐ เค•เฅ‹เคกเคฌเฅ‡เคธ [เค—เคฟเคŸเคนเคฌ](https://github.com/CASIA-IVA-Lab/FastSAM) เคชเคฐ เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเฅค เคนเคฎ เค‡เคจ เคฆเฅ‹เคจเฅ‹เค‚ เค•เฅ‡ เคชเฅเคฐเคฏเคพเคธเฅ‹เค‚ เค•เฅ€ เค•เฅ€เคฎเคค เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ เค”เคฐ เค‰เคจเค•เฅ‡ เคชเฅเคฐเคฏเคพเคธ เค•เฅ‡ เคฒเคฟเค เคงเคจเฅเคฏเคตเคพเคฆ เคฆเฅ‡เคคเฅ‡ เคนเฅˆเค‚ เคœเฅ‹ เค•เฅเคทเฅ‡เคคเฅเคฐ เค•เฅ‹ เค†เค—เฅ‡ เคฌเคขเคผเคพเคจเฅ‡ เค”เคฐ เค…เคชเคจเฅ‡ เค•เคพเคฎ เค•เฅ‹ เคตเฅเคฏเคพเคชเค• เคธเคฎเฅเคฆเคพเคฏ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฒเคฌเฅเคง เค•เคฐเคพเคจเฅ‡ เคฎเฅ‡เค‚ เคธเคฎเคฐเฅเคฅ เคนเฅˆเค‚เฅค diff --git a/ultralytics/docs/hi/models/fast-sam.md:Zone.Identifier b/ultralytics/docs/hi/models/fast-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/models/fast-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/models/index.md b/ultralytics/docs/hi/models/index.md new file mode 100755 index 0000000..3ac9539 --- /dev/null +++ b/ultralytics/docs/hi/models/index.md @@ -0,0 +1,98 @@ +--- +comments: true +description: Ultralytics เคฆเฅเคตเคพเคฐเคพ เคธเคฎเคฐเฅเคฅเคฟเคค YOLO เคชเคฐเคฟเคตเคพเคฐ เค•เฅ€ เคตเคฟเคตเคฟเคง เคฐเฅ‡เค‚เคœ, SAM, MobileSAM, FastSAM, YOLO-NAS, เค”เคฐ RT-DETR เคฎเฅ‰เคกเคฒเฅเคธ เค•เคพ เคชเคคเคพ เคฒเค—เคพเคเค‚เฅค CLI เค”เคฐ Python เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคฒเคฟเค เค‰เคฆเคพเคนเคฐเคฃเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เคชเฅเคฐเคพเคฐเค‚เคญ เค•เคฐเฅ‡เค‚เฅค +keywords: Ultralytics, เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ, YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS, RT-DETR, เคฎเฅ‰เคกเคฒเฅเคธ, เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐเฅเคธ, Python, CLI +--- + +# Ultralytics เคฆเฅเคตเคพเคฐเคพ เคธเคฎเคฐเฅเคฅเคฟเคค เคฎเฅ‰เคกเคฒ + +Ultralytics เค•เฅ‡ เคฎเฅ‰เคกเคฒ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เคฎเฅ‡เค‚ เค†เคชเค•เคพ เคธเฅเคตเคพเค—เคค เคนเฅˆ! เคนเคฎ [เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ](../tasks/detect.md), [เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ](../tasks/segment.md), [เค‡เคฎเฅ‡เคœ เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ](../tasks/classify.md), [เคชเฅ‹เคœเคผ เคเคธเฅเคŸเคฟเคฎเฅ‡เคถเคจ](../tasks/pose.md), เค”เคฐ [เคฎเคฒเฅเคŸเฅ€-เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—](../modes/track.md) เคœเฅˆเคธเฅ‡ เคตเคฟเคถเคฟเคทเฅเคŸ เค•เคพเคฎเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคœเคผเคพเค‡เคจ เค•เคฟเค เค—เค เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ€ เคเค• เคตเคฟเคธเฅเคคเฅƒเคค เคฐเฅ‡เค‚เคœ เค•เคพ เคธเคฎเคฐเฅเคฅเคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚เฅค เคฏเคฆเคฟ เค†เคช Ultralytics เคฎเฅ‡เค‚ เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เค•เฅ‹ เคฏเฅ‹เค—เคฆเคพเคจ เคฆเฅ‡เคจเฅ‡ เคฎเฅ‡เค‚ เคฐเฅเคšเคฟ เคฐเค–เคคเฅ‡ เคนเฅˆเค‚, เคคเฅ‹ เคนเคฎเคพเคฐเคพ [Contributing Guide](../../help/contributing.md) เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Note "เคงเฅเคฏเคพเคจ เคฆเฅ‡เค‚" + + ๐Ÿšง เคนเคฎเคพเคฐเฅ€ เค…เคฒเค—-เค…เคฒเค— เคญเคพเคทเคพเค“เค‚ เคฎเฅ‡เค‚ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เคตเคฐเฅเคคเคฎเคพเคจ เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฎเคพเคฃเคพเคงเฅ€เคจ เคนเฅˆ, เค”เคฐ เคนเคฎ เค‡เคธเฅ‡ เคธเฅเคงเคพเคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เค เคฟเคจ เคชเคฐเคฟเคถเฅเคฐเคฎ เค•เคฐ เคฐเคนเฅ‡ เคนเฅˆเค‚เฅค เคงเฅˆเคฐเฅเคฏ เคฐเค–เคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคงเคจเฅเคฏเคตเคพเคฆ! ๐Ÿ™ + +## เคชเฅเคฐเคฎเฅเค– เคฎเฅ‰เคกเคฒ + +เคฏเคนเคพเค‚ เค•เฅเค› เคฎเฅเค–เฅเคฏ เคฎเฅ‰เคกเคฒ เคฆเคฟเค เค—เค เคนเฅˆเค‚: + +1. **[YOLOv3](yolov3.md)**: YOLO เคฎเฅ‰เคกเคฒ เคชเคฐเคฟเคตเคพเคฐ เค•เคพ เคคเฅ€เคธเคฐเคพ เคธเค‚เคธเฅเค•เคฐเคฃ, เคœเคฟเคธเฅ‡ เคœเฅ‹เคธเฅ‡เคซ เคฐเฅ‡เคกเคฎเฅ‹เคจ เคฆเฅเคตเคพเคฐเคพ เคฌเคจเคพเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคœเฅ‹ เค‡เคธเค•เฅ€ เค•เฅเคถเคฒ เคฐเคฟเคฏเคฒ-เคŸเคพเค‡เคฎ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅเคทเคฎเคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เคœเคพเคจเคพ เคœเคพเคคเคพ เคนเฅˆเฅค +2. **[YOLOv4](yolov4.md)**: YOLOv3 เค•เฅ‹ เค…เคชเคกเฅ‡เคŸ เค•เคฐเคจเฅ‡ เคตเคพเคฒเคพ เคเค• เคกเคพเคฐเฅเค•เคจเฅ‡เคŸ-เคจเฅ‡เคŸเคฟเคต, เคœเคฟเคธเฅ‡ 2020 เคฎเฅ‡เค‚ เคเคฒเฅ‡เค•เฅเคธเฅ€ เคฌเฅ‹เคšเค•เฅ‹เคตเคธเฅเค•เฅ€ เคฆเฅเคตเคพเคฐเคพ เคœเคพเคฐเฅ€ เค•เคฟเคฏเคพ เค—เคฏเคพเฅค +3. **[YOLOv5](yolov5.md)**: เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเคพเค‡เคŸเคฟเค•เฅเคธ เคฆเฅเคตเคพเคฐเคพ เคฌเฅ‡เคนเคคเคฐ YOLO เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เค•เคพ เคเค• เคธเฅเคงเคพเคฐเคฟเคค เคธเค‚เคธเฅเค•เคฐเคฃ, เคœเฅ‹ เคชเคฟเค›เคฒเฅ‡ เคธเค‚เคธเฅเค•เคฐเคฃเฅ‹เค‚ เค•เฅ€ เคคเฅเคฒเคจเคพ เคฎเฅ‡เค‚ เคฌเฅ‡เคนเคคเคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค”เคฐ เค—เคคเคฟ เค•เฅ€ เคธเคฎเคเฅŒเคคเคพ เค•เฅ€ เคชเฅ‡เคถเค•เคถ เค•เคฐเคคเคพ เคนเฅˆเฅค +4. **[YOLOv6](yolov6.md)**: 2022 เคฎเฅ‡เค‚ [Meituan](https://about.meituan.com/) เคฆเฅเคตเคพเคฐเคพ เคœเคพเคฐเฅ€ เค•เคฟเคฏเคพ เค—เคฏเคพ, เค”เคฐ เค•เค‚เคชเคจเฅ€ เค•เฅ‡ เค•เคˆ เคธเฅเคตเคพเคฏเคคเฅเคค เคกเคฟเคฒเฅ€เคตเคฐเฅ€ เคฐเฅ‹เคฌเฅ‹เคŸเฅเคธ เคฎเฅ‡เค‚ เค‰เคชเคฏเฅ‹เค— เคฎเฅ‡เค‚เฅค +5. **[YOLOv7](yolov7.md)**: 2022 เคฎเฅ‡เค‚ YOLOv4 เค•เฅ‡ เคฒเฅ‡เค–เค•เฅ‹เค‚ เคฆเฅเคตเคพเคฐเคพ เคœเคพเคฐเฅ€ เค•เคฟเคฏเคพ เค—เคฏเคพ เค…เคชเคกเฅ‡เคŸเฅ‡เคก YOLO เคฎเฅ‰เคกเคฒเฅค +6. **[YOLOv8](yolov8.md) เคจเคฏเคพ ๐Ÿš€**: YOLO เคชเคฐเคฟเคตเคพเคฐ เค•เคพ เคจเคตเฅ€เคจเคคเคฎ เคธเค‚เคธเฅเค•เคฐเคฃ, เคœเคฟเคธเคฎเฅ‡เค‚ เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ, เคชเฅ‹เคœ/เค•เฅ€เคชเฅ‰เค‡เค‚เคŸเฅเคธ เค…เคจเฅเคฎเคพเคจ, เค”เคฐ เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ เคœเฅˆเคธเฅ€ เค‰เคจเฅเคจเคค เค•เฅเคทเคฎเคคเคพเคเค‚ เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚เฅค +7. **[Segment Anything Model (SAM)](sam.md)**: เคฎเฅ‡เคŸเคพ เค•เฅ‡ Segment Anything Model (SAM)เฅค +8. **[Mobile Segment Anything Model (MobileSAM)](mobile-sam.md)**: เคฎเฅ‹เคฌเคพเค‡เคฒ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค MobileSAM, เค•เฅเคฏเฅเค‚เค— เคนเฅ€ เคฏเฅ‚เคจเคฟเคตเคฐเฅเคธเคฟเคŸเฅ€ เคฆเฅเคตเคพเคฐเคพเฅค +9. **[Fast Segment Anything Model (FastSAM)](fast-sam.md)**: เคšเฅ€เคจเฅ€ เคตเคฟเคœเฅเคžเคพเคจ เค…เค•เคพเคฆเคฎเฅ€, เค‘เคŸเฅ‹เคฎเฅ‡เคถเคจ เคธเค‚เคธเฅเคฅเคพเคจ เค•เฅ‡ เค‡เคฎเฅ‡เคœ & เคตเฅ€เคกเคฟเคฏเฅ‹ เคเคจเคพเคฒเคฟเคธเคฟเคธ เค—เฅเคฐเฅเคช เคฆเฅเคตเคพเคฐเคพ FastSAMเฅค +10. **[YOLO-NAS](yolo-nas.md)**: YOLO เคจเฅเคฏเฅ‚เคฐเคฒ เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เคธเคฐเฅเคš (NAS) เคฎเฅ‰เคกเคฒเฅเคธเฅค +11. **[Realtime Detection Transformers (RT-DETR)](rtdetr.md)**: เคฌเฅˆเคฆเฅ เค•เฅ‡ เคชเคกเคฒเคชเฅˆเคกเคฒ Realtime Detection Transformer (RT-DETR) เคฎเฅ‰เคกเคฒเฅค + +

+
+ +
+ เคฆเฅ‡เค–เฅ‡เค‚: เค•เฅเค› เคฒเคพเค‡เคจเฅ‹เค‚ เค•เฅ‡ เค•เฅ‹เคก เคฎเฅ‡เค‚ Ultralytics YOLO เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ‹ เคšเคฒเคพเคเค‚เฅค +

+ +## เคชเฅเคฐเคพเคฐเค‚เคญ เค•เคฐเคจเคพ: เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ + +เคฏเคน เค‰เคฆเคพเคนเคฐเคฃ เคฏเฅ‹เคฒเฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค”เคฐ เค…เคจเฅเคฎเคพเคจ เค•เฅ‡ เคธเคฐเคฒ เค‰เคฆเคพเคนเคฐเคฃ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เค‡เคจ เค”เคฐ เค…เคจเฅเคฏ [modes](../modes/index.md) เค•เฅ‡ เคชเฅ‚เคฐเฅเคฃ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) เค”เคฐ [Export](../modes/export.md) เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ‹เค‚ เค•เฅ‡ เคชเคจเฅเคจเฅ‹เค‚ เค•เฅ‹ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +เคจเฅ€เคšเฅ‡ เคฆเคฟเคฏเคพ เค—เคฏเคพ เค‰เคฆเคพเคนเคฐเคฃ YOLOv8 [Detect](../tasks/detect.md) เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ‡ เคฒเคฟเค เคนเฅˆ, เคœเฅ‹ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ‡ เคฒเคฟเค เคนเฅˆเค‚เฅค เค…เคคเคฟเคฐเคฟเค•เฅเคค เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค [Segment](../tasks/segment.md), [Classify](../tasks/classify.md) เค”เคฐ [Pose](../tasks/pose.md) เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ‹เค‚ เค•เฅ‹ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + เคชเคพเคฏเคฅเคจ เคฎเฅ‡เค‚ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค PyTorch เคชเฅเคฐเฅ€เคŸเฅเคฐเฅ‡เคจเฅเคก '*.pt' เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ‡ เคธเคพเคฅ-เคธเคพเคฅ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ '*.yaml' เคซเคผเคพเค‡เคฒเฅ‹เค‚ เค•เฅ‹ `YOLO()`, `SAM()`, `NAS()` เค”เคฐ `RTDETR()` เค•เฅเคฒเคพเคธเฅ‡เคœเคผ เคฎเฅ‡เค‚ เคชเคพเคธ เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆ: + + ```python + from ultralytics import YOLO + + # COCO-เคชเฅเคฐเฅ€เคŸเฅเคฐเฅ‡เคจเฅเคก YOLOv8n เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') + + # เคฎเฅ‰เคกเคฒ เค•เฅ€ เคœเคพเคจเค•เคพเคฐเฅ€ เคฆเคฟเค–เคพเคเค (เคตเฅˆเค•เคฒเฅเคชเคฟเค•) + model.info() + + # COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ 100 เคเคชเฅ‹เค•เฅเคธ เค•เฅ‡ เคฒเคฟเค เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # 'bus.jpg' เค‡เคฎเฅ‡เคœ เคชเคฐ YOLOv8n เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เค…เคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI เค•เคฎเคพเค‚เคกเฅเคธ เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเค‚ เคœเฅ‹ เคธเฅ€เคงเฅ‡ เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ‹ เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคนเฅˆเค‚: + + ```bash + # COCO-เคชเฅเคฐเฅ€เคŸเฅเคฐเฅ‡เคจเฅเคก YOLOv8n เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ 100 เคเคชเฅ‹เค•เฅเคธ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCO-เคชเฅเคฐเฅ€เคŸเฅเคฐเฅ‡เคจเฅเคก YOLOv8n เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ 'bus.jpg' เค‡เคฎเฅ‡เคœ เคชเคฐ เค…เคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## เคจเค เคฎเฅ‰เคกเคฒเฅเคธ เค•เคพ เคฏเฅ‹เค—เคฆเคพเคจ + +เค†เคช Ultralytics เคฎเฅ‡เค‚ เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เคพ เคฏเฅ‹เค—เคฆเคพเคจ เคฆเฅ‡เคจเฅ‡ เค•เฅ‡ เค‡เคšเฅเค›เฅเค• เคนเฅˆเค‚? เคฌเคนเฅเคค เคฌเคขเคผเคฟเคฏเคพ! เคนเคฎ เคนเคฎเฅ‡เคถเคพ เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เคชเฅ‹เคฐเฅเคŸเคซเฅ‹เคฒเคฟเคฏเฅ‹ เค•เคพ เคตเคฟเคธเฅเคคเคพเคฐ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค–เฅเคฒเฅ‡ เคนเฅˆเค‚เฅค + +1. **เคฐเคฟเคชเฅ‰เคœเคฟเคŸเคฐเฅ€ เคซเฅ‹เคฐเฅเค• เค•เคฐเฅ‡เค‚**: [Ultralytics GitHub เคฐเคฟเคชเฅ‰เคœเคฟเคŸเคฐเฅ€](https://github.com/ultralytics/ultralytics) เค•เฅ‹ เคซเฅ‹เคฐเฅเค• เค•เคฐเค•เฅ‡ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚เฅค + +2. **เค…เคชเคจเฅ‡ เคซเฅ‹เคฐเฅเค• เค•เฅ‹ เค•เฅเคฒเฅ‹เคจ เค•เคฐเฅ‡เค‚**: เค…เคชเคจเฅ‡ เคซเฅ‹เคฐเฅเค• เค•เฅ‹ เค…เคชเคจเฅ€ เคฒเฅ‹เค•เคฒ เคฎเคถเฅ€เคจ เคชเคฐ เค•เฅเคฒเฅ‹เคจ เค•เคฐเฅ‡เค‚ เค”เคฐ เค•เคพเคฎ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคเค• เคจเคˆ เคฌเฅเคฐเคพเค‚เคš เคฌเคจเคพเคเค‚เฅค + +3. **เค…เคชเคจเคพ เคฎเฅ‰เคกเคฒ เคฒเคพเค—เฅ‚ เค•เคฐเฅ‡เค‚**: เคนเคฎเคพเคฐเฅ‡ [Contributing Guide](../../help/contributing.md) เคฎเฅ‡เค‚ เคฆเคฟเค เค—เค เค•เฅ‹เคกเคฟเค‚เค— เคธเฅเคŸเฅˆเค‚เคกเคฐเฅเคกเฅเคธ เค”เคฐ เคฆเคฟเคถเคพเคจเคฟเคฐเฅเคฆเฅ‡เคถเฅ‹เค‚ เค•เคพ เค…เคจเฅเคธเคฐเคฃ เค•เคฐเคคเฅ‡ เคนเฅเค เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคœเฅ‹เคกเคผเฅ‡เค‚เฅค + +4. **เค—เคนเคฐเคพเคˆ เคธเฅ‡ เคชเคฐเฅ€เค•เฅเคทเคฃ เค•เคฐเฅ‡เค‚**: เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เคพ เคชเคฐเฅ€เค•เฅเคทเคฃ เค…เคฒเค— เคธเฅ‡ เค”เคฐ เคชเคพเค‡เคชเคฒเคพเค‡เคจ เค•เฅ‡ เคนเคฟเคธเฅเคธเฅ‡ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค + +5. **เคชเฅเคฒ เคฐเคฟเค•เฅเคตเฅ‡เคธเฅเคŸ เคฌเคจเคพเคเค‚**: เคเค• เคฌเคพเคฐ เคœเคฌ เค†เคช เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เคธเฅ‡ เคธเค‚เคคเฅเคทเฅเคŸ เคนเฅ‹ เคœเคพเคเค‚, เคคเฅ‹ เคธเคฎเฅ€เค•เฅเคทเคพ เค•เฅ‡ เคฒเคฟเค เคฎเฅเค–เฅเคฏ เคฐเคฟเคชเฅ‰เคœเคฟเคŸเคฐเฅ€ เค•เฅ‹ เคเค• เคชเฅเคฒ เคฐเคฟเค•เฅเคตเฅ‡เคธเฅเคŸ เคฌเคจเคพเคเค‚เฅค + +6. **เค•เฅ‹เคก เคธเคฎเฅ€เค•เฅเคทเคพ เค”เคฐ เคฎเคฟเคฒเคพเคจ**: เคธเคฎเฅ€เค•เฅเคทเคพ เค•เฅ‡ เคฌเคพเคฆ, เคฏเคฆเคฟ เค†เคชเค•เคพ เคฎเฅ‰เคกเคฒ เคนเคฎเคพเคฐเฅ‡ เคฎเคพเคจเคฆเค‚เคกเฅ‹เค‚ เค•เฅ‹ เคชเฅ‚เคฐเคพ เค•เคฐเคคเคพ เคนเฅˆ, เคคเฅ‹ เค‡เคธเฅ‡ เคฎเฅเค–เฅเคฏ เคฐเคฟเคชเฅ‰เคœเคฟเคŸเคฐเฅ€ เคฎเฅ‡เค‚ เคฎเคฟเคฒเคพ เคฆเคฟเคฏเคพ เคœเคพเคเค—เคพเฅค + +เคตเคฟเคธเฅเคคเฅƒเคค เคšเคฐเคฃเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคนเคฎเคพเคฐเคพ [Contributing Guide](../../help/contributing.md) เคฆเฅ‡เค–เฅ‡เค‚เฅค diff --git a/ultralytics/docs/hi/models/index.md:Zone.Identifier b/ultralytics/docs/hi/models/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/models/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/models/mobile-sam.md b/ultralytics/docs/hi/models/mobile-sam.md new file mode 100755 index 0000000..ea9da4a --- /dev/null +++ b/ultralytics/docs/hi/models/mobile-sam.md @@ -0,0 +1,115 @@ +--- +comments: true +description: Ultralytics เคฎเคพเคฐเฅเค—เคฆเคฐเฅเคถเคฟเค•เคพ เคฎเฅ‡เค‚ MobileSAM เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เค”เคฐ เค‰เคธเค•เฅ‡ เคชเฅเคฐเคพเคฏเฅ‹เค—เคถเคพเคฒเคพ เคคเฅเคฒเคจเคพเคคเฅเคฎเค• เคตเคฟเคตเฅ‡เคšเคจ, เคฎเฅ‚เคฒ SAM เค•เฅ‡ เคธเคพเคฅ เคคเฅเคฒเคจเคพ เค”เคฐ เค‡เคธเฅ‡ Ultralytics เคขเคพเค‚เคšเฅ‡ เคฎเฅ‡เค‚ เคกเคพเค‰เคจเคฒเฅ‹เคก เค”เคฐ เคชเคฐเฅ€เค•เฅเคทเคฃ เค•เฅˆเคธเฅ‡ เค•เคฐเฅ‡เค‚เฅค เค…เคชเคจเฅ‡ เคฎเฅ‹เคฌเคพเค‡เคฒ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจ เค•เฅ‹ เคฌเฅ‡เคนเคคเคฐ เคฌเคจเคพเคเค‚เฅค +keywords: MobileSAM, Ultralytics, SAM, เคฎเฅ‹เคฌเคพเค‡เคฒ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจ, Arxiv, GPU, API, เค›เคตเคฟ เคเคจเค•เฅ‹เคกเคฐ, เคฎเคพเคธเฅเค• เคกเคฟเค•เฅ‹เคกเคฐ, เคฎเฅ‰เคกเคฒ เคกเคพเค‰เคจเคฒเฅ‹เคก, เคชเคฐเฅ€เค•เฅเคทเคฃ เคชเคฆเฅเคงเคคเคฟ +--- + +![MobileSAM เคฒเฅ‹เค—เฅ‹](https://github.com/ChaoningZhang/MobileSAM/blob/master/assets/logo2.png?raw=true) + +# เคฎเฅ‹เคฌเคพเค‡เคฒ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เฅเค› เคญเฅ€ (MobileSAM) + +เคฎเฅ‹เคฌเคพเค‡เคฒSAM เคชเฅ‡เคชเคฐ [arXiv](https://arxiv.org/pdf/2306.14289.pdf) เคชเคฐ เค…เคฌ เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเฅค + +MobileSAM เค•เฅ‡ เคธเค‚เคšเคพเคฒเคจ เค•เคพ เคเค• เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคฎเฅเคชเฅเคฏเฅ‚เคŸเคฐ เคชเคฐ เคชเคนเฅเค‚เคšเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆ เค‰เคธ [เคกเฅ‡เคฎเฅ‹ เคฒเคฟเค‚เค•](https://huggingface.co/spaces/dhkim2810/MobileSAM) เค•เฅ‡ เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡เฅค Mac i5 CPU เคชเคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคฐเคจเฅ‡ เคฎเฅ‡เค‚ เคฒเค—เคญเค— 3 เคธเฅ‡เค•เค‚เคก เค•เคพ เคธเคฎเคฏ เคฒเค—เคคเคพ เคนเฅˆเฅค เคนเค—เคฟเค‚เค— เคซเฅ‡เคธ เคกเฅ‡เคฎเฅ‹ เคชเคฐเคฟเคตเฅ‡เคถ เค”เคฐ เค•เคฎ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคตเคพเคฒเฅ‡ เคธเฅ€เคชเคฟเคฏเฅ‚ เคจเฅ‡ เคชเฅเคฐเคคเคฟเค•เฅเคฐเคฟเคฏเคพ เค•เฅ‹ เคงเฅ€เคฎเฅ€ เค•เคฟเคฏเคพ เคนเฅˆ, เคฒเฅ‡เค•เคฟเคจ เคฏเคน เค…เคญเฅ€ เคญเฅ€ เคชเฅเคฐเคญเคพเคตเฅ€ เคขเค‚เค— เคธเฅ‡ เค•เคพเคฎ เค•เคฐเคคเคพ เคนเฅˆเฅค + +เคฎเฅ‹เคฌเคพเค‡เคฒSAM [Grounding-SAM](https://github.com/IDEA-Research/Grounded-Segment-Anything), [AnyLabeling](https://github.com/vietanhdev/anylabeling), เค”เคฐ [Segment Anything in 3D](https://github.com/Jumpat/SegmentAnythingin3D) เคธเคนเคฟเคค เคตเคฟเคญเคฟเคจเฅเคจ เคชเคฐเคฟเคฏเฅ‹เคœเคจเคพเค“เค‚ เคฎเฅ‡เค‚ เคฒเคพเค—เฅ‚ เคนเฅˆเฅค + +เคฎเฅ‹เคฌเคพเค‡เคฒSAM เคเค• เคเค•เคฒ GPU เคชเคฐ 100k เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ (เคฎเฅ‚เคฒ เค›เคตเคฟ เค•เคพ 1%) เค•เฅ‡ เคธเคพเคฅ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคนเฅ‹เคคเคพ เคนเฅˆ เค”เคฐ เค‡เคธเคฎเฅ‡เค‚ เคเค• เคฆเคฟเคจ เคธเฅ‡ เค•เคฎ เคธเคฎเคฏ เคฒเค—เคคเคพ เคนเฅˆเฅค เค‡เคธ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค เค•เฅ‹เคก เคญเคตเคฟเคทเฅเคฏ เคฎเฅ‡เค‚ เค‰เคชเคฒเคฌเฅเคง เค•เคฐเคพเคฏเคพ เคœเคพเคเค—เคพเฅค + +## เค‰เคชเคฒเคฌเฅเคง เคฎเฅ‰เคกเคฒ, เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ เค”เคฐ เค‘เคชเคฐเฅ‡เคŸเคฟเค‚เค— เคฎเฅ‹เคก + +เค‡เคธ เคคเคพเคฒเคฟเค•เคพ เคฎเฅ‡เค‚ เค‰เคชเคฒเคฌเฅเคง เคฎเฅ‰เคกเคฒ, เค‰เคจเค•เฅ‡ เคตเคฟเคถเคฟเคทเฅเคŸ เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเคœเคจ, เคตเฅ‡ เค•เคพเคฐเฅเคฏ เคœเคฟเคจเฅเคนเฅ‡เค‚ เคตเฅ‡ เคธเคฎเคฐเฅเคฅเคจ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เค”เคฐ เค‰เคจเค•เคพ เค…เคญเคฟเคจเฅเคจเคคเคฎ เคธเค‚เค—เคคเคคเคพ เค•เฅ‡ เคธเคพเคฅ เคตเคฟเคญเคฟเคจเฅเคจ เค‘เคชเคฐเฅ‡เคŸเคฟเค‚เค— เคฎเฅ‹เคก (เค‡เค‚เคซเคฐเฅ‡เค‚เคธ, เคตเฅˆเคงเคพเคจเคฟเค•เฅ€, เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ, เค”เคฐ เคจเคฟเคฐเฅเคฏเคพเคค) เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฟเค เค—เค เคนเฅˆเค‚, เคœเคฟเคจเฅเคนเฅ‡เค‚ เคธเคฎเคฐเฅเคฅเคฟเคค เคฎเฅ‹เคก เค•เฅ‡ เคฒเคฟเค โœ… emoji เค”เคฐ เค…เคธเคฎเคฐเฅเคฅเคฟเคค เคฎเฅ‹เคก เค•เฅ‡ เคฒเคฟเค โŒ emoji เคธเฅ‡ เคฆเคฐเฅเคถเคพเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค + +| เคฎเฅ‰เคกเคฒ เคชเฅเคฐเค•เคพเคฐ | เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเคœเคจ | เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ | เค‡เค‚เคซเคฐเฅ‡เค‚เคธ | เคตเฅˆเคงเคพเคจเคฟเค•เฅ€ | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ | เคจเคฟเคฐเฅเคฏเคพเคค | +|-------------|----------------------|--------------------------------------------|---------|----------|-----------|---------| +| MobileSAM | `mobile_sam.pt` | [เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## SAM เคธเฅ‡ MobileSAM เคฎเฅ‡เค‚ เค…เคจเฅเค•เฅ‚เคฒเคจ + +MobileSAM เคฎเฅ‚เคฒ SAM เค•เฅ€ เคคเคฐเคซ เคธเฅ‡ เคธเคฎเคพเคจ เคชเคพเค‡เคชเคฒเคพเค‡เคจ เคฌเคฐเค•เคฐเคพเคฐ เคฐเค–เคคเคพ เคนเฅˆ, เคนเคฎเคจเฅ‡ เคฎเฅ‚เคฒ เค•เฅ€ เคชเฅเคฐเฅ€-เคชเฅเคฐเฅ‹เคธเฅ‡เคธเคฟเค‚เค—, เคชเฅ‹เคธเฅเคŸ-เคชเฅเคฐเฅ‹เคธเฅ‡เคธเคฟเค‚เค— เค”เคฐ เคธเคญเฅ€ เค…เคจเฅเคฏ เค‡เค‚เคŸเคฐเคซเฅ‡เคธเฅ‹เค‚ เค•เฅ‹ เคธเคฎเฅเคฎเคฟเคฒเคฟเคค เค•เคฐ เคฆเคฟเคฏเคพ เคนเฅˆเฅค เค‡เคธเคฒเคฟเค, เคตเคฐเฅเคคเคฎเคพเคจ เคฎเฅ‡เค‚ เคฎเฅ‚เคฒ SAM เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ‡ เคฒเฅ‹เค— เคฎเคฟเคจเคฟเคฎเคฒ เคชเฅเคฐเคฏเคพเคธ เค•เฅ‡ เคธเคพเคฅ MobileSAM เคฎเฅ‡เค‚ เคŸเฅเคฐเคพเค‚เคธเคฟเคถเคจ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค + +MobileSAM เคฎเฅ‚เคฒ SAM เค•เฅ‡ เคธเคฎเคพเคจ เคชเคพเค‡เคชเคฒเคพเค‡เคจ เคฎเฅ‡เค‚ เค‰เคคเฅเคคเคฎ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคฐเคคเคพ เคนเฅˆ เค”เคฐ เค•เฅ‡เคตเคฒ เค›เคตเคฟ เคเคจเฅเค•เฅ‹เคกเคฐ เคฎเฅ‡เค‚ เคชเคฐเคฟเคตเคฐเฅเคคเคจ เคนเฅ‹เคคเคพ เคนเฅˆเฅค เคตเคฟเคถเฅ‡เคท เคฐเฅ‚เคช เคธเฅ‡, เคนเคฎ เคฎเฅ‚เคฒ เคญเคพเคฐเฅ€เคตเคœเฅเคžเคพเคจเคฟเค• ViT-H เคเคจเฅเค•เฅ‹เคกเคฐ (632M) เค•เฅ‹ เคเค• เค›เฅ‹เคŸเฅ‡ Tiny-ViT (5M) เคธเฅ‡ เคฌเคฆเคฒเคคเฅ‡ เคนเฅˆเค‚เฅค เคเค•เคฒ GPU เคชเคฐ MobileSAM เคฒเค—เคญเค— 12ms เคชเฅเคฐเคคเคฟ เค›เคตเคฟ เคชเคฐ เค‘เคชเคฐเฅ‡เคŸ เค•เคฐเคคเคพ เคนเฅˆ: 8ms เค›เคตเคฟ เคเคจเฅเค•เฅ‹เคกเคฐ เคชเคฐ เค”เคฐ 4ms เคฎเคพเคธเฅเค• เคกเคฟเค•เฅ‹เคกเคฐ เคชเคฐเฅค + +เคตเคฟเคŸ-เค†เคงเคพเคฐเคฟเคค เค‡เคฎเฅ‡เคœ เคเคจเฅเค•เฅ‹เคกเคฐเฅ‹เค‚ เค•เฅ€ เคคเฅเคฒเคจเคพ เคจเฅ€เคšเฅ‡ เคฆเฅ€ เค—เคˆ เคคเคพเคฒเคฟเค•เคพ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ€ เคนเฅˆ: + +| เค›เคตเคฟ เคเคจเฅเค•เฅ‹เคกเคฐ | เคฎเฅ‚เคฒ SAM | MobileSAM | +|-------------|---------|-----------| +| เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅเคธ | 611M | 5M | +| เคธเฅเคชเฅ€เคก | 452ms | 8ms | + +เคฎเฅ‚เคฒ SAM เค”เคฐ MobileSAM เคฆเฅ‹เคจเฅ‹เค‚ เคฎเฅ‡เค‚ เคธเคฎเคพเคจ เคชเฅเคฐเฅ‰เคฎเฅเคชเฅเคŸ เค—เคพเค‡เคกเฅ‡เคก เคฎเคพเคธเฅเค• เคกเคฟเค•เฅ‹เคกเคฐ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ: + +| เคฎเคพเคธเฅเค• เคกเคฟเค•เฅ‹เคกเคฐ | เคฎเฅ‚เคฒ SAM | MobileSAM | +|--------------|---------|-----------| +| เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅเคธ | 3.876M | 3.876M | +| เคธเฅเคชเฅ€เคก | 4ms | 4ms | + +เคฏเคนเคพเค‚ เคชเคพเค‡เคชเคฒเคพเค‡เคจ เค•เฅ€ เคคเฅเคฒเคจเคพ เคนเฅˆ: + +| เคชเฅ‚เคฐเคพ เคชเคพเค‡เคชเคฒเคพเค‡เคจ (เคเคจเฅเค•เฅ‹เคกเคฐ+เคกเคฟเค•เฅ‹เคกเคฐ) | เคฎเฅ‚เคฒ SAM | MobileSAM | +|--------------------------------|---------|-----------| +| เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅเคธ | 615M | 9.66M | +| เคธเฅเคชเฅ€เคก | 456ms | 12ms | + +MobileSAM เค”เคฐ เคฎเฅ‚เคฒ SAM เค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‹ เคเค• เคฌเคฟเคจเฅเคฆเฅ เค”เคฐ เคฌเฅ‰เค•เฅเคธ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค + +![เคฌเคฟเคจเฅเคฆเฅ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เค›เคตเคฟ](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +![เคฌเฅ‰เค•เฅเคธ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เค›เคตเคฟ](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +เคฌเฅ‡เคนเคคเคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคธเฅ‡ MobileSAM เคฎเฅŒเคœเฅ‚เคฆเคพ FastSAM เค•เฅ€ เคคเฅเคฒเคจเคพ เคฎเฅ‡เค‚ เคฒเค—เคญเค— 5 เค—เฅเคจเคพ เค›เฅ‹เคŸเคพ เค”เคฐ 7 เค—เฅเคจเคพ เคคเฅ‡เคœ เคนเฅˆเฅค เค…เคงเคฟเค• เคตเคฟเคตเคฐเคฃ [MobileSAM เคชเฅเคฐเฅ‹เคœเฅ‡เค•เฅเคŸ เคชเฅ‡เคœ](https://github.com/ChaoningZhang/MobileSAM) เคชเคฐ เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเค‚เฅค + +## Ultralytics เคฎเฅ‡เค‚ MobileSAM เค•เคพ เคชเคฐเฅ€เค•เฅเคทเคฃ + +เคฎเฅ‚เคฒ SAM เค•เฅ€ เคคเคฐเคน เคนเฅ€, เคนเคฎ Ultralytics เคฎเฅ‡เค‚ เคเค• เคธเฅ€เคงเคพ เคชเคฐเฅ€เค•เฅเคทเคฃ เคตเคฟเคงเคฟ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เคœเคฟเคธเคฎเฅ‡เค‚ เคฌเคฟเค‚เคฆเฅ เค”เคฐ เคฌเฅ‰เค•เฅเคธ เคชเฅเคฐเฅ‰เคฎเฅเคชเฅเคŸเฅเคธ เคฆเฅ‹เคจเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคฎเฅ‹เคก เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚เฅค + +### เคฎเฅ‰เคกเคฒ เคกเคพเค‰เคจเคฒเฅ‹เคก + +เค†เคช เคฏเคนเคพเค‚ เคธเฅ‡ เคฎเฅ‰เคกเคฒ เคกเคพเค‰เคจเคฒเฅ‹เคก เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚ [here](https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt)เฅค + +### เคฌเคฟเค‚เคฆเฅ เคชเฅเคฐเฅ‰เคฎเฅเคชเฅเคŸ + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + ```python + from ultralytics import SAM + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = SAM('mobile_sam.pt') + + # เคฌเคฟเค‚เคฆเฅ เคชเฅเคฐเฅ‰เคฎเฅเคชเฅเคŸ เคชเคฐ เค†เคงเคพเคฐเคฟเคค เคเค• เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคฐเฅ‡เค‚ + model.predict('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +### เคฌเฅ‰เค•เฅเคธ เคชเฅเคฐเฅ‰เคฎเฅเคชเฅเคŸ + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + ```python + from ultralytics import SAM + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = SAM('mobile_sam.pt') + + # เคฌเฅ‰เค•เฅเคธ เคชเฅเคฐเฅ‰เคฎเฅเคชเฅเคŸ เคชเคฐ เค†เคงเคพเคฐเคฟเคค เคเค• เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคฐเฅ‡เค‚ + model.predict('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + ``` + +เคนเคฎเคจเฅ‡ `MobileSAM` เค”เคฐ `SAM` เคฆเฅ‹เคจเฅ‹เค‚ เค•เฅ‹ เคเค• เคนเฅ€ API เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค‡เคฎเฅเคชเฅเคฒเคฟเคฎเฅ‡เค‚เคŸ เค•เคฟเคฏเคพ เคนเฅˆเฅค เค…เคงเคฟเค• เค‰เคชเคฏเฅ‹เค— เคœเคพเคจเค•เคพเคฐเฅ€ เค•เฅ‡ เคฒเคฟเค, เค•เฅƒเคชเคฏเคพ [SAM เคชเฅ‡เคœ](sam.md) เคฆเฅ‡เค–เฅ‡เค‚เฅค + +## เคธเค‚เคฆเคฐเฅเคญ เค”เคฐ เค†เคญเคพเคฐ + +เค…เค—เคฐ เค†เคช เค…เคชเคจเฅ‡ เค…เคจเฅเคธเค‚เคงเคพเคจ เคฏเคพ เคตเคฟเค•เคพเคธ เค•เคพเคฐเฅเคฏ เคฎเฅ‡เค‚ MobileSAM เค•เคพ เค‰เคชเคฏเฅ‹เค—เคฏเฅ‹เค—เฅ€ เคชเคพเคคเฅ‡ เคนเฅˆเค‚, เคคเฅ‹ เค•เฅƒเคชเคฏเคพ เคนเคฎเคพเคฐเฅ‡ เคชเฅ‡เคชเคฐ เค•เฅ‹ เคธเคพเค‡เคŸ เค•เคฐเคจเฅ‡ เค•เคพ เคตเคฟเคšเคพเคฐ เค•เคฐเฅ‡เค‚: + +!!! Quote "" +=== "BibTeX" + + ```bibtex + @article{mobile_sam, + title={Faster Segment Anything: Towards Lightweight SAM for Mobile Applications}, + author={Zhang, Chaoning and Han, Dongshen and Qiao, Yu and Kim, Jung Uk and Bae, Sung Ho and Lee, Seungkyu and Hong, Choong Seon}, + journal={arXiv preprint arXiv:2306.14289}, + year={2023} + } diff --git a/ultralytics/docs/hi/models/mobile-sam.md:Zone.Identifier b/ultralytics/docs/hi/models/mobile-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/models/mobile-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/models/rtdetr.md b/ultralytics/docs/hi/models/rtdetr.md new file mode 100755 index 0000000..383d780 --- /dev/null +++ b/ultralytics/docs/hi/models/rtdetr.md @@ -0,0 +1,94 @@ +--- +comments: true +description: + Baidu เค•เฅ‡ RT-DETR เค•เคพ เค…เคตเคฒเฅ‹เค•เคจ เค•เคฐเฅ‡เค‚: เคตเคฟเคœเคผเคจ เคŸเฅเคฐเคพเค‚เคธเคซเฅ‰เคฐเฅเคฎเคฐ เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เคธเค‚เคšเคพเคฒเคฟเคค, เค‰เคจเฅเคจเคค เค”เคฐ เค…เคจเฅเค•เฅ‚เคฒเคจเคฏเฅ‹เค—เฅเคฏ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ, เคœเคฟเคธเคฎเฅ‡เค‚ เคคเฅˆเคฏเคพเคฐ เคฎเฅ‰เคกเคฒ เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚เฅค +keywords: RT-DETR, Baidu, เคตเคฟเคœเคผเคจ เคŸเฅเคฐเคพเค‚เคธเคซเฅ‰เคฐเฅเคฎเคฐเฅเคธ, เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคชเฅเคฐเคฆเคฐเฅเคถเคจ, CUDA, TensorRT, IoU-เคœเคพเค—เคฐเฅ‚เค• เค•เฅเคตเฅ‡เคฐเฅ€ เคšเคฏเคจ, Ultralytics, เคชเคพเคฏเคฅเคจ เคเคชเฅ€เค†เคˆ, PaddlePaddle +--- + +# Baidu เค•เฅ‡ RT-DETR: เคเค• เคตเคฟเคœเคผเคจ เคŸเฅเคฐเคพเค‚เคธเคซเฅ‰เคฐเฅเคฎเคฐ เค•เฅ‡ เค†เคงเคพเคฐ เคชเคฐ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ + +## เค…เคตเคฒเฅ‹เค•เคจ + +Baidu เคฆเฅเคตเคพเคฐเคพ เคตเคฟเค•เคธเคฟเคค เคฐเคฟเคฏเคฒ-เคŸเคพเค‡เคฎ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคŸเฅเคฐเคพเค‚เคธเคซเฅ‰เคฐเฅเคฎเคฐ (RT-DETR) เคเค• เค‰เคจเฅเคจเคค, end-to-end เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ เคนเฅˆ เคœเฅ‹ เค‰เคšเฅเคš เคจเคฟ: เคถเฅเคฒเฅเค•เคคเคพ เคฌเคจเคพเค เคฐเค–เคคเฅ‡ เคนเฅเค เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เคฏเคน Vision เคŸเฅเคฐเคพเค‚เคธเคซเฅ‰เคฐเฅเคฎเคฐเฅเคธ (ViT) เค•เฅ€ เคถเค•เฅเคคเคฟ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคฌเคนเฅเคธเฅเคคเคฐเฅ€เคฏ เคธเฅเคตเคฟเคงเคพเค“เค‚ เค•เฅ€ เค•เฅเคถเคฒเคคเคพเคชเฅ‚เคฐเฅเคตเค• เคชเฅเคฐเฅ‹เคธเฅ‡เคธเคฟเค‚เค— เค•เคฐเคคเคพ เคนเฅˆ เค”เคฐ เค‡เค‚เคŸเฅเคฐเคพ-เคธเฅเค•เฅ‡เคฒ เค‡เค‚เคŸเคฐเฅ‡เค•เฅเคถเคจ เค”เคฐ เค•เฅเคฐเฅ‰เคธ-เคธเฅเค•เฅ‡เคฒ เคซเฅเคฏเฅ‚เคœเคจ เค•เฅ‹ เค…เคฒเค— เค•เคฐเค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคฎเคพเคชเฅ‹เค‚ เค•เฅ€ เคธเฅเคตเคฟเคงเคพ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค RT-DETR เค…เคคเฅเคฏเคงเคฟเค• เค…เคจเฅเค•เฅ‚เคฒเคจเคฏเฅ‹เค—เฅเคฏ เคนเฅˆ, เคœเคฟเคธเคฎเฅ‡เค‚ เคฌเฅเคฏเคพเคตเคธเคพเคฏเคฟเค• เคฒเคฟเค‚เค— เค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคกเคฟเค•เฅ‹เคกเคฐ เคฒเฅ‡เคฏเคฐ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค‰เคชเคจเฅเคฏเคพเคธเคพเคคเฅเคฎเค• เค—เคคเคฟ เค•เฅ‹ เคธเคฎเคพเคฏเฅ‹เคœเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ€ เคธเคฎเคฐเฅเคฅเคจ เค•เฅ€ เค—เคˆ เคนเฅˆ เคฌเคฟเคจเคพ เคชเฅเคจเคฐเฅเคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡เฅค เค‡เคธ เคฎเฅ‰เคกเคฒ เค•เคพ เคชเคฐเคฟเคฃเคพเคฎเคธเฅเคตเคฐเฅ‚เคช, เคœเฅ‹ เค•เคฟ CUDA เค”เคฐ TensorRT เค•เฅ‡ เคธเคฎเคฐเฅเคฅเคจเคฏเฅเค•เฅเคค เคฌเฅˆเค•เฅ‡เค‚เคก เคชเคฐ เค…เคšเฅเค›เคพ เค•เฅเคฐเคฟเคฏเคพเคถเฅ€เคฒ เค‘เคฌเฅเคœเฅˆเค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐเฅ‹เค‚ เคธเฅ‡ เค…เคงเคฟเค• เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค + +![เค†เคฆเคฐเฅเคถ เค›เคตเคฟ](https://user-images.githubusercontent.com/26833433/238963168-90e8483f-90aa-4eb6-a5e1-0d408b23dd33.png) +**Baidu เค•เฅ‡ RT-DETR เค•เคพ เค…เคตเคฒเฅ‹เค•เคจเฅค** RT-DETR เคฎเฅ‰เคกเคฒ เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เค†เคตเคšเฅ‡เคฆเค• เคœเฅˆเคธเฅ‡ เค†เค–เคฟเคฐเฅ€ เคคเฅ€เคจ เคธเฅเคŸเฅ‡เคœ {S3, S4, S5} เค•เฅ‹ เค‡เค‚เคชเฅเคŸ เคŸเฅ‚ เคเคจเฅเค•เฅ‹เคกเคฐ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคฆเคฐเฅเคถเคพเคคเคพ เคนเฅˆเฅค เค•เคŸเคฟเคนเคฐเคฃ เคฏเฅ‹เค—เฅเคฏ เคนเคพเค‡เคฌเฅเคฐเคฟเคก เคเคจเฅเค•เฅ‹เคกเคฐ เค…เค‚เคค: เคงเคพเคฐเคฃ เคตเคฟเคถเคฟเคทเฅเคŸเคคเคพเค“เค‚ เค•เฅ‹ Ekเคคเฅเคฐเคฟเคค เค”เคฐ เค‡เค‚เคคเฅเคฐเคพเค•เฅเคท เคธเฅเคฅเคฒเฅ€เคฏ เคฒเค•เฅเคทเคฃ (AIFI) เค”เคฐ เค•เฅเคฐเฅ‰เคธ-เคธเฅเค•เฅ‡เคฒ เคฒเค•เฅเคทเคฃ-เคธเค‚เค˜เคŸเคจ เคฎเฅ‰เคกเฅเคฏเฅ‚เคฒ (CCFM) เคฆเฅเคตเคพเคฐเคพ เคšเคฟเคคเฅเคฐ เคตเคฟเคถเฅ‡เคทเคฃ เคฎเฅ‡เค‚ เคชเคฐเคฟเคตเคฐเฅเคคเคฟเคค เค•เคฐเคคเคพ เคนเฅˆเฅค IoU-เคœเคพเค—เคฐเฅ‚เค• เค•เฅเคตเฅ‡เคฐเฅ€ เคšเคฏเคจ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค†เคฆเคฐเฅเคถ เค›เคตเคฟ เคšเคฏเคจ เค•เฅ‡ เคฒเคฟเค เคจเคฟเคฐเฅเคงเคพเคฐเคฟเคค เคธเค‚เค–เฅเคฏเคพ เค•เฅ‡ เคšเคฟเคคเฅเคฐ เคตเคฟเคถเฅ‡เคทเคคเคพเค“เค‚ เค•เฅ‹ เค•เคตเคฒ เคตเคธเฅเคคเฅเคฐเคฃ เคธเฅเคตเคฐเฅ‚เคช เคฎเฅ‡เค‚ เคšเคฏเคจเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เค…เค‚เคค เคฎเฅ‡เค‚, เคกเคฟเค•เฅ‹เคกเคฐ เคธเคนเคพเคฏเค• เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฎเฅเคฆเฅเคฆเคพ เค•เคฟเคธเฅ€ เคตเคฟเคถเฅ‡เคท เค›เคตเคฟ เค•เฅ‹ เค‰เคชเคฏเฅเค•เฅเคค เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅเคตเฅ‡เคฐเฅ€ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เค†เคฐเคฎเฅเคญ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคธเค‚เคถเฅ‹เคงเคฟเคค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ เคœเคฟเคธเค•เฅ‡ เค…เค‚เคค เคฎเฅ‡เค‚ เคฌเฅ‰เค•เฅเคธ เค”เคฐ เคตเคฟเคถเฅเคตเคพเคธ เคธเฅเค•เฅ‹เคฐ เคชเฅˆเคฆเคพ เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ ([เคธเฅเคฐเฅ‹เคค](https://arxiv.org/pdf/2304.08069.pdf))เฅค + +### เคฎเฅเค–เฅเคฏ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค + +- **เค•เคŸเคฟเคนเคฐเคฃ เคฏเฅ‹เค—เฅเคฏ เคนเคพเค‡เคฌเฅเคฐเคฟเคก เคเคจเฅเค•เฅ‹เคกเคฐ:** Baidu เค•เฅ‡ RT-DETR เคจเฅ‡ เค‡เค‚เคคเฅเคฐเคพ-เคธเฅเค•เฅ‡เคฒ เค‡เค‚เคŸเคฐเฅ‡เค•เฅเคถเคจ เค”เคฐ เค•เฅเคฐเฅ‰เคธ-เคธเฅเค•เฅ‡เคฒ เคซเฅเคฏเฅ‚เคœเคจ เค•เฅ‹ เค…เคฒเค— เค•เคฐเค•เฅ‡ เคฌเคนเฅเคธเฅเคคเคฐเฅ€เคฏ เคตเคฟเคถเฅ‡เคทเคคเคพเค“เค‚ เค•เฅ‹ เคชเฅเคฐเฅ‹เคธเฅ‡เคธ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคเค• เค•เคŸเคฟเคนเคฐเคฃ เคฏเฅ‹เค—เฅเคฏ เคนเคพเค‡เคฌเฅเคฐเคฟเคก เคเคจเฅเค•เฅ‹เคกเคฐ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคนเฅˆเฅค เคฏเคน เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคตเคฟเคœเคผเคจ เคŸเฅเคฐเคพเค‚เคธเคซเฅ‰เคฐเฅเคฎเคฐ เคธเฅ‡ เค†เคงเคฟเค• เคนเคฟเคฎเคพเค‚เคถเฅ€เคฏ เคฒเคพเค—เคค เค•เฅ‹ เค•เคฎ เค•เคฐเคคเคพ เคนเฅˆ เค”เคฐ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเคพ เคนเฅˆเฅค +- **IoU-เคœเคพเค—เคฐเฅ‚เค• เค•เฅเคตเฅ‡เคฐเฅ€ เคšเคฏเคจ:** Baidu เค•เฅ‡ RT-DETR เคจเฅ‡ IoU-เคœเคพเค—เคฐเฅ‚เค• เค•เฅเคตเฅ‡เคฐเฅ€ เคšเคฏเคจ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค†เคฆเคฐเฅเคถ เค›เคตเคฟ เคšเคฏเคจ เค•เฅ‹ เคธเฅเคงเคพเคฐเคพ เคนเฅˆเฅค เค‡เคธเคธเฅ‡ เคฎเฅ‰เคกเคฒ เคฆเฅเคตเคพเคฐเคพ เคธเฅ€เคจ เคฎเฅ‡เค‚ เคธเคฌเคธเฅ‡ เคชเฅเคฐเคพเคธเค‚เค—เคฟเค• เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคชเคฐ เค•เฅ‡เคจเฅเคฆเฅเคฐเคฟเคค เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ€ เคจเคฟเคถเคพเคจเคพ เคธเฅเคชเคทเฅเคŸเคคเคพ เคฎเฅ‡เค‚ เคธเฅเคงเคพเคฐ เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค +- **เค…เคจเฅเค•เฅ‚เคฒเคจเคฏเฅ‹เค—เฅเคฏ เค…เคจเฅเคฎเคพเคจ เคชเฅเคฐเคฆเคพเคจ:** Baidu เค•เฅ‡ RT-DETR เคจเฅ‡ เคชเฅเคจเคฐเฅเคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เค†เคตเคถเฅเคฏเค•เคคเคพ เค•เฅ‡ เคฌเคฟเคจเคพ เคญเคฟเคจเฅเคจ เคกเคฟเค•เฅ‹เคกเคฐ เคฒเฅ‡เคฏเคฐ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค…เคจเฅเคฎเคพเคจ เค•เฅ€ เค—เคคเคฟ เค•เฅ€ เคฎเคพเคจเฅเคฏเคคเคพ เคฎเฅ‡เค‚ เคธเฅเคตเคฟเคงเคพเคเค‚ เคธเคฎเคฐเฅเคฅเคฟเคค เค•เฅ€ เคนเฅˆเค‚เฅค เคฏเคน เค…เคจเฅเค•เฅ‚เคฒเคจเคฏเฅ‹เค—เฅเคฏเคคเคพ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคธเฅเคฅเคฟเคคเคฟเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เคตเฅเคฏเคพเคตเคนเคพเคฐเคฟเค• เคฒเคพเค—เฅ‚ เค•เคฐเคจเฅ‡ เค•เฅ€ เคธเคนเคพเคฏเคคเคพ เค•เคฐเคคเฅ€ เคนเฅˆเฅค + +## เคชเฅเคฐเฅ€-เคคเคพเคฐเค•เคฟเคค เคฎเฅ‰เคกเคฒ + +Ultralytics Python API เคตเคฟเคญเคฟเคจเฅเคจ เคธเฅเค•เฅ‡เคฒเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เคชเฅเคฐเฅ€-เคคเคพเคฐเค•เคฟเคค PaddlePaddle RT-DETR เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ: + +- RT-DETR-L: COCO val2017 เคชเคฐ 53.0% AP, T4 GPU เคชเคฐ 114 FPS +- RT-DETR-X: COCO val2017 เคชเคฐ 54.8% AP, T4 GPU เคชเคฐ 74 FPS + +## เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ + +เคฏเคน เค‰เคฆเคพเคนเคฐเคฃ เคธเคฐเคฒ RT-DETRR เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค”เคฐ เคธเค‚เคฆเคฐเฅเคญ เค‰เคฆเคพเคนเคฐเคฃ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เคชเฅ‚เคฐเฅ‡ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค เค‡เคจ เค”เคฐ เค…เคจเฅเคฏ [modes](../modes/index.md) เคชเคฐ เคฆเฅ‡เค–เฅ‡เค‚: [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) เค”เคฐ [Export](../modes/export.md) เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผ เคชเฅƒเคทเฅเค เฅ‹เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "เคชเคพเคฏเคฅเคจ" + + ```python + from ultralytics import RTDETR + + # COCO-เคชเฅเคฐเฅ€-เคคเคพเคฐเค•เคฟเคค RT-DETR-l เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = RTDETR('rtdetr-l.pt') + + # เคฎเฅ‰เคกเคฒ เคœเคพเคจเค•เคพเคฐเฅ€ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฐเฅ‡เค‚ (เคตเฅˆเค•เคฒเฅเคชเคฟเค•) + model.info() + + # COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคฎเฅ‰เคกเคฒ เค•เฅ‹ 100 เคเคชเคฟเคธเฅ‹เคก เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # RT-DETR-l เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ 'bus.jpg' เค›เคตเคฟ เคชเคฐ เคธเค‚เคฆเคฐเฅเคญ เคšเคฒเคพเคเค‚ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ```bash + # COCO-เคชเฅเคฐเฅ€-เคคเคพเคฐเค•เคฟเคค RT-DETR-l เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ เค‰เคธเฅ‡ COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ 100 เคเคชเคฟเคธเฅ‹เคก เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + yolo train model=rtdetr-l.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCO-เคชเฅเคฐเฅ€-เคคเคพเคฐเค•เคฟเคค RT-DETR-l เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ 'bus.jpg' เค›เคตเคฟ เคชเคฐ เคธเค‚เคฆเคฐเฅเคญ เคšเคฒเคพเคเค‚ + yolo predict model=rtdetr-l.pt source=path/to/bus.jpg + ``` + +## เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ เค”เคฐ เคฎเฅ‹เคก + +เค‡เคธ เคคเคพเคฒเคฟเค•เคพ เคฎเฅ‡เค‚ เคฎเฅ‰เคกเคฒ เคชเฅเคฐเค•เคพเคฐ, เคตเคฟเคถเฅ‡เคท เคชเฅเคฐเฅ€-เคคเคพเคฐเค•เคฟเคค เคญเคพเคฐ, เคนเคฐ เคฎเฅ‰เคกเคฒ เคฆเฅเคตเคพเคฐเคพ เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ, เค”เคฐ [Train](../modes/train.md) , [Val](../modes/val.md), [Predict](../modes/predict.md), [Export](../modes/export.md) เค‡เคคเฅเคฏเคพเคฆเคฟ เคœเฅˆเคธเฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคฎเฅ‹เคกเฅเคธ เคตเคฟเคญเคพเคœเคฟเคค เค•เฅ€ เค—เคˆ เคนเฅˆเค‚, โœ… เค‡เคฎเฅ‹เคœเคฟเคฏเฅ‹เค‚ เคฆเฅเคตเคพเคฐเคพ เคธเค‚เค•เฅ‡เคคเคฟเคค เคนเฅˆเค‚ + +| เคฎเฅ‰เคกเคฒ เคชเฅเคฐเค•เคพเคฐ | เคชเฅเคฐเฅ€-เคคเคพเคฐเค•เคฟเคค เคญเคพเคฐ | เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ | เคธเค‚เคฆเคฐเฅเคญ | เคฎเคพเคจเฅเคฏเคคเคพ | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ | เคจเคฟเคฐเฅเคฏเคพเคค | +|---------------------|-----------------|-----------------------------------------|--------|---------|-----------|---------| +| RT-DETR Large | `rtdetr-l.pt` | [เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| RT-DETR Extra-Large | `rtdetr-x.pt` | [เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +## เคชเฅเคฐเคถเค‚เคธเคพเคชเคคเฅเคฐ เค”เคฐ เค†เคญเคพเคฐ + +เคฏเคฆเคฟ เค†เคช เค…เคชเคจเฅ‡ เคถเฅ‹เคง เคฏเคพ เคตเคฟเค•เคพเคธ เค•เคพเคฐเฅเคฏ เคฎเฅ‡เค‚ Baidu เค•เฅ‡ RT-DETR เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เค•เฅƒเคชเคฏเคพ [เคฎเฅ‚เคฒ เคชเฅ‡เคชเคฐ](https://arxiv.org/abs/2304.08069) เค•เฅ‹ เค‰เคฆเฅเคงเฅƒเคค เค•เคฐเฅ‡เค‚: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{lv2023detrs, + title={DETRs Beat YOLOs on Real-time Object Detection}, + author={Wenyu Lv and Shangliang Xu and Yian Zhao and Guanzhong Wang and Jinman Wei and Cheng Cui and Yuning Du and Qingqing Dang and Yi Liu}, + year={2023}, + eprint={2304.08069}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +เคนเคฎ Baidu เค”เคฐ [PaddlePaddle](https://github.com/PaddlePaddle/PaddleDetection) เคŸเฅ€เคฎ เค•เฅ‹ เค†เคญเคพเคฐ เคชเฅเคฐเค•เคŸ เค•เคฐเคจเคพ เคšเคพเคนเฅ‡เค‚เค—เฅ‡ เคœเคฟเคจเฅเคนเฅ‹เค‚เคจเฅ‡ เคฌเคฟเคฒเฅเค•เฅเคฒ เคฒเฅ‹เค•-เคฆเฅƒเคทเฅเคŸเคฟ เคธเคฎเฅเคฆเคพเคฏ เค•เฅ‡ เคฒเคฟเค เค‡เคธ เคฏเฅ‹เค—เฅเคฏ เคธเค‚เคธเคพเคงเคจ เค•เฅ‹ เคฌเคจเคพเคจเฅ‡ เค”เคฐ เคธเค‚เคญเคพเคฒเคจเฅ‡ เค•เคพ เค•เคพเคฐเฅเคฏ เค•เคฟเคฏเคพ เคนเฅˆเฅค เคตเคฟเคœเคผเคจ เคŸเฅเคฐเคพเค‚เคธเคซเฅ‰เคฐเฅเคฎเคฐเฅเคธ-เค†เคงเคพเคฐเคฟเคค เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ RT-DETR เค•เฅ‡ เคตเคฟเค•เคพเคธ เค•เฅ‡ เคธเคพเคฅ เค‰เคจเค•เฅ‡ เคฏเฅ‹เค—เคฆเคพเคจ เค•เฅ‹ เคฌเคกเคผเคพ เคฐเฅ‚เคช เคธเฅ‡ เคธเฅเคตเฅ€เค•เคพเคฐ เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค + +*Keywords: RT-DETR, เคŸเฅเคฐเคพเค‚เคธเคซเฅ‰เคฐเฅเคฎเคฐ, ViT, เคตเคฟเคœเคผเคจ เคŸเฅเคฐเคพเค‚เคธเคซเฅ‰เคฐเฅเคฎเคฐเฅเคธ, Baidu RT-DETR, PaddlePaddle, PaddlePaddle RT-DETR, เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เคตเคฟเคœเคผเคจ เคŸเฅเคฐเคพเค‚เคธเคซเฅ‰เคฐเฅเคฎเคฐเฅเคธ เค†เคงเคพเคฐเคฟเคค เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เคชเฅเคฐเฅ€-เคคเคพเคฐเค•เคฟเคค PaddlePaddle RT-DETR เคฎเฅ‰เคกเคฒ, Baidu เค•เฅ‡ RT-DETR เค•เคพ เค‰เคชเคฏเฅ‹เค—, Ultralytics Python API* diff --git a/ultralytics/docs/hi/models/rtdetr.md:Zone.Identifier b/ultralytics/docs/hi/models/rtdetr.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/models/rtdetr.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/models/sam.md b/ultralytics/docs/hi/models/sam.md new file mode 100755 index 0000000..8057178 --- /dev/null +++ b/ultralytics/docs/hi/models/sam.md @@ -0,0 +1,226 @@ +--- +comments: true +description: Ultralytics เค•เฅ‡ Segment Anything Model (SAM) เค•เฅ€ เค…เคญเคฟเคจเคต เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเฅ‰เคกเคฒ เค•เคพ เคชเคคเคพ เคฒเค—เคพเคเค‚ เคœเฅ‹ เคฐเฅ€เคฏเคฒ-เคŸเคพเค‡เคฎ เค›เคตเคฟ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเคพ เคนเฅˆเฅค เคธเคฎเคเฅ‡เค‚ เค•เคฟ เค‡เคธเคฎเฅ‡เค‚ promptable เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ, zero-shot เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคถเคพเคฎเคฟเคฒ เคนเฅˆ เค”เคฐ เคฏเคน เค•เฅˆเคธเฅ‡ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค +keywords: Ultralytics, Image segmentation, Segment Anything Model, SAM, SA-1B เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ, เคฐเฅ€เคฏเคฒ-เคŸเคพเค‡เคฎ เคชเฅเคฐเคฆเคฐเฅเคถเคจ, zero-shot เคŸเฅเคฐเคพเค‚เคธเคซเคฐ, เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจเคจ, เค›เคตเคฟ เคตเคฟเคถเฅเคฒเฅ‡เคทเคฃ, เคฎเคถเฅ€เคจ เคฒเคฐเฅเคจเคฟเค‚เค— +--- + +# Segment Anything Model (SAM) + +Segment Anything Model (SAM), เคฏเคพเคจเฅ€ เคตเคธเฅเคคเฅ เค•เฅ‹ เค•เฅ‡ เคธเคฟเคฒเคธเฅ€เคฒเฅ‡เคฌเค‚เคฆ เค›เคตเคฟ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค•เฅ‡ เคชเคนเคฒเฅ‡ เค–เค‚เคก "SAM"เคฎเฅ‡เค‚ เค†เคชเค•เคพ เคธเฅเคตเคพเค—เคค เคนเฅˆเฅค เคฏเคน เค•เฅเคฐเคพเค‚เคคเคฟเค•เคพเคฐเฅ€ เคฎเฅ‰เคกเคฒ เคธเคฎเคฏเคฌเคฆเฅเคง เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‡ เคธเคพเคฅ promptable เค›เคตเคฟ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฆเฅเคตเคพเคฐเคพ เค–เฅ‡เคฒ เค•เฅ‹ เคฌเคฆเคฒ เคšเฅเค•เคพ เคนเฅˆ เค”เคฐ เค•เฅเคทเฅ‡เคคเฅเคฐ เคฎเฅ‡เค‚ เคจเคˆ เคฎเคพเคจเค•เฅ‹เค‚ เค•เฅ‹ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฟเค เคนเฅˆเค‚เฅค + +## SAM: Segment Anything Model เค•เคพ เคชเคฐเคฟเคšเคฏ + +Segment Anything Model (SAM), เคฏเคพเคจเฅ€ SAM, เคเค• เค…เคญเคฟเคจเคต เค›เคตเคฟ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเฅ‰เคกเคฒ เคนเฅˆ เคœเฅ‹ promptable เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เค›เคตเคฟ เคตเคฟเคถเฅเคฒเฅ‡เคทเคฃ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เค…เคตเคฟเคถเฅเคตเคธเคจเฅ€เคฏ เคตเคฟเคตเคฟเคงเคคเคพ เคชเฅเคฐเคพเคชเฅเคค เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค SAM, Segment Anything เคชเคนเคฒ เค•เฅ‡ เคฆเคฟเคฒ เคฎเฅ‡เค‚ เค†เคคเคพ เคนเฅˆ, เคœเฅ‹ เค›เคตเคฟ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค•เฅ‡ เคฒเคฟเค เคจเคˆ เคฎเฅ‰เคกเคฒ, เค•เคพเคฐเฅเคฏ เค”เคฐ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เคพ เคชเคฐเคฟเคšเคฏ เค•เคฐเคพเคคเคพ เคนเฅˆเฅค + +SAM เค•เคพ เค‰เคจเฅเคจเคค เคกเคฟเคœเคพเค‡เคจ เค‡เคธเฅ‡ เคจเคˆ เค›เคตเคฟ เคตเคฟเคคเคฐเคฃเฅ‹เค‚ เค”เคฐ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคชเฅ‚เคฐเฅเคตเคœเฅเคžเคพเคจ เค•เฅ‡ เคฌเคฟเคจเคพ เคธเคพเคฎเคพเคฏเคฟเค• เคนเฅ‹เคจเฅ‡ เคฆเฅ‡เคคเคพ เคนเฅˆ, เคœเคฟเคธเฅ‡ เคœเฅ€เคฐเฅ‹-เคถเฅ‰เคŸ เคŸเฅเคฐเคพเค‚เคธเคซเคฐ เค•เฅ‡ เคจเคพเคฎ เคธเฅ‡ เคœเคพเคจเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เคตเคฟเคธเฅเคคเคพเคฐเคถเฅ€เคฒ [SA-1B เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ](https://ai.facebook.com/datasets/segment-anything/) เคชเคฐ (เคœเคฟเคธเคฎเฅ‡เค‚ 1 เค…เคฐเคฌ เคธเฅ‡ เค…เคงเคฟเค• เคฎเคพเคธเฅเค• เคนเฅˆเค‚ เคœเฅ‹ 11 เคฎเคฟเคฒเคฟเคฏเคจ เคธเคพเคตเคงเคพเคจเฅ€เคชเฅ‚เคฐเฅเคตเค• เค‡เคšเฅเค›เฅ€เคค เค›เคตเคฟเคฏเฅ‹เค‚ เคชเคฐ เคฌเคฟเค–เฅ‡เคฐเฅ‡ เค—เค เคนเฅˆเค‚), SAM เคจเฅ‡ เคตเคพเคธเฅเคคเคต เคฎเฅ‡เค‚ เคฆเคฟเค–เคพเคฏเคพ เคนเฅˆ เค•เคฟ เคฏเคน เคฌเฅ‡เคนเคฆ เคถเฅเคฐเฅ‡เคทเฅเค  เคœเฅ€เคฐเฅ‹-เคถเฅ‰เคŸ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคพ anomaly-detection เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ‡ เคชเคนเคฒเฅ‡ เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เค•เฅ‹ เคนเคฐ เคนเคพเคฒ เคฎเฅ‡เค‚ เค›เฅ‚ เคธเค•เคคเคพ เคนเฅˆเฅค + +![เคธเฅˆเค‚เคชเคฒ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค›เคตเคฟ](https://user-images.githubusercontent.com/26833433/238056229-0e8ffbeb-f81a-477e-a490-aff3d82fd8ce.jpg) +เคนเคฎเคพเคฐเฅ‡ เคจเคˆ เคชเฅ‡เคถเค•เคถ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ, SA-1B เคธเฅ‡ เคฆเคฟเค เค—เค เค“เคตเคฐเคฒเฅ‡ เคฎเคพเคธเฅเค• เคตเคพเคฒเฅ€ เค‰เคฆเคพเคนเคฐเคฃ เค›เคตเคฟเคฏเคพเคเฅค SA-1B เคฎเฅ‡เค‚ 11M เคตเคฟเคตเคฟเคง, เค‰เคšเฅเคš-เคฐเคฟเคœเฅ‹เคฒเฅเคฏเฅ‚เคถเคจ, เคฒเคพเค‡เคธเฅ‡เค‚เคธ เค”เคฐ เคชเฅเคฐเคพเค‡เคตเฅ‡เคธเฅ€ เคธเค‚เคฐเค•เฅเคทเคฃ เคฏเฅ‹เค—เฅเคฏ เค›เคตเคฟเคฏเคพเค เค”เคฐ 1.1B เค‰เคšเฅเคš-เค—เฅเคฃเคตเคคเฅเคคเคพ เคตเคพเคฒเฅ‡ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเคพเคธเฅเค• เคฎเฅŒเคœเฅ‚เคฆ เคนเฅˆเค‚เฅค เค‡เคจ เคฎเคพเคธเฅเค• เค•เฅ‹ SAM เคฆเฅเคตเคพเคฐเคพ เคชเฅ‚เคฐเฅเคฃเคคเคƒ เคธเฅเคตเคšเคพเคฒเคฟเคค เคฐเฅ‚เคช เคธเฅ‡ เคŸเคฟเคชเฅเคชเคฃเฅ€ เค•เฅ€ เค—เคˆ เคนเฅˆเค‚, เค”เคฐ เคฎเคพเคจเคตเฅ€เคฏ เคฐเฅ‡เคŸเคฟเค‚เค— เค”เคฐ เค…เคจเฅ‡เค• เค…เคญเฅเคฏเคพเคธเฅ‹เค‚ เคธเฅ‡ เคธเคคเฅเคฏเคพเคชเคฟเคค เคนเฅเค เค…เคจเฅเคธเคพเคฐ, เค‡เคจเค•เฅ€ เค—เฅเคฃเคตเคคเฅเคคเคพ เค”เคฐ เคตเคฟเคตเคฟเคงเคคเคพ เคชเฅเคฐเคฎเฅเค– เคนเฅˆเค‚เฅค เค›เคตเคฟเคฏเฅ‹เค‚ เค•เฅ€ เคธเค‚เค–เฅเคฏเคพ เค•เฅ‡ เค†เคงเคพเคฐ เคชเคฐ เค›เคตเคฟเคฏเคพเค เคธเคฎเฅ‚เคนเคฟเคค เค•เฅ€ เค—เคˆ เคนเฅˆเค‚ (เค”เคธเคคเคจ เคชเฅเคฐเคคเคฟ เค›เคตเคฟ เคชเคฐ เคฒเค—เคญเค— 100 เคฎเคพเคธเฅเค• เคนเฅ‹เคคเฅ€ เคนเฅˆเค‚)เฅค + +## Segment Anything Model (SAM) เค•เฅ€ เคฎเฅเค–เฅเคฏ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค‚ + +- **Promptable Segmentation Task:** SAM เค•เคพ เคจเคฟเคฐเฅเคฎเคพเคฃ promptable เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค•เคพเคฐเฅเคฏ เค•เฅ‡ เคธเคพเคฅ เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคœเคฟเคธเค•เฅ‡ เคฌเคพเคฆ เคตเคน เค•เฅ‹เคˆ เคญเฅ€ เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ เคฆเฅ‡เค•เคฐ เคฎเคพเคจเฅเคฏ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเคพเคธเฅเค• เค‰เคคเฅเคชเคจเฅเคจ เค•เคฐ เคธเค•เคคเคพ เคนเฅˆ, เคœเฅˆเคธเฅ‡ เค•เคฟ เคตเคธเฅเคคเฅ เค•เคพ เคจเคฟเคฐเฅเคฆเฅ‡เคถ เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ‡ เคธเฅเคฅเคพเคจเคฟเค• เคฏเคพ เคชเคพเค  เคธเค‚เค•เฅ‡เคคเฅค +- **เค‰เคจเฅเคจเคค เคตเคพเคธเฅเคคเคตเคฟเค•เคคเคพ:** Segment Anything Model เคฎเฅ‡เค‚ เคเค• เคถเค•เฅเคคเคฟเคถเคพเคฒเฅ€ เค›เคตเคฟ เค‡เคจเค•เฅ‹เคกเคฐ, เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ เค‡เคจเค•เฅ‹เคกเคฐ, เค”เคฐ เคนเคฒเฅเค•เฅ€ เคตเคœเคจ เค•เฅ‡ เคฎเคพเคธเฅเค• เคกเฅ€เค•เฅ‹เคกเคฐ เค•เฅ€ เคฌเคพเคˆเค‚ เคฒเค—เคพเคฎ เคฒเค—เคพเคคเคพเคฐ เคฌเคฆเคฒเคพเคต เค”เคฐ เค…เคธเฅเคชเคทเฅเคŸเคคเคพ เค•เฅ€ เคœเคพเค—เคฐเฅ‚เค•เคคเคพ เคชเคฐ เค†เคงเคพเคฐเคฟเคค เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค เคฏเคน เคตเคฟเคถเฅ‡เคท เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เคชเฅเคฐเคพเคฐเค‚เคญเคฟเค• เคœเฅ‹เค–เคฟเคฎ เคœเฅเคžเคพเคจ เค•เฅ‡ เคฌเคฟเคจเคพ เคจเค เค•เฅเคทเฅ‡เคคเฅเคฐเฅ‹เค‚ เค”เคฐ เค›เคตเคฟ เคตเคฟเคคเคฐเคฃเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคธเฅเคตเคฟเคงเคพเคœเคจเค• เคนเฅ‹เคจเฅ‡ เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเคพ เคนเฅˆเฅค +- **SA-1B เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ:** SAM เคชเคนเคฒ เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เคชเฅเคฐเคธเฅเคคเฅเคค เค•เคฟเค เค—เค SA-1B เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคฎเฅ‡เค‚ 11 เคฎเคฟเคฒเคฟเคฏเคจ เค›เคตเคฟเคฏเฅ‹เค‚ เคชเคฐ 1 เค…เคฐเคฌ เคธเฅ‡ เค…เคงเคฟเค• เคฎเคพเคธเฅเค• เคนเฅ‹เคคเฅ€ เคนเฅˆเค‚เฅค เค‡เคธเฅ‡ เค…เคฌ เคคเค• เค•เคพ เคธเคฌเคธเฅ‡ เคฌเคกเคผเคพ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคฎเคพเคจเคพ เคœเคพเคคเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ SAM เค•เฅ‹ เคตเคฟเคตเคฟเคงเคคเคพเคชเฅ‚เคฐเฅเคฃ เค”เคฐ เคฌเคกเคผเฅ‡ เคชเฅˆเคฎเคพเคจเฅ‡ เคชเคฐ เคญเคกเคผเคพเคธ เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เคกเฅ‡เคŸเคพ เคธเฅเคฐเฅ‹เคค เคชเฅเคฐเคพเคชเฅเคค เคนเฅ‹เคคเคพ เคนเฅˆเฅค +- **เคœเฅ€เคฐเฅ‹-เคถเฅ‰เคŸ เคชเฅเคฐเคฆเคฐเฅเคถเคจ:** SAM เคตเคฟเคญเคฟเคจเฅเคจ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เค‰เคคเฅเค•เฅƒเคทเฅเคŸ เคœเฅ€เคฐเฅ‹-เคถเฅ‰เคŸ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฐเคคเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เคฏเคน เคตเคฟเคตเคฟเคง เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคคเคคเฅเคชเคฐเคคเคพ เค•เฅ‡ เค†เคตเคถเฅเคฏเค•เคคเคพ เค•เฅ‡ เคธเคพเคฅ เคคเคคเฅเคชเคฐเคคเคพ เค•เฅ‡ เคฏเคจเฅเคคเฅเคฐเคฟเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคคเคคเฅเคชเคฐเคคเคพ เค•เฅ‡ เคฏเค‚เคคเฅเคฐเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคคเคคเฅเคชเคฐ เค•เคฐเคจเฅ‡ เคฏเฅ‹เค—เฅเคฏ เคเค• เคคเคคเฅเคชเคฐเคคเคพ เคฏเค‚เคคเฅเคฐ เคฌเคจ เค—เคฏเคพ เคนเฅˆเฅค + +Segment Anything Model เค”เคฐ SA-1B เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ€ เค—เคนเคจ เคœเคพเคจเค•เคพเคฐเฅ€ เค•เฅ‡ เคฒเคฟเค, เค•เฅƒเคชเคฏเคพ [Segment Anything เคตเฅ‡เคฌเคธเคพเค‡เคŸ](https://segment-anything.com) เคชเคฐ เคœเคพเคเค‚ เค”เคฐ เคถเฅ‹เคง เคชเฅ‡เคชเคฐ [Segment Anything](https://arxiv.org/abs/2304.02643) เค•เฅ€ เคœเคพเคเคš เค•เคฐเฅ‡เค‚เฅค + +## เค‰เคชเคฒเคฌเฅเคง เคฎเฅ‰เคกเคฒ, เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ เค”เคฐ เคธเค‚เคšเคพเคฒเคจเคฟเค• เคตเคฟเคงเคฟเคฏเคพเค + +เคฏเคน เคคเคพเคฒเคฟเค•เคพ เค‰เคชเคฒเคฌเฅเคง เคฎเฅ‰เคกเคฒ, เค‰เคจเค•เฅ€ เคตเคฟเคถเฅ‡เคท เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเฅ‡เคŸ, เค‰เคจเค•เฅ‡ เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ เค”เคฐ เค‡เค‚เคซเคฐเฅ‡เค‚เคธ, เคฎเคพเคจเฅเคฏเฅ€เค•เคฐเคฃ, เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค”เคฐ เคจเคฟเคฐเฅเคฏเคพเคค เคœเฅˆเคธเฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคธเค‚เคšเคพเคฒเคฟเคค เคตเคฟเคงเคฟเคฏเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เค‰เคจเค•เฅ€ เคธเค‚เคฌเคฆเฅเคงเคคเคพ เค•เคพ เคชเฅเคฐเคธเฅเคคเคพเคต เคชเฅเคฐเคธเฅเคคเฅเคค เค•เคฐเคคเฅ€ เคนเฅˆ. + +| เคฎเฅ‰เคกเคฒ เคชเฅเคฐเค•เคพเคฐ | เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเฅ‡เคŸ | เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ | Inference | Validation | Training | Export | +|-------------|----------------------|---------------------|-----------|------------|----------|--------| +| SAM เคฌเฅ‡เคธ | `sam_b.pt` | เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ | โœ… | โŒ | โŒ | โœ… | +| SAM เคฒเคพเคฐเฅเคœ | `sam_l.pt` | เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ | โœ… | โŒ | โŒ | โœ… | + +## SAM เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เฅˆเคธเฅ‡ เค•เคฐเฅ‡เค‚: เค›เคตเคฟ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเฅ‡เค‚ เคฏเคฅเคพเคฐเฅเคฅเคคเคพ เค”เคฐ เคถเค•เฅเคคเคฟ + +Segment Anything Model เค•เคพ เค‰เคชเคฏเฅ‹เค— เค‰เคชเคธเฅเคฅเคฟเคค เคกเฅ‡เคŸเคพ เคธเฅ‡ เค†เค—เฅ‡ เค•เฅ‡ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค เค‡เคธเคฎเฅ‡เค‚ เคเคœ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เคจเคฟเคตเฅ‡เคฆเคจ เคชเฅเคฐเคธเฅเคคเคพเคต เค‰เคคเฅเคชเคพเคฆเคจ, เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ, เค”เคฐ เคชเฅเคฐเคพเคฅเคฎเคฟเค• เคชเคพเค -เคคเฅ‹-เคฎเคพเคธเฅเค• เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚เฅค เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ เค‡เค‚เคœเฅ€เคจเคฟเคฏเคฐเคฟเค‚เค— เค•เฅ‡ เคธเคพเคฅ, SAM เคจเค เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค”เคฐ เคกเฅ‡เคŸเคพ เคตเคฟเคคเคฐเคฃเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคœเฅ€เคฐเฅ‹-เคถเฅ‰เคŸ เคคเคฐเฅ€เค•เฅ‡ เคฎเฅ‡เค‚ เคถเฅ€เค˜เฅเคฐ เคฐเฅ‚เคช เคธเฅ‡ เค…เคจเฅเค•เฅ‚เคฒเคฟเคค เคนเฅ‹ เคธเค•เคคเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เคฏเคน เค†เคชเค•เฅ€ เคธเคญเฅ€ เค›เคตเคฟ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เคเค• เคธเฅเค—เคฎ เค”เคฐ เคชเฅเคฐเคญเคพเคตเฅ€ เค‰เคชเค•เคฐเคฃ เคฌเคจ เคœเคพเคคเคพ เคนเฅˆเฅค + +### SAM เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค‰เคฆเคพเคนเคฐเคฃ + +!!! Example "เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เคชเฅเคฐเคชเฅเคคเคฟ เค•เฅ‡ เคธเคพเคฅ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เคคเคฏ เค•เคฐเฅ‡เค‚" + + เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เคชเฅเคฐเคชเฅเคคเคฟ เค•เฅ‡ เคธเคพเคฅ เคšเคฟเคคเฅเคฐเฅ€เคฏ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค•เคฐเฅ‡เค‚เฅค + + === "เคŸเค–เฅเคคเฅ€" + + ```python + from ultralytics import SAM + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = SAM('sam_b.pt') + + # เคฎเฅ‰เคกเคฒ เคธเฅ‚เคšเคจเคพ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฐเฅ‡เค‚ (เคตเฅˆเค•เคฒเฅเคชเคฟเค•) + model.info() + + # เคฌเฅ‰เค•เฅเคธ เคชเฅเคฐเคฎเฅเคชเฅเคŸ เค•เฅ‡ เคธเคพเคฅ เค‡เคจเคซเคผเคฐเคจเฅเคธ เคšเคฒเคพเคเค‚ + model('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + + # เคฌเคฟเค‚เคฆเฅ เคชเฅเคฐเคฎเฅเคชเฅเคŸ เค•เฅ‡ เคธเคพเคฅ เค‡เคจเคซเคผเคฐเคจเฅเคธ เคšเคฒเคพเคเค‚ + model('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +!!! Example "เคธเคฌ เค•เฅเค› เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เคฐเฅ‡เค‚" + + เคชเฅ‚เคฐเฅ€ เค›เคตเคฟ เค•เฅ‹ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เคฐเฅ‡เค‚เฅค + + === "เคŸเค–เฅเคคเฅ€" + + ```python + from ultralytics import SAM + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = SAM('sam_b.pt') + + # เคฎเฅ‰เคกเคฒ เคธเฅ‚เคšเคจเคพ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฐเฅ‡เค‚ (เคตเฅˆเค•เคฒเฅเคชเคฟเค•) + model.info() + + # เค‡เคจเคซเคผเคฐเฅ‡เค‚เคธ เคšเคฒเคพเคเค‚ + model('เคชเคฅ/เคซเคผเคพเค‡เคฒ/เคธเคฆเฅƒเคถ เค›เคตเคฟ.เคœเฅ‡เคชเฅ€เคœเฅ€') + ``` + + === "CLI" + + ```เคฌเฅˆเคถ + # เคธเคฌ เค•เฅเค› SEKเฅˆเค— เค•เฅ‡ เคธเคพเคฅ SAM เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เค‡เคจเคซเคผเคฐเคจเฅเคธ เคšเคฒเคพเคเค‚ + yolo predict model=sam_b.pt source=เคชเคฅ/เคซเคผเคพเค‡เคฒ/เคธเคฆเฅƒเคถ เค›เคตเคฟ.เคœเฅ‡เคชเฅ€เคœเฅ€ + ``` + +- เคฏเคนเคพเค‚ เคฏเคน เคคเคฐเฅเค• เคนเฅˆ เค•เคฟ เค†เคช เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ (เคฌเฅ‰เค•เฅเคธ / เคชเฅ‰เค‡เค‚เคŸ / เคฎเคพเคธเฅเค•) เคชเคพเคธ เคจเคนเฅ€เค‚ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ เคคเฅ‹ เคชเฅ‚เคฐเฅ€ เค›เคตเคฟ เค•เฅ‹ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค•เคฐเฅ‡เค‚เฅค + +!!! Example "SAMPredictor เค‰เคฆเคพเคนเคฐเคฃ" + + เค‡เคธ เคคเคฐเคน เคธเฅ‡ เค†เคช เคเค• เคฌเคพเคฐ เค›เคตเคฟ เคธเฅ‡เคŸ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚ เค”เคฐ เคฌเคพเคฐ-เคฌเคพเคฐ เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ เค‡เคจเฅเคซเคผเคฐเฅ‡เค‚เคธ เคšเคฒเคพ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค เค›เคตเคฟ เค•เฅ‹ เคฌเคพเคฐ เคฌเคพเคฐ เค‡เคจเฅเค•เฅ‹เคกเคฐ เค•เฅ‡ เคฎเคงเฅเคฏ เคฎเฅ‡เค‚ เคจเคนเฅ€เค‚ เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเคเฅค + + === "เคชเฅเคฐเฅ‹เคฎเฅเคชเฅเคŸ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค•เคฐเคจเคพ" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # SAMPredictor เคฌเคจเคพเคเค‚ + overrides = dict(conf=0.25, task='เค•เคŸเคพ', mode='เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # เคšเคฟเคคเฅเคฐ เคธเฅ‡เคŸ เค•เคฐเฅ‡เค‚ + predictor.set_image("ultralytics/assets/zidane.jpg") # เคšเคฟเคคเฅเคฐ เคซเคผเคพเค‡เคฒ เค•เฅ‡ เคธเคพเคฅ เคธเฅ‡เคŸ เค•เคฐเฅ‡เค‚ + predictor.set_image(cv2.imread("ultralytics/assets/zidane.jpg")) # เคเคจเคชเฅ€. เคเคธ. เคเคจ. เคฆเฅเคตเคพเคฐเคพ เคธเฅ‡เคŸ เค•เคฐเฅ‡เค‚ + results = predictor(bboxes=[439, 437, 524, 709]) + results = predictor(points=[900, 370], labels=[1]) + + # เคšเคฟเคคเฅเคฐ เคฐเฅ€เคธเฅ‡เคŸ เค•เคฐเฅ‡เค‚ + predictor.reset_image() + ``` + + เค…เคคเคฟเคฐเคฟเค•เฅเคค เคคเคคเฅเคตเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เคธเคฌ เค•เฅเค› เค•เฅ‹ เคŸเฅเค•เคกเคผเฅ‹เค‚ เคฎเฅ‡เค‚ เคตเคฟเคญเคพเคœเคฟเคค เค•เคฐเฅ‡เค‚เฅค + + === "เคธเคฌ เค•เฅเค› เค•เคพ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # SAMPredictor เคฌเคจเคพเคเค‚ + overrides = dict(conf=0.25, task='เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ', mode='เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # เค…เคคเคฟเคฐเคฟเค•เฅเคค เคคเคคเฅเคตเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ + results = predictor(source="ultralytics/assets/zidane.jpg", crop_n_layers=1, points_stride=64) + ``` + +- `เคธเคฌ เค•เฅเค› เค•เคพ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ` เค•เฅ‡ เคฒเคฟเค เค…เคคเคฟเคฐเคฟเค•เฅเคค เคคเคคเฅเคตเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค…เคงเคฟเค• เคฆเฅ‡เค–เฅ‡เค‚ [`Predictor/generate` Reference](../../../reference/models/sam/predict.md). + +## SAM เค•เฅ€ เคคเฅเคฒเคจเคพ YOLOv8 เค•เฅ‡ เคฌเคจเคพเคฎ + +เคฏเคนเคพเค‚ เคนเคฎ SAM เค•เฅ‡ เคธเคฌเคธเฅ‡ เค›เฅ‹เคŸเฅ‡ เคฎเฅ‰เคกเคฒ, SAM-b, เค•เฅ€ เคคเฅเคฒเคจเคพ Ultralytics เค•เฅ‡ เคธเคฌเคธเฅ‡ เค›เฅ‹เคŸเฅ‡ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เคฎเฅ‰เคกเคฒ, [YOLOv8n-seg](../tasks/segment.md), เค•เฅ‡ เคธเคพเคฅ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚: + +| เคฎเฅ‰เคกเคฒ | เค†เค•เคพเคฐ | เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ | เค—เคคเคฟ (เคธเฅ€เคชเฅ€เคฏเฅ‚) | +|--------------------------------------------------------------------|-----------------------------|----------------------|-------------------------| +| SAM เค•เคพ เคธเคฌเคธเฅ‡ เค›เฅ‹เคŸเคพ, SAM-b | 358 MB | 94.7 M | 51096 ms/im | +| [เคฎเฅ‹เคฌเคพเค‡เคฒ SAM](mobile-sam.md) | 40.7 MB | 10.1 M | 46122 ms/im | +| [เค…เค—เฅเคฐเฅ€ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคตเคพเคฒเฅ€ FastSAM-s, YOLOv8 เคฌเฅˆเค•เคฌเฅ‹เคจ เคธเคนเคฟเคค](fast-sam.md) | 23.7 MB | 11.8 M | 115 ms/im | +| Ultralytics [เคฏเฅ‹เคฒเฅ‹เคตเฅ€8เคจ-seg](yolov8.md) | **6.7 MB** (53.4 เค—เฅเคจเคพ เค›เฅ‹เคŸเคพ) | **3.4 M** (27.9x เค•เคฎ) | **59 ms/im** (866x เคคเฅ‡เคœ) | + +เคฏเคน เคคเฅเคฒเคจเคพ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เค†เค•เคพเคฐ เค”เคฐ เค—เคคเคฟ เคฎเฅ‡เค‚ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเฅ€เคฏ เค…เค‚เคคเคฐ เคฆเคฟเค–เคพเคคเฅ€ เคนเฅˆเฅค เคœเคนเคพเค‚ SAM เคธเฅเคตเคšเคพเคฒเคฟเคค เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค•เฅ‡ เคฒเคฟเค เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เค•เฅเคทเคฎเคคเคพเค“เค‚ เค•เฅ‹ เคชเฅเคฐเคธเฅเคคเฅเคค เค•เคฐเคคเคพ เคนเฅˆ, เคตเคนเฅ€เค‚ Ultralytics เคตเคฟเคฆเฅเคฏเคฎเคพเคจ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเคพเคจเคฆเค‚เคกเฅ‹เค‚ เค•เฅ‡ เคคเฅเคฒเคจเคพเคคเฅเคฎเค• เค†เค•เคพเคฐ, เค—เคคเคฟ เค”เคฐ เคธเค‚เคšเคพเคฒเคจ เค•เฅเคทเคฎเคคเคพ เคฎเฅ‡เค‚ เคธเคฎเคฐเฅเคฅเคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ€ เคนเฅˆเฅค + +เคเค• 2023 Apple M2 Macbook (16GB เคฐเฅˆเคฎ เค•เฅ‡ เคธเคพเคฅ) เคชเคฐ เคšเคฒเคพเคˆ เค—เคˆ เคชเคฐเฅ€เค•เฅเคทเคพเฅค เค‡เคธ เคชเคฐเฅ€เค•เฅเคทเคฃ เค•เฅ‹ เคฆเฅ‹เคนเคฐเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค: + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + ```python + from ultralytics import FastSAM, SAM, YOLO + + # SAM-b เคชเฅเคฐเฅ‹เคซเคพเค‡เคฒ เค•เคฐเฅ‡เค‚ + model = SAM('sam_b.pt') + model.info() + model('ultralytics/assets') + + # เคฎเฅ‹เคฌเคพเค‡เคฒSAM เคชเฅเคฐเฅ‹เคซเคพเค‡เคฒ เค•เคฐเฅ‡เค‚ + model = SAM('mobile_sam.pt') + model.info() + model('ultralytics/assets') + + # FastSAM-s เคชเฅเคฐเฅ‹เคซเคพเค‡เคฒ เค•เคฐเฅ‡เค‚ + model = FastSAM('FastSAM-s.pt') + model.info() + model('ultralytics/assets') + + # YOLOv8n-seg เคชเฅเคฐเฅ‹เคซเคพเค‡เคฒ เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-seg.pt') + model.info() + model('ultralytics/assets') + ``` + +## เคธเฅเคตเคค: เคŸเคฟเคชเคฃเฅเคฃเฅ€เค•เคฐเคฃ: เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‡ เคฒเคฟเค เคเค• เคคเฅเคตเคฐเคฟเคค เคฎเคพเคฐเฅเค— + +เคธเฅเคตเคค: เคŸเคฟเคชเคฃเฅเคฃเฅ€เค•เคฐเคฃ SAM เค•เฅ€ เคเค• เคฎเฅเค–เฅเคฏ เคธเฅเคตเคฟเคงเคพ เคนเฅˆ เคœเฅ‹ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพเค“เค‚ เค•เฅ‹ เคเค• เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ [เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ](https://docs.ultralytics.com/datasets/segment) เค‰เคคเฅเคชเคจเฅเคจ เค•เคฐเคจเฅ‡ เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเฅ€ เคนเฅˆเฅค เคฏเคน เคธเฅเคตเคฟเคงเคพ เคฌเคกเคผเฅ€ เคธเค‚เค–เฅเคฏเคพ เคฎเฅ‡เค‚ เค›เคตเคฟเคฏเฅ‹เค‚ เค•เฅ‹ เค•เฅเคตเคพเคฒเคฟเคŸเฅ€ เค•เค‚เคŸเฅเคฐเฅ‹เคฒ เคธเฅ‡ เค†เคจเฅ‹เคค เค•เคฐเคจเฅ‡ เค•เฅ€ เคœเคฐเฅ‚เคฐเคค เค•เฅ‡ เคฆเฅŒเคฐเคพเคจ เคธเคฎเคฏเคฐเฅ‹เค•เฅ€ เคฎเฅ‚เคฒเฅเคฏ เค…เค‚เค•เคจ เค•เฅ€ เคœเคฐเฅ‚เคฐเคค เค›เฅ‹เคกเคผเคคเฅ€ เคนเฅˆเฅค + +### เค…เคชเคจเฅ‡ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‡ เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคฒเคฟเค เค—เคคเคฟเคถเฅ€เคฒเคคเคพ เคจเคฟเคฐเฅเคฎเคฟเคค เค•เคฐเฅ‡เค‚ + +Ultralytics เคซเคผเฅเคฐเฅ‡เคฎเคตเคฐเฅเค• เค•เฅ‡ เคธเคพเคฅ เคธเฅโ€เคตเคธเฅโ€เคฅ เคฆเคฟเค–เคพเคˆ เคฆเฅ‡เคจเฅ‡ เคตเคพเคฒเฅ‡ เคตเคฐเฅเค—เคพเคจเฅเคธเคพเคฐ [เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ](https://docs.ultralytics.com/datasets/segment) เคฌเคจเคพเคเค‚เฅค เคเคชเคจเฅ€ เคฎเฅ‡เค‚ เคฆเคฟเค เค—เค เค†เคฆเฅ‡เคถ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + ```python + from ultralytics.data.annotator import auto_annotate + + auto_annotate(data="เคชเคฅ/เคธเคพเคฎเค—เฅเคฐเฅ€", det_model="yolov8x.pt", sam_model='sam_b.pt') + ``` + +| เคคเคคเฅเคต | เคชเฅเคฐเค•เคพเคฐ | เคตเคฟเคตเคฐเคฃ | เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ | +|------------|-------------------|--------------------------------------------------------------------------------------------------------------------------------------------|--------------| +| เคกเฅ‡เคŸเคพ | str | เค…เคจเฅเคฎเคพเคจเคฟเคค เคฎเฅ‚เคฒ เค›เคตเคฟเคฏเคพเค เคœเฅ‹ เค†เค‚เคถเคฟเค• เคชเฅเคฐเคคเคฟเคธเคพเคฆเคพเคจ เค•เฅ‡ เคฒเคฟเค เค…เคจเฅเคฎเคพเคจเคฟเคค เคนเฅˆเค‚เฅค | | +| det_model | str, เคตเฅˆเค•เคฒเฅเคชเคฟเค• | เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLO เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒเฅค 'yolov8x.pt' เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ เคฐเฅ‚เคช เคธเฅ‡ เคนเฅ‹เคคเคพ เคนเฅˆเฅค | 'yolov8x.pt' | +| sam_model | str, เคตเฅˆเค•เคฒเฅเคชเคฟเค• | เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค SAM เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเฅ‰เคกเคฒเฅค เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ เคฐเฅ‚เคช เคธเฅ‡ 'sam_b.pt' เคนเฅˆเฅค | 'sam_b.pt' | +| device | str, เคตเฅˆเค•เคฒเฅเคชเคฟเค• | เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคตเคพเค‡เคธเฅค เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ เคฐเฅ‚เคช เคธเฅ‡ เคเค• เค–เคพเคฒเฅ€ เคธเฅเคŸเฅเคฐเคฟเค‚เค— เคนเฅ‹เคคเคพ เคนเฅˆ (เคธเฅ€เคชเฅ€เคฏเฅ‚ เคฏเคพ เคœเฅ€เคชเฅ€เคฏเฅ‚, เคฏเคฆเคฟ เค‰เคชเคฒเคฌเฅเคง เคนเฅˆ)เฅค | | +| output_dir | str, เค•เฅ‹เคˆ เคตเฅˆเค•เคฒเฅเคชเคฟเค• | เคŸเคฟเคชเคฃเฅเคฃเฅ€เคค เคชเคฐเคฟเคฃเคพเคฎ เคธเคนเฅ‡เคœเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพเฅค เค‡เคธเฅ‡ 'เคกเฅ‡เคŸเคพ' เค•เฅ‡ เคธเคฎเคพเคจ เคกเคฟเคฐเฅ‡เค•เฅเคŸเคฐเฅ€ เคฎเฅ‡เค‚ "เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพเคเค‚" เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ เคฎเคพเคจ เคจเคนเฅ€เค‚ เคนเฅ‹เคคเคพ เคนเฅˆเฅค | เค•เฅ‹เคˆ | + +`เค‘เคŸเฅ‹_เคคเคฟเคชเคฃเฅเคฃเฅ€` เคซเคผเค‚เค•เฅเคถเคจ เค†เคชเค•เฅ€ เค›เคตเคฟเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคฏเคพเคคเคพเคฏเคพเคค เค•เคพ เค•เคพเคฐเฅเคฏเค•เฅเคฐเคฎ เคฆเฅ‡เค–เคคเฅ€ เคนเฅˆ เค”เคฐ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคธเฅ‡เคŸ เค•เฅ‹ เค†เคชเค•เฅ‡ เค›เคตเคฟ เค•เฅ‡ เคชเคฅ เคธเฅ‡เคŸ เค•เคฐเคคเฅ€ เคนเฅˆ, เคตเฅˆเค•เคฒเฅเคชเคฟเค• เคคเคคเฅเคตเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค”เคฐ SAM เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเฅ‰เคกเคฒ, เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคตเคพเค‡เคธ เค”เคฐ เคŸเคฟเคชเคฃเฅเคฃเฅ€เคค เคชเคฐเคฟเคฃเคพเคฎ เคธเคนเฅ‡เคœเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ เคœเฅˆเคธเฅ‡, เค‡เคจ เค•เคพเคฐเฅเคฏเค•เฅเคฐเคฎ เคคเคคเฅเคตเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคนเฅเคเฅค + +เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคธเฅเคตเคค: เคŸเคฟเคชเคฃเฅเคฃเฅ€เค•เคฐเคฃ เค›เคตเคฟ เคเคจเฅ‹เคŸเฅ‡เคถเคจ เคเค• เคฌเคกเคผเฅ‡ เคชเฅˆเคฎเคพเคจเฅ‡ เคชเคฐ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคฌเคจเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคพเคฏเฅ‹เค—เคฟเค• เค”เคฐ เคŠเคฐเฅเคœเคพเคตเคพเคจ เค‰เคชเค•เคพเคฐเคฃ เคนเฅˆเฅค เคฏเคน เคธเฅเคตเคฟเคงเคพ เคตเคฟเคถเฅ‡เคท เคฐเฅ‚เคช เคธเฅ‡ เค‰เคธ เคธเคฎเคฏ เคธเฅเคฐเฅ‡เค–เคฟเคคเคน เคฐเฅ‡เค–เคพเค‚เค•เฅ€เค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค เคธเคฎเคฏ-เคถเฅ€เค˜เฅเคฐเคคเคพ เคฆเฅเคตเคพเคฐเคพ เคคเคฏ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ, เค‰เคšเฅเคš เค—เฅเคฃเคตเคคเฅเคคเคพ เคตเคพเคฒเฅ‡ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคฌเคจเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค…เคงเคฟเค• เคœเคฐเฅ‚เคฐเฅ€ เคนเฅ‹เคคเคพ เคนเฅˆเฅค เค‡เคธ เคธเฅเคตเคฟเคงเคพ เค•เคพ เค‰เคฆเฅเคฆเฅ‡เคถเฅเคฏเคธเคฟเคฆเฅเคง เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพเค“เค‚ เค”เคฐ เคกเฅ‡เคตเคฒเคชเคฐเฅเคธ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคฆเฅเคฏเคฎเคพเคจ เค›เคตเคฟ เคธเค‚เค—เฅเคฐเคนเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เคœเคŸเคฟเคฒ เคธเฅ‚เค•เฅเคทเฅเคฎ เคฎเฅ‰เคกเคฒ เคตเคฟเค•เคพเคธ เค”เคฐ เคฎเคพเคจเฅเคฏเคพเคจเฅเคฏเฅ‹เค— เค•เคพ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคตเคฟเค•เคฒเฅเคช เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ€ เคนเฅˆเฅค + +## เคชเฅเคฐเคถเค‚เคธเคพ เค”เคฐ เค†เคญเคพเคฐ + +เคฏเคฆเคฟ เค†เคช เค…เคชเคจเฅ‡ เคถเฅ‹เคง เคฏเคพ เคตเคฟเค•เคพเคธ เค•เฅ‡ เคฒเคฟเค SAM เค•เคพ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เคคเฅ‹ เค•เฅƒเคชเคฏเคพ เคนเคฎเคพเคฐเฅ‡ เคชเฅ‡เคชเคฐ เค•เฅ‹ เค‰เคฆเคพเคนเคฐเคฃเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‹ เคตเคฟเคšเคพเคฐ เค•เคฐเฅ‡เค‚: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{kirillov2023segment, + title={Segment Anything}, + author={Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollรกr and Ross Girshick}, + year={2023}, + eprint={2304.02643}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +เคนเคฎ เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฐ เคตเคฟเคœเฅเคžเคพเคจ เคธเคฎเฅเคฆเคพเคฏ เค•เฅ‡ เคฒเคฟเค เค‡เคธ เคฎเฅ‚เคฒเฅเคฏเคตเคพเคจ เคธเค‚เคธเคพเคงเคจ เค•เฅ‹ เคจเคฟเคฐเฅเคฎเคฟเคค เค”เคฐ เคฌเคจเคพเค เคฐเค–เคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคคเคพเคฐเฅ€เคซ เค•เคฐเคจเคพ เคšเคพเคนเฅ‡เค‚เค—เฅ‡เฅค + +*เคธเค‚เค•เฅ‡เคค: Segment Anything, Segment Anything Model, เคธเคฎเฅเคฆเฅเคฐเฅ€, Meta SAM, เค›เคตเคฟ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ, promptable เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ, zero-shot เคชเฅเคฐเคฆเคฐเฅเคถเคจ, SA-1B เคกเฅ‡เคŸเคพ เคธเฅ‡เคŸ, เค‰เคจเฅเคจเคค เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ, เคธเฅเคตเคค: เคŸเคฟเคชเคฃเฅเคฃเฅ€เค•เคฐเคฃ, Ultralytics, เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒเฅ‡เค‚, SAM เคฌเฅ‡เคธ, SAM-เคฒเคพเคฐเฅเคœ, เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ, เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฐ เคตเคฟเคœเฅเคžเคพเคจ, AI, artificial intelligence, machine learning, data annotation, segmentation masks, เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒ, YOLO เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒ, bibtex, Meta AI. diff --git a/ultralytics/docs/hi/models/sam.md:Zone.Identifier b/ultralytics/docs/hi/models/sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/models/sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/models/yolo-nas.md b/ultralytics/docs/hi/models/yolo-nas.md new file mode 100755 index 0000000..54c44a5 --- /dev/null +++ b/ultralytics/docs/hi/models/yolo-nas.md @@ -0,0 +1,119 @@ +--- +comments: true +description: YOLO-NAS เคเค• เคฌเฅ‡เคนเคคเคฐ เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เคฎเฅ‰เคกเคฒ เคนเฅˆเฅค เค‡เคธเค•เฅ€ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค, เคชเฅเคฐเฅ€-เคชเฅเคฐเคถเคฟเคค เคฎเฅ‰เคกเคฒ, Ultralytics Python API เค•เฅ‡ เคธเคพเคฅ เค‰เคชเคฏเฅ‹เค— เค”เคฐ เค…เคงเคฟเค• เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคตเคฟเคธเฅเคคเฅƒเคค เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เคœเคพเคจเฅ‡เค‚เฅค +keywords: YOLO-NAS, Deci AI, object detection, deep learning, neural architecture search, Ultralytics Python API, YOLO model, pre-trained models, quantization, optimization, COCO, Objects365, Roboflow 100 +--- + +# YOLO-NAS + +## เค…เคตเคฒเฅ‹เค•เคจ + +เคกเฅ‡เคธเฅ€ เคเค†เคˆ เคฆเฅเคตเคพเคฐเคพ เคตเคฟเค•เคธเคฟเคค YOLO-NAS เคเค• เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เคฎเฅ‚เคฒเคญเฅ‚เคค เคฎเฅ‰เคกเคฒ เคนเฅˆเฅค เคฏเคน เคชเคฟเค›เคฒเฅ‡ YOLO เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ€ เคธเฅ€เคฎเคพเค“เค‚ เค•เคพ เคธเคฎเคพเคงเคพเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฎเคพเคจเค•เคฐเฅเคฎเคถเคพเคธเฅเคคเฅเคฐเฅ€เคฏ เคธเค‚เคฐเคšเคจเคพ เค–เฅ‹เคœ เคชเฅเคฐเฅŒเคฆเฅเคฏเฅ‹เค—เคฟเค•เฅ€ เค•เคพ เค‰เคคเฅเคชเคพเคฆ เคนเฅˆเฅค เค•เฅเคตเคพเค‚เคŸเคพเค‡เคœเคผเฅ‡เคถเคจ เคธเคฎเคฐเฅเคฅเคจ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ-เคฒเฅ‡เคŸเฅ‡เค‚เคธเฅ€ เคตเคฟเคจเคฟเคฎเคฏ เคฎเฅ‡เค‚ เค•เคพเคซเฅ€ เคธเฅเคงเคพเคฐ เค•เฅ‡ เคธเคพเคฅ, YOLO-NAS เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เคฎเฅ‡เค‚ เคเค• เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เค†เค—เฅ‡ เค•เฅ€ เคฒเคกเคผเคพเคˆ เคชเฅเคฐเคธเฅเคคเฅเคค เค•เคฐเคคเคพ เคนเฅˆเฅค + +![เคฎเฅ‰เคกเคฒ เค‰เคฆเคพเคนเคฐเคฃ เค›เคตเคฟ](https://learnopencv.com/wp-content/uploads/2023/05/yolo-nas_COCO_map_metrics.png) +**YOLO-NAS เค•เคพ เค…เคตเคฒเฅ‹เค•เคจเฅค** YOLO-NAS เค†เคชเฅ‡เค•เฅเคทเคฟเค•เคคเคพ เค•เฅ‡ เคฒเคฟเค เคฌเฅเคฒเฅ‰เค•เฅเคธ เค”เคฐ เคตเฅˆเค•เคฒเฅเคชเคฟเค• เค•เฅเคตเคพเค‚เคŸเคพเค‡เคœเคผเฅ‡เคถเคจ เค•เฅ‡ เคธเคพเคฅ เค•เฅเคตเคพเค‚เคŸเคพเค‡เคœเคผเฅ‡เคถเคจ เคฏเฅ‹เค—เฅเคฏ เคฌเฅเคฒเฅ‰เค• เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเคพ เคนเฅˆเฅค เคฎเฅ‰เคกเคฒ เค•เฅ‹ INT8 เค•เฅเคตเคพเค‚เคŸเคพเค‡เคœเคผเฅ‡เคถเคจ เคฎเฅ‡เค‚ เคฐเฅ‚เคชเคพเค‚เคคเคฐเคฟเคค เค•เคฟเคฏเคพ เคœเคพเคจเฅ‡ เคชเคฐ, เค…เคจเฅเคฏ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ€ เคคเฅเคฒเคจเคพ เคฎเฅ‡เค‚ เค•เฅ‡เคตเคฒ เคฅเฅ‹เคกเคผเฅ€ เคธเฅ€ เคธเคŸเฅ€เค•เคคเคพ เคจเฅเค•เคธเคพเคจ เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค เคฏเฅ‡ เค‰เคจเฅเคจเคคเคฟ เคธเคญเฅ€ เคตเคฟเค•เคธเคฟเคค เคตเคฟเคถเฅ‡เคทเคคเคพเค“เค‚ เคฆเฅเคตเคพเคฐเคพ เคฏเฅ‹เค—เฅเคฏเคคเคพเคชเฅ‚เคฐเฅเคตเค• เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เค•เฅเคทเคฎเคคเคพเค“เค‚ เค”เคฐ เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคฎเฅ‡เค‚ เคธเคฎเคพเคชเฅเคค เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค + +### เคฎเฅเค–เฅเคฏ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค + +- **เค•เฅเคตเคพเค‚เคŸเคพเค‡เคœเคผเฅ‡เคถเคจ-เคฎเคฟเคคเฅเคฐเคถเฅ€เคฒ เคฎเฅ‚เคฒ เคฌเฅเคฒเฅ‰เค•:** YOLO-NAS เคชเคฟเค›เคฒเฅ‡ YOLO เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ€ เคเค• เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคธเฅ€เคฎเคพ เค•เฅ‹ เคธเคฎเคพเคชเฅเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคเค• เคจเคฏเคพ เคฎเฅ‚เคฒ เคฌเฅเคฒเฅ‰เค• เคชเฅ‡เคถ เค•เคฐเคคเคพ เคนเฅˆเฅค +- **เคธเฅเคฐเฅเคšเคฟเคชเฅ‚เคฐเฅเคตเค• เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค”เคฐ เค•เฅเคตเคพเค‚เคŸเคพเค‡เคœเคผเฅ‡เคถเคจ:** YOLO-NAS เค‰เคจเฅเคจเคค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฏเฅ‹เคœเคจเคพเคเค‚ เค”เคฐ เคชเฅ‹เคธเฅเคŸ-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅเคตเคพเค‚เคŸเคพเค‡เคœเคผเฅ‡เคถเคจ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเคพ เคนเฅˆ เคคเคพเค•เคฟ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคฎเฅ‡เค‚ เคธเฅเคงเคพเคฐ เคนเฅ‹ เคธเค•เฅ‡เฅค +- **เค‘เคŸเฅ‹เคเคจเคเคธเฅ€ เค…เคจเฅเค•เฅ‚เคฒเคจ เค”เคฐ เคชเฅเคฐเฅ€-เคชเฅเคฐเคถเคฟเคค:** YOLO-NAS เค‘เคŸเฅ‹เคเคจเคเคธเฅ€ เค…เคจเฅเค•เฅ‚เคฒเคจ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเคพ เคนเฅˆ เค”เคฐ COCO, Objects365 เค”เคฐ Roboflow 100 เคœเฅˆเคธเฅ‡ เคชเฅเคฐเคฎเฅเค– เคกเฅ‡เคŸเคพเคธเฅ‡เคŸเฅ‹เค‚ เคชเคฐ เคชเฅเคฐเฅ€-เคชเฅเคฐเคถเคฟเคค เคนเฅ‹เคคเคพ เคนเฅˆเฅค เคฏเคน เคชเฅเคฐเฅ€-เคชเฅเคฐเคถเคฟเคค เค‡เคธเฅ‡ เค‰เคคเฅเคชเคพเคฆเคจ เคตเคพเคคเคพเคตเคฐเคฃ เคฎเฅ‡เค‚ เคจเฅ€เคšเฅ‡ เคชเคกเคผเคจเฅ‡ เคตเคพเคฒเฅ‡ เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค…เคคเฅเคฏเค‚เคค เค‰เคชเคฏเฅเค•เฅเคค เคฌเคจเคพเคคเคพ เคนเฅˆเฅค + +## เคชเฅเคฐเฅ€-เคชเฅเคฐเคถเคฟเคค เคฎเฅ‰เคกเคฒ + +Ultralytics เคฆเฅเคตเคพเคฐเคพ เคชเฅเคฐเคฆเคพเคจ เค•เฅ€ เค—เคˆ เคชเฅเคฐเฅ€-เคชเฅเคฐเคถเคฟเคค YOLO-NAS เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เค†เค—เคพเคฎเฅ€ เคชเฅ€เคขเคผเฅ€ เค•เฅ€ เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เค•เฅ€ เคถเค•เฅเคคเคฟ เค•เคพ เค…เคจเฅเคญเคต เค•เคฐเฅ‡เค‚เฅค เคฏเฅ‡ เคฎเฅ‰เคกเคฒ เคธเฅเคชเฅ€เคก เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เคฆเฅ‹เคจเฅ‹เค‚ เคฎเฅ‡เค‚ เค‰เคคเฅเค•เฅƒเคทเฅเคŸ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคœเคผเคพเค‡เคจ เค•เคฟเค เค—เค เคนเฅˆเค‚เฅค เค…เคชเคจเฅ€ เคตเคฟเคถเคฟเคทเฅเคŸ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคญเคฟเคจเฅเคจ เคตเคฟเค•เคฒเฅเคชเฅ‹เค‚ เคฎเฅ‡เค‚ เคธเฅ‡ เคšเฅเคจเฅ‡เค‚: + +| เคฎเฅ‰เคกเคฒ | mAP | เคฒเฅ‡เคŸเฅ‡เค‚เคธเฅ€ (เคฎเคฟเคฒเฅ€เคธเฅ‡เค•เค‚เคก) | +|------------------|-------|---------------------| +| YOLO-NAS S | 47.5 | 3.21 | +| YOLO-NAS M | 51.55 | 5.85 | +| YOLO-NAS L | 52.22 | 7.87 | +| YOLO-NAS S INT-8 | 47.03 | 2.36 | +| YOLO-NAS M INT-8 | 51.0 | 3.78 | +| YOLO-NAS L INT-8 | 52.1 | 4.78 | + +เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เคฎเฅ‰เคกเคฒ เคตเฅ‡เคฐเคฟเคเค‚เคŸ mAP เค”เคฐ เคฒเฅ‡เคŸเฅ‡เค‚เคธเฅ€ เค•เฅ‡ เคฌเฅ€เคš เคธเค‚เคคเฅเคฒเคจ เค•เคพ เคชเฅเคฐเคธเฅเคคเคพเคต เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคœเคผเคพเค‡เคจ เค•เคฟเค เค—เค เคนเฅˆเค‚, เคœเฅ‹ เค†เคชเค•เฅ‡ เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‹ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค”เคฐ เค—เคคเคฟ เคฆเฅ‹เคจเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค†เคชเค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เค…เคจเฅเค•เฅ‚เคฒเคฟเคค เค•เคฐเคจเฅ‡ เคฎเฅ‡เค‚ เคฎเคฆเคฆ เค•เคฐเฅ‡เค—เคพเฅค + +## เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ + +Ultralytics เคจเฅ‡ YOLO-NAS เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‹ เค†เคชเค•เฅ‡ Python เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจ เคฎเฅ‡เค‚ เค†เคธเคพเคจเฅ€ เคธเฅ‡ เคเค•เฅ€เค•เฅƒเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคนเคฎเคพเคฐเฅ‡ `ultralytics` เคชเคพเค‡เคฅเคจ เคชเฅˆเค•เฅ‡เคœ เค•เฅ‡ เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡ เคธเฅเคตเคฟเคงเคพเคœเคจเค• เคชเคพเคฏเคฅเคจ API เคชเฅเคฐเคฆเคพเคจ เค•เคฟเคฏเคพ เคนเฅˆเฅค เคชเฅˆเค•เฅ‡เคœ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เค•เฅ‹ เคธเฅเค—เค เคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคเค• เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพ-เคฎเคฟเคคเฅเคฐเคชเฅ‚เคฐเฅเคฃ เคชเคพเค‡เคฅเคจ API เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค + +เคจเคฟเคฎเฅเคจ เค‰เคฆเคพเคนเคฐเคฃ เคฆเคฟเค–เคพเคคเฅ‡ เคนเฅˆเค‚ เค•เคฟ `เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเคฟเคŸเคฟเค•เฅเคธ` เคชเฅˆเค•เฅ‡เคœ เค•เฅ‡ เคธเคพเคฅ YOLO-NAS เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— `ultralytics` เคชเฅˆเค•เฅ‡เคœ เค•เฅ‡ เคธเคพเคฅ เค•เฅˆเคธเฅ‡ เค•เคฐเฅ‡เค‚: + +### เคชเคนเคšเคพเคจ เค”เคฐ เคฎเคพเคจเฅเคฏเคคเคพ เค‰เคฆเคพเคนเคฐเคฃ + +เค‡เคธ เค‰เคฆเคพเคนเคฐเคฃ เคฎเฅ‡เค‚ เคนเคฎ COCO8 เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ YOLO-NAS-s เค•เฅ€ เคฎเคพเคจเฅเคฏเคคเคพ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + เค‡เคธ เค‰เคฆเคพเคนเคฐเคฃ เคฎเฅ‡เค‚ เคนเคฎ YOLO-NAS เค•เฅ‡ เคฒเคฟเค เคธเคฐเคฒ เคชเคนเคšเคพเคจ เค”เคฐ เคฎเคพเคจเฅเคฏเคคเคพ เค•เฅ‹เคก เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚เฅค เคชเคนเคšเคพเคจ เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เค•เคพ เคนเฅˆเค‚เคกเคฒเคฟเค‚เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฆเฅ‡เค–เฅ‡เค‚ [เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€](../modes/predict.md) เคฎเฅ‹เคก เค•เฅ‹เฅค เค…เคคเคฟเคฐเคฟเค•เฅเคค เคฎเฅ‹เคก เค•เฅ‡ เคธเคพเคฅ YOLO-NAS เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค [เคฎเคพเคจเฅเคฏเคคเคพ](../modes/val.md) เค”เคฐ [เคจเคฟเคฐเฅเคฏเคพเคค](../modes/export.md) เค•เฅ‹ เคฆเฅ‡เค–เฅ‡เค‚เฅค `เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเคฟเคŸเคฟเค•เฅเคธ` เคชเฅˆเค•เฅ‡เคœ เคชเคฐ YOLO-NAS เค•เคพ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคธเคฎเคฐเฅเคฅเคจ เคจเคนเฅ€เค‚ เค•เคฐเคคเฅ€ เคนเฅˆเฅค + + === "Python" + + PyTorch เคชเฅเคฐเฅ€-เคชเฅเคฐเคถเคฟเคค `*.pt` เคฎเฅ‰เคกเคฒ เคซเคผเคพเค‡เคฒเฅ‹เค‚ เค•เฅ‹ เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเคฟเคŸเคฟเค•เฅเคธ เค•เฅเคฒเคพเคธ 'เคจเคพเคธ()' เค•เฅ‹ เคชเคพเคธ เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆ เคคเคพเค•เคฟ เคชเคพเคฏเคฅเคจ เคฎเฅ‡เค‚ เคเค• เคฎเฅ‰เคกเคฒ เคฎเคพเคฎเคฒเคพ เคฌเคจเคพเคฏเคพ เคœเคพ เคธเค•เฅ‡: + + ```python + from ultralytics import NAS + + # COCO-pretrained YOLO-NAS-s เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = NAS('yolo_nas_s.pt') + + # เคฎเฅ‰เคกเคฒ เค•เฅ€ เคœเคพเคจเค•เคพเคฐเฅ€ เคฆเคฟเค–เคพเคเค (เคตเฅˆเค•เคฒเฅเคชเคฟเค•) + model.info() + + # COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคฎเคพเคจเฅเคฏเคคเคพ เค•เคฐเฅ‡เค‚ + results = model.val(data='coco8.yaml') + + # 'bus.jpg' เค›เคตเคฟ เคชเคฐ YOLO-NAS-s เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคชเคนเคšเคพเคจ เคšเคฒเคพเคเค‚ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‹ เคธเฅ€เคงเฅ‡ เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค CLI เค•เคฎเคพเค‚เคก เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเค‚: + + ```bash + # COCO-pretrained YOLO-NAS-s เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ เค‰เคธเค•เฅ€ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคฎเคพเคจเฅเคฏเคคเคพ เค•เคฐเฅ‡เค‚ COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ + yolo val model=yolo_nas_s.pt data=coco8.yaml + + # COCO-pretrained YOLO-NAS-s เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ 'bus.jpg' เค›เคตเคฟ เคชเคฐ เคชเคนเคšเคพเคจ เคšเคฒเคพเคเค‚ + yolo predict model=yolo_nas_s.pt source=path/to/bus.jpg + ``` + +## เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ เค”เคฐ เคฎเฅ‹เคก + +เคนเคฎ เคคเฅ€เคจ YOLO-NAS เคฎเฅ‰เคกเคฒ เคตเฅ‡เคฐเคฟเคเค‚เคŸ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚: เค›เฅ‹เคŸเคพ (s), เคฎเคงเฅเคฏเคฎ (m) เค”เคฐ เคฌเคกเคผเคพ (l)เฅค เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เคตเฅ‡เคฐเคฟเคเค‚เคŸ เคตเคฟเคญเคฟเคจเฅเคจ เค•เค‚เคชเฅเคฏเฅ‚เคŸเฅ‡เคถเคจเคฒ เค”เคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคœเคผเคพเค‡เคจ เค•เคฟเค เค—เค เคนเฅˆเค‚: + +- **YOLO-NAS-s**: เค•เคฎเฅเคชเฅเคฏเฅ‚เคŸเฅ‡เคถเคจ เคธเค‚เคธเคพเคงเคจ เคธเฅ€เคฎเคฟเคค เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚ เคฒเฅ‡เค•เคฟเคจ เคฆเค•เฅเคทเคคเคพ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคนเฅˆเค‚, เคเคธเฅ‡ เคตเคพเคคเคพเคตเคฐเคฃเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค…เคจเฅเค•เฅ‚เคฒเคฟเคคเฅค +- **YOLO-NAS-m**: เคเค• เคธเค‚เคคเฅเคฒเคฟเคค เคฆเฅƒเคทเฅเคŸเคฟเค•เฅ‹เคฃ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เค†เคชเค•เฅ‡ เคชเคพเคธ เค‰เคšเฅเคš เคธเคŸเฅ€เค•เคคเคพ เคตเคพเคฒเฅ‡ เคธเคพเคฎเคพเคจเฅเคฏ-เค‰เคฆเฅเคฆเฅ‡เคถเฅเคฏ เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅเค•เฅเคคเคคเคพ เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค +- **YOLO-NAS-l**: เคธเคฌเคธเฅ‡ เค…เคงเคฟเค• เคธเคŸเฅ€เค•เคคเคพ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพ เคนเฅ‹เคจเฅ‡ เคตเคพเคฒเฅ‡ เคธเฅเคฅเคฟเคคเคฟเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคถเฅ‡เคท เคฐเฅ‚เคช เคธเฅ‡ เคคเฅˆเคฏเคพเคฐ เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ, เคœเคนเคพเค‚ เค—เคฃเคจเคพ เคธเค‚เคธเคพเคงเคจเฅ‹เค‚ เค•เฅ€ เค•เคฎ เคฌเคพเคงเคพ เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค + +เคจเฅ€เคšเฅ‡ เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคเค• เคตเคฟเคธเฅเคคเฅƒเคค เค…เคตเคฒเฅ‹เค•เคจ เคฆเคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคœเคฟเคธเคฎเฅ‡เค‚ เค‰เคจเค•เฅ€ เคชเฅเคฐเฅ€-เคชเฅเคฐเคถเคฟเคค เคตเฅ‡เคŸ, เคฏเฅ‡ เค•เคพเคฐเฅเคฏ เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เค”เคฐ เค‡เคจเค•เฅ€ เค†เคชเคฐเฅ‡เคŸเคฟเค‚เค— เคฎเฅ‹เคก เค•เฅ‡ เคธเคพเคฅ เค‰เคจเค•เคพ เคธเค‚เค—เคคเคคเคพ เคถเคพเคฎเคฟเคฒ เคนเฅˆเฅค + +| เคฎเฅ‰เคกเคฒ เคชเฅเคฐเค•เคพเคฐ | เคชเฅเคฐเฅ€-เคชเฅเคฐเคถเคฟเคค เคตเฅ‡เคŸ | เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ | เคชเคนเคšเคพเคจ | เคฎเคพเคจเฅเคฏเคคเคพ | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ | เคจเคฟเคฐเฅเคฏเคพเคค | +|-------------|-----------------------------------------------------------------------------------------------|-----------------------------------|-------|---------|-----------|---------| +| YOLO-NAS-s | [yolo_nas_s.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_s.pt) | [เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-m | [yolo_nas_m.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_m.pt) | [เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-l | [yolo_nas_l.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_l.pt) | [เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | + +## เค‰เคฆเฅเคงเคฐเคฃ เค”เคฐ เคชเฅเคฐเคถเค‚เคธเคพเคชเคคเฅเคฐ + +เคฏเคฆเคฟ เค†เคช เค…เคชเคจเฅ‡ เคถเฅ‹เคง เคฏเคพ เคตเคฟเค•เคพเคธ เค•เคพเคฐเฅเคฏ เคฎเฅ‡เค‚ YOLO-NAS เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เค•เฅƒเคชเคฏเคพ SuperGradients เค•เฅ‹ เค‰เคฆเฅเคงเคฐเคฃ เคฆเฅ‡เค‚: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{supergradients, + doi = {10.5281/ZENODO.7789328}, + url = {https://zenodo.org/record/7789328}, + author = {Aharon, Shay and {Louis-Dupont} and {Ofri Masad} and Yurkova, Kate and {Lotem Fridman} and {Lkdci} and Khvedchenya, Eugene and Rubin, Ran and Bagrov, Natan and Tymchenko, Borys and Keren, Tomer and Zhilko, Alexander and {Eran-Deci}}, + title = {Super-Gradients}, + publisher = {GitHub}, + journal = {GitHub repository}, + year = {2021}, + } + ``` + +เคนเคฎ Deci AI เค•เฅ‡ [SuperGradients](https://github.com/Deci-AI/super-gradients/) เคŸเฅ€เคฎ เค•เฅ‡ เคชเฅเคฐเคฏเคพเคธเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค†เคญเคพเคฐ เคชเฅเคฐเค•เคŸ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ เคœเคฟเคจเฅเคนเฅ‹เค‚เคจเฅ‡ เค‡เคธ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคธเค‚เคธเคพเคงเคจ เค•เฅ‹ เคฌเคจเคพเคจเฅ‡ เค”เคฐ เคฌเคจเคพเค เคฐเค–เคจเฅ‡ เค•เฅ‡ เคฒเคฟเคเฅค เคนเคฎ เคฎเคพเคจเคคเฅ‡ เคนเฅˆเค‚ เค•เคฟ YOLO-NAS, เค…เคชเคจเฅ‡ เคจเคตเคพเคšเคพเคฐเฅ€ เคถเฅเคฐเฅ‡เคฃเฅ€เคฌเคฆเฅเคง เคธเค‚เคฐเคšเคจเคพ เค”เคฐ เค‰เคคเฅเค•เฅƒเคทเฅเคŸ เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เค•เฅเคทเคฎเคคเคพเค“เค‚ เค•เฅ‡ เคธเคพเคฅ เคเค• เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เค‰เคชเค•เคฐเคฃ เคฌเคจเฅ‡เค—เคพ เคœเคฟเคธเฅ‡ เค‰เคฆเฅเคฏเคฎเคฟเคฏเฅ‹เค‚ เค”เคฐ เคถเฅ‹เคงเค•เคฐเฅเคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เค†เคตเคถเฅเคฏเค• เคธเคพเคงเคจ เคฌเคจเคพ เคฐเค–เคพ เคœเคพเคเค—เคพเฅค diff --git a/ultralytics/docs/hi/models/yolo-nas.md:Zone.Identifier b/ultralytics/docs/hi/models/yolo-nas.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/models/yolo-nas.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/models/yolov3.md b/ultralytics/docs/hi/models/yolov3.md new file mode 100755 index 0000000..4fc14a8 --- /dev/null +++ b/ultralytics/docs/hi/models/yolov3.md @@ -0,0 +1,98 @@ +--- +comments: true +description: YOLOv3, YOLOv3-Ultralytics เค”เคฐ YOLOv3u เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคœเคพเคจเฅ‡เค‚เฅค เค‡เคจเค•เฅ€ เคชเฅเคฐเคฎเฅเค– เคตเคฟเคถเฅ‡เคทเคคเคพเคเค, เค‰เคชเคฏเฅ‹เค— เค”เคฐ เคตเคธเฅเคคเฅ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ‡ เคฒเคฟเค เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ€ เคœเคพเคจเค•เคพเคฐเฅ€ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚เฅค +keywords: YOLOv3, YOLOv3-Ultralytics, YOLOv3u, เคตเคธเฅเคคเฅ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เค‡เคจเคซเฅ‡เคฐเฅ‡เคจเฅเคธ, เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ, Ultralytics +--- + +# YOLOv3, YOLOv3-Ultralytics, เค”เคฐ YOLOv3u + +## เคธเคตเคพเคฒ + +เคฏเฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจเฅ€ เคฎเฅ‡เค‚ เคคเฅ€เคจ เคชเฅเคฐเคฎเฅเค– เคตเคธเฅเคคเฅ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒเฅ‹เค‚, เคฏเคพเคจเฅ€ [YOLOv3](https://pjreddie.com/darknet/yolo/), [YOLOv3-Ultralytics](https://github.com/ultralytics/yolov3) เค”เคฐ [YOLOv3u](https://github.com/ultralytics/ultralytics), เค•เฅ€ เคเค• เค…เคตเคฒเฅ‹เค•เคจ เคชเฅเคฐเคธเฅเคคเฅเคค เค•เคฐเคคเฅ€ เคนเฅˆเฅค + +1. **YOLOv3:** เคฏเคน You Only Look Once (YOLO) เคตเคธเฅเคคเฅ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคเคฒเฅเค—เฅ‹เคฐเคฟเคฆเคฎ เค•เคพ เคคเฅ€เคธเคฐเคพ เคธเค‚เคธเฅเค•เคฐเคฃ เคนเฅˆเฅค เคชเฅเคฐเคพเคฐเค‚เคญเคฟเค• เคฐเฅ‚เคช เคฎเฅ‡เค‚ Joseph Redmon เคจเฅ‡ เคตเคฟเค•เคธเคฟเคค เค•เคฟเคฏเคพ, YOLOv3 เคจเฅ‡ เคฏเฅ‹เค—เฅเคฏเคคเคพเค“เค‚ เค•เคพ เคเค• เคถเฅเคฐเฅ‡เคฃเฅ€, เคœเฅˆเคธเฅ‡ เค•เคฟ เคฎเคฒเฅเคŸเฅ€เคธเฅเค•เฅ‡เคฒ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค”เคฐ เคคเฅ€เคจ เคญเคฟเคจเฅเคจ เค†เค•เคพเคฐ เค•เฅ‡ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เคฐเฅเคจเคฒเฅเคธ, เค•เฅ‹ เคถเคพเคฎเคฟเคฒ เค•เคฐเค•เฅ‡ เค…เคชเคจเฅ‡ เคชเฅ‚เคฐเฅเคตเคœเฅ‹เค‚ เคชเคฐ เคธเฅเคงเคพเคฐ เค•เคฟเคฏเคพเฅค + +2. **YOLOv3-Ultralytics:** เคฏเคน YOLOv3 เคฎเฅ‰เคกเคฒ เค•เคพ Ultralytics เค…เคจเฅเคชเคพเคฒเคจ เคนเฅˆเฅค เค‡เคธเคจเฅ‡ เคฎเฅ‚เคฒ YOLOv3 เค•เฅ€ เคตเคพเคธเฅเคคเคตเคฟเค•เคคเคพ เค•เฅ‹ เคชเฅเคจเคฐเฅเคœเฅ€เคตเคฟเคค เค•เคฟเคฏเคพ เคนเฅˆ เค”เคฐ เค…เคคเคฟเคฐเคฟเค•เฅเคค เค•เคพเคฐเฅเคฏเค•เฅเคทเคฎเคคเคพเค“เค‚, เคœเฅˆเคธเฅ‡ เค•เคฟ เค…เคงเคฟเค• เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เคพ เคธเคฎเคฐเฅเคฅเคจ เค”เคฐ เค”เคฐ เคธเฅเค—เค เคจเคฟเคคเค•เคฐเคฃ เคตเคฟเค•เคฒเฅเคชเฅ‹เค‚ เค•เฅ€ เคธเฅเคตเคฟเคงเคพ เคญเฅ€ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค + +3. **YOLOv3u:** เคฏเคน YOLOv3-Ultralytics เค•เคพ เคเค• เค…เคฆเฅเคฏเคคเคฟเคค เคธเค‚เคธเฅเค•เคฐเคฃ เคนเฅˆ เคœเฅ‹ YOLOv8 เคฎเฅ‰เคกเคฒเฅ‹เค‚ เคฎเฅ‡เค‚ เค‰เคชเคฏเฅ‹เค— เคฎเฅ‡เค‚ เคฒเคพเคฏเคพ เค—เคฏเคพ เคเค‚เค•เคฐ-เคฎเฅเค•เฅเคค, เคŸเฅˆเค—-เคจเฅเคฏเฅ‚เคฎเคจเฅ‡เคธ เคธเฅเคชเฅเคฒเคฟเคŸ เคนเฅ‡เคก เค•เฅ‹ เคถเคพเคฎเคฟเคฒ เค•เคฐเคคเคพ เคนเฅˆเฅค YOLOv3u เคฎเฅ‡เค‚ เคฏเฅ‹เค—เฅเคฏเคคเคพ เคฏเฅ‹เคœเคจเคพ (backbone) เค”เคฐ เค—เคฐเฅเคฆเคจ (neck) เค•เฅ€ เคตเคพเคธเฅเคคเคตเคฟเค•เคคเคพ เคคเฅ‹ เคฌเคจเคพเค เคฐเค–เคคเฅ€ เคนเฅˆ, เคฒเฅ‡เค•เคฟเคจ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคนเฅ‡เคก เค•เฅ‹ YOLOv8 เคธเฅ‡ เค…เคฆเฅเคฏเคคเคฟเคค เค•เคฐ เคฆเฅ‡เคคเฅ€ เคนเฅˆเฅค + +![Ultralytics YOLOv3](https://raw.githubusercontent.com/ultralytics/assets/main/yolov3/banner-yolov3.png) + +## เคชเฅเคฐเคฎเฅเค– เคตเคฟเคถเฅ‡เคทเคคเคพเคเค‚ + +- **YOLOv3:** เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ‡ เคฒเคฟเค เคคเฅ€เคจ เคญเคฟเคจเฅเคจ เคชเฅˆเคฎเคพเคจเฅ‡ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ€ เคเค•เคคเคพ เคตเคพเคฒเคพ เคฏเคน เคฎเฅ‰เคกเคฒ, 13x13, 26x26 เค”เคฐ 52x52 เค•เฅ‡ เคคเฅ€เคจ เคญเคฟเคจเฅเคจ เค†เค•เคพเฅƒ เค•เฅ‡ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เคฐเฅเคจเคฒเฅเคธ เค•เฅ‡ เค‰เคชเคฏเฅ‹เค— เคธเฅ‡ เคœเคฒเฅเคฆเฅ€ เคœเคพเคจเฅ‡ เคœเคพเคจเฅ‡ เคตเคพเคฒเฅ‡ เคตเคธเฅเคคเฅเค“เค‚ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ€ เคฏเฅ‹เค—เฅเคฏเคคเคพ เคฎเฅ‡เค‚ เคธเฅเคงเคพเคฐ เค•เคฐเคคเฅ€ เคนเฅˆเฅค เค‡เคธเค•เฅ‡ เค…เคคเคฟเคฐเคฟเค•เฅเคค, YOLOv3 เคจเฅ‡ เคชเฅเคฐเคคเคฟ เคฌเคพเค‰เค‚เคกเคฟเค— เคฌเฅ‰เค•เฅเคธ เค•เฅ‡ เคฒเคฟเค เคฎเคฒเฅเคŸเฅ€-เคฒเฅ‡เคฌเคฒ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค”เคฐ เคเค• เคฌเฅ‡เคนเคคเคฐ เคซเคผเฅ€เคšเคฐ เคเค•เฅเคธเคŸเฅเคฐเฅˆเค•เฅเคŸเคฐ เคจเฅ‡เคŸเคตเคฐเฅเค• เคœเฅˆเคธเฅ€ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค เคญเฅ€ เคœเฅ‹เคกเคผเฅ€ เคนเฅˆเค‚เฅค + +- **YOLOv3-Ultralytics:** Ultralytics เค•เฅ‡ YOLOv3 เค•เฅ‡ เค…เคจเฅเคชเคพเคฒเคจ เคฎเฅ‡เค‚ เคฏเคน เคฎเฅ‰เคกเคฒ เคฎเฅ‚เคฒ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ€ เคธเคฎเคคเคพ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ, เคฒเฅ‡เค•เคฟเคจ เค‡เคธเค•เฅ‡ เคชเคพเคธ เค…เคคเคฟเคฐเคฟเค•เฅเคค เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒเฅ‹เค‚, เค…เคงเคฟเค• เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคตเคฟเคงเคฟเคฏเฅ‹เค‚ เค”เคฐ เคธเฅเคตเคฟเคงเคพเคœเคจเค• เคธเฅเคตเคฟเคงเคพเคเค เคœเฅˆเคธเฅ‡ เค…เคคเคฟเคฐเคฟเค•เฅเคค เคธเคฎเคฐเฅเคฅเคจ เคนเฅ‹เคคเคพ เคนเฅˆเฅค เค‡เคธเคธเฅ‡ เค‡เคธเค•เคพ เคตเคฟเคญเคฟเคจเฅเคจ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เคฎเฅ‡เค‚ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเคพ เคธเค‚เคญเคต เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +- **YOLOv3u:** เคฏเคน เค…เคฆเฅเคฏเคคเคฟเคค เคฎเฅ‰เคกเคฒ YOLOv8 เคธเฅ‡ เคฏเฅ‹เค—เฅเคฏเคคเคพ เคฏเฅ‹เคœเคจเคพ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเคพ เคนเฅˆเฅค เคฏเฅ‹เคœเฅเคžเคคเคพ เค•เฅ‡ เคชเฅ‚เคฐเฅเคต-เคจเคฟเคฐเฅเคงเคพเคฐเคฟเคค เคเค‚เค•เคฐ เคฌเฅ‰เค•เฅเคธ เค”เคฐ เคชเคฆเคพเคฐเฅเคฅเคคเคพ เคธเฅเค•เฅ‹เคฐ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพ เค•เฅ‹ เคฎเคฟเคŸเคพ เค•เคฐ, เคฏเคน เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคนเฅ‡เคก เค•เคพ เคกเคฟเคœเคพเค‡เคจ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคฏเฅ‹เค—เฅเคฏเคคเคพ เค•เฅ‹ เคฌเฅ‡เคนเคคเคฐเฅ€เคจ เค†เค•เคพเคฐ เค”เคฐ เค†เค•เฅƒเคคเคฟ เค•เฅ‡ เคตเคธเฅเคคเฅเค“เค‚ เค•เฅ€ เคชเคนเคšเคพเคจ เค•เคฐเคจเฅ‡ เค•เฅ€ เค•เฅเคทเคฎเคคเคพ เคฎเฅ‡เค‚ เคธเฅเคงเคพเคฐ เค•เคฐ เคธเค•เคคเคพ เคนเฅˆเฅค เค‡เคธเคธเฅ‡ เคตเคธเฅเคคเฅ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ‡ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค YOLOv3u เคฎเฅ‰เคกเคฒเฅ‹เค‚ เคฎเฅ‡เค‚ เคฌเคขเคผเคฟเคฏเคพ เคธเฅเคฐเค•เฅเคทเคพ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เคชเฅเคฐเคพเคชเฅเคค เคนเฅ‹ เคธเค•เคคเฅ€ เคนเฅˆเฅค + +## เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ เค”เคฐ เคฎเฅ‹เคก + +YOLOv3 เคถเฅเคฐเฅƒเค‚เค–เคฒเคพ, เค‡เคจเคฎเฅ‡เค‚ YOLOv3, YOLOv3-Ultralytics เค”เคฐ YOLOv3u เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚, เคตเคฟเคถเฅ‡เคท เคฐเฅ‚เคช เคธเฅ‡ เคตเคธเฅเคคเฅ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅเคฐเคคเคฟเคทเฅเค เคฟเคค เคนเคพเคฒเคค เคฎเฅ‡เค‚ เคกเคฟเคœเคผเคพเค‡เคจ เค•เคฟเค เค—เค เคนเฅˆเค‚เฅค เคฏเฅ‡ เคฎเฅ‰เคกเคฒ เคธเคŸเฅ€เค•เคคเคพ เค”เคฐ เค—เคคเคฟ เคฎเฅ‡เค‚ เคธเค‚เคคเฅเคฒเคจ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคธเคฟเคฆเฅเคง เคนเฅˆเค‚เฅค เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เคตเฅˆเคฐเคฟเคเค‚เคŸ เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคธเฅเคตเคฟเคงเคพเคเค เค”เคฐ เค…เคจเฅเค•เฅ‚เคฒเคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ€ เคนเฅˆเค‚, เค‡เคธเคฒเคฟเค เค‡เคจเค•เคพ เค‰เคชเคฏเฅ‹เค— เคตเคฟเคญเคฟเคจเฅเคจ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคธเค‚เคญเคต เคนเฅˆเฅค + +เคคเฅ€เคจเฅ‹เค‚ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‹ เคชเฅ‚เคฐเฅเคฃ เค‰เคชเคฏเฅ‹เค—เคคเคพ เคฎเฅ‡เค‚ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคญเคฟเคจเฅเคจ เคฎเฅ‹เคก เคนเฅˆเค‚, เค‡เคจเคฎเฅ‡เค‚ [เค‡เคจเคซเฅ‡เคฐเฅ‡เค‚เคธ](../modes/predict.md), [เคฎเคพเคจเฅเคฏเค•เคฐเคฃ](../modes/val.md), [เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ](../modes/train.md) เค”เคฐ [เคจเคฟเคฐเฅเคฏเคพเคค](../modes/export.md) เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚, เคตเคฟเคญเคพเคœเคจเคฟเคค เค•เคฟเคฏเฅ‡ เค—เค เคนเฅˆเค‚เฅค เคฏเฅ‡ เคฎเฅ‹เคกเคฒ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพเค“เค‚ เค•เฅ‹ เคตเคธเฅเคคเฅ‚ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ‡ เคชเฅเคฐเคญเคพเคตเฅ€ เค†เคตเคพเค—เคฎเคจ เค”เคฐ เคตเคฟเค•เคพเคธ เค•เฅ‡ เคตเคฟเคญเคพเคœเคจ เค•เฅ‡ เคตเคฟเคญเคพเคœเคจ เค•เฅ‡ เคฒเคฟเค เคเค• เคชเฅ‚เคฐเฅ€ เค‰เคชเค•เคฐเคฃ เคชเคฐเฅเคฏเคพเคชเฅเคคเคคเคพ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚เฅค + +| เคฎเฅ‰เคกเคฒ เคชเฅเคฐเค•เคพเคฐ | เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ | เค‡เคจเคซเฅ‡เคฐเฅ‡เค‚เคธ | เคฎเคพเคจเฅเคฏเค•เคฐเคฃ | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ | เคจเคฟเคฐเฅเคฏเคพเคค | +|--------------------|--------------------------------------|----------|----------|-----------|---------| +| YOLOv3 | [เคตเคธเฅเคคเฅ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3-Ultralytics | [เคตเคธเฅเคคเฅ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3u | [เคตเคธเฅเคคเฅ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +เคฏเคน เคคเคพเคฒเคฟเค•เคพ เคชเฅเคฐเคคเฅเคฏเฅ‡เค• YOLOv3 เคตเฅ‡เคฐเคฟเคเค‚เคŸ เค•เฅ€ เค•เฅเคทเคฎเคคเคพเค“เค‚ เค•เฅ€ เค•เฅเคทเคฃเคญเค‚เค—เฅเคฐ เคเคฒเค• เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ€ เคนเฅˆ, เค‡เคธเคฎเฅ‡เค‚ เคตเคฟเคญเคฟเคจเฅเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค”เคฐ เค‘เคชเคฐเฅ‡เคถเคจเคฒ เคฎเฅ‹เคก เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅเค•เฅเคคเคคเคพ เค”เคฐ เคตเคฟเคญเคพเคœเคจ เคฎเคพเคจเค•เฅ‹เค‚ เค•เฅ‹ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค + +## เค‰เคชเคฏเฅ‹เค— เค•เฅ€ เค‰เคฆเคพเคนเคฐเคฃ + +เคฏเคน เค‰เคฆเคพเคนเคฐเคฃ YOLOv3 เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค”เคฐ เค‡เคจเคซเฅ‡เคฐเฅ‡เค‚เคธ เค•เฅ‡ เค†เคธเคพเคจ เค‰เคฆเคพเคนเคฐเคฃ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เค‡เคจ เค”เคฐ เค…เคจเฅเคฏ [เคฎเฅ‹เคก](../modes/index.md) เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคตเคฟเคธเฅเคคเคพเคฐเคชเฅ‚เคฐเฅเคตเค• เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค เค‡เคธเค•เฅ‡ เคธเคพเคฅเฅ€ [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) เค”เคฐ [Export](../modes/export.md) เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผ เคชเฅ‡เคœเฅ‡เคœเคผ เค•เฅ€ เคœเคพเค‚เคš เค•เคฐเฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + `*.pt` เคชเฅเคฐเฅ€เคŸเฅเคฐเฅ‡เคจ เค•เคฟเค เค—เค PyTorch เคฎเฅ‰เคกเคฒ เค”เคฐ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ *.yaml เคซเคผเคพเค‡เคฒ Python เคฎเฅ‡เค‚ YOLO() เค•เฅเคฒเคพเคธ เค•เฅ‹เค‚ เคฏเฅ‚เคœเคผ เค•เคฐเค•เฅ‡ เคเค• เคฎเฅ‰เคกเคฒ เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคคเฅˆเคฏเคพเคฐ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคชเคพเคธ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚: + + ```python + from ultralytics import YOLO + + # COCO-pretrained YOLOv3n เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov3n.pt') + + # เคฎเฅ‰เคกเคฒ เค•เฅ€ เคœเคพเคจเค•เคพเคฐเฅ€ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฐเฅ‡เค‚ (เคตเฅˆเค•เคฒเฅเคชเคฟเค•) + model.info() + + # COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคฎเฅ‰เคกเคฒ 100 epochs เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฆเฅ‡เค‚ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # YOLOv3n เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ 'bus.jpg' เค›เคตเคฟ เคชเคฐ เค‡เคจเคซเฅ‡เคฐเฅ‡เค‚เคธ เคšเคฒเคพเคเค‚ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + เคฎเฅ‰เคกเคฒ เคชเคฐ เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค CLI เค†เคฆเฅ‡เคถ เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเค‚: + + ```bash + # COCO-pretrained YOLOv3n เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เค‡เคธเฅ‡ 100 epochs เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + yolo train model=yolov3n.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCO-pretrained YOLOv3n เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ 'bus.jpg' เค›เคตเคฟ เคชเคฐ เค‡เคจเคซเฅ‡เคฐเฅ‡เค‚เคธ เคšเคฒเคพเคเค‚ + yolo predict model=yolov3n.pt source=path/to/bus.jpg + ``` + +## เค‰เคฆเฅเคงเคฐเคฃ เค”เคฐ เคชเฅเคฐเคถเค‚เคธเคพเคชเคคเฅเคฐ + +เค…เค—เคฐ เค†เคช เค…เคชเคจเฅ‡ เคถเฅ‹เคง เคฎเฅ‡เค‚ YOLOv3 เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เคคเฅ‹ เค•เฅƒเคชเคฏเคพ เคฎเฅ‚เคฒ YOLO เคชเฅ‡เคชเคฐเฅเคธ เค”เคฐ Ultralytics YOLOv3 เคฐเคฟเคชเฅ‰เคœเคผเคฟเคŸเคฐเฅ€ เค•เฅ‹ เค‰เคฆเฅเคงเฅƒเคค เค•เคฐเฅ‡เค‚เฅค + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Redmon, Joseph and Farhadi, Ali}, + journal={arXiv preprint arXiv:1804.02767}, + year={2018} + } + ``` + +Joseph Redmon เค”เคฐ Ali Farhadi เค•เฅ‹ เคฎเฅ‚เคฒ YOLOv3 เคตเคฟเค•เคธเค•เคฐเฅเคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เคงเคจเฅเคฏเคตเคพเคฆเฅค diff --git a/ultralytics/docs/hi/models/yolov3.md:Zone.Identifier b/ultralytics/docs/hi/models/yolov3.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/models/yolov3.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/models/yolov4.md b/ultralytics/docs/hi/models/yolov4.md new file mode 100755 index 0000000..a2cfb4f --- /dev/null +++ b/ultralytics/docs/hi/models/yolov4.md @@ -0,0 +1,69 @@ +--- +comments: true +description: YOLOv4 เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคนเคฎเคพเคฐเฅ‡ เคตเคฟเคธเฅเคคเฅƒเคค เค—เคพเค‡เคก เคชเคฐ เค†เคชเค•เคพ เคธเฅเคตเคพเค—เคค เคนเฅˆ, เคœเฅ‹ เคเค• เคจเคตเฅ€เคจเคคเคฎ เคธเคฎเคฏ เคฎเฅ‡เค‚เค•เคฟเค เค—เค เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคตเคธเฅเคคเฅเค“เค‚ เค•เคพ เคชเคคเคพ เคฒเค—เคพเคจเฅ‡ เคตเคพเคฒเคพ เค‰เคคเฅเค•เฅƒเคทเฅเคŸ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ เคนเฅˆเฅค เค‡เคธเฅ‡ เคฏเฅ‹เค—เฅเคฏเคคเคพ เคธเฅ‡ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡, เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เค•เฅ‡ เคชเฅเคฐเคฎเฅเค– เคฌเคฟเค‚เคฆเฅเค“เค‚ เค•เฅ‹ เคธเคฎเคเคจเฅ‡ เค”เคฐ เค‰เคชเคฏเฅ‹เค— เค•เฅ€ เคฆเฅƒเคทเฅเคŸเคฟ เคธเฅ‡ เค•เฅเค› เค‰เคฆเคพเคนเคฐเคฃเฅ‹เค‚ เค•เฅ‹ เคฆเฅ‡เค–เคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคชเคขเคผเฅ‡เค‚เฅค +keywords: ultralytics, YOLOv4, เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เคจเฅเคฏเฅ‚เคฐเคฒ เคจเฅ‡เคŸเคตเคฐเฅเค•, เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคฎเฅ‡เค‚ เคชเคคเคพ เคฒเค—เคพเคจเฅ‡ เคตเคพเคฒเคพ, เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ, เคฎเคถเฅ€เคจ เคฒเคฐเฅเคจเคฟเค‚เค— +--- + +# YOLOv4: เค‰เคšเฅเคš เค—เคคเคฟ เค”เคฐ เคธเคŸเฅ€เค• เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ + +YOLOv4, เคœเฅ‹ 2020 เคฎเฅ‡เค‚ เค…เคฒเฅ‡เค•เฅเคธเฅ€ เคฌเฅ‹เคšเค•เฅ‹เคตเคธเฅเค•เฅ€ (Alexey Bochkovskiy) เคฆเฅเคตเคพเคฐเคพ [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet) เคชเคฐ เคฒเฅ‰เคจเฅเคš เคนเฅเค† เคเค• เค‰เคคเฅเค•เฅƒเคทเฅเคŸ เคฌเคพเคœเคผเคพเคฐ เคฎเฅ‡เค‚ เคธเคฎเคฏเคตเคฟเคถเฅ‡เคทเฅ€-เคฎเฅ‡เค‚เคŸ เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ‡ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ เคนเฅˆ, เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ Ultralytics เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เคชเฅ‡เคœ เคฎเฅ‡เค‚ เค†เคชเค•เคพ เคธเฅเคตเคพเค—เคค เคนเฅˆเฅค YOLOv4 เค—เคคเคฟ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‡ เคฌเฅ€เคš เค†เคฆเคฐเฅเคถ เคธเค‚เคคเฅเคฒเคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคœเคผเคพเค‡เคจ เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคœเฅ‹ เค‡เคธเฅ‡ เคฌเคนเฅเคค เคธเคพเคฐเฅ‡ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจ เค•เฅ‡ เคฒเคฟเค เคเค• เค‰เคคเฅเค•เฅƒเคทเฅเคŸ เคตเคฟเค•เคฒเฅเคช เคฌเคจเคพเคคเคพ เคนเฅˆเฅค + +![YOLOv4 เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เค†เคฐเฅ‡เค–](https://user-images.githubusercontent.com/26833433/246185689-530b7fe8-737b-4bb0-b5dd-de10ef5aface.png) +**YOLOv4 เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เค†เคฐเฅ‡เค–**เฅค YOLOv4 เค•เฅ€ เคœเคŸเคฟเคฒ เคจเฅ‡เคŸเคตเคฐเฅเค• เคกเคฟเคœเคผเคพเค‡เคจ เค•เคพ เคชเฅเคฐเคฆเคฐเฅเคถเคจ, เคœเคฟเคธเคฎเฅ‡เค‚ เคฌเฅˆเค•เคฌเฅ‹เคจ, เคจเฅ‡เค• เค”เคฐ เคนเฅ‡เคก เค˜เคŸเค• เค”เคฐ เค‰เคจเค•เฅ‡ เคœเฅ‹เคกเคผเฅ‡ เค—เค เคธเฅเคคเคฐเฅ‹เค‚ เค•เฅ€ เคฆเคฟเค–เคพเคตเคŸเฅ€ เคคเคธเฅเคตเฅ€เคฐ เคนเฅˆ, เคคเคพเค•เคฟ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ‡ เคฒเคฟเค เค‰เคจเค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพ เคธเค•เฅ‡เฅค + +## เคชเคฐเคฟเคšเคฏ + +YOLOv4 เค•เคพ เคฎเคคเคฒเคฌ เคนเฅ‹เคคเคพ เคนเฅˆ 'You Only Look Once' เคธเค‚เคธเฅเค•เคฐเคฃ 4เฅค เคฏเคน เคเค• เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒ เคนเฅˆ เคœเฅ‹ เคชเคฟเค›เคฒเฅ‡ YOLO เคธเค‚เคธเฅเค•เคฐเคฃเฅ‹เค‚ เคœเฅˆเคธเฅ‡ YOLOv3 เค”เคฐ เค…เคจเฅเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ€ เคธเฅ€เคฎเคพเค“เค‚ เค•เคพ เคธเคพเคฎเคจเคพ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคตเคฟเค•เคธเคฟเคค เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค เค…เคจเฅเคฏ เค•เคจเฅเคตเฅ‹เคฒเฅเคฏเฅ‚เคถเคจเคฒ เคจเฅเคฏเฅ‚เคฐเคฒ เคจเฅ‡เคŸเคตเคฐเฅเค• (CNN) เค†เคงเคพเคฐเคฟเคค เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐเฅ‹เค‚ เค•เฅ‡ เคตเคฟเคชเคฐเฅ€เคค, YOLOv4 เค•เคพ เค‰เคชเคฏเฅ‹เค— เคธเคฟเคฐเฅเคซ เค…เคจเฅเคถเค‚เคธเคพ เคชเฅเคฐเคฃเคพเคฒเคฟเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคนเฅ€ เคจเคนเฅ€เค‚, เคฌเคฒเฅเค•เคฟ เคธเฅเคตเคคเค‚เคคเฅเคฐ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เคชเฅเคฐเคฌเค‚เคงเคจ เค”เคฐ เคฎเคพเคจเคต เค‡เคจเคชเฅเคŸ เค•เฅ‹ เค•เคฎ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคญเฅ€ เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค เคฏเคน เคเค• เคธเคพเคงเคพเคฐเคฃ เค—เฅเคฐเคพเคซเคผเคฟเค•เฅเคธ เคชเฅเคฐเฅ‹เคธเฅ‡เคธเคฟเค‚เค— เค‡เค•เคพเค‡เคฏเคพเค‚ (เคœเฅ€เคชเฅ€เคฏเฅ‚) เคชเคฐ เค…เคชเคจเคพ เค•เคพเคฐเฅเคฏ เคธเค‚เคชเคพเคฆเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเคพ เคนเฅˆ เค”เคฐ เค•เฅ‡เคตเคฒ เคเค• เคเคธเฅ‡ เคœเฅ€เคชเฅ€เคฏเฅ‚ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค เคเค• เคนเฅ€ เคœเฅ€เคชเฅ€เคฏเฅ‚ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพ เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค + +## เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ + +YOLOv4 เค…เคชเคจเฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‹ เค…เคจเฅเค•เฅ‚เคฒเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคˆ เคจเคตเคพเคšเคพเคฐเฅ€ เคตเคฟเคถเฅ‡เคทเคคเคพเค“เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเคพ เคนเฅˆเฅค เค‡เคจเคฎเฅ‡เค‚ เคตเฅ‡เคŸเฅ‡เคก-เคฐเฅ‡เคœเคฟเคกเฅเคฏเฅ‚เคฒ-เค•เคจเฅ‡เค•เฅเคถเค‚เคธ (WRC), เค•เฅเคฐเฅ‰เคธ-เคธเฅเคŸเฅ‡เคœ-เคชเคพเคฐเฅเคถเคฒ-เค•เคจเฅ‡เค•เฅเคถเค‚เคธ (CSP), เค•เฅเคฐเฅ‰เคธ เคฎเคฟเคจเฅ€-เคฌเฅˆเคš เคจเคฟเคฏเคฎเคจ (CmBN), เคธเฅ‡เคฒเฅเคซ-เคชเฅเคฐเคคเคฟเคนเคฟเค‚เคธเคพ-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ (SAT), เคฎเคฟเคถ เคเค•เฅเคŸเคฟเคตเฅ‡เคถเคจ, เคฎเฅ‹เคœเคผเฅ‡เค• เคกเฅ‡เคŸเคพ เคตเฅƒเคฆเฅเคงเคฟ, เคกเฅเคฐเฅ‰เคชเคฌเฅเคฒเฅ‰เค• เคจเคฟเคฏเคฎเคจ เค”เคฐ เคธเฅ€เค†เคˆเค“เคฏเฅ‚ เคนเคพเคจเคฟ เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚เฅค เคฏเฅ‡ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค เคธเค‚เคฏเฅเค•เฅเคค เคฐเฅ‚เคช เคธเฅ‡ เคฌเฅ‡เคนเคคเคฐเฅ€เคจ เคชเคฐเคฟเคฃเคพเคฎ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคธเค‚เคฏเฅ‹เคœเคฟเคค เค•เฅ€ เคœเคพเคคเฅ€ เคนเฅˆเค‚เฅค + +เค†เคฎเคคเฅŒเคฐ เคชเคฐ, เคเค• เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ เคฎเฅ‡เค‚ เค•เคˆ เคนเคฟเคธเฅเคธเฅ‡ เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚ เคœเคฟเคจเคฎเฅ‡เค‚ เค‡เคจเคชเฅเคŸ, เคฌเฅˆเค•เคฌเฅ‹เคจ, เคจเฅ‡เค• เค”เคฐ เคนเฅ‡เคก เคถเคพเคฎเคฟเคฒ เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เฅค YOLOv4 เค•เคพ เคฌเฅˆเค•เคฌเฅ‹เคจ ImageNet เคชเคฐ เคชเฅ‚เคฐเฅ€ เคคเคฐเคน เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคนเฅ‹เคคเคพ เคนเฅˆ เค”เคฐ เคฏเคน เคตเคธเฅเคคเฅเค“เค‚ เค•เฅ‡ เคตเคฐเฅเค—เฅ‹เค‚ เค”เคฐ เคฌเคพเค‰เค‚เคกเคฟเค‚เค— เคฌเฅ‰เค•เฅเคธ เค•เคพ เค…เคจเฅเคฎเคพเคจ เคฒเค—เคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เคฌเฅˆเค•เคฌเฅ‹เคจ เคตเคฟเคญเคฟเคจเฅเคจ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เคฎเฅ‡เค‚ เคธเฅ‡ เคนเฅ‹ เคธเค•เคคเคพ เคนเฅˆ, เคœเฅˆเคธเฅ‡ VGG, ResNet, ResNeXt เคฏเคพ DenseNetเฅค เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคนเฅ‡เคก เคญเคพเค— เคคเคพเค•เคฟ เค…เค‚เคคเคฟเคฎ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค”เคฐ เคถเฅเคฐเฅ‡เคฃเฅ€เค•เคฐเคฃ เค•เคฟเค เคœเคพ เคธเค•เฅ‡เค‚เฅค + +## เคซเฅเคฐเฅ€เคฌเฅ€เคœเคผ เคธเฅ‚เคšเคจเคพ + +YOLOv4 เค‰เคจ เคตเคฟเคงเคฟเคฏเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเคพ เคนเฅˆ เคœเคฟเคจเฅเคนเฅ‡เค‚ "เคซเฅเคฐเฅ€เคฌเฅ€เคœเคผ เค•เคพ เคธเค‚เค—เฅเคฐเคน" เค•เคนเคพ เคœเคพเคคเคพ เคนเฅˆ, เคœเฅ‹ เคฎเฅ‰เคกเคฒ เค•เฅ€ เค‰เคจเฅเคจเคคเคคเคพ เค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฆเฅŒเคฐเคพเคจ เคฌเคขเคผเคพเคจเฅ‡ เค”เคฐ เคธเค‚เคตเคฟเคงเคพเคจ เค•เฅ‡ เคฒเคพเค—เฅ‚ เคนเฅ‹เคจเฅ‡ เค•เฅ€ เค•เฅ€เคฎเคค เคฌเคขเคผเคพเคคเฅ‡ เคนเฅˆเค‚เฅค เคกเคพเคŸเคพ เคตเฅƒเคฆเฅเคงเคฟ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‡เค‚ เค‰เคชเคฏเฅ‹เค— เคนเฅ‹เคจเฅ‡ เคตเคพเคฒเฅ€ เคเค• เคธเคพเคฎเคพเคจเฅเคฏ เคซเฅเคฐเฅ€เคฌเฅ€เคœเคผ เคŸเคฟเค•เคจเคฟเค• เคนเฅˆ, เคœเฅ‹ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคธเค‚เคตเฅ‡เค—เฅ€เคคเคพ เคฌเคขเคผเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‡เคจเคชเฅเคŸ เค›เคตเคฟเคฏเฅ‹เค‚ เค•เฅ€ เคตเคฟเคตเคฟเคงเคคเคพ เคฌเคขเคผเคพเคคเฅ€ เคนเฅˆเฅค เคกเคพเคŸเคพ เคตเฅƒเคฆเฅเคงเคฟ เค•เฅ‡ เค•เฅเค› เค‰เคฆเคพเคนเคฐเคฃ เคนเฅˆเค‚ เคœเคฟเคจเคฎเฅ‡เค‚ เคซเฅ‹เคŸเฅ‹เคฎเฅ‡เคŸเฅเคฐเคฟเค• เคฆเคฟเคธเค‚เค—เคค (เคšเคฟเคคเฅเคฐ เค•เฅ‡ เคเค• เค›เคตเคฟ เค•เฅ‡ เคคเฅ‡เคœเฅ€, เคฌเค‚เคฆเคฟเคถ, เคธเฅ‹เคจเฅ‡, เค‰เคทเฅเคฃเคคเคพ เค”เคฐ เค†เคตเคพเคœเคผ เค•เฅ‹ เคธเคฎเคพเคฏเฅ‹เคœเคฟเคค เค•เคฐเคจเคพ) เค”เคฐ เคœเฅเคฏเคพเคฎเคฟเคคเคฟเค• เคฆเคฟเคธเค‚เค—เคค (เคเค• เค›เคตเคฟ เค•เฅ‡ เคฐเฅˆเค‚เคกเคฎ เคธเฅเค•เฅ‡เคฒเคฟเค‚เค—, เค•เฅเคฐเฅ‰เคชเคฟเค‚เค—, เคซเฅเคฒเคฟเคชเคฟเค‚เค— เค”เคฐ เค˜เฅเคฎเคพเคจเฅ‡ เค•เฅ‡ เคœเฅ‹เคกเคผเคจเฅ‡) เคถเคพเคฎเคฟเคฒ เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เฅค เคฏเฅ‡ เคŸเคฟเค•เคจเคฟเค• เคฎเฅ‰เคกเคฒ เค•เฅ‹ เค…เคฒเค—-เค…เคฒเค— เคชเฅเคฐเค•เคพเคฐ เค•เฅ€ เค›เคตเคฟเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เคฌเฅ‡เคนเคคเคฐ เคธเคพเคฎเคพเคจเฅเคฏเฅ€เค•เคฐเคฃ เค•เคฐเคจเฅ‡ เคฎเฅ‡เค‚ เคธเคนเคพเคฏเคคเคพ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚เฅค + +## เคตเคฟเคถเฅ‡เคทเคคเคพเคเค เค”เคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคจ + +YOLOv4 เค•เฅ‹ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‡เค‚ เค‰เคšเฅเคš เค—เคคเคฟ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคœเคผเคพเค‡เคจ เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค YOLOv4 เค•เฅ€ เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เคฎเฅ‡เค‚ CSPDarknet53 เคฌเฅˆเค•เคฌเฅ‹เคจ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚, เคจเฅ‡เค• เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ PANet, เค”เคฐ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคนเฅ‡เคก เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ YOLOv3 เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚เฅค เคฏเคน เคกเคฟเคœเคผเคพเค‡เคจ YOLOv4 เค•เฅ‹ เคเค• เคชเฅเคฐเคญเคพเคตเคถเคพเคฒเฅ€ เค—เคคเคฟ เคชเคฐ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เคฐเคจเฅ‡ เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เคฏเคน เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคฎเฅ‡เค‚ เค‰เคชเคฏเฅเค•เฅเคค เคนเฅ‹เคคเคพ เคนเฅˆเฅค YOLOv4 เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเคพเคจเค•เฅ‹เค‚ เคฎเฅ‡เค‚ เค‰เคšเฅเคš เค—เฅเคฃเคตเคคเฅเคคเคพ เคนเคพเคธเคฟเคฒ เค•เคฐเคจเฅ‡ เคฎเฅ‡เค‚ เคญเฅ€ เค‰เคคเฅเค•เฅƒเคทเฅเคŸเคคเคพ เค•เคฐเคคเคพ เคนเฅˆเฅค + +## เค‰เคชเคฏเฅ‹เค— เค•เฅ€ เค‰เคฆเคพเคนเคฐเคฃ + +เคฎเคพเคฎเคฒเฅ‡ เคธเฅ‡ เค†เคช เคฏเฅ‹เค—เฅเคฏเคคเคพ เคธเค‚เคฌเค‚เคงเคฟเคค เคตเคฟเคธเฅเคคเคพเคฐเคฟเคค เคšเคฐเคฃเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค YOLOv4 เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเคพ เคšเคพเคนเฅ‡เค‚เค—เฅ‡: + +1. YOLOv4 GitHub เคฐเคฟเคชเฅ‰เคœเคผเคฟเคŸเคฐเฅ€ เคชเคฐ เคœเคพเคเค‚: [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet)เฅค + +2. เคธเฅเคฅเคพเคชเคจเคพ เค•เฅ‡ เคฒเคฟเค readme เคซเคผเคพเค‡เคฒ เคฎเฅ‡เค‚ เคฆเคฟเค เค—เค เคจเคฟเคฐเฅเคฆเฅ‡เคถเฅ‹เค‚ เค•เคพ เคชเคพเคฒเคจ เค•เคฐเฅ‡เค‚เฅค เค‡เคธเคฎเฅ‡เค‚ เคธเคพเคฎเคพเคจเฅเคฏเคคเคฏเคพ เคฐเคฟเคชเฅ‰เคœเคผเคฟเคŸเคฐเฅ€ เค•เฅเคฒเฅ‹เคจ เค•เคฐเคจเคพ, เค†เคตเคถเฅเคฏเค• เคกเคฟเคชเฅ‡เค‚เคกเฅ‡เค‚เคธเคฟเคฏเฅ‹เค‚ เค•เฅ‹ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคจเคพ เค”เคฐ เค•เคฟเคธเฅ€ เค†เคตเคถเฅเคฏเค• เคชเคฐเฅเคฏเคพเคตเคฐเคฃ เคšเคฐ เคชเคฐ เคธเฅ‡เคŸเค…เคช เค•เคฐเคจเคพ เคถเคพเคฎเคฟเคฒ เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +3. เคธเฅเคฅเคพเคชเคจเคพ เคชเฅ‚เคฐเฅ€ เคนเฅ‹เคจเฅ‡ เคชเคฐ, เค†เคช เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚ เค”เคฐ เค‰เคธเฅ‡ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค— เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚ เคนเคฎเฅ‰เคกเคจ/เคฐเคฟเคชเฅ‰เคœเคผเคฟเคŸเคฐเฅ€ เคฎเฅ‡เค‚ เคฆเคฟเค เค—เค เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคจเคฟเคฐเฅเคฆเฅ‡เคถเฅ‹เค‚ เค•เฅ‡ เค…เคจเฅเคธเคพเคฐเฅค เคฏเคน เค†เคฎเคคเฅŒเคฐ เคชเคฐ เค…เคชเคจเฅ‡ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‹ เคคเฅˆเคฏเคพเคฐ เค•เคฐเคจเคพ, เคฎเฅ‰เคกเคฒ เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅเคธ เค•เฅ‹ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐ เค•เคฐเคจเคพ, เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเคจเคพ เค”เคฐ เคซเคฟเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฐเคคเคพ เคนเฅˆเฅค + +เค•เฅƒเคชเคฏเคพ เคงเฅเคฏเคพเคจ เคฆเฅ‡เค‚ เค•เคฟ เคตเคฟเคถเคฟเคทเฅเคŸ เคšเคฐเคฃ เค†เคชเค•เฅ‡ เคตเคฟเคถเคฟเคทเฅเคŸ เค‰เคชเคฏเฅ‹เค— เคฎเคพเคฎเคฒเฅ‡ เค”เคฐ YOLOv4 เคฐเฅ‡เคชเฅ‹เคœเคผเคฟเคŸเคฐเฅ€ เค•เฅ€ เคธเฅเคฅเคฟเคคเคฟ เคชเคฐ เคจเคฟเคฐเฅเคญเคฐ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค เค‡เคธเคฒเคฟเค, YOLOv4 เค—เคฟเคฆเฅเคงเคถเคพเคฒเฅ€ เคฐเคฟเคชเฅ‰เคœเคผเคฟเคŸเคฐเฅ€ เคฎเฅ‡เค‚ เคฆเคฟเค เค—เค เคจเคฟเคฐเฅเคฆเฅ‡เคถเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ€ เค•เคกเคผเฅ€ เคธเคฒเคพเคน เคฆเฅ€ เคœเคพเคคเฅ€ เคนเฅˆเฅค + +เคนเคฎ เค‡เคธ เคฌเคพเคค เค•เฅ€ เค…เคธเฅเคตเคฟเคงเคพ เค•เฅ‡ เคฒเคฟเค เคฎเคพเคซเฅ€ เคšเคพเคนเคคเฅ‡ เคนเฅˆเค‚ เค”เคฐ YOLOv4 เค•เฅ‡ เคธเคฎเคฐเฅเคฅเคจ เค•เฅ‹ Ultralytics เค•เฅ‡ เคฒเคฟเค เคฒเคพเค—เฅ‚ เค•เคฟเคฏเคพ เคœเคพเคจเฅ‡ เคชเคฐ เค‰เคชเคฏเฅ‹เค— เค•เฅ€ เค‰เคฆเคพเคนเคฐเคฃเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เค‡เคธ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผ เค•เฅ‹ เค…เคฆเฅเคฏเคคเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ€ เค•เฅ‹เคถเคฟเคถ เค•เคฐเฅ‡เค‚เค—เฅ‡เฅค + +## เคจเคฟเคทเฅเค•เคฐเฅเคท + +YOLOv4 เคเค• เคถเค•เฅเคคเคฟเคถเคพเคฒเฅ€ เค”เคฐ เค•เฅเคถเคฒ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒ เคนเฅˆ เคœเฅ‹ เค—เคคเคฟ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‡ เคฌเฅ€เคš เคธเค‚เคคเฅเคฒเคจ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคคเคพ เคนเฅˆเฅค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฆเฅŒเคฐเคพเคจ เค‰เคจเคฟเค• เคตเคฟเคถเฅ‡เคทเคคเคพเค“เค‚ เค”เคฐ เคซเฅเคฐเฅ€เคฌเฅ€เคœเคผ เคคเค•เคจเฅ€เค•เฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡, เคฏเคน เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เค‰เคคเฅเค•เฅƒเคทเฅเคŸ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เค•เคฟเคธเฅ€ เค†เคฎ เคœเฅ€เคชเฅ€เคฏเฅ‚ เค•เฅ‡ เคธเคพเคฅ เค•เคฟเคธเฅ€ เคญเฅ€ เคตเฅเคฏเค•เฅเคคเคฟ เคฆเฅเคตเคพเคฐเคพ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค”เคฐ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆ, เคœเฅ‹ เค‡เคธเฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจ เค•เฅ‡ เคฒเคฟเค เคชเคนเฅเค‚เคš เคฏเฅ‹เค—เฅเคฏ เค”เคฐ เคตเฅเคฏเคพเคตเคนเคพเคฐเคฟเค• เคฌเคจเคพเคคเคพ เคนเฅˆเฅค + +## เคธเค‚เคฆเคฐเฅเคญ เค”เคฐ เคชเฅเคฐเคถเค‚เคธเคพ + +เคนเคฎ YOLOv4 เคฒเฅ‡เค–เค•เฅ‹เค‚ เค•เฅ‹ เค‡เคธ เคซเคผเฅ€เคฒเฅเคก เคฎเฅ‡เค‚ เค‰เคจเค•เฅ‡ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคฏเฅ‹เค—เคฆเคพเคจเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค†เคญเคพเคฐ เคตเฅเคฏเค•เฅเคค เค•เคฐเคจเคพ เคšเคพเคนเฅ‡เค‚เค—เฅ‡: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{bochkovskiy2020yolov4, + title={YOLOv4: Optimal Speed and Accuracy of Object Detection}, + author={Alexey Bochkovskiy and Chien-Yao Wang and Hong-Yuan Mark Liao}, + year={2020}, + eprint={2004.10934}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +เคฎเฅ‚เคฒ YOLOv4 เคชเฅ‡เคชเคฐ [arXiv](https://arxiv.org/abs/2004.10934) เคชเคฐ เคฎเคฟเคฒ เคธเค•เคคเคพ เคนเฅˆเฅค เคฒเฅ‡เค–เค•เฅ‹เค‚ เคจเฅ‡ เค…เคชเคจเฅ‡ เค•เคพเคฐเฅเคฏ เค•เฅ‹ เคชเคฌเฅเคฒเคฟเค•เคฒเฅ€ เค‰เคชเคฒเคฌเฅเคง เค•เคฐเคพเคฏเคพ เคนเฅˆ, เค”เคฐ เค•เฅ‹เคกเคฌเฅ‡เคธ [GitHub](https://github.com/AlexeyAB/darknet) เคชเคฐ เคเค•เฅเคธเฅ‡เคธ เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค เคนเคฎ เค‰เคจเค•เฅ‡ เค•เฅ‹เคถเคฟเคถเฅ‹เค‚ เค•เฅ€ เคธเคฐเคพเคนเคจเคพ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ เคœเฅ‹ เค•เฅเคทเฅ‡เคคเฅเคฐ เค•เฅ‹ เค†เค—เฅ‡ เคฌเคขเคผเคพเคจเฅ‡ เค”เคฐ เค‰เคจเค•เฅ‡ เค•เคพเคฎ เค•เฅ‹ เคตเฅเคฏเคพเคชเค• เคธเคฎเฅเคฆเคพเคฏ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฒเคฌเฅเคง เค•เคฐเคพเคจเฅ‡ เคฎเฅ‡เค‚ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚เฅค diff --git a/ultralytics/docs/hi/models/yolov4.md:Zone.Identifier b/ultralytics/docs/hi/models/yolov4.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/models/yolov4.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/models/yolov5.md b/ultralytics/docs/hi/models/yolov5.md new file mode 100755 index 0000000..4a9172e --- /dev/null +++ b/ultralytics/docs/hi/models/yolov5.md @@ -0,0 +1,113 @@ +--- +comments: true +description: เคฏเฅ‹เคฒเฅ‹เคตเฅ€5เคฏเฅ‚ เค•เฅ€ เค–เฅ‹เคœ เค•เคฐเฅ‡เค‚, เคฏเฅ‹เคฒเฅ‹เคตเฅ€5 เคฎเฅ‰เคกเคฒ เค•เคพ เคเค• เคฌเคขเคผเคพเคฏเคพ เคนเฅเค† เคธเค‚เคธเฅเค•เคฐเคฃ เคœเคฟเคธเคฎเฅ‡เค‚ เคเค• เคจเคฟเคถเฅเคšเคฟเคค เคฐเคซเคผเฅเคคเคพเคฐ เค•เฅ‡ เคฌเคฆเคฒเคพเคต เค”เคฐ เคตเคฟเคญเคฟเคจเฅเคจ เคตเคธเฅเคคเฅ เคœเฅเคžเคพเคชเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค•เคˆ เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚เฅค +keywords: YOLOv5u, เคตเคธเฅเคคเฅ เคœเฅเคžเคพเคชเคจ, เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ, Ultralytics, Inference, Validation, YOLOv5, YOLOv8, เคเค‚เคšเคฐ-เคฎเฅเค•เฅเคค, เคตเคธเฅเคคเฅเคจเคฟเคชเคพเคคเคฟ เคฐเคนเคฟเคค, เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—, เคฎเคถเฅ€เคจ เคฒเคฐเฅเคจเคฟเค‚เค— +--- + +# เคฏเฅ‹เคฒเฅ‹เคตเฅ€5 + +## เคธเคฎเฅ€เค•เฅเคทเคพ + +YOLOv5u เคตเคธเฅเคคเฅ เคœเฅเคžเคพเคชเคจ เค•เฅ‡ เคคเคฐเฅ€เค•เฅ‹เค‚ เคฎเฅ‡เค‚ เคเค• เคชเคŸเคฒ เคฌเคขเคผเฅ‹เคคเคฐเฅ€ เค•เฅ‹ เคชเฅเคฐเคคเคฟเคทเฅเค เคพเคจเคฟเคค เค•เคฐเคคเคพ เคนเฅˆเฅค เคฏเฅ‹เค—เฅเคฏเคคเคพ เค—เฅเคฐเคนเคฃ เค”เคฐ เคธเคฎเคฏ เค•เฅ€ เคฎเฅ‚เคฒเฅเคฏ-เคฎเคพเคช เคฌเคฆเคฒเคคเฅ€ เคถเฅˆเคฒเฅ€ เค•เฅ‡ เค†เคงเคพเคฐ เคชเคฐ เค†เคงเคพเคฐเคฟเคค เคฏเฅ‹เคฒเฅ‹เคตเฅ€5 เคฎเฅ‰เคกเคฒ เค•เฅ€ เคธเฅเคฅเคพเคชเคจเคพ เคธเฅ‡ เคชเคฐเคฟเคšเคฏ เคฎเฅ‡เค‚ เคธเฅเคงเคพเคฐ เคฒเคพเคคเฅ€ เคนเฅˆเฅค เคคเคพเคคเฅเค•เคพเคฒเคฟเค• เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เค”เคฐ เค‡เคธเค•เฅ€ เคชเฅเคฐเคพเคชเฅเคค เคตเคฟเคถเฅ‡เคทเคคเคพเค“เค‚ เค•เฅ‡ เคฎเคฆเฅเคฆเฅ‡เคจเคœเคฐ, YOLOv5u เคเค• เคเคธเคพ เค•เฅเคถเคฒ เคธเฅเคฅเคพเคจเคพเค‚เคคเคฐเคฃ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ เคœเฅ‹ เคจเคตเฅ€เคจ เคฐเค‚เค—เฅ‡เค‚เค—เคฐ เคฎเฅ‡เค‚ เคถเฅ‹เคง เค”เคฐ เคตเฅเคฏเคพเคตเคธเคพเคฏเคฟเค• เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เคฎเฅ‡เค‚ เคธเค เคฟเค• เคธเคฎเคพเคงเคพเคจเฅ‹เค‚ เค•เฅ€ เคคเคฒเคพเคถ เค•เคฐ เคฐเคนเฅ‡ เคนเฅˆเค‚เฅค + +![Ultralytics YOLOv5](https://raw.githubusercontent.com/ultralytics/assets/main/yolov5/v70/splash.png) + +## เคฎเฅเค–เฅเคฏ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค‚ + +- **เคเค‚เคšเคฐ-เคฎเฅเค•เฅเคค เคนเคฟเคธเฅเคธเคพ เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเคฟเคŸเคฟเค•เฅเคธ เคนเฅ‡เคก:** เคชเคพเคฐเค‚เคชเคฐเคฟเค• เคตเคธเฅเคคเฅ เคœเฅเคžเคพเคชเคจ เคฎเฅ‰เคกเคฒ เคจเคฟเคถเฅเคšเคฟเคค เคชเฅเคฐเคฎเฅเค– เคฌเฅ‰เค•เฅเคธเฅ‹เค‚ เคชเคฐ เค†เคงเคพเคฐเคฟเคค เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เฅค เคนเคพเคฒเคพเค‚เค•เคฟ, YOLOv5u เค‡เคธ เคฆเฅƒเคทเฅเคŸเคฟเค•เฅ‹เคฃ เค•เฅ‹ เค†เคงเฅเคจเคฟเค• เคฌเคจเคพเคคเคพ เคนเฅˆเฅค เคเค• เคเค‚เคšเคฐ-เคฎเฅเค•เฅเคค เคนเคฟเคธเฅเคธเคพ เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเคฟเคŸเคฟเค•เฅเคธ เคนเฅ‡เคก เค•เฅ€ เค…เคชเคจเคพเคจเฅ‡ เคธเฅ‡ เคฏเคน เคธเฅเคจเคฟเคถเฅเคšเคฟเคค เค•เคฐเคคเคพ เคนเฅˆ เค•เคฟ เคเค• เค”เคฐ เค‰เคšเคฟเคค เค”เคฐ เค…เคจเฅเคฐเฅ‚เคช เคœเฅเคžเคพเคชเคจ เคฎเฅ‡เค•เฅ‡เคจเคฟเคœเคผเคฎ เคจเคฟเคฐเฅเคงเคพเคฐเคฟเคค เค•เคฐเฅ‡เค‚, เคœเคฟเคธเคธเฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคชเคฐเคฟเคฆเฅƒเคถเฅเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคฎเฅ‡เค‚ เคธเฅเคงเคพเคฐ เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +- **เคฎเฅ‡เค‚ เคธเฅเคงเคพเคฐ เค—เคฏเคพ เค—เฅเคฃเคพเค‚เค• เค—เคคเคฟ เคตเคธเฅเคคเฅ:** เค—เคคเคฟ เค”เคฐ เคธเฅเคงเคพเคฐ เค•เคพ anomaly เคฐเคนเคคเคพ เคนเฅˆเค‚เฅค เคฒเฅ‡เค•เคฟเคจ YOLOv5u เค‡เคธ เคตเคฟเคฐเฅ‹เคงเคพเคญเคพเคธเฅ€ เค•เฅ‹ เคšเฅเคจเฅŒเคคเฅ€ เคฆเฅ‡เคคเคพ เคนเฅˆเฅค เค‡เคธ เคฐเค‚เค—เฅ‡เค‚เค—เคฐ เคต เคชเฅเคทเฅเคŸเคฟ เคฆเฅƒเคขเคผ เค•เคฐ เคธเฅเคจเคฟเคถเฅเคšเคฟเคค เค•เคฐเคคเคพ เคนเฅˆ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏเค—เคค เคœเฅเคžเคพเคชเคจ เคฎเฅ‡เค‚ เคธเฅเคฅเฅˆเคคเคฟเค•เคคเคพ เคจเฅเค•เคธเคพเคจ เค•เฅ‡ เคฌเคฟเคจเคพเฅค เคฏเคน เคตเคฟเคถเฅ‡เคทเคคเคพ เคตเคพเคนเคจ เคธเฅเคตเคคเค‚เคคเฅเคฐ, เคฐเฅ‹เคฌเฅ‹เคŸเคฟเค•เฅเคธ, เค”เคฐ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏเค—เคค เคตเฅ€เคกเคฟเคฏเฅ‹ เคตเคฟเคถเฅเคฒเฅ‡เคทเคฃ เคœเฅˆเคธเฅ‡ เคคเคคเฅเคตเฅ‹เค‚ เค•เฅ‹ เคšเคพเคนเคคเฅ€ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคถเคฟเคทเฅเคŸ เคธเคฌเค• เค•เฅ€ เค…เคจเคฎเฅ‹เคฒเคคเคพ เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค + +- **เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคตเคธเฅเคคเฅเคงเคพเคชเคฐเฅเคฏเคพเคตเคฅเคพเคเค‚:** เคฏเคน เคธเคฎเคเคจเฅ‡ เค•เคฟ เคฒเคฟเค เค•เคฟ เคตเคฟเคญเคฟเคจเฅเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคญเคฟเคจเฅเคจ เค‰เคชเค•เคฐเคฃ เค•เฅ€ เคœเคฐเฅ‚เคฐเคค เคนเฅ‹เคคเฅ€ เคนเฅˆ, YOLOv5u เคเค• เค•เคˆ เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เคšเคพเคนเฅ‡ เค†เคช เคœเฅเคžเคพเคชเคจ, เคฎเคพเคจเฅเคฏเคคเคพ, เคฏเคพ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคชเคฐ เคงเฅเคฏเคพเคจ เค•เฅ‡เค‚เคฆเฅเคฐเคฟเคค เค•เคฐ เคฐเคนเฅ‡ เคนเฅˆเค‚, เค†เคชเค•เฅ€ เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคšเฅเคจเฅŒเคคเฅ€ เค•เฅ‡ เคฒเคฟเค เคเค• เคŸเฅ‡เคฒเคฐเคฎเฅ‡เคก เคฎเฅ‰เคกเคฒ เคนเฅˆเฅค เคฏเคน เคตเคฟเคตเคฟเคงเคคเคพ เคฏเคน เคธเฅเคจเคฟเคถเฅเคšเคฟเคค เค•เคฐเคคเฅ€ เคนเฅˆ เค•เคฟ เค†เคช เคเค• เคตเคจ-เคธเคพเค‡เคœ-เคซเคฟเคŸ เค‘เคฒ เคธเคฎเคพเคงเคพเคจ เคนเฅ€ เคจเคนเฅ€เค‚ เค‰เคชเคฏเฅ‹เค— เค•เคฐ เคฐเคนเฅ‡ เคนเฅˆเค‚, เคฌเคฒเฅเค•เคฟ เค…เคชเคจเฅ€ เค…เคฆเฅเคฏเคพเคชเคฟเคค เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคšเฅเคจเฅŒเคคเฅ€ เค•เฅ‡ เคฒเคฟเค เคเค• เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐ เคฐเคนเฅ‡ เคนเฅˆเค‚เฅค + +## เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ เคคเคฅเคพ เคฎเฅ‹เคก + +เคฏเฅ‹เคฒเฅ‹เคตเฅ€5u เคฎเฅ‰เคกเคฒ, เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเฅ‡เคŸ เคตเคพเคฒเฅ€, [เคตเคธเฅเคคเฅ เคœเฅเคžเคพเคชเคจ](../tasks/detect.md) เค•เคพเคฐเฅเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เค‰เคคเฅเค•เฅƒเคทเฅเคŸ เคนเฅˆเค‚เฅค เค‡เคจเฅเคนเฅ‡เค‚ เคตเคฟเคญเคฟเคจเฅเคจ เค‘เคชเคฐเฅ‡เคถเคจ เคฎเฅ‹เคกเฅเคธ เค•เคพ เคธเคฎเคฐเฅเคฅเคจ เคนเฅˆ, เค‡เคธเคฒเคฟเค เค‡เคจเฅเคนเฅ‡เค‚ เคตเคฟเค•เคพเคธ เคธเฅ‡ เคฒเฅ‡เค•เคฐ เค…เค‚เคคเคฐเฅเค—เคค เค‰เคจเฅเคจเคคเคฟเคถเฅ€เคฒ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅเค•เฅเคค เค เคนเคฐเคพเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค + +| เคฎเฅ‰เคกเคฒ เคชเฅเคฐเค•เคพเคฐ | เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเฅ‡เคŸ | เค•เคพเคฐเฅเคฏ | เคœเฅเคžเคพเคชเคจ | เคฎเคพเคจเฅเคฏเคคเคพ | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ | เคจเคฟเคฐเฅเคฏเคพเคค | +|-------------|-----------------------------------------------------------------------------------------------------------------------------|------------------------------------|--------|---------|-----------|---------| +| YOLOv5u | `yolov5nu`, `yolov5su`, `yolov5mu`, `yolov5lu`, `yolov5xu`, `yolov5n6u`, `yolov5s6u`, `yolov5m6u`, `yolov5l6u`, `yolov5x6u` | [เคตเคธเฅเคคเฅ เคœเฅเคžเคพเคชเคจ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +เคฏเคน เคคเคพเคฒเคฟเค•เคพ เคฏเฅ‹เคฒเฅ‹เคตเฅ€5u เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคœเฅˆเคตเคฟเค• เคตเฅ‡เคถเคญเฅ‚เคทเคพ เคชเฅเคฐเคธเฅเคคเฅเคค เค•เคฐเคคเฅ€ เคนเฅˆ, เค‡เคจเค•เฅ‡ เคตเคธเฅเคคเฅ เคœเฅเคžเคพเคชเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เคฒเคพเค—เฅ‚เคนเฅ‹เคจเฅ‡ เค”เคฐ [เคœเฅเคžเคพเคชเคจ](../modes/predict.md), [เคฎเคพเคจเฅเคฏเคคเคพ](../modes/val.md), [เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ](../modes/train.md), เค”เคฐ [เคจเคฟเคฐเฅเคฏเคพเคค](../modes/export.md) เค•เฅ€ เคธเคฎเคฐเฅเคฅเคจเคคเคพ เค•เฅ‹ เค‰เคœเฅเคœเฅเคตเคฒ เคฌเคจเคพเคคเฅ€ เคนเฅˆเฅค เค‡เคธ เคธเคฎเคฐเฅเคฅเคจ เค•เฅ€ เคชเฅ‚เคฐเฅเคฃเคคเคพ เคธเฅเคจเคฟเคถเฅเคšเคฟเคค เค•เคฐเคคเฅ€ เคนเฅˆ เค•เคฟ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพ เคฏเฅ‹เคฒเฅ‹เคตเฅ€5u เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ€ เคธเค‚เคชเฅ‚เคฐเฅเคฃ เค•เฅเคทเคฎเคคเคพเค“เค‚ เค•เคพ เค–เคพเคธ เคฒเคพเคญ เค‰เค เคพ เคธเค•เคคเฅ‡ เคนเฅˆเค‚ เคตเคฟเคญเคฟเคจเฅเคจ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคœเฅเคžเคพเคชเคจ เคธเฅเคฅเคฟเคคเคฟเคฏเฅ‹เค‚ เคฎเฅ‡เค‚เฅค + +## เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคชเฅˆเคฎเคพเคจเฅ‡ + +!!! Performance + + === "เคœเฅเคžเคพเคชเคจ" + + [เคฆเฅ‡เค–เฅ‡เค‚ เคœเฅเคžเคพเคชเคจ เคกเฅ‰เค•เคธ](https://docs.ultralytics.com/tasks/detect/) เค•เฅ‹ [COCO](https://docs.ultralytics.com/datasets/detect/coco/) เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค‡เคจ เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ‡ เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคธเคพเคฅ เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ เคœเฅˆเคธเฅ‡ เคตเคฟเคตเคฟเคง เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเคฐเฅเค—เฅ‹เค‚ เค•เฅ‹ เคถเคพเคฎเคฟเคฒ เค•เคฐเคคเคพ เคนเฅˆเฅค + + | เคฎเฅ‰เคกเคฒ | YAML | เคธเคพเค‡เคœเคผ
(เคชเคฟเค•เฅเคธเฅ‡เคฒ) | mAPเคตเฅˆเคฒ
50-95 | เค—เคคเคฟ
CPU ONNX
(เคฎเคฟ.เคธเฅ‡.) | เค—เคคเคฟ
A100 TensorRT
(เคฎเคฟ.เคธเฅ‡.) | params
(M) | FLOPs
(B) | + |---------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------|---------------------------|------------------------|--------------------------------|-----------------------------------------|--------------------|-------------------| + | [yolov5nu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5nu.pt) | [yolov5n.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 34.3 | 73.6 | 1.06 | 2.6 | 7.7 | + | [yolov5su.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5su.pt) | [yolov5s.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 43.0 | 120.7 | 1.27 | 9.1 | 24.0 | + | [yolov5mu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5mu.pt) | [yolov5m.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 49.0 | 233.9 | 1.86 | 25.1 | 64.2 | + | [yolov5lu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5lu.pt) | [yolov5l.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 52.2 | 408.4 | 2.50 | 53.2 | 135.0 | + | [yolov5xu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5xu.pt) | [yolov5x.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 53.2 | 763.2 | 3.81 | 97.2 | 246.4 | + | | | | | | | | | + | [yolov5n6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5n6u.pt) | [yolov5n6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 42.1 | 211.0 | 1.83 | 4.3 | 7.8 | + | [yolov5s6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5s6u.pt) | [yolov5s6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 48.6 | 422.6 | 2.34 | 15.3 | 24.6 | + | [yolov5m6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5m6u.pt) | [yolov5m6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 53.6 | 810.9 | 4.36 | 41.2 | 65.7 | + | [yolov5l6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5l6u.pt) | [yolov5l6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 55.7 | 1470.9 | 5.47 | 86.1 | 137.4 | + | [yolov5x6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5x6u.pt) | [yolov5x6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 56.8 | 2436.5 | 8.98 | 155.4 | 250.7 | + +## เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ + +เค‡เคธ เค‰เคฆเคพเคนเคฐเคฃ เคฎเฅ‡เค‚ เคธเคฐเคฒ YOLOv5 เคšเคพเคฒเคจ เค”เคฐ เคœเฅเคžเคพเคชเคจ เค‰เคฆเคพเคนเคฐเคฃ เคชเฅเคฐเคฆเคพเคจ เค•เคฟเค เค—เค เคนเฅˆเค‚เฅค เค‡เคจ เค”เคฐ เค…เคจเฅเคฏ [modes](../modes/index.md) เค•เฅ‡ เคฒเคฟเค เคชเฅ‚เคฐเฅเคฃ เคธเค‚เคฆเคฐเฅเคญ เคธเคพเคฎเค—เฅเคฐเฅ€ เค•เฅ‡ เคฒเคฟเค เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เคชเฅƒเคทเฅเค เฅ‹เค‚ เคฎเฅ‡เค‚ เคœเคพเคเค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "เคชเคพเคฏเคฅเคจ" + + เคชเคพเคฏเคฅเคจ เคฎเฅ‡เค‚ เคเค• เคฎเฅ‰เคกเคฒ เค‰เคฆเคพเคนเคฐเคฃ เค•เฅ‡ เคฒเคฟเค เคฏเฅ‹เคฒเฅ‹เคตเฅ€5 เค†เคˆเคเคฎเคœเฅ‡เคก เคนเคพเคฒเคค เคฎเฅ‡เค‚ `*.pt` เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ‡ เคธเคพเคฅ เคฎเฅ‰เคกเคฒ เคจเคฟเคฐเฅเคฎเคพเคฃ เค•เฅ‡ เคฒเคฟเค `YOLO()` เคถเฅเคฐเฅ‡เคฃเฅ€ เค•เฅ‹ เคชเคพเคฐเคฟเคค เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆ: + + ```python + from ultralytics import YOLO + + # COCO-pretrained YOLOv5n เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov5n.pt') + + # เคฎเฅ‰เคกเคฒ เคœเคพเคจเค•เคพเคฐเฅ€ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฐเฅ‡เค‚ (เคตเฅˆเค•เคฒเฅเคชเคฟเค•) + model.info() + + # COCO8 เคชเฅเคฐเคพเคฏเฅ‹เค—เคฟเค• เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ 100 เคเคชเฅ‰เค• เค•เฅ‡ เคฒเคฟเค เคฎเฅ‰เคกเคฒ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # YOLOv5n เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ 'bus.jpg' เค›เคตเคฟเคฎเฅ‡เค‚ เคœเฅเคžเคพเคชเคจ เคšเคฒเคพเคเค‚ + results = model('path/to/bus.jpg') + ``` + + === "เคธเฅ€.เคเคฒ.เค†เคˆ." + + เคฎเคพเคฒเคฟเคถเฅ€ เค†เคฆเฅ‡เคถเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เคธเฅ€เคงเฅ‡ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‹ เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเค‚: + + ```bash + # COCO-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv5n เคฎเฅ‰เคกเคฒ เค–เฅ‹เคฒเฅ‡เค‚ เค”เคฐ 100 เคเคชเฅ‰เค• เค•เฅ‡ เคฒเคฟเค เค‡เคธเฅ‡ COCO8 เคชเฅเคฐเคพเคฏเฅ‹เค—เคฟเค• เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + yolo train model=yolov5n.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCO-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv5n เคฎเฅ‰เคกเคฒ เค–เฅ‹เคฒเฅ‡เค‚ เค”เคฐ 'bus.jpg' เค›เคตเคฟ เคฎเฅ‡เค‚ เคœเฅเคžเคพเคชเคจ เคšเคฒเคพเคเค‚ + yolo predict model=yolov5n.pt source=path/to/bus.jpg + ``` + +## เค‰เคฆเฅเคงเคฐเคฃ เค”เคฐ เคฎเคพเคจเฅเคฏเคคเคพ + +เคฏเคฆเคฟ เค†เคช เค…เคชเคจเฅ‡ เคถเฅ‹เคง เคฎเฅ‡เค‚ YOLOv5 เคฏเคพ YOLOv5u เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เคคเฅ‹ เค•เฅƒเคชเคฏเคพ Ultralytics YOLOv5 เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เคฎเฅ‡เค‚ เคฎเฅเค–เฅเคฏ เคฐเฅ‚เคช เคธเฅ‡ เค‰เคฒเฅเคฒเฅ‡เค– เค•เคฐเฅ‡เค‚: + +!!! Quote "" + + === "BibTeX" + ```bibtex + @software{yolov5, + title = {Ultralytics YOLOv5}, + author = {Glenn Jocher}, + year = {2020}, + version = {7.0}, + license = {AGPL-3.0}, + url = {https://github.com/ultralytics/yolov5}, + doi = {10.5281/zenodo.3908559}, + orcid = {0000-0001-5950-6979} + } + ``` + +เค•เฅƒเคชเคฏเคพ เคงเฅเคฏเคพเคจ เคฆเฅ‡เค‚ เค•เคฟ YOLOv5 เคฎเฅ‰เคกเคฒเฅ‡เค‚ [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) เค”เคฐ [เคเค‚เคŸเคฐเคชเฅเคฐเคพเค‡เคœ](https://ultralytics.com/license) เคฒเคพเค‡เคธเฅ‡เค‚เคธ เคฎเฅ‡เค‚ เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเค‚เฅค diff --git a/ultralytics/docs/hi/models/yolov5.md:Zone.Identifier b/ultralytics/docs/hi/models/yolov5.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/models/yolov5.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/models/yolov6.md b/ultralytics/docs/hi/models/yolov6.md new file mode 100755 index 0000000..06b946a --- /dev/null +++ b/ultralytics/docs/hi/models/yolov6.md @@ -0,0 +1,90 @@ +--- +comments: true +description: เคเค• เค‰เคคเฅเค•เฅƒเคทเฅเคŸ เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจเคจเฅ‡ เคฎเฅ‰เคกเคฒ เคฎเฅ‡เค‚ เคธเฅเคชเฅ€เคก เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‡ เคฌเฅ€เคš เคเค• เคธเคพเคฎเค‚เคœเคธเฅเคฏ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคจเฅ‡ เคตเคพเคฒเคพ, เคฐเฅ€เคฏเคฒ-เคŸเคพเค‡เคฎ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจเฅเคธ เค•เฅ‡ เคฒเคฟเค เคฒเฅ‹เค•เคชเฅเคฐเคฟเคฏ เคญเคพเคฐเคคเฅ€เคฏ เคฌเฅเคฐเคพเค‚เคก เคฏเฅ‹เคฒเฅ‹เคตเฅ€6 เค•เคพ เค…เคงเฅเคฏเคฏเคจ เค•เคฐเฅ‡เค‚เฅค เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเคฟเคŸเคฟเค•เฅเคธ เค•เฅ‡ เคฒเคฟเค เคฎเฅ€เคฅเฅเคจ เคฏเฅ‹เคฒเฅ‹เคตเฅ€6, เคซเคผเฅ€เคšเคฐเฅเคธ, เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒเฅเคธ เค”เคฐ เคชเคพเคฏเคฅเคจ เค‰เคชเคฏเฅ‹เค— เคชเคฐ เคกเคพเค‡เคต เค•เคฐเฅ‡เค‚เฅค +keywords: เคฎเคฟเคฅเฅเคจ เคฏเฅ‹เคฒเฅ‹เคตเฅ€6, เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ, Ultralytics, เคฏเฅ‹เคฒเฅ‹เคตเฅ€6 เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผ, เคชเฅเคฐเคคเคฟเคธเฅเคฅเคพเคชเคจ เคฎเฅ‡เค‚ เคฆเฅเคตเคฟเคฆเคฟเคถเฅ€เคฏ เคœเฅ‹เคกเคผ, เคเค‚เค•เคฐ-เคธเคนเคพเคฏเคฟเคค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ, เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ, เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจเฅเคธ +--- + +# เคฎเคฟเคฅเฅเคจ เคฏเฅ‹เคฒเฅ‹เคตเฅ€6 + +## เคชเคฐเคฟเคฏเฅ‹เคœเคจเคพ + +[เคฎเคฟเคฅเฅเคจ](https://about.meituan.com/) เคฏเฅ‹เคฒเฅ‹เคตเฅ€6 เคเค• เคจเคตเฅ€เคจเคคเคฎ เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจเค•เคฐเฅเคคเคพ เคนเฅˆ เคœเฅ‹ เคธเฅเคชเฅ€เคก เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‡ เคฌเฅ€เคš เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคธเค‚เคคเฅเคฒเคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ, เคœเคฟเคธเค•เฅ‡ เค•เคพเคฐเคฃ เคฏเคน เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจเฅเคธ เค•เฅ‡ เคฒเคฟเค เคเค• เคฒเฅ‹เค•เคชเฅเคฐเคฟเคฏ เคตเคฟเค•เคฒเฅเคช เคนเฅˆเฅค เค‡เคธ เคฎเฅ‰เคกเคฒ เคจเฅ‡ เค…เคชเคจเฅ‡ เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เค”เคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฏเฅ‹เคœเคจเคพ เคชเคฐ เค•เคˆ เค†เคฆเคฐเฅเคถ เคจเคตเฅ€เคจเคคเคฎเคพเคจ เคตเฅƒเคฆเฅเคงเคฟเคฏเฅ‹เค‚ เค•เฅ‹ เคชเฅ‡เคถ เค•เคฟเคฏเคพ เคนเฅˆ, เคœเคฟเคธเคฎเฅ‡เค‚ เคเค• เคœเฅ‹เคกเคผเคจเฅ‡-เคฆเฅเคตเคฟเคฆเคฟเคถเฅ€เค•เคฐเคฃ (BiC) เคฎเฅ‰เคกเฅเคฏเฅ‚เคฒ, เคเค‚เค•เคฐ-เคธเคนเคพเคฏเคฟเคค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ (AAT) เคธเฅเคŸเฅเคฐเฅ‡เคŸเฅ‡เคœเฅ€, เค”เคฐ COCO เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‡ เคฒเคฟเค เคธเฅเคงเคพเคฐเคฟเคค เคฌเฅˆเค•เคฌเฅ‹เคจ เค”เคฐ เค—เคฐเฅเคฆเคจ เคกเคฟเคœเคผเคพเค‡เคจ เค•เคพ เค•เฅเคฐเคฟเคฏเคพเคจเฅเคตเคฏเคจ เคถเคพเคฎเคฟเคฒ เคนเฅˆเฅค + +![เคฎเคฟเคฅเฅเคจ เคฏเฅ‹เคฒเฅ‹เคตเฅ€6](https://user-images.githubusercontent.com/26833433/240750495-4da954ce-8b3b-41c4-8afd-ddb74361d3c2.png) +![เคฎเคพเฅ…เคกเฅ‡เคฒ เค‰เคฆเคพเคนเคฐเคฃ เค›เคตเคฟ](https://user-images.githubusercontent.com/26833433/240750557-3e9ec4f0-0598-49a8-83ea-f33c91eb6d68.png) +**เคฏเฅ‹เคฒเฅ‹เคตเฅ€6 เค•เคพ เค…เคตเคฒเฅ‹เค•เคจเฅค** เคฎเฅ‰เคกเคฒ เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เค†เคฐเฅ‡เค– เค†เคชเค•เฅ‹ เคฌเคกเคผเฅ€ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคตเฅƒเคฆเฅเคงเคฟ เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ‡ เคธเค‚เค•เคฐเคฃเฅ‹เค‚ เค”เคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฐเคฃเคจเฅ€เคคเคฟเคฏเฅ‹เค‚ เค•เคพ เค†เคญเคพเคธ เค•เคฐเคพเคคเคพ เคนเฅˆเฅค (a) เคฏเฅ‹เคฒเฅ‹เคตเฅ€6 เค•เคพ เค—เคฐเฅเคฆเคจ (N เค”เคฐ S เคฆเคฟเค–เคพเค เค—เค เคนเฅˆเค‚)เฅค M/L เค•เฅ‡ เคฒเคฟเค, เคฐเคฟเคชเคฌเฅเคฒเฅ‰เค• เค•เฅ‹ เคธเฅ€เคเคธเคชเฅ€เคธเฅเคŸเฅˆเค•เคฐเฅ‡เคช เคธเฅ‡ เคฌเคฆเคฒ เคฆเคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค (b) เคฌเฅ€เคธเฅ€ เคฎเฅ‰เคกเฅเคฏเฅ‚เคฒ เค•เคพ เคธเค‚เคฐเคšเคจเคพเฅค (c) เคเค• เคธเคฟเคฎเค•เฅเคธเฅเคชเคธเฅเคชเคซ เคฌเฅเคฒเฅ‰เค•เฅค ([เคธเฅเคฐเฅ‹เคค](https://arxiv.org/pdf/2301.05586.pdf))เฅค + +### เคฎเฅเค–เฅเคฏ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค‚ + +- **เคฆเฅเคตเคฟเคฆเคฟเคถเฅ€เคฏ เคœเฅ‹เคกเคผเคจเฅ‡ (BiC) เคฎเฅ‰เคกเฅเคฏเฅ‚เคฒ:** เคฏเฅ‹เคฒเฅ‹เคตเฅ€6 เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ เค•เฅ‡ เค—เคฐเฅเคฆเคจ เคฎเฅ‡เค‚ BiC เคฎเฅ‰เคกเฅเคฏเฅ‚เคฒ เคชเฅเคฐเคธเฅเคคเฅเคค เค•เคฐเคคเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เคธเฅเคฅเคพเคจเคพเค‚เคคเคฐเคฃ เคธเคฟเค—เฅเคจเคฒ เคฎเฅ‡เค‚ เคธเฅเคงเคพเคฐ เคนเฅ‹เคคเฅ€ เคนเฅˆ เค”เคฐ เคœเฅเคžเคพเคจเคธเค‚เค•เฅเคทเฅ‡เคช เคฎเฅ‡เค‚ เค—เคคเคฟเคตเคฟเคงเคฟ เค•เฅ‡ เคธเคพเคฅ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคธเฅเคงเคพเคฐ เคนเฅ‹เคคเคพ เคนเฅˆเฅค +- **เคเค‚เค•เคฐ-เคธเคนเคพเคฏเคฟเคค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ (AAT) เคธเฅเคŸเฅเคฐเฅ‡เคŸเฅ‡เคœเฅ€:** เคฏเคน เคฎเฅ‰เคกเคฒ AAT เคชเฅเคฐเคธเฅเคคเคพเคตเคฟเคค เค•เคฐเคคเคพ เคนเฅˆ เคคเคพเค•เคฟ เคฏเคน เคเค‚เค•เคฐ-เค†เคงเคพเคฐเคฟเคค เค”เคฐ เคเค‚เค•เคฐ-เคฎเฅเค•เฅเคค เคฆเฅ‹เคจเฅ‹เค‚ เคชเคฐเค‚เคชเคฐเคพเค“เค‚ เค•เฅ‡ เคฒเคพเคญ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐ เคธเค•เฅ‡ เค”เคฐ เค…เค‚เคคเคฐเฅเคจเคฟเคนเคฟเคค เค•เฅเคทเคฎเคคเคพ เคชเคฐ เค–เคฐเคพเคฌ เคชเฅเคฐเคญเคพเคต เคจ เคนเฅ‹เฅค +- **เคธเฅเคงเคพเคฐเคฟเคค เคฌเฅˆเค•เคฌเฅ‹เคจ เค”เคฐ เค—เคฐเฅเคฆเคจ เคกเคฟเคœเคผเคพเค‡เคจ:** YOLOv6 เค•เฅ‹ เคฌเฅˆเค•เคฌเฅ‹เคจ เค”เคฐ เค—เคฐเฅเคฆเคจ เคฎเฅ‡เค‚ เคเค• เค”เคฐ เคธเฅเคŸเฅ‡เคœ เคถเคพเคฎเคฟเคฒ เค•เคฐเค•เฅ‡, เค‡เคธ เคฎเฅ‰เคกเคฒ เคจเฅ‡ เค•เฅ‹เค•เฅ‹ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‹ เค‰เคšเฅเคš-เคธเค‚เค•เคฒเฅเคชเคจ เค‡เคจเคชเฅเคŸ เคชเคฐ เคชเฅเคฐเคพเคชเฅเคค เค•เคฟเคฏเคพ เคนเฅˆเฅค +- **เคธเฅเคต-เคธเฅเคคเฅเคฐเคพเคตเคฌเคฆเฅเคงเคฟ (Self-Distillation) เคธเฅเคŸเฅเคฐเฅ‡เคŸเฅ‡เคœเฅ€:** เค›เฅ‹เคŸเฅ‡ YOLOv6 เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‹ เคฌเคขเคผเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, เคฏเคนเคพเค‚ เคจเคˆ เคธเฅเคต-เคธเฅเคคเฅเคฐเคพเคตเคฌเคฆเฅเคงเคฟ เคธเฅเคŸเฅเคฐเฅ‡เคŸเฅ‡เคœเฅ€ เค•เคพ เค…เคฎเคฒ เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฆเฅŒเคฐเคพเคจ เคฎเคฆเคฆเค—เคพเคฐ เคธเค‚เคถเฅเคฒเฅ‡เคทเคฃ เคถเคพเค–เคพ เค•เฅ‹ เคธเฅเคงเคพเคฐเคพ เคœเคพเคคเคพ เคนเฅˆ เค”เคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฎเฅ‡เค‚ เค‡เคธเฅ‡ เคนเคŸเคพ เคฆเคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ เคคเคพเค•เคฟ เคฎเคพเคฐเฅเค• เค•เฅ€ เค—เคคเคฟ เคฎเฅ‡เค‚ เคชเฅเคฐเคฎเฅเค– เค—เคฟเคฐเคพเคตเคŸ เคจ เคนเฅ‹เฅค + +## เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคฎเคพเคช + +YOLOv6 เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ เคœเคฟเคจเคฎเฅ‡เค‚ เค…เคฒเค—-เค…เคฒเค— เคธเฅเค•เฅ‡เคฒ เคนเฅ‹เคคเฅ€ เคนเฅˆเค‚: + +- YOLOv6-N: NVIDIA Tesla T4 GPU เคชเคฐ 1187 เคซเฅเคฐเฅ‡เคฎ เคชเฅเคฐเคคเคฟ เคธเฅ‡เค•เค‚เคก เคชเคฐ COCO val2017 เคฎเฅ‡เค‚ 37.5% เคเคชเฅ€เฅค +- YOLOv6-S: 484 เคซเฅเคฐเฅ‡เคฎ เคชเฅเคฐเคคเคฟ เคธเฅ‡เค•เค‚เคก เคชเคฐ 45.0% เคเคชเฅ€เฅค +- YOLOv6-M: 226 เคซเฅเคฐเฅ‡เคฎ เคชเฅเคฐเคคเคฟ เคธเฅ‡เค•เค‚เคก เคชเคฐ 50.0% เคเคชเฅ€เฅค +- YOLOv6-L: 116 เคซเฅเคฐเฅ‡เคฎ เคชเฅเคฐเคคเคฟ เคธเฅ‡เค•เค‚เคก เคชเคฐ 52.8% เคเคชเฅ€เฅค +- YOLOv6-L6: เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคฎเฅ‡เค‚ เคชเคฐเคพเค•เคพเคทเฅเค เคพ เค•เฅ€ เคธเคŸเฅ€เค•เคคเคพเฅค + +YOLOv6 เคเคธเฅ‡ เคชเคพเคฒเคŸเคจเฅ‡ เคตเคพเคฒเฅ‡ เคฎเฅ‰เคกเคฒ เคญเฅ€ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ เคœเคฟเคจเคฎเฅ‡เค‚ เคตเคฟเคญเคฟเคจเฅเคจ เคชเคฐเคฟเคถเฅเคฆเฅเคงเคฟเคฏเคพเค‚ เคนเฅ‹เคคเฅ€ เคนเฅˆเค‚ เค”เคฐ เคฎเฅ‹เคฌเคพเค‡เคฒ เคชเฅเคฒเฅ‡เคŸเคซเคผเฅ‰เคฐเฅเคฎ เค•เฅ‡ เคฒเคฟเค เค…เคจเฅเค•เฅ‚เคฒเคฟเคค เคฎเฅ‰เคกเคฒเฅเคธ เคนเฅ‹เคคเฅ€ เคนเฅˆเค‚เฅค + +## เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ + +เคฏเคน เค‰เคฆเคพเคนเคฐเคฃ เค†เคธเคพเคจ YOLOv6 เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค”เคฐ เคธเค‚เคฆเคฐเฅเคญ เค‰เคฆเคพเคนเคฐเคฃ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เค‡เคจ เค”เคฐ เค…เคจเฅเคฏ [modes](../modes/index.md) เค•เฅ‡ เคฒเคฟเค เคชเฅ‚เคฐเฅเคฃ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) เค”เคฐ [Export](../modes/export.md) เคกเฅ‰เค•เฅเคธ เคชเฅ‡เคœ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "เคชเคพเคฏเคฅเคจ" + + PyTorch เคธเฅ‡ เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค `*.pt` เคฎเฅ‰เคกเคฒ เค”เคฐ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ `*.yaml` เคซเคผเคพเค‡เคฒเฅ‡เค‚ เคชเคพเคธ เค•เคฐเค•เฅ‡ `YOLO()` เค•เค•เฅเคทเคพ เคฎเฅ‡เค‚ เคเค• เคฎเฅ‰เคกเคฒ เค‰เคฆเคพเคนเคฐเคฃ เคฌเคจเคพเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆ: + + ```python + from ultralytics import YOLO + + # เค–เคพเคฒเฅ€ เคธเฅเคฅเคพเคจ เคธเฅ‡ เคเค• YOLOv6n เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค + model = YOLO('yolov6n.yaml') + + # เคฎเฅ‰เคกเคฒ เค•เฅ€ เคœเคพเคจเค•เคพเคฐเฅ€ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฐเฅ‡เค‚ (เคตเฅˆเค•เคฒเฅเคชเคฟเค•) + model.info() + + # COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคฎเฅ‰เคกเคฒ เค•เฅ‹ 100 เคเคชเฅ‹เค• เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # 'bus.jpg' เค›เคตเคฟ เคชเคฐ YOLOv6n เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เคšเคฒเคพเคเค + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคจเคฟเคฐเฅเคงเคพเคฐเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคธเฅ€เคเคฒเค†เคˆ เค•เคฎเคพเค‚เคก เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเค‚: + + ```bash + # เคถเฅเคฐเฅ‚ เคธเฅ‡ เคเค• YOLOv6n เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค เค”เคฐ เค‡เคธเฅ‡ COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ 100 เคเคชเฅ‹เค• เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + yolo train model=yolov6n.yaml data=coco8.yaml epochs=100 imgsz=640 + + # เคถเฅเคฐเฅ‚ เคธเฅ‡ เคเค• YOLOv6n เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค เค”เคฐ 'bus.jpg' เค›เคตเคฟ เคชเคฐ เคจเคฟเคงเคพเคฐเคฃ เคšเคฒเคพเคเค + yolo predict model=yolov6n.yaml source=path/to/bus.jpg + ``` + +## เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ เค”เคฐ เคฎเฅ‹เคก + +YOLOv6 เคถเฅเคฐเฅƒเค‚เค–เคฒเคพ เค‰เคšเฅเคš เคชเฅเคฐเคฆเคฐเฅเคถเคจ [เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ](../tasks/detect.md) เค•เฅ‡ เคฒเคฟเค เคตเคฟเคญเคฟเคจเฅเคจ เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ€ เคนเฅˆเฅค เค‡เคจ เคฎเฅ‰เคกเคฒเฅเคธ เคฎเฅ‡เค‚ เคตเคฟเคญเคฟเคจเฅเคจ เค—เคฃเคจเคพ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เค†เคฆเคฐเฅเคถ เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚, เคœเคฟเคธเคธเฅ‡ เค‡เคจเฅเคนเฅ‡เค‚ เคตเคฟเคญเคฟเคจเฅเคจ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคšเคพเคฐเคถเฅ€เคฒ เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +| เคฎเฅ‰เคกเคฒ เค•เคพ เคชเฅเคฐเค•เคพเคฐ | เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคญเคพเคฐ | เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ | เคจเคฟเคฐเฅเคงเคพเคฐเคฃ | เคฎเคพเคจเฅเคฏเฅ€เค•เคฐเคฃ | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ | เคจเคฟเคฐเฅเคฏเคพเคค | +|----------------|----------------------|-----------------------------------|----------|-----------|-----------|---------| +| YOLOv6-N | `yolov6-n.pt` | [เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-S | `yolov6-s.pt` | [เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-M | `yolov6-m.pt` | [เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L | `yolov6-l.pt` | [เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L6 | `yolov6-l6.pt` | [เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +เคฏเคน เคคเคพเคฒเคฟเค•เคพ เคฏเฅ‹เคฒเฅ‹เคตเฅ€6 เคฎเฅ‰เคกเคฒ เคตเฅ‡เคฐเคฟเคเค‚เคŸเฅเคธ เค•เคพ เคตเคฟเคธเฅเคคเฅƒเคค เค…เคตเคฒเฅ‹เค•เคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ€ เคนเฅˆ, เคœเฅ‹ เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เค‰เคจเค•เฅ€ เค•เฅเคทเคฎเคคเคพเค“เค‚ เค”เคฐ เคตเคฟเคญเคฟเคจเฅเคจ เคธเค‚เคšเคพเคฒเคจ เคฎเฅ‹เคกเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ [เคจเคฟเคฐเฅเคงเคพเคฐเคฃ](../modes/predict.md), [เคฎเคพเคจเฅเคฏเฅ€เค•เคฐเคฃ](../modes/val.md), [เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ](../modes/train.md), เค”เคฐ [เคจเคฟเคฐเฅเคฏเคพเคค](../modes/export.md) เค•เฅ‡ เคธเค‚เค—เคคเคคเคพ เค•เฅ‹ เคนเคพเค‡เคฒเคพเค‡เคŸ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚เฅค เค‡เคธ เคตเฅเคฏเคพเคชเค• เคธเคฎเคฐเฅเคฅเคจ เคธเฅ‡ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพเค“เค‚ เค•เฅ‹ เคฏเฅ‹เคฒเฅ‹เคตเฅ€6 เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ€ เค•เฅเคทเคฎเคคเคพเค“เค‚ เค•เคพ เคชเฅ‚เคฐเคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ€ เคธเฅเคตเคฟเคงเคพ เคนเฅ‹เคคเฅ€ เคนเฅˆ เคเค• เคตเฅเคฏเคพเคชเค• เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เคธเฅเคฅเคฟเคคเคฟ เคฎเฅ‡เค‚เฅค + +## เคธเคจเฅเคฆเคฐเฅเคญ เค”เคฐ เคชเฅเคจ:เคœเฅเคžเคพเคจเคœเคจเค• + +เคนเคฎ เคฎเฅ‚เคฒ เคฏเฅ‹เคฒเฅ‹เคตเฅ€6 เค•เคพเค—เคœ เคชเคฐ [arXiv](https://arxiv.org/abs/2301.05586) เคฎเฅ‡เค‚ เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเค‚ เค•เคพเคฎ เค•เฅ‡ เคฒเคฟเค เคธเค‚เค˜ เคฆเฅเคตเคพเคฐเคพ เคธเฅเคตเคฟเค•เฅƒเคคเคฟ เคฆเฅ€ เคœเคพเคคเฅ€ เคนเฅˆเฅค เคฒเฅ‡เค–เค•เฅ‹เค‚ เคจเฅ‡ เค…เคชเคจเฅ‡ เค•เคพเคฎ เค•เฅ‹ เคธเคพเคฐเฅเคตเคœเคจเคฟเค• เคฐเฅ‚เคช เคธเฅ‡ เค‰เคชเคฒเคฌเฅเคง เค•เคฐเคพเคฏเคพ เคนเฅˆ, เค”เคฐ เค•เฅ‹เคกเคฌเฅ‡เคธ [GitHub](https://github.com/meituan/YOLOv6) เคชเคฐ เคชเคนเฅเค‚เคšเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคนเฅˆเฅค เคนเคฎ เค‰เคจเค•เฅ‡ เคชเฅเคฐเคฏเคพเคธเฅ‹เค‚ เค•เฅ€ เคชเฅเคฐเคถเค‚เคธเคพ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ เค•เฅเคฏเฅ‹เค‚เค•เคฟ เคตเฅ‡ เค•เฅเคทเฅ‡เคคเฅเคฐ เค•เฅ‹ เค†เค—เฅ‡ เคฌเคขเคผเคพเคจเฅ‡ เค”เคฐ เค…เคชเคจเฅ‡ เค•เคพเคฎ เค•เฅ‹ เค†เคชเคพเคคเค•เคพเคฒเฅ€เคจ เคฐเฅ‚เคช เคธเฅ‡ เคฌเฅเคฐเฅ‰เคกเคฐ เคธเคฎเฅเคฆเคพเคฏ เค•เฅ‹ เคธเฅเคฒเคญ เคฌเคจเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคจเค•เฅ‡ เคชเฅเคฐเคฏเคพเคธเฅ‹เค‚ เค•เฅ‹ เคชเคนเฅเค‚เคšเคจเฅ‡ เคฎเฅ‡เค‚ เคฒเค—เฅ‡ เคนเฅˆเค‚เฅค diff --git a/ultralytics/docs/hi/models/yolov6.md:Zone.Identifier b/ultralytics/docs/hi/models/yolov6.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/models/yolov6.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/models/yolov7.md b/ultralytics/docs/hi/models/yolov7.md new file mode 100755 index 0000000..173b177 --- /dev/null +++ b/ultralytics/docs/hi/models/yolov7.md @@ -0,0 +1,65 @@ +--- +comments: true +description: YOLOv7, เคเค• เคฐเคฟเคฏเคฒ-เคŸเคพเค‡เคฎ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ เคนเฅˆเฅค เค‡เคธเค•เฅ€ เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคธเฅเคชเฅ€เคก, เคชเฅเคฐเคญเคพเคตเคถเคพเคฒเฅ€ เคธเคŸเฅ€เค•เคคเคพ, เค”เคฐ เค…เคฆเฅเคฏเคคเคจเฅ€เคฏ "เคฌเฅˆเค—-เค‘เคซ-เคซเฅเคฐเฅ€เคฌเฅ€เคœ" เค…เคจเฅเค•เฅ‚เคฒเคจ เคงเฅเคฏเคพเคจเคฆเฅ‡เคจเฅ‡ เคฏเฅ‹เค—เฅเคฏเคคเคพ เค•เฅ‹ เคธเคฎเคเฅ‡เค‚เฅค +keywords: YOLOv7, เคฐเคฟเคฏเคฒ-เคŸเคพเค‡เคฎ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ, เคคเค•เคจเฅ€เค•-เคธเฅ‡-เค…เคฒเคพเคตเคพ เค•เฅเค› เค”เคซเคผเคพเคจ, Ultralytics, MS COCO เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ, เคฎเฅ‰เคกเคฒ เคซเคฟเคฐ-เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅ€เค•เคฐเคฃ, เคกเคพเคฏเคจเคพเคฎเคฟเค• เคฒเฅ‡เคฌเคฒ เค…เคธเคพเค‡เคจเคฎเฅ‡เค‚เคŸ, เคตเคฟเคธเฅเคคเคพเคฐเคฟเคค เคฎเคพเคชเคจ, เค•เค‚เคชเคพเค‰เค‚เคก เคฎเคพเคชเคจ +--- + +# YOLOv7: เคŸเฅเคฐเฅ‡เคจเคฌเคฒ เคฌเฅˆเค—-เค‘เคซ-เคซเฅเคฐเฅ€เคฌเฅ€เคœเคผ + +YOLOv7 เคเค• เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคฐเคฟเคฏเคฒ-เคŸเคพเค‡เคฎ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ เคนเฅˆ เคœเฅ‹ 5 FPS เคธเฅ‡ 160 FPS เคคเค• เค•เฅ€ เคฐเฅ‡เค‚เคœ เคฎเฅ‡เค‚ เคธเคญเฅ€ เคœเฅเคžเคพเคค เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐเฅเคธ เค•เฅ‹ เคคเฅ‡เคœเคผเฅ€ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‡ เคฎเคพเคฎเคฒเฅ‡ เคฎเฅ‡เค‚ เคชเฅ€เค›เฅ‡ เค›เฅ‹เคกเคผ เคฆเฅ‡เคคเคพ เคนเฅˆเฅค เคฏเคน 30 FPS เคฏเคพ เค‰เคธเคธเฅ‡ เค…เคงเคฟเค• เคชเคฐ GPU V100 เคชเคฐ เคธเคญเฅ€ เคœเฅเคžเคพเคค เคฐเคฟเคฏเคฒ-เคŸเคพเค‡เคฎ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐเฅเคธ เคฎเฅ‡เค‚ เคธเคฌเคธเฅ‡ เค…เคงเคฟเค• เค…เค•เฅเคฏเฅ‚เคฐเฅ‡เคŸ (56.8% AP) เคนเฅˆเฅค เค‡เคธเค•เฅ‡ เค…เคฒเคพเคตเคพ, YOLOv7 เคคเฅ‡เคœเฅ€ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‡ เคฎเคพเคฎเคฒเฅ‡ เคฎเฅ‡เค‚ YOLOR, YOLOX, Scaled-YOLOv4, YOLOv5, เค”เคฐ เค•เคˆ เค…เคจเฅเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐเฅเคธ เคธเฅ‡ เคฌเฅ‡เคนเคคเคฐ เคชเคฐเคซเฅ‰เคฐเฅเคฎ เค•เคฐเคคเคพ เคนเฅˆเฅค เคฏเคน เคฎเฅ‰เคกเคฒ MS COCO เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเคนเคฒเฅ‡ เคธเฅ‡ เคนเฅ€ เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เคนเฅˆ เค”เคฐ เค•เคฟเคธเฅ€ เค…เคจเฅเคฏ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคฏเคพ เคชเฅ‚เคฐเฅเคต-เคŸเฅเคฐเฅ‡เคจ เคนเฅเคˆ เคตเฅ‡เคŸเฅเคธ เค•เคพ เคชเฅเคฐเคฏเฅ‹เค— เคจเคนเฅ€เค‚ เค•เคฐเคคเคพ เคนเฅˆเฅค YOLOv7 เค•เฅ‡ เคธเฅเคฐเฅ‹เคค เค•เฅ‹เคก [GitHub](https://github.com/WongKinYiu/yolov7) เคชเคฐ เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเฅค + +![SOTA เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐเฅเคธ เค•เฅ‡ เคคเฅเคฒเคจเคพเคคเฅเคฎเค• เคฎเฅเค•เคผเคพเคฌเคฒเคพ](https://github.com/ultralytics/ultralytics/assets/26833433/5e1e0420-8122-4c79-b8d0-2860aa79af92) +**เคตเฅˆเคถเฅเคตเคฟเค•-เคธเฅ‡-เค…เคฒเคพเคตเคพ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐเฅเคธ เค•เคพ เคคเฅเคฒเคจเคพเคคเฅเคฎเค• เคฎเฅเค•เคผเคพเคฌเคฒเคพเฅค** เคคเคพเคฒเคฟเค•เคพ 2 เคฎเฅ‡เค‚ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เคธเฅ‡ เคนเคฎเฅ‡เค‚ เคฏเคน เคชเคคเคพ เคšเคฒเคคเคพ เคนเฅˆ เค•เคฟ เคชเฅเคฐเคธเฅเคคเคพเคตเคฟเคค เคตเคฟเคงเคฟ เคธเค‚เคชเฅ‚เคฐเฅเคฃเคคเคพ เคธเฅ‡ เคธเคฌเคธเฅ‡ เค…เคšเฅเค›เคพ เคคเฅ‡เคœเฅ€-เค…เค•เฅเคฏเฅ‚เคฐเฅ‡เคŸเคคเคพ เคธเค‚เค˜เคฐเฅเคท เคฒเคพเคคเฅ€ เคนเฅˆเฅค เค…เค—เคฐ เคนเคฎ YOLOv7-tiny-SiLU เค•เฅ‹ YOLOv5-N (r6.1) เค•เฅ‡ เคธเคพเคฅ เคคเฅเคฒเคจเคพ เค•เคฐเฅ‡เค‚, เคคเฅ‹ เคนเคฎเคพเคฐเฅ€ เคตเคฟเคงเคฟ 127 fps เคคเฅ‡เคœเคผ เค”เคฐ AP เคฎเฅ‡เค‚ 10.7% เค…เคงเคฟเค• เคธเคŸเฅ€เค• เคนเฅˆเฅค เคธเคพเคฅ เคนเฅ€, YOLOv7 เค•เฅ‡ เคธเคพเคฅ AP 51.4% เคนเฅˆ เคœเคฌเค•เคฟ PPYOLOE-L เค•เฅ‡ เคธเคพเคฅ เคเค• เคนเฅ€ AP เค•เฅ‡ เคธเคพเคฅ 78 fps เคซเฅเคฐเฅ‡เคฎ เคฐเฅ‡เคŸ เคนเฅˆเฅค เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคฎเคพเคฎเคฒเฅ‡ เคฎเฅ‡เค‚, YOLOv7 PPYOLOE-L เค•เฅ€ เคคเฅเคฒเคจเคพ เคฎเฅ‡เค‚ 41% เค•เคฎ เคนเฅˆเฅค YOLOv7-X เค•เฅ‹ 114 fps เค‡เคจเฅเคซเคฐเฅ‡เค‚เคธ เคธเฅเคชเฅ€เคก เค•เฅ‡ เคธเคพเคฅ YOLOv5-L (r6.1) เค•เฅ‡ เคธเคพเคฅ เคคเฅเคฒเคจเคพ เค•เคฐเฅ‡เค‚ เคคเฅ‹, YOLOv7-X AP เค•เฅ‹ 3.9% เคฌเคขเคผเคพ เคธเค•เคคเคพ เคนเฅˆเฅค YOLOv7-X เค•เฅ‹ เคฌเคฐเคพเคฌเคฐ เคธเฅเค•เฅ‡เคฒ เค•เฅ‡ YOLOv5-X (r6.1) เค•เฅ‡ เคธเคพเคฅ เคคเฅเคฒเคจเคพ เค•เคฐเฅ‡เค‚ เคคเฅ‹, YOLOv7-X เค•เฅ€ เค‡เคจเฅเคซเฅ‡เคฐเฅ‡เค‚เคธ เคธเฅเคชเฅ€เคก 31 fps เคคเฅ‡เคœเคผ เคนเฅˆเฅค เคธเคพเคฅ เคนเฅ€, เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค”เคฐ เคชเคฐเคฟเค•เคฒเคจ เค•เฅ€ เคฎเคพเคคเฅเคฐเคพ เค•เฅ‡ เคฎเคพเคฎเคฒเฅ‡ เคฎเฅ‡เค‚, YOLOv7-X เคชเคนเคฒเฅ‡ เคฏเฅ‹เคฒเฅ‹เคต5-X (r6.1) เค•เฅ€ เคคเฅเคฒเคจเคพ เคฎเฅ‡เค‚ 22% เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค•เคฎ เค•เคฐเคคเคพ เคนเฅˆ เค”เคฐ 8% เคชเคฐเคฟเค•เคฒเคจ เค•เคฎ เค•เคฐเคคเคพ เคนเฅˆ, เคฒเฅ‡เค•เคฟเคจ AP เค•เฅ‹ 2.2% เคฌเคขเคผเคพเคคเคพ เคนเฅˆเฅค ([เคธเฅเคฐเฅ‹เคค](https://arxiv.org/pdf/2207.02696.pdf))เฅค + +## เค…เคตเคฒเฅ‹เค•เคจ + +เคฐเคฟเคฏเคฒ-เคŸเคพเค‡เคฎ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฐ เคตเคฟเคœเคจ เค•เฅ‡ เค•เคˆ เคธเคฟเคธเฅเคŸเคฎเฅ‹เค‚ เคฎเฅ‡เค‚ เคเค• เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เค˜เคŸเค• เคนเฅˆ, เคœเคฟเคธเคฎเฅ‡เค‚ เคฎเคฒเฅเคŸเฅ€-เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—, เคธเฅเคตเคคเค‚เคคเฅเคฐ เคšเคพเคฒเคจ, เคฐเฅ‹เคฌเฅ‹เคŸเคฟเค•เฅเคธ, เค”เคฐ เคฎเฅ‡เคกเคฟเค•เคฒ เค‡เคฎเฅ‡เคœ เคตเคฟเคถเฅเคฒเฅ‡เคทเคฃ เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚เฅค เคฏเฅ‹เค—เฅเคฏเคคเคพ เค•เฅ‡ เคตเคฟเค•เคพเคธ เค•เฅ‡ เคฒเคฟเค เคนเคพเคฒ เค•เฅ‡ เคตเคฐเฅเคทเฅ‹เค‚ เคฎเฅ‡เค‚ เคฐเคฟเคฏเคฒ-เคŸเคพเค‡เคฎ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคตเคฟเค•เคพเคธ เคจเฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคธเฅ€เคชเฅ€เคฏเฅ‚, เคœเฅ€เคชเฅ€เคฏเฅ‚, เค”เคฐ เคจเฅเคฏเฅ‚เคฐเคฒ เคชเฅเคฐเฅ‹เคธเฅ‡เคธเคฟเค‚เค— เคฏเฅ‚เคจเคฟเคŸเฅ‹เค‚ (เคเคจเคชเฅ€เคฏเฅ‚) เค•เฅ‡ เค•เคพเคฐเฅเคฏเคพเคจเฅเคตเคฏเคจ เค•เฅ€ เคคเฅ‡เคœเฅ€ เคฌเคขเคผเคพเคจเฅ‡ เค”เคฐ เค…เคงเคฟเค• เค…เคงเคฟเค• เคœเฅเคžเคพเคจเคธเคฐเคšเค•เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเคเฅค YOLOv7 เคจเฅ‡ เคฎเฅ‹เคฌเคพเค‡เคฒ เคœเฅ€เคชเฅ€เคฏเฅ‚ เค”เคฐ เคœเฅ€เคชเฅ€เคฏเฅ‚ เคกเคฟเคตเคพเค‡เคธ, เค‡เคœเคผเฅเคœ เคŸเฅ เคฆ เค•เฅเคฒเคพเค‰เคก, เค•เฅ‹ เคธเคฎเคฐเฅเคฅเคจ เคฆเคฟเคฏเคพ เคนเฅˆเฅค + +เคชเคพเคฐเค‚เคชเคฐเคฟเค• เคฐเคฟเคฏเคฒ-เคŸเคพเค‡เคฎ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐเฅเคธ เค•เฅ€ เคคเฅเคฒเคจเคพ เคฎเฅ‡เค‚ เคœเฅ‹ เค•เคฟ เคถเคพเคฐเฅ€เคฐเคฟเค• เค…เคญเคฟเคตเฅเคฏเค•เฅเคคเคฟ เค…เคจเฅเค•เฅ‚เคฒเคจ เคชเคฐ เคงเฅเคฏเคพเคจ เค•เฅ‡เค‚เคฆเฅเคฐเคฟเคค เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚, YOLOv7 เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เค•เฅ‹ เค…เคจเฅเค•เคฐเคฃ เค•เคฐเคจเฅ‡ เคชเคฐ เคงเฅเคฏเคพเคจ เค•เฅ‡เค‚เคฆเฅเคฐเคฟเคค เค•เคฐเคจเฅ‡ เค•เคพ เคธเฅเคเคพเคต เคฆเฅ‡เคคเคพ เคนเฅˆเฅค เค‡เคธเคฎเฅ‡เค‚ เคฎเฅ‰เคกเฅเคฏเฅ‚เคฒเฅเคธ เค”เคฐ เค‘เคชเฅเคŸเคฟเคฎเคพเค‡เคœเคผเฅ‡เคถเคจ เคฎเฅ‡เคฅเคกเฅเคธ เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚ เคœเคฟเคจเค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ€ เค…เค•เฅเคฏเฅ‚เคฐเฅ‡เคธเฅ€ เคฎเฅ‡เค‚ เคธเฅเคงเคพเคฐ เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ เคฌเคฟเคจเคพ เค‡เคจเฅเคซเฅ‡เคฐเฅ‡เค‚เคธ เค•เฅ€ เคฒเคพเค—เคค เคฌเคขเคผเคพเค, เคœเคฟเคธเฅ‡ "เคŸเฅเคฐเฅ‡เคจเคฌเคฒ เคฌเฅˆเค—-เค‘เคซ-เคซเฅเคฐเฅ€เคฌเฅ€เคœเคผ" เค•เฅ€ เคเค• เคจเคˆ เค•เคพเค‚เคธเฅ‡เคชเฅเคŸ เค•เฅ‡ เค†เคงเคพเคฐ เคชเคฐ เค•เคนเคพ เคœเคพเคคเคพ เคนเฅˆเฅค + +## เคฎเฅเค–เฅเคฏ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค + +YOLOv7 เคธเคพเคฅ เคฎเฅ‡เค‚ เค•เคˆ เคฎเฅเค–เฅเคฏ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค เคฒเคพเคคเคพ เคนเฅˆ: + +1. **เคฎเฅ‰เคกเคฒ เคซเคฟเคฐ-เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅ€เค•เคฐเคฃ**: YOLOv7 เคเค• เคฏเฅ‹เคœเคจเคพ เคฌเคจเคพเค•เคฐ เคซเคฟเคฐ-เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเคพเค‡เคœเคผเฅเคก เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคธเฅเคคเคพเคตเคฟเคค เค•เคฐเคคเคพ เคนเฅˆ, เคœเฅ‹ เคเค• เค•เคฃเฅเคŸเคฟเคจเฅเค…เคธ เคธเค‚เค•เคฐเฅเคฃเคจ เคชเคฅ เค•เฅ€ เคธเค‚เค•เคฒเฅเคชเคจเคพ เค•เฅ‡ เคธเคพเคฅ เคตเคฟเคญเคฟเคจเฅเคจ เคจเฅ‡เคŸเคตเคฐเฅเค•เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคฒเคพเค—เฅ‚ เค•เคฐเคจเฅ‡ เคฏเฅ‹เค—เฅเคฏ เคนเฅˆเฅค + +2. **เคกเคพเคฏเคจเคพเคฎเคฟเค• เคฒเฅ‡เคฌเคฒ เค…เคธเคพเค‡เคจเคฎเฅ‡เค‚เคŸ**: เคเค•เคพเคงเคฟเค• เค†เค‰เคŸเคชเฅเคŸ เคฒเฅ‡เคฏเคฐเฅเคธ เค•เฅ‡ เคธเคพเคฅ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เค•เคฐเคจเฅ‡ เคธเฅ‡ เคเค• เคจเคฏเคพ เคฎเฅเคฆเฅเคฆเคพ เคชเฅ‡เคถ เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ: "เค…เคฒเค—-เค…เคฒเค— เคถเคพเค–เคพเค“เค‚ เค•เฅ‡ เค†เค‰เคŸเคชเฅเคŸ เค•เฅ‡ เคฒเคฟเค เคกเคพเคฏเคจเคพเคฎเคฟเค• เคจเคฟเคฐเฅเคงเคพเคฐเคฟเคค เคฒเค•เฅเคทเฅเคฏ เค•เฅˆเคธเฅ‡ เคฆเฅ‡เค‚?" เค‡เคธ เคธเคฎเคธเฅเคฏเคพ เค•เฅ‹ เคนเคฒ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, YOLOv7 เคจเฅ‡ "เค•เฅ‰เคฐเฅเคธ-เคŸเฅ‚-เคซเคผเคพเค‡เคจ เคฒเฅ€เคก เค—เคพเค‡เคกเฅ‡เคก เคฒเฅ‡เคฌเคฒ เค…เคธเคพเค‡เคจเคฎเฅ‡เค‚เคŸ" เคจเคพเคฎเค• เคเค• เคจเคˆ เคฒเฅ‡เคฌเคฒ เค…เคธเคพเค‡เคจเคฎเฅ‡เค‚เคŸ เคตเคฟเคงเคฟ เคชเฅ‡เคถ เค•เฅ€ เคนเฅˆเฅค + +3. **เคตเคฟเคธเฅเคคเคพเคฐเคฟเคค เค”เคฐ เค•เค‚เคชเคพเค‰เค‚เคก เคฎเคพเคชเคจ**: YOLOv7 เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ เค•เฅ‡ เคฒเคฟเค "เคตเคฟเคธเฅเคคเคพเคฐเคฟเคค" เค”เคฐ "เค•เค‚เคชเคพเค‰เค‚เคก เคฎเคพเคชเคจ" เคตเคฟเคงเคฟเคฏเฅ‹เค‚ เค•เฅ‹ เคชเฅเคฐเคธเฅเคคเคพเคตเคฟเคค เค•เคฐเคคเคพ เคนเฅˆ เคœเฅ‹ เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค”เคฐ เคชเคฐเคฟเค•เคฒเคจ เค•เคพ เคธเค•เคพเคฐเคพเคคเฅเคฎเค• เคฐเฅ‚เคช เคธเฅ‡ เค‰เคชเคฏเฅ‹เค— เค•เคฐ เคธเค•เคคเฅ€ เคนเฅˆเค‚เฅค + +4. **เคฆเค•เฅเคทเคคเคพ**: YOLOv7 เคฆเฅเคตเคพเคฐเคพ เคชเฅเคฐเคธเฅเคคเคพเคตเคฟเคค เคตเคฟเคงเคฟ เคฐเคพเคทเฅเคŸเฅเคฐเฅ€เคฏ เคธเฅเคคเคฐ เค•เฅ‡ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐ เค•เฅ‡ เคฒเค—เคญเค— 40% เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค”เคฐ 50% เคชเคฐเคฟเค•เคฒเคจ เค•เฅ‹ เค•เคฎ เค•เคฐ เคธเค•เคคเฅ€ เคนเฅˆ, เค”เคฐ เคฌเฅ‡เคนเคคเคฐ เค‡เคจเฅเคซเฅ‡เคฐเฅ‡เค‚เคธ เค—เคคเคฟ เค”เคฐ เค…เคงเคฟเค• เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค…เค•เฅเคฏเฅ‚เคฐเฅ‡เคธเฅ€ เคนเฅˆเฅค + +## เค‰เคชเคฏเฅ‹เค— เค•เฅ€ เค‰เคฆเคพเคนเคฐเคฃ + +เคฒเฅ‡เค– เค•เฅ€ เค…เคตเคธเฅเคฅเคพ เค•เฅ‡ เคธเคฎเคฏ, Ultralytics เคตเคฐเฅเคคเคฎเคพเคจ เคฎเฅ‡เค‚ YOLOv7 เคฎเฅ‰เคกเฅ‡เคฒ เค•เคพ เคธเคฎเคฐเฅเคฅเคจ เคจเคนเฅ€เค‚ เค•เคฐเคคเคพ เคนเฅˆเฅค เค‡เคธเคฒเคฟเค, YOLOv7 เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเคพ เคšเคพเคนเคจเฅ‡ เคตเคพเคฒเฅ‡ เค•เคฟเคธเฅ€ เคญเฅ€ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพ เค•เฅ‡ เคฒเคฟเค เคฏเฅ‹เค—เฅเคฏเคคเคพ เค•เฅ‡ เคฒเคฟเค เค•เฅƒเคชเคฏเคพ เคธเฅ€เคงเฅ‡ YOLOv7 GitHub เคฐเคฟเคชเฅ‰เคœเคผเคฟเคŸเคฐเฅ€ เค•เฅ‡ เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ เค•เคพ เคธเค‚เคฆเคฐเฅเคญ เคฒเฅ‡เค‚เฅค + +เคฏเคนเคพเค‚ YOLOv7 เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค†เคช เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เค†เคฎ เค•เคฆเคฎเฅ‹เค‚ เค•เคพ เคธเคพเคฐเคพเค‚เคถ เคฆเฅ‡เค– เคธเค•เคคเฅ‡ เคนเฅˆเค‚: + +1. YOLOv7 GitHub เคฐเคฟเคชเฅ‰เคœเคผเคฟเคŸเคฐเฅ€ เคชเคฐ เคœเคพเคเค‚: [https://github.com/WongKinYiu/yolov7](https://github.com/WongKinYiu/yolov7)เฅค + +2. เคธเฅเคฅเคพเคชเคจเคพ เค•เฅ‡ เคฒเคฟเค README เคซเคผเคพเค‡เคฒ เคฎเฅ‡เค‚ เคฆเคฟเค เคนเฅเค เคจเคฟเคฐเฅเคฆเฅ‡เคถเฅ‹เค‚ เค•เคพ เคชเคพเคฒเคจ เค•เคฐเฅ‡เค‚เฅค เค‡เคธเคฎเฅ‡เค‚ เค†เคฎเคคเฅŒเคฐ เคชเคฐ เคฐเคฟเคชเฅ‰เคœเคผเคฟเคŸเคฐเฅ€ เค•เฅเคฒเฅ‹เคจเคฟเค‚เค—, เค†เคตเคถเฅเคฏเค• เคกเคฟเคชเฅ‡เค‚เคกเฅ‡เค‚เคธเคฟเคฏเฅ‹เค‚ เค•เฅ€ เคธเฅเคฅเคพเคชเคจเคพ, เค”เคฐ เค†เคตเคถเฅเคฏเค• เคชเคฐเฅเคฏเคพเคตเคฐเคฃ เคšเคฐเฅ‹เค‚ เค•เคพ เคธเฅ‡เคŸเค…เคช เคถเคพเคฎเคฟเคฒ เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +3. เคธเฅเคฅเคพเคชเคจเคพ เคธเคฎเฅเคชเฅ‚เคฐเฅเคฃ เคนเฅ‹เคจเฅ‡ เค•เฅ‡ เคฌเคพเคฆ, เค†เคช เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคŸเฅเคฐเฅ‡เคจ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚ เค”เคฐ เคตเคฟเคถเฅ‡เคทเคคเคพเค“เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚, เคœเฅˆเคธเคพ เค•เคฟ เคฐเฅ‡เคชเฅ‰เคœเคผเคฟเคŸเคฐเฅ€ เคฎเฅ‡เค‚ เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคจเคฟเคฐเฅเคฆเฅ‡เคถ เคฆเคฟเค เค—เค เคนเฅˆเค‚เฅค เคฏเคน เค†เคฎเคคเฅŒเคฐ เคชเคฐ เค…เคชเคจเฅ‡ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‹ เคคเฅˆเคฏเคพเคฐ เค•เคฐเคจเคพ, เคฎเฅ‰เคกเคฒ เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐ เค•เคฐเคจเคพ, เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคŸเฅเคฐเฅ‡เคจ เค•เคฐเคจเคพ, เค”เคฐ เคซเคฟเคฐ เคŸเฅเคฐเฅ‡เคจ เค•เคฟเค เค—เค เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เคฐเคจเคพ เคถเคพเคฎเคฟเคฒ เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +เค•เฅƒเคชเคฏเคพ เคงเฅเคฏเคพเคจ เคฆเฅ‡เค‚ เค•เคฟ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เค•เคฆเคฎ เค†เคชเค•เฅ‡ เคตเคฟเคถเคฟเคทเฅเคŸ เค‰เคชเคฏเฅ‹เค— เคฎเคพเคฎเคฒเฅ‡ เค”เคฐ YOLOv7 เคฐเคฟเคชเฅ‰เคœเคผเคฟเคŸเคฐเฅ€ เค•เฅ€ เคตเคฐเฅเคคเคฎเคพเคจ เคธเฅเคฅเคฟเคคเคฟ เคชเคฐ เคจเคฟเคฐเฅเคญเคฐ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค เค‡เคธเคฒเคฟเค, เคฏเฅ‹เค—เฅเคฏเคคเคพ เค•เฅ‡ เคฒเคฟเค เคธเฅ€เคงเฅ‡ YOLOv7 GitHub เคฐเคฟเคชเฅ‰เคœเคผเคฟเคŸเคฐเฅ€ เคฎเฅ‡เค‚ เคฆเคฟเค เค—เค เคจเคฟเคฐเฅเคฆเฅ‡เคถเฅ‹เค‚ เค•เคพ เคธเค‚เคฆเคฐเฅเคญ เคฒเฅ‡เคจเคพ เคฎเคพเคœเคผเคฌเฅ‚เคคเฅ€ เคธเฅ‡ เค…เคจเฅเคถเค‚เคธเคฟเคค เคนเฅˆเฅค + +เคนเคฎ เค‡เคธเคธเฅ‡ เคนเฅ‹เคจเฅ‡ เคตเคพเคฒเฅ€ เค•เคฟเคธเฅ€ เคญเฅ€ เค…เคธเฅเคตเคฟเคงเคพ เค•เฅ‡ เคฒเคฟเค เค–เฅ‡เคฆ เคชเฅเคฐเค•เคŸ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ เค”เคฐ YOLOv7 เค•เฅ‡ เคธเคฎเคฐเฅเคฅเคจ เค•เฅ‹ Ultralytics เคฎเฅ‡เค‚ เคฒเคพเค—เฅ‚ เคนเฅ‹เคจเฅ‡ เคชเคฐ เค‰เคฆเคพเคนเคฐเคฃเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เค‡เคธ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผ เค•เฅ‹ เค…เคฆเฅเคฏเคคเคฟเคค เค•เคฐเคจเฅ‡ เค•เคพ เคชเฅเคฐเคฏเคพเคธ เค•เคฐเฅ‡เค‚เค—เฅ‡เฅค + +## เคธเค‚เคฆเคฐเฅเคญ เค”เคฐ เค†เคญเคพเคฐ + +เคนเคฎ YOLOv7 เคฒเฅ‡เค–เค•เฅ‹เค‚ เค•เฅ‹ เคฏเคนเคพเค‚ เค‰เคฒเฅเคฒเฅ‡เค– เค•เคฐเคจเคพ เคšเคพเคนเฅ‡เค‚เค—เฅ‡, เค‰เคจเค•เฅ‡ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅเคทเฅ‡เคคเฅเคฐ เคฎเฅ‡เค‚ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคฏเฅ‹เค—เคฆเคพเคจ เค•เฅ‡ เคฒเคฟเค: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{wang2022yolov7, + title={{YOLOv7}: เคŸเฅเคฐเฅ‡เคจเคฌเคฒ เคฌเฅˆเค—-เค‘เคซ-เคซเฅเคฐเฅ€เคฌเฅ€เคœ เคนเคพเคธเคฟเคฒ เค•เคฐเคคเคพ เคนเฅˆ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸเคฐเฅเคธ เค•เฅ‡ เคฒเคฟเค เคจเคˆ เคฐเคพเคทเฅเคŸเฅเคฐเฅ€เคฏ เคธเฅเคคเคฐ เค•เฅ‹}, + author={Wang, Chien-Yao and Bochkovskiy, Alexey and Liao, Hong-Yuan Mark}, + journal={arXiv preprint arXiv:2207.02696}, + year={2022} + } + ``` + +YOLOv7 เค•เฅ‡ เคฎเฅ‚เคฒ เคฒเฅ‡เค– เค•เฅ‹ [arXiv](https://arxiv.org/pdf/2207.02696.pdf) เคชเคฐ เคชเคพเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค เคฒเฅ‡เค–เค•เฅ‹เค‚ เคจเฅ‡ เค…เคชเคจเคพ เค•เคพเคฎ เคธเคพเคฐเฅเคตเคœเคจเคฟเค• เคฐเฅ‚เคช เคธเฅ‡ เค‰เคชเคฒเคฌเฅเคง เค•เคฟเคฏเคพ เคนเฅˆ เค”เคฐ เค•เฅ‹เคกเคฌเฅ‡เคธ [GitHub](https://github.com/WongKinYiu/yolov7) เคชเคฐ เคเค•เฅเคธเฅ‡เคธ เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค เคนเคฎ เค‰เคจเค•เฅ‡ เคชเฅเคฐเคฏเคพเคธเฅ‹เค‚ เค•เฅ€ เคธเคฐเคพเคนเคจเคพ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ เคœเฅ‹ เค•เฅเคทเฅ‡เคคเฅเคฐ เค•เฅ‹ เค†เค—เฅ‡ เคฌเคขเคผเคพเคจเฅ‡ เค”เคฐ เค‰เคธเฅ‡ เคตเฅเคฏเคพเคชเค• เคธเคฎเฅเคฆเคพเคฏ เค•เฅ‡ เคฒเคฟเค เคธเฅเคฒเคญ เคฌเคจเคพเคจเฅ‡ เคฎเฅ‡เค‚ เค•เคฟเค เค—เค เคนเฅˆเค‚เฅค diff --git a/ultralytics/docs/hi/models/yolov7.md:Zone.Identifier b/ultralytics/docs/hi/models/yolov7.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/models/yolov7.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/models/yolov8.md b/ultralytics/docs/hi/models/yolov8.md new file mode 100755 index 0000000..44f4628 --- /dev/null +++ b/ultralytics/docs/hi/models/yolov8.md @@ -0,0 +1,162 @@ +--- +comments: true +description: YOLOv8 เค•เฅ€ เคฐเฅ‹เคฎเคพเค‚เคšเค• เคตเคฟเคถเฅ‡เคทเคคเคพเค“เค‚ เค•เคพ เค…เคจเฅเคตเฅ‡เคทเคฃ เค•เคฐเฅ‡เค‚, เคนเคฎเคพเคฐเฅ‡ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคตเคธเฅเคคเฅ เคจเคฟเคฐเฅเคงเคพเคฐเค• เค•เฅ‡ เคจเคตเฅ€เคจเคคเคฎ เคธเค‚เคธเฅเค•เคฐเคฃเฅค เคฆเฅ‡เค–เฅ‡เค‚ เค•เฅˆเคธเฅ‡ เคชเฅเคฐเค—เคคเคฟเคถเฅ€เคฒ เคถเฅƒเค‚เค–เคฒเคพเค“เค‚, เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค”เคฐ เค—เคคเคฟ เค•เฅ‡ เคฌเฅ€เคš เคธเคนเฅ€ เคธเค‚เคคเฅเคฒเคจ เค•เฅ‹ YOLOv8 เค•เฅ‡ เคตเคฟเค•เคฒเฅเคช เคฎเฅ‡เค‚ เคธเคŸเฅ‡ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ เคธเค‚เคœเฅเคžเคพเคจเค˜เคจ เคตเคธเฅเคคเฅเคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค YOLOv8 เค•เฅ‹ เค†เคชเค•เฅ‡ เคตเคธเฅเคคเฅ เค†เคฐเฅ‹เคช เค•เฅ‡ เคฒเคฟเค เคธเคนเฅ€ เคšเฅเคจเคพเคต เคฌเคจเคพเคคเคพ เคนเฅˆเฅค +keywords: YOLOv8, Ultralytics, เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคตเคธเฅเคคเฅเคจเคฟเคฐเฅเคงเคพเคฐเค•, เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ, เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ, เคตเคธเฅเคคเฅเคตเคพเคนเฅ€เคจเคฟเคฐเฅเคงเคพเคฐเคฃ, YOLO เคถเฅเคฐเฅƒเค‚เค–เคฒเคพ, เคชเฅเคฐเค—เคคเคฟเคถเฅ€เคฒ เคถเฅƒเค‚เค–เคฒเคพเคเค‚, เคธเคŸเฅ€เค•เคคเคพ, เค—เคคเคฟ +--- + +# YOLOv8 + +## เค…เคตเคฒเฅ‹เค•เคจ + +YOLOv8 เคฏเฅ‹เคฒเฅ‹ เคถเฅเคฐเฅƒเค‚เค–เคฒเคพ เค•เคพ เคจเคตเฅ€เคจเคคเคฎ เคธเค‚เคธเฅเค•เคฐเคฃ เคนเฅˆ, เคœเฅ‹ เคธเคŸเฅ€เค•เคคเคพ เค”เคฐ เค—เคคเคฟ เค•เฅ‡ เคฎเคพเคฎเคฒเฅ‡ เคฎเฅ‡เค‚ เค•เคŸเคฟเค‚เค—-เคเคœ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เคชเคฟเค›เคฒเฅ‡ YOLO เคธเค‚เคธเฅเค•เคฐเคฃเฅ‹เค‚ เค•เฅ€ เคชเฅเคฐเค—เคคเคฟ เค•เฅ‹ เค…เคตเคงเคพเคฐเคฃเคพ เค•เคฐเคคเฅ‡ เคนเฅเค, YOLOv8 เค‰เคจเฅเคจเคค เคธเฅเคตเคฟเคงเคพเค“เค‚ เค”เคฐ เค…เคจเฅเค•เฅ‚เคฒเคจ เค•เฅ‹ เคชเฅเคฐเคธเฅเคคเฅเคค เค•เคฐเคคเคพ เคนเฅˆ, เคœเฅ‹ เค‡เคธเฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคตเคธเฅเคคเฅเคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคเค• เค†เคฆเคฐเฅเคถ เคšเฅเคจเคพเคต เคฌเคจเคพเคคเคพ เคนเฅˆ เคตเคฟเคญเคฟเคจเฅเคจ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เคฎเฅ‡เค‚เฅค + +![Ultralytics YOLOv8](https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png) + +## เคฎเฅเค–เฅเคฏ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค‚ + +- **เค‰เคจเฅเคจเคค เคชเฅ€เค  เค”เคฐ เค—เคฐเฅเคฆเคจ เคถเฅƒเค‚เค–เคฒเคพเคเค‚:** YOLOv8 เค‰เคจเฅเคจเคค เคชเฅ€เค  เค”เคฐ เค—เคฐเฅเคฆเคจ เคถเฅƒเค‚เค–เคฒเคพเคเค‚ เคชเฅเคฐเคฏเฅ‹เค— เค•เคฐเคคเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เคตเคฟเคถเฅ‡เคทเคคเคพ เคจเคฟเคทเฅเค•เคฐเฅเคทเคฃ เค”เคฐ เคตเคธเฅเคคเฅ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค•เฅเคทเคฎเคคเคพ เค•เฅ€ เคธเฅเคงเคพเคฐ เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค +- **เคเค‚เค•เคฐ-เคฎเฅเค•เฅเคค เคธเฅเคชเฅเคฒเคฟเคŸ Ultralytics เคนเฅˆเคก:** YOLOv8 เคเค‚เค•เคฐ-เค†เคงเคพเคฐเคฟเคค เคฆเฅƒเคทเฅเคŸเคฟเค•เฅ‹เคฃเฅ‹เค‚ เค•เฅ€ เคคเฅเคฒเคจเคพ เคฎเฅ‡เค‚ เค…เคงเคฟเค• เคธเคŸเฅ€เค•เคคเคพ เค”เคฐ เคเค• เค…เคงเคฟเค• เคธเค‚เคšเคพเคฒเคจเคฏเฅ‹เค—เฅเคฏ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เค•เฅ‡ เคฒเคฟเค เคเค• เคเค‚เค•เคฐ-เคฎเฅเค•เฅเคค เคธเฅเคชเฅเคฒเคฟเคŸ Ultralytics เคนเฅ‡เคก เค…เคชเคจเคพเคคเคพ เคนเฅˆเฅค +- **เคธเฅเคงเคพเคฐเคฟเคค เคธเคŸเฅ€เค•เคคเคพ-เค—เคคเคฟ เค•เคพ เคธเค‚เคคเฅเคฒเคจ:** เคธเคŸเฅ€เค•เคคเคพ เค”เคฐ เค—เคคเคฟ เค•เฅ‡ เคฎเคงเฅเคฏ เคฎเฅ‡เค‚ เค‰เคšเคฟเคค เคธเค‚เคคเฅเคฒเคจ เคฌเคจเคพเค เคฐเค–เคจเฅ‡ เค•เฅ‡ เคงเฅเคฏเคพเคจ เค•เฅ‡ เคธเคพเคฅ, YOLOv8 เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคตเคธเฅเคคเฅเคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅเค•เฅเคค เคนเฅˆ เคœเฅ‹ เคตเคฟเคญเคฟเคจเฅเคจ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค— เค•เฅเคทเฅ‡เคคเฅเคฐเฅ‹เค‚ เคฎเฅ‡เค‚ เคนเฅ‹ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค +- **เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ:** YOLOv8 เคตเคฟเคญเคฟเคจเฅเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค”เคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เคเค• เคตเคฟเคธเฅเคคเฅƒเคค เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคฐเฅ‡เค‚เคœ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ, เค‡เคธเคธเฅ‡ เค…เคชเคจเฅ‡ เคตเคฟเคถเฅ‡เคทเคคเคพ เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคฒเคฟเค เคธเคนเฅ€ เคฎเฅ‰เคกเคฒ เค–เฅ‹เคœเคจเคพ เค†เคธเคพเคจ เคนเฅ‹ เคœเคพเคคเคพ เคนเฅˆเฅค + +## เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏ เค”เคฐ เคฎเฅ‹เคก + +YOLOv8 เคถเฅเคฐเฅƒเค‚เค–เคฒเคพ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคตเคธเฅเคคเฅเคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคถเฅ‡เคทเค•เฅƒเคค เค•เคˆ เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ€ เคนเฅˆเฅค เคฏเฅ‡ เคฎเฅ‰เคกเคฒ เคตเคฟเคญเคฟเคจเฅเคจ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‹ เคชเฅ‚เคฐเคพ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคœเคพเค‡เคจ เค•เคฟเค เค—เค เคนเฅˆเค‚, เคตเฅˆเคถเฅเคตเคฟเค• เคธเฅเคคเคฐ เคชเคนเฅเค‚เคšเคจเฅ‡ เคธเฅ‡ เคฒเฅ‡เค•เคฐ เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ, เคชเฅ‹เคœ/เค•เคฟเค‚เคคเฅเคฎเคพเค‚เค• เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค”เคฐ เคถเฅเคฐเฅ‡เคฃเฅ€เค•เคฐเคฃ เคœเฅˆเคธเฅ‡ เคœเคŸเคฟเคฒ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เคคเค•เฅค + +Yเคเค• เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคนเคฐ เคฎเคพเคจเค•, เคตเคฟเคถเคฟเคทเฅเคŸ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เค…เคชเคจเฅ€ เคตเคฟเคถเฅ‡เคทเคคเคพเค“เค‚ เค•เฅ‹ เคงเฅเคฏเคพเคจ เคฎเฅ‡เค‚ เคฐเค–เคคเฅ‡ เคนเฅเค, เค‰เคšเฅเคš เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เคธเฅเคจเคฟเคถเฅเคšเคฟเคค เค•เคฟเค เคœเคพเคคเฅ‡ เคนเฅˆเค‚เฅค เค‡เคธเค•เฅ‡ เค…เคฒเคพเคตเคพ, เคฏเฅ‡ เคฎเฅ‰เคกเคฒ เคตเคฟเคญเคฟเคจเฅเคจ เคธเค‚เคšเคพเคฒเคจ เคฎเฅ‹เคก เค•เฅ‡ เคธเคพเคฅ เค…เคจเฅเค•เฅ‚เคฒเคฟเคค เคนเฅˆเค‚ เคœเฅˆเคธเฅ‡ [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md), เค”เคฐ [Export](../modes/export.md), เคœเฅ‹ เค‰เคจเค•เคพ เค‰เคชเคฏเฅ‹เค— เคตเคฟเคคเคฐเคฃ เค”เคฐ เคตเคฟเค•เคพเคธ เค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคธเฅเคคเคฐเฅ‹เค‚ เคฎเฅ‡เค‚ เคธเคฐเคฒ เคฌเคจเคพเคจเฅ‡ เคฎเฅ‡เค‚ เคฎเคฆเคฆ เค•เคฐเคคเคพ เคนเฅˆเฅค + +| เคฎเฅ‰เคกเคฒ | เคซเคผเคพเค‡เคฒเคจเฅ‡เคฎ | เค•เคพเคฐเฅเคฏ | Inference | Validation | Training | Export | +|-------------|----------------------------------------------------------------------------------------------------------------|--------------------------------------------|-----------|------------|----------|--------| +| YOLOv8 | `yolov8n.pt` `yolov8s.pt` `yolov8m.pt` `yolov8l.pt` `yolov8x.pt` | [เคตเคธเฅเคคเฅเคจเคฟเคฐเฅเคงเคพเคฐเคฃ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-seg | `yolov8n-seg.pt` `yolov8s-seg.pt` `yolov8m-seg.pt` `yolov8l-seg.pt` `yolov8x-seg.pt` | [เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ](../tasks/segment.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-pose | `yolov8n-pose.pt` `yolov8s-pose.pt` `yolov8m-pose.pt` `yolov8l-pose.pt` `yolov8x-pose.pt` `yolov8x-pose-p6.pt` | [เคชเฅ‹เคœ/เค•เคฟเค‚เคคเฅเคฎเคพเค‚เค•](../tasks/pose.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-cls | `yolov8n-cls.pt` `yolov8s-cls.pt` `yolov8m-cls.pt` `yolov8l-cls.pt` `yolov8x-cls.pt` | [เคถเฅเคฐเฅ‡เคฃเฅ€เคฌเคฆเฅเคฆเฅ€เค•เคฐเคฃ](../tasks/classify.md) | โœ… | โœ… | โœ… | โœ… | + +เค‡เคธ เคธเคพเคฐเคฃเฅ€ เคฎเฅ‡เค‚ YOLOv8 เคฎเฅ‰เคกเคฒ เคตเคฟเคญเคฟเคจเฅเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅเค•เฅเคคเคคเคพ เค”เคฐ เคตเคฟเคญเคฟเคจเฅเคจ เคธเค‚เคšเคพเคฒเคจ เคฎเฅ‹เคก เค•เฅ‡ เคธเคพเคฅ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคฐเฅ‚เคชเฅ‹เค‚ เค•เคพ เค…เคตเคฒเฅ‹เค•เคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ€ เคนเฅˆเฅค เคฏเคน YOLOv8 เคถเฅเคฐเฅƒเค‚เค–เคฒเคพ เค•เฅ€ เคตเฅเคฏเคพเคชเฅเคคเคฟ เค”เคฐ เคฎเคœเคฌเฅ‚เคคเฅ€ เค•เคพ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคฐเคคเฅ€ เคนเฅˆ, เคœเฅ‹ เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฐ เคฆเฅƒเคทเฅเคŸเคฟ เคฎเฅ‡เค‚ เคตเคฟเคญเคฟเคจเฅเคจ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅเค•เฅเคค เคฌเคจเคพเคคเฅ€ เคนเฅˆเฅค + +## เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ€ เคฎเคพเคชเคฆเค‚เคก + +!!! Note "เคชเฅเคฐเคฆเคฐเฅเคถเคจ" + + === "เคตเคธเฅเคคเฅเคจเคฟเคฐเฅเคงเคพเคฐเคฃ (COCO)" + + [เคตเคธเฅเคคเฅเคจเคฟเคฐเฅเคงเคพเคฐเคฃ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ](https://docs.ultralytics.com/tasks/detect/) เคชเคฐ เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ เคฆเฅ‡เค–เฅ‡เค‚ เคœเคนเคพเค‚ COCO เคŸเฅเคฐเฅ‡เคจ เค•เคฟเค เค—เค [80 เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเคฐเฅเค—เฅ‹เค‚](https://docs.ultralytics.com/datasets/detect/coco/) เค•เฅ‡ เคธเคพเคฅ เคฏเฅ‡ เคฎเฅ‰เคกเคฒ เคฆเคฟเค เค—เค เคนเฅˆเค‚เฅค + + | เคฎเฅ‰เคกเคฒ | เค†เค•เคพเคฐ
(เคชเคฟเค•เฅเคธ) | mAPเคตเฅˆเคฒ
50-95 | เค—เคคเคฟ
CPU ONNX
(ms) | เค—เคคเคฟ
A100 TensorRT
(ms) | params
(เคเคฎ) | FLOPs
(เคฌเฅ€) | + | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + + === "เคตเคธเฅเคคเฅเคจเคฟเคฐเฅเคงเคพเคฐเคฃ (Open Images V7)" + + [เคตเคธเฅเคคเฅเคจเคฟเคฐเฅเคงเคพเคฐเคฃ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ](https://docs.ultralytics.com/tasks/detect/) เคชเคฐ เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ เคฆเฅ‡เค–เฅ‡เค‚ เคœเคนเคพเค‚ เค‡เคจ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‹ [Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/) เคชเคฐ เคŸเฅเคฐเฅ‡เคจ เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคœเคฟเคธเคฎเฅ‡เค‚ 600 เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเคฐเฅเค— เคนเฅˆเค‚เฅค + + | เคฎเฅ‰เคกเคฒ | เค†เค•เคพเคฐ
(เคชเคฟเค•เฅเคธ) | mAPเคตเฅˆเคฒ
50-95 | เค—เคคเคฟ
CPU ONNX
(ms) | เค—เคคเคฟ
A100 TensorRT
(ms) | params
(เคเคฎ) | FLOPs
(เคฌเฅ€) | + | ----------------------------------------------------------------------------------------- | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18.4 | 142.4 | 1.21 | 3.5 | 10.5 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27.7 | 183.1 | 1.40 | 11.4 | 29.7 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33.6 | 408.5 | 2.26 | 26.2 | 80.6 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34.9 | 596.9 | 2.43 | 44.1 | 167.4 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36.3 | 860.6 | 3.56 | 68.7 | 260.6 | + + === "เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ (COCO)" + + [เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ](https://docs.ultralytics.com/tasks/segment/) เคชเคฐ เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ เคฆเฅ‡เค–เฅ‡เค‚ เคœเคนเคพเค‚ เค‡เคจ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‹ [COCO](https://docs.ultralytics.com/datasets/segment/coco/) เคชเคฐ เคŸเฅเคฐเฅ‡เคจ เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคœเคฟเคธเคฎเฅ‡เค‚ 80 เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเคฐเฅเค— เคนเฅˆเค‚เฅค + + | เคฎเฅ‰เคกเคฒ | เค†เค•เคพเคฐ
(เคชเคฟเค•เฅเคธ) | mAPเคฌเฅ‰เค•เฅเคธ
50-95 | mAPเคฎเคพเคธเฅเค•
50-95 | เค—เคคเคฟ
CPU ONNX
(ms) | เค—เคคเคฟ
A100 TensorRT
(ms) | params
(เคเคฎ) | FLOPs
(เคฌเฅ€) | + | -------------------------------------------------------------------------------------------- | --------------------- | --------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | + | [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | + | [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | + | [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | + | [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + + === "เคถเฅเคฐเฅ‡เคฃเฅ€เค•เคฐเคฃ (ImageNet)" + + [เคถเฅเคฐเฅ‡เคฃเฅ€เค•เคฐเคฃ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ](https://docs.ultralytics.com/tasks/classify/) เคชเคฐ เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ เคฆเฅ‡เค–เฅ‡เค‚ เคœเคนเคพเค‚ เค‡เคจ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‹ [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/) เคชเคฐ เคŸเฅเคฐเฅ‡เคจ เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคœเคฟเคธเคฎเฅ‡เค‚ 1000 เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเคฐเฅเค— เคนเฅˆเค‚เฅค + + | เคฎเฅ‰เคกเคฒ | เค†เค•เคพเคฐ
(เคชเคฟเค•เฅเคธ) | เคถเฅ€เคฐเฅเคท1 เคตเคฟเคœเคฏเฅ€
เคฏเฅ‹เค—เฅเคฏเคคเคพ | เคถเฅ€เคฐเฅเคท5 เคตเคฟเคœเคฏเฅ€
เคฏเฅ‹เค—เฅเคฏเคคเคพ | เค—เคคเคฟ
CPU ONNX
(ms) | เค—เคคเคฟ
A100 TensorRT
(ms) | params
(เคเคฎ) | FLOPs
(เคฌเฅ€) at 640 | + | ------------------------------------------------------------------------------------------ | --------------------- | ------------------------ | ------------------------ | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ | + | [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | + | [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | + | [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | + | [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | + | [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + + === "เคชเฅ‹เคœ (COCO)" + + [เคชเฅ‹เคœ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ](https://docs.ultralytics.com/tasks/pose/) เคชเคฐ เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ เคฆเฅ‡เค–เฅ‡เค‚ เคœเคนเคพเค‚ เค‡เคจ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‹ [COCO](https://docs.ultralytics.com/datasets/pose/coco/) เคชเคฐ เคŸเฅเคฐเฅ‡เคจ เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคœเคฟเคธเคฎเฅ‡เค‚ 1 เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเคฐเฅเค—, 'person' เคถเคพเคฎเคฟเคฒ เคนเฅˆเฅค + + | เคฎเฅ‰เคกเคฒ | เค†เค•เคพเคฐ
(เคชเคฟเค•เฅเคธ) | mAPเคถเคพเคฎเคฟเคคเฅ€
50-95 | mAPเคถเคพเคฎเคฟเคคเฅ€
50 | เค—เคคเคฟ
CPU ONNX
(ms) | เค—เคคเคฟ
A100 TensorRT
(ms) | params
(เคเคฎ) | FLOPs
(เคฌเฅ€) | + | ----------------------------------------------------------------------------------------------------- | --------------------- | ------------------------ | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | + | [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | + | [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | + | [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | + | [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | + | [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +## เค‰เคชเคฏเฅ‹เค— เค•เฅ€ เค‰เคฆเคพเคนเคฐเคฃ + +เคฏเคน เค‰เคฆเคพเคนเคฐเคฃ เคธเคฐเคฒ YOLOv8 เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค”เคฐ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค‰เคฆเคพเคนเคฐเคฃ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เค‡เคจ เค”เคฐ เค…เคจเฅเคฏ [เคฎเฅ‹เคก](../modes/index.md) เค•เฅ€ เคชเฅ‚เคฐเฅ€ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผ เคชเฅƒเคทเฅเค เฅ‹เค‚ [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) เค”เคฐ [Export](../modes/export.md) เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค + +เค‡เคธเฅ‡ เคงเฅเคฏเคพเคจ เคฆเฅ‡เค‚ เค•เคฟ เคจเฅ€เคšเฅ‡ เคฆเคฟเค เค—เค เค‰เคฆเคพเคนเคฐเคฃ เคฏเฅ‹เคฒเฅ‹เคตเฅ€ [เคตเคธเฅเคคเฅเคจเคฟเคฐเฅเคงเคพเคฐเคฃ](../tasks/detect.md) เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคฒเคฟเค เคนเฅˆเค‚เฅค เค…เคคเคฟเคฐเคฟเค•เฅเคค เคธเคฎเคฐเฅเคฅเคฟเคค เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค [Segment](../tasks/segment.md), [Classify](../tasks/classify.md) เค”เคฐ [Pose](../tasks/pose.md) เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "เคชเคพเคฏเคฅเคจ" + + เคชเคพเคฏเคŸเฅ‹เคฐเฅเคš เค•เคพ เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค `*.pt` เคฎเฅ‰เคกเคฒ เค”เคฐ เคตเคฟเคจเฅเคฏเคพเคธ `*.yaml` เคซเคผเคพเค‡เคฒ เคชเคพเคฏเคŸเคจ เคฎเฅ‡เค‚ เคเค• เคฎเฅ‰เคกเคฒ เคจเคฎเฅ‚เคจเคพ เคฌเคจเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค `YOLO()` เค•เค•เฅเคทเคพ เค•เฅ‹ เคชเคพเคฐเคฟเคค เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆ: + + ```python + from ultralytics import YOLO + + # เค•เฅ‹เคนเคฒเฅ€ เค•เฅ‡ COCO-pretrained YOLOv8n เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') + + # เคฎเฅ‰เคกเคฒ เคœเคพเคจเค•เคพเคฐเฅ€ เคฆเคฟเค–เคพเคเค (เคตเฅˆเค•เคฒเฅเคชเคฟเค•) + model.info() + + # COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ 100 เคเคชเฅ‹เค• เค•เฅ‡ เคฒเคฟเค เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # 'bus.jpg' เค›เคตเคฟ เคชเคฐ YOLOv8n เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เคšเคฒเคพเคเค + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI เค•เคฎเคพเค‚เคก เค•เฅ‹ เคธเฅ€เคงเฅ‡ เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเค‚: + + ```bash + # COCO-pretrained YOLOv8n เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ เค‰เคธเฅ‡ COCO8 เค‰เคฆเคพเคนเคฐเคฃ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ 100 เคเคชเฅ‹เค• เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCO-pretrained YOLOv8n เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ 'bus.jpg' เค›เคตเคฟ เคชเคฐ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เคšเคฒเคพเคเค + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## เคธเคจเฅเคฆเคฐเฅเคญ เค”เคฐ เคชเฅเคฐเคธเฅเค•เคพเคฐ + +เคฏเคฆเคฟ เค†เคช เค…เคชเคจเฅ‡ เค•เคพเคฎ เคฎเฅ‡เค‚ YOLOv8 เคฎเฅ‰เคกเคฒ เคฏเคพ เค‡เคธ เคฐเคฟเคชเฅ‰เคœเคฟเคŸเคฐเฅ€ เค•เฅ‡ เค•เคฟเคธเฅ€ เค…เคจเฅเคฏ เคธเฅ‰เคซเคผเฅเคŸเคตเฅ‡เคฏเคฐ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เคคเฅ‹ เค•เฅƒเคชเคฏเคพ เค‡เคธเค•เฅ€ เค‰เคฆเฅเคงเคฐเคฃ เค‡เคธ เคชเฅเคฐเค•เคพเคฐ เค•เคฐเฅ‡เค‚: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @software{yolov8_ultralytics, + author = {เค—เฅเคฒเฅ‡เคจ เคœเฅ‹เคšเคฐ and เค†เคฏเฅเคท เคšเฅŒเคฐเคธเคฟเคฏเคพ and เคœเคฟเค‚เค— เค•เฅเคฏเฅ‚}, + title = {Ultralytics YOLOv8}, + version = {8.0.0}, + year = {2023}, + url = {https://github.com/ultralytics/ultralytics}, + orcid = {0000-0001-5950-6979, 0000-0002-7603-6750, 0000-0003-3783-7069}, + license = {AGPL-3.0} + } + ``` + +เค•เฅƒเคชเคฏเคพ เคงเฅเคฏเคพเคจ เคฆเฅ‡เค‚ เค•เคฟ DOI เคฒเค‚เคฌเคฟเคค เคนเฅˆ เค”เคฐ เคœเคฌ เคฏเคน เค‰เคชเคฒเคฌเฅเคง เคนเฅ‹ เคœเคพเคเค—เคพ เคคเฅ‹ เค‰เคฆเฅเคงเคฐเคฃ เคฎเฅ‡เค‚ เค‡เคธเฅ‡ เคถเคพเคฎเคฟเคฒ เค•เคฟเคฏเคพ เคœเคพเคเค—เคพเฅค YOLOv8 เคฎเฅ‰เคกเคฒ [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) เค”เคฐ [เคเค‚เคŸเคฐเคชเฅเคฐเคพเค‡เคœ](https://ultralytics.com/license) เคฒเคพเค‡เคธเฅ‡เค‚เคธ เค•เฅ‡ เคคเคนเคค เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเค‚เฅค diff --git a/ultralytics/docs/hi/models/yolov8.md:Zone.Identifier b/ultralytics/docs/hi/models/yolov8.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/models/yolov8.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/modes/benchmark.md b/ultralytics/docs/hi/modes/benchmark.md new file mode 100755 index 0000000..9187029 --- /dev/null +++ b/ultralytics/docs/hi/modes/benchmark.md @@ -0,0 +1,94 @@ +--- +comments: true +description: Ultralytics YOLO เค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ เค•เฅ‡ เคœเคฐเคฟเค YOLOv8 เค•เฅ€ เค—เคคเคฟ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เคพ เคœเคพเค‚เคš เค•เคฐเฅ‡เค‚; mAP50-95, accuracy_top5 เคฎเคพเคช, เค”เคฐ เค…เคจเฅเคฏ เคฎเคพเคชเฅ‹เค‚ เคชเคฐ เค…เคจเฅเคญเคต เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚เฅค +keywords: Ultralytics, YOLOv8, เคฌเค‚เคšเคฎเคพเคฐเฅเค•เคฟเค‚เค—, เค—เคคเคฟ เคชเฅเคฐเฅ‹เคซเคพเค‡เคฒเคฟเค‚เค—, เคธเคŸเฅ€เค•เคคเคพ เคชเฅเคฐเฅ‹เคซเคพเค‡เคฒเคฟเค‚เค—, mAP50-95, accuracy_top5, ONNX, OpenVINO, TensorRT, YOLO เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคช +--- + +# เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเฅเคฏเคพเคŸเคฟเค•เฅเคธ YOLO เค•เฅ‡ เคธเคพเคฅ เคฎเฅ‰เคกเคฒ เคฌเค‚เคšเคฎเคพเคฐเฅเค•เคฟเค‚เค— + +เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเฅเคฏเคพเคŸเคฟเค•เฅเคธ YOLO เคชเคพเคฐเคฟเคธเฅเคฅเคฟเคคเคฟเค•เฅ€ เค”เคฐ เคธเคฎเคพเคตเฅ‡เคถ + +## เคชเคฐเคฟเคšเคฏ + +เคœเคฌ เค†เคชเค•เคพ เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค”เคฐ เคธเคคเฅเคฏเคพเคชเคฟเคค เคนเฅ‹ เคœเคพเคคเคพ เคนเฅˆ, เคคเฅ‹ เค†เค—เคพเคฎเฅ€ เคคเคพเคฐเฅเค•เคฟเค• เคšเคฐเคฃ เคนเฅ‹เคคเคพ เคนเฅˆ เค•เคฟ เคคเคคเฅเค•เคพเคฒเคฟเค• เคตเคพเคธเฅเคคเคตเคฟเค•-เคฆเฅเคจเคฟเคฏเคพ เค•เฅ€ เคธเฅเคฅเคฟเคคเคฟเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เค‡เคธเค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคพ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฐเฅ‡เค‚เฅค Ultralytics YOLOv8 เคฎเฅ‡เค‚ เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค• เคฎเฅ‹เคก เค‡เคธ เค‰เคฆเฅเคฆเฅ‡เคถเฅเคฏ เค•เฅ€ เคธเฅ‡เคตเคพ เค•เคฐเคคเคพ เคนเฅˆ, เคœเคนเคพเค‚ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพเค“เค‚ เค•เฅ‹ เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ€ เค—เคคเคฟ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เคพ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคเค• เคฎเคœเคฌเฅ‚เคค เคขเคพเค‚เคšเคพ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค + +## เคฌเค‚เคšเคฎเคพเคฐเฅเค•เคฟเค‚เค— เค•เฅเคฏเฅ‹เค‚ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคนเฅˆ? + +- **เคœเคพเค—เคฐเฅ‚เค• เคจเคฟเคฐเฅเคฃเคฏ:** เค—เคคเคฟ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‡ เคฌเฅ€เคš เคŸเฅเคฐเฅ‡เคก-เค‘เคซ เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคœเคพเคจเค•เคพเคฐเฅ€ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚เฅค +- **เคธเค‚เคธเคพเคงเคจ เค†เคตเค‚เคŸเคจ:** เค…เคฒเค—-เค…เคฒเค— เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ เค•เคพ เคตเคฟเคญเคฟเคจเฅเคจ เคนเคพเคฐเฅเคกเคตเฅ‡เคฏเคฐ เคชเคฐ เค•เฅˆเคธเคพ เค•เคพเคฎ เค•เคฐเคคเคพ เคนเฅˆ เค‡เคธเค•เฅ€ เคธเคฎเค เคชเคพเคเค‚เฅค +- **เค…เคจเฅเค•เฅ‚เคฒเคจ:** เค…เคชเคจเฅ‡ เคตเคฟเคถเคฟเคทเฅเคŸ เค‰เคชเคฏเฅ‹เค— เคฎเคพเคฎเคฒเฅ‡ เคฎเฅ‡เค‚ เคธเคฐเฅเคตเฅ‹เคคเฅเคคเคฎ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคจเฅ‡ เคตเคพเคฒเคพ เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคช เค•เฅŒเคจ เคธเคพ เคนเฅˆ, เค‡เคธเค•เฅ€ เคœเคพเคจเค•เคพเคฐเฅ€ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚เฅค +- **เคฒเคพเค—เคค เคธเค‚เคšเคฏ:** เคฌเค‚เคšเคฎเคพเคฐเฅเค• เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เค•เฅ‡ เค†เคงเคพเคฐ เคชเคฐ เคนเคพเคฐเฅเคกเคตเฅ‡เคฏเคฐ เคธเค‚เคธเคพเคงเคจเฅ‹เค‚ เค•เคพ เค…เคงเคฟเค• เค…เคญเคฟเค•เคฒเฅเคช เคธเฅ‡เคตเคจ เค•เคฐเฅ‡เค‚เฅค + +### เคฌเค‚เคšเคฎเคพเคฐเฅเค• เคฎเฅ‹เคก เคฎเฅ‡เค‚ เคฎเฅเค–เฅเคฏ เคฎเคพเคช + +- **mAP50-95:** เคตเคธเฅเคคเฅ เค•เคพ เคชเคคเคพ เคฒเค—เคพเคจเฅ‡, เคตเคฟเคญเคพเคœเคจ เค•เคฐเคจเฅ‡ เค”เคฐ เคธเฅเคฅเคฟเคคเคฟ เคฎเคพเคจ เค•เฅ‡ เคฒเคฟเคเฅค +- **accuracy_top5:** เค›เคตเคฟ เคตเคฐเฅเค—เฅ€เค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเคเฅค +- **เคชเคฐเคฟเคจเฅเคฆเคคเคพ เคธเคฎเคฏ:** เคชเฅเคฐเคคเคฟ เค›เคตเคฟ เค•เฅ‡ เคฒเคฟเค เคฒเคฟเคฏเคพ เค—เคฏเคพ เคธเคฎเคฏ เคฎเคฟเคฒเฅ€เคธเฅ‡เค•เค‚เคก เคฎเฅ‡เค‚เฅค + +### เคธเคฎเคฐเฅเคฅเคฟเคค เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคช + +- **ONNX:** CPU เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‡ เคฒเคฟเค เค†เคฆเคฐเฅเคถ +- **TensorRT:** เค…เคงเคฟเค•เคคเคฎ GPU เค•เฅเคทเคฎเคคเคพ เค•เฅ‡ เคฒเคฟเค +- **OpenVINO:** Intel เคนเคพเคฐเฅเคกเคตเฅ‡เคฏเคฐ เคธเค‚เคถเฅ‹เคงเคจ เค•เฅ‡ เคฒเคฟเค +- **CoreML, TensorFlow SavedModel, เค”เคฐ เค…เคงเคฟเค•:** เคตเคฟเคตเคฟเคง เคกเคฟเคชเฅเคฒเฅ‰เคฏเคฎเฅ‡เค‚เคŸ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเคเฅค + +!!! Tip "เคฏเฅเค•เฅเคคเคฟ" + + * เคคเค•เคจเฅ€เค•เฅ€ เค•เคพเคฐเคฃเฅ‹เค‚ เคธเฅ‡ เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฟเค‚เค— เคธเค‚เคธเคพเคงเคจเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคธเคฎเคฏ ONNX เคฏเคพ OpenVINO เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚, เคคเคพเค•เคฟ เค†เคช CPU เคธเฅเคชเฅ€เคก เคคเค• upto 3x เคคเค• เคธเฅเคชเฅ€เคกเค…เคช เค•เคฐ เคธเค•เฅ‡เค‚เฅค + * GPU เคธเฅเคชเฅ€เคก เคคเค• เค…เคชเคจเฅ‡ เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฟเค‚เค— เคธเค‚เคธเคพเคงเคจเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคธเคฎเคฏ TensorRT เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ เคคเคพเค•เคฟ เค†เคช เคคเค• 5x เคคเค• เคธเฅเคชเฅ€เคกเค…เคช เค•เคฐ เคธเค•เฅ‡เค‚เฅค + +## เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ + +เคธเคฎเคฐเฅเคฅเคฟเคค เคธเคญเฅ€ เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ เคชเคฐ ONNX, TensorRT เค†เคฆเคฟ เค•เฅ‡ เคธเคพเคฅ YOLOv8n เคฌเค‚เคšเคฎเคพเคฐเฅเค• เคšเคฒเคพเคเค‚เฅค เคชเฅ‚เคฐเฅ€ เคจเคฟเคฐเฅเคฏเคพเคค เคตเคฟเคตเคฐเคฃ เค•เฅ‡ เคฒเคฟเค เคจเฅ€เคšเฅ‡ Arguments เค…เคจเฅเคญเคพเค— เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics.utils.benchmarks import benchmark + + # GPU เคชเคฐ เคฌเค‚เคšเคฎเคพเคฐเฅเค• + benchmark(model='yolov8n.pt', data='coco8.yaml', imgsz=640, half=False, device=0) + ``` + === "CLI" + + ```bash + yolo เคฌเค‚เคšเคฎเคพเคฐเฅเค• model=yolov8n.pt data='coco8.yaml' imgsz=640 half=False device=0 + ``` + +## Arguments + +`model`, `data`, `imgsz`, `half`, `device`, เค”เคฐ `verbose` เคœเฅˆเคธเฅ‡ เคคเคฐเฅเค• เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพเค“เค‚ เค•เฅ‹ เคฎเคพเคจเคฆเค‚เคกเฅ‹เค‚ เค•เฅ‹ เค…เคชเคจเฅ€ เคตเคฟเคถเฅ‡เคท เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เคธเฅเค—เคฎเคคเคพ เค•เฅ‡ เคธเคพเคฅ เคฌเค‚เคšเคฎเคพเคฐเฅเค• เค•เฅ‹ เคธเค‚เคถเฅ‹เคงเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ€ เคธเฅเคตเคฟเคงเคพ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เค”เคฐ เคตเคฟเคญเคฟเคจเฅเคจ เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ เค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ€ เคคเฅเคฒเคจเคพ เค•เคฐเคจเฅ‡ เค•เฅ€ เคธเฅเคตเคฟเคงเคพ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚เฅค + +| เค•เฅเค‚เคœเฅ€ | เคฎเคพเคจ | เคตเคฟเคตเคฐเคฃ | +|-----------|------------|---------------------------------------------------------------------------------| +| `model` | `เค•เฅ‹เคˆ เคจเคนเฅ€เค‚` | เคฎเฅ‰เคกเคฒ เคซเคผเคพเค‡เคฒ เค•เคพ เคชเคฅ, เคฏเคพเคจเฅ€ yolov8n.pt, yolov8n.yaml | +| `data` | `เค•เฅ‹เคˆ เคจเคนเฅ€เค‚` | เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค•เคฟเค‚เค— เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‹ เคธเค‚เคฆเคฐเฅเคญเคฟเคค เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ‡ YAML เคซเคผเคพเค‡เคฒ เค•เคพ เคชเคฅ (val เคฒเฅ‡เคฌเคฒ เค•เฅ‡ เคคเคนเคค) | +| `imgsz` | `640` | เค›เคตเคฟ เค•เคพ เค†เค•เคพเคฐ เคธเฅเค•เฅˆเคฒเคฐ เคฏเคพ (h, w) เคธเฅ‚เคšเฅ€, เค…เคฐเฅเคฅเคพเคค (640, 480) | +| `half` | `เค…เคธเคคเฅเคฏ` | FP16 เคฎเคพเคชเฅเคฏเคพเค‚เค•เคจ | +| `int8` | `เค…เคธเคคเฅเคฏ` | INT8 เคฎเคพเคชเฅเคฏเคพเค‚เค•เคจ | +| `device` | `เค•เฅ‹เคˆ เคจเคนเฅ€เค‚` | เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฏเฅเค•เฅเคคเคฟ เค‰เคชเค•เคฐเคฃ, เค…เคฐเฅเคฅเคพเคค cuda device=0 เคฏเคพ device=0,1,2,3 เคฏเคพ device=cpu | +| `verbose` | `เค…เคธเคคเฅเคฏ` | เคคเฅเคฐเฅเคŸเคฟ เคฎเฅ‡เค‚ เคจ เคœเคพเคฐเฅ€ เคฐเค–เฅ‡ (เคฌเฅ‚เคฒ), เคฏเคพ เคตเคพเคฒ (เคซเฅเคฒเฅ‹เคŸ) | + +## เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคช + +เคฌเค‚เคšเคฎเคพเคฐเฅเค• เคชเฅเคฐเคฏเคพเคธ เคนเฅ‹เค—เคพ เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เคธเคญเฅ€ เคธเค‚เคญเคพเคตเคฟเคค เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ เคชเคฐ เคธเฅเคตเคšเคพเคฒเคฟเคค เคฐเฅ‚เคช เคธเฅ‡ เคšเคฒเคพเคจเฅ‡ เค•เฅ€ เค•เฅ‹เคถเคฟเคถ เค•เคฐเฅ‡เค—เคพเฅค + +| เคชเฅเคฐเคพเคฐเฅ‚เคช | `เคชเฅเคฐเคพเคฐเฅ‚เคช` เคคเคฐเฅเค• | เคฎเฅ‰เคกเคฒ | เคฎเฅ‡เคŸเคพเคกเฅ‡เคŸเคพ | เคคเคฐเฅเค• | +|--------------------------------------------------------------------|----------------|---------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half`, `int8` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras`, `int8` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +เคชเฅ‚เคฐเฅเคฃ เคจเคฟเคฐเฅเคฏเคพเคค เคตเคฟเคตเคฐเคฃ เคฆเฅ‡เค–เฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅƒเคทเฅเค  เคฎเฅ‡เค‚ [Export](https://docs.ultralytics.com/modes/export/)เฅค diff --git a/ultralytics/docs/hi/modes/benchmark.md:Zone.Identifier b/ultralytics/docs/hi/modes/benchmark.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/modes/benchmark.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/modes/export.md b/ultralytics/docs/hi/modes/export.md new file mode 100755 index 0000000..646b439 --- /dev/null +++ b/ultralytics/docs/hi/modes/export.md @@ -0,0 +1,108 @@ +--- +comments: true +description: เคธเคญเฅ€ เคชเฅเคฐเค•เคพเคฐ เค•เฅ‡ เคจเคฟเคฐเฅเคฏเคพเคค เคธเฅเคคเคฐ เคชเคฐ YOLOv8 เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ‹ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค†เคชเค•เฅ‡ เคฒเคฟเค เคšเคฐเคฃ-เคฆเคฐ-เคšเคฐเคฃ เคฎเคพเคฐเฅเค—เคฆเคฐเฅเคถเคฟเค•เคพเฅค เค…เคฌ เคจเคฟเคฐเฅเคฏเคพเคค เค•เฅ€ เคœเคพเค‚เคš เค•เคฐเฅ‡เค‚! +keywords: YOLO, YOLOv8, Ultralytics, เคฎเฅ‰เคกเคฒ เคจเคฟเคฐเฅเคฏเคพเคค, ONNX, TensorRT, CoreML, TensorFlow SavedModel, OpenVINO, PyTorch, เคจเคฟเคฐเฅเคฏเคพเคค เคฎเฅ‰เคกเคฒ +--- + +# Ultralytics YOLO เค•เฅ‡ เคธเคพเคฅ เคฎเฅ‰เคกเคฒ เคจเคฟเคฐเฅเคฏเคพเคค + +เคฏเฅ‚เคฒเฅเคŸเฅเคฐเคพเคฒเคฟเค•เฅเคธ YOLO ecosystem and integrations + +## เคชเคฐเคฟเคšเคฏ + +เคเค• เคฎเฅ‰เคกเคฒ เค•เฅ€ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ€ เค…เค‚เคคเคฟเคฎ เคฒเค•เฅเคทเฅเคฏ เค‰เคธเฅ‡ เคตเคพเคธเฅเคคเคตเคฟเค• เคฆเฅเคจเคฟเคฏเคพ เค•เฅ‡ เค†เคตเฅ‡เคฆเคจเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคคเฅˆเคจเคพเคค เค•เคฐเคจเคพ เคนเฅ‹เคคเคพ เคนเฅˆเฅค เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเคฟเคŸเฅ€เค•เฅเคธ YOLOv8 เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เคฎเฅ‹เคก เคฎเฅ‡เค‚ เค†เคชเค•เฅ‹ เค…เคญเคฟเคจเคตเคคเคพ เคฐเฅ‡เค‚เคœ เค•เฅ‡ เค‘เคชเฅเคถเคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ, เคตเคพเคฏเคฐเคฒเฅ‡ เค•เคฟเค เค—เค เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคตเคฟเคญเคฟเคจเฅเคจ เคธเฅเคตเคฐเฅ‚เคชเฅ‹เค‚ เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, เคœเคฟเคธเคธเฅ‡ เคตเฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅเคฒเฅ‡เคŸเคซเฅ‰เคฐเฅเคฎเฅ‹เค‚ เค”เคฐ เค‰เคชเค•เคฐเคฃเฅ‹เค‚ เคชเคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฟเค เคœเคพ เคธเค•เฅ‡เค‚เฅค เคฏเคน เคตเฅเคฏเคพเคชเค• เคฎเคพเคฐเฅเค—เคฆเคฐเฅเคถเคฟเค•เคพ เค…เคงเคฟเค•เคคเคฎ เคธเค‚เค—เคคเคคเคพ เค”เคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคคเคฐเฅ€เค•เฅ‹เค‚ เค•เฅ‹ เคฆเคฟเค–เคพเคจเฅ‡ เค•เคพ เคฒเค•เฅเคทเฅเคฏ เคฐเค–เคคเฅ€ เคนเฅˆเฅค + +

+
+ +
+ เคฆเฅ‡เค–เฅ‡เค‚: เค…เคชเคจเฅ‡ เค‰เคคเฅเคชเคพเคฆเคจ เค•เฅ‹ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคธเฅเคŸเคฎ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค Ultralytics YOLOv8 เคฎเฅ‰เคกเคฒ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเคจเฅ‡ เค”เคฐ เคตเฅ‡เคฌเค•เฅˆเคฎ เคชเคฐ เคฒเคพเค‡เคต เค…เคจเฅเคฎเคพเคจ เคšเคฒเคพเคจเฅ‡เฅค +

+ +## YOLOv8 เค•เฅ‡ เคจเคฟเคฐเฅเคฏเคพเคค เคฎเฅ‹เคก เค•เฅ‹ เค•เฅเคฏเฅ‹เค‚ เคšเฅเคจเฅ‡เค‚? + +- **เคตเคฟเคตเคฟเคงเคคเคพ:** ONNX, TensorRT, CoreML เค”เคฐ เค…เคจเฅเคฏ เคธเคนเคฟเคค เค•เคˆ เคซเฅ‰เคฐเฅเคฎเฅ‡เคŸ เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚เฅค +- **เคชเฅเคฐเคฆเคฐเฅเคถเคจ:** TensorRT เคฎเฅ‡เค‚ 5x เคœเฅ€เคชเฅ€เคฏเฅ‚ เคธเฅเคชเฅ€เคกเค…เคช เค”เคฐ ONNX เคฏเคพ OpenVINO เคฎเฅ‡เค‚ 3x เคธเฅ€เคชเฅ€เคฏเฅ‚ เคธเฅเคชเฅ€เคกเค…เคช เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚เฅค +- **เคธเค‚เค—เคคเคคเคพ:** เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เค•เคˆ เคนเคพเคฐเฅเคกเคตเฅ‡เคฏเคฐ เค”เคฐ เคธเฅ‰เคซเคผเฅเคŸเคตเฅ‡เคฏเคฐ เคชเคฐ เคธเค‚เค—เค เคฟเคค เค•เคฐเฅ‡เค‚เฅค +- **เค‰เคชเคฏเฅ‹เค— เค•เฅ€ เคธเฅเคตเคฟเคงเคพ:** เคคเฅเคตเคฐเคฟเคค เค”เคฐ เคธเฅ€เคงเฅ€ เคฎเฅ‰เคกเคฒ เคจเคฟเคฐเฅเคฏเคพเคค เค•เฅ‡ เคฒเคฟเค เคธเคฐเคฒ CLI เค”เคฐ Python APIเฅค + +### เคจเคฟเคฐเฅเคฏเคพเคค เคฎเฅ‹เคก เค•เฅ€ เคชเฅเคฐเคฎเฅเค– เคตเคฟเคถเฅ‡เคทเคคเคพเคเค‚ + +เคฏเคนเคพเค เค•เฅเค› เคฎเฅเค–เฅเคฏ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค เคนเฅˆเค‚: + +- **เคเค•-เค•เฅเคฒเคฟเค• เคจเคฟเคฐเฅเคฏเคพเคค:** เค…เคฒเค—-เค…เคฒเค— เคซเฅ‰เคฐเฅเคฎเฅ‡เคŸ เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคธเคฐเคฒ เค•เคฎเคพเค‚เคกเฅค +- **เคฌเฅˆเคš เคจเคฟเคฐเฅเคฏเคพเคค:** เคฌเฅˆเคš-เค‡เคจเฅเคซเคฐเฅ‡เค‚เคธ เค•เฅเคทเคฎเคคเคพ เคตเคพเคฒเฅ‡ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‹ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚เฅค +- **เคธเฅเคงเคพเคฐเคฟเคค เค…เคจเฅเคฎเคพเคจ:** เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฟเค เค—เค เคฎเฅ‰เคกเคฒ เค…เคจเฅเคฎเคพเคจ เคธเคฎเคฏ เค•เฅ‡ เคฒเคฟเค เค…เคจเฅเค•เฅ‚เคฒเคจ เค•เคฟเค เคœเคพเคคเฅ‡ เคนเฅˆเค‚เฅค +- **เคŸเฅเคฏเฅ‚เคŸเฅ‹เคฐเคฟเคฏเคฒ เคตเฅ€เคกเคฟเคฏเฅ‹:** เคธเฅเคตเคฟเคงเคพเคเค‚ เค”เคฐ เคŸเฅเคฏเฅ‚เคŸเฅ‹เคฐเคฟเคฏเคฒ เคธเฅเคจเคฟเคถเฅเคšเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค—เคนเคจ เคฎเคพเคฐเฅเค—เคฆเคฐเฅเคถเคฟเค•เคพเค“เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค + +!!! Tip "เคธเฅเคเคพเคต" + + * 3x เคธเฅ€เคชเฅ€เคฏเฅ‚ เคธเฅเคชเฅ€เคกเค…เคช เค•เฅ‡ เคฒเคฟเค ONNX เคฏเคพ OpenVINO เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚เฅค + * 5x เคœเฅ€เคชเฅ€เคฏเฅ‚ เคธเฅเคชเฅ€เคกเค…เคช เค•เฅ‡ เคฒเคฟเค TensorRT เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚เฅค + +## เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ + +YOLOv8n เคฎเฅ‰เคกเคฒ เค•เฅ‹ ONNX เคฏเคพ TensorRT เคœเฅˆเคธเฅ‡ เค…เคฒเค— เคซเฅ‰เคฐเฅเคฎเฅ‡เคŸ เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚เฅค เคชเฅ‚เคฐเฅ€ เคธเฅ‚เคšเฅ€ เคจเคฟเคฐเฅเคฏเคพเคค เคคเคฐเฅเค•เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคจเฅ€เคšเฅ‡ เคฆเคฟเค เค—เค Arguments เค–เค‚เคก เค•เฅ‹ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') # เคเค• เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เคเค• เค•เคธเฅเคŸเคฎ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เค•เคพ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + yolo export model=path/to/best.pt format=onnx # เค•เคธเฅเคŸเคฎ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เค•เคพ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + ``` + +## Arguments + +YOLO เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‡ เคจเคฟเคฐเฅเคฏเคพเคค เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคจเคฟเคฐเฅเคฏเคพเคค เค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคตเคฟเคจเฅเคฏเคพเคธ เค”เคฐ เคตเคฟเค•เคฒเฅเคชเฅ‹เค‚ เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚, เคœเคฟเคจเฅเคนเฅ‡เค‚ เคฏเฅ‚เคœเคผ เค•เคฐเค•เฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เค…เคจเฅเคฏ เคชเคฐเฅเคฏเคพเคตเคฐเคฃ เคฏเคพ เคชเฅเคฒเฅ‡เคŸเคซเคผเฅ‰เคฐเฅเคฎ เคฎเฅ‡เค‚ เคธเคนเฅ‡เคœเคจเฅ‡ เคฏเคพ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค เค‡เคจ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคธเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ, เค†เค•เคพเคฐ เค”เคฐ เคตเคฟเคญเคฟเคจเฅเคจ เคธเคฟเคธเฅเคŸเคฎ เค•เฅ‡ เคธเคพเคฅ เคธเค‚เค—เคคเคคเคพ เคชเฅเคฐเคญเคพเคตเคฟเคค เคนเฅ‹ เคธเค•เคคเฅ€ เคนเฅˆเค‚เฅค เค•เฅเค› เคธเคพเคฎเคพเคจเฅเคฏ YOLO เคจเคฟเคฐเฅเคฏเคพเคค เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เฅ€ เค—เคˆ เคฎเฅ‰เคกเคฒ เคซเคผเคพเค‡เคฒ เค•เคพ เคธเฅเคตเคฐเฅ‚เคช (เคœเฅˆเคธเฅ‡ ONNX, TensorFlow SavedModel), เคฎเฅ‰เคกเคฒ เค•เฅ‹เคฐเฅ€ เคธเคนเคตเคพเคธ เคฎเฅ‡เค‚ เคšเคฒเคพเคจเฅ‡ เคตเคพเคฒเฅ€ เค‰เคชเค•เคฐเคฃ (เคœเฅˆเคธเฅ‡ CPU, GPU) เค”เคฐ เคฎเคพเคธเฅเค• เคฏเคพ เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เคฌเฅ‰เค•เฅเคธ เคชเคฐ เค•เคˆ เคฒเฅ‡เคฌเคฒเฅ‹เค‚ เค•เฅ€ เค‰เคชเคธเฅเคฅเคฟเคคเคฟ เคœเฅˆเคธเฅ‡ เค…เคคเคฟเคฐเคฟเค•เฅเคค เคตเคฟเคถเฅ‡เคทเคคเคพเคเค เคถเคพเคฎเคฟเคฒ เคนเฅ‹ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เคชเฅเคฐเคญเคพเคตเคฟเคค เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ‡ เค…เคจเฅเคฏ เค•เคพเคฐเค•เฅ‹เค‚ เคฎเฅ‡เค‚ เคฎเฅ‰เคกเคฒ เคฆเฅเคตเคพเคฐเคพ เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคฒเคฟเค เคเค• เคตเคฟเคถเฅ‡เคท เค•เคพเคฐเฅเคฏ เค”เคฐ เคฒเค•เฅเคทเคฟเคค เคชเคฐเฅเคฏเคพเคตเคฐเคฃ เคฏเคพ เคชเฅเคฒเฅ‡เคŸเคซเคผเฅ‰เคฐเฅเคฎ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เคฏเคพ เคธเฅ€เคฎเคพเค“เค‚ เค•เคพ เคงเฅเคฏเคพเคจ เคฆเฅ‡เคจเคพ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคนเฅˆเฅค เคฒเค•เฅเคทเฅเคฏ เคชเฅเคฐเคฏเฅ‹เคœเคจ เค”เคฐ เคฒเค•เฅเคทเฅเคฏเคฟเคค เคตเคพเคคเคพเคตเคฐเคฃ เคฎเฅ‡เค‚ เคชเฅเคฐเคญเคพเคตเฅ€ เคขเค‚เค— เคธเฅ‡ เค‰เคชเคฏเฅ‹เค— เคนเฅ‹เคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‡เคจ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‹ เคงเฅเคฏเคพเคจ เคธเฅ‡ เคตเคฟเคšเคพเคฐ เค•เคฐเคจเคพ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคนเฅˆเฅค + +| เค•เฅเค‚เคœเฅ€ | เคฎเคพเคจ | เคตเคฟเคตเคฐเคฃ | +|-------------|-----------------|------------------------------------------------------------------------| +| `format` | `'torchscript'` | เคฏเฅ‹เค—เฅเคฏเคคเคพ เค•เฅ‡ เคฒเคฟเค เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคธเฅเคตเคฐเฅ‚เคช | +| `imgsz` | `640` | เคเค•เคฒ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เค›เคตเคฟ เค•เคพ เค†เค•เคพเคฐ เคฏเคพ (h, w) เคธเฅ‚เคšเฅ€, เคœเฅˆเคธเฅ‡ (640, 480) | +| `keras` | `False` | TF SavedModel เคจเคฟเคฐเฅเคฏเคพเคค เค•เฅ‡ เคฒเคฟเค เค•เฅ‡เคฐเคธ เค•เคพ เคชเฅเคฐเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | +| `optimize` | `False` | TorchScript: เคฎเฅ‹เคฌเคพเค‡เคฒ เค•เฅ‡ เคฒเคฟเค เค‘เคชเฅเคŸเคฟเคฎเคพเค‡เคœเคผ เค•เคฐเฅ‡เค‚ | +| `half` | `False` | FP16 เคธเค‚เค—เคฃเคจเคพ | +| `int8` | `False` | INT8 เคธเค‚เค—เคฃเคจเคพ | +| `dynamic` | `False` | ONNX/TensorRT: เค—เคคเคฟเคถเฅ€เคฒ เคงเฅเคฏเคพเคจ เคฆเคฟเคฒเคพเคจเฅ‡ เคตเคพเคฒเฅ‡ เคงเฅเคฏเคพเคจ | +| `simplify` | `False` | ONNX/TensorRT: เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคธเคฐเคฒ เคฌเคจเคพเคเค‚ | +| `opset` | `None` | ONNX: เค‘เคชเคธเฅ‡เคŸ เคธเค‚เคธเฅเค•เคฐเคฃ (เคตเฅˆเค•เคฒเฅเคชเคฟเค•, เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸเฅเคธ เค•เฅ‹ เคจเคตเฅ€เคจเคคเคฎ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เค›เฅ‹เคกเคผเฅ‡เค‚) | +| `workspace` | `4` | TensorRT: เค•เคพเคฐเฅเคฏเค•เฅเคทเฅ‡เคคเฅเคฐ เค†เค•เคพเคฐ (GB) | +| `nms` | `False` | CoreML: NMS เคœเฅ‹เคกเคผเฅ‡เค‚ | + +## เคจเคฟเคฐเฅเคฏเคพเคค เคธเฅเคตเคฐเฅ‚เคช + +เคจเฅ€เคšเฅ‡ เคฆเคฟเค เค—เค เคคเคพเคฒเคฟเค•เคพ เคฎเฅ‡เค‚ YOLOv8 เคจเคฟเคฐเฅเคฏเคพเคค เคธเฅเคตเคฐเฅ‚เคช เคฆเคฟเค เค—เค เคนเฅˆเค‚เฅค เค†เคช เค•เคฟเคธเฅ€ เคญเฅ€ เคธเฅเคตเคฐเฅ‚เคช เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚, เคœเฅˆเคธเฅ‡ `format='onnx'` เคฏเคพ `format='engine'`เฅค + +| เคธเฅเคตเคฐเฅ‚เคช | `format` เคคเคฐเฅเค• | เคฎเฅ‰เคกเคฒ | เคฎเฅ‡เคŸเคพเคกเคพเคŸเคพ | เคคเคฐเฅเค• | +|--------------------------------------------------------------------|---------------|---------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half`, `int8` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras`, `int8` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | diff --git a/ultralytics/docs/hi/modes/export.md:Zone.Identifier b/ultralytics/docs/hi/modes/export.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/modes/export.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/modes/index.md b/ultralytics/docs/hi/modes/index.md new file mode 100755 index 0000000..9e49886 --- /dev/null +++ b/ultralytics/docs/hi/modes/index.md @@ -0,0 +1,78 @@ +--- +comments: true +description: เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคธเฅ‡ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคคเค•, Ultralytics เค•เฅ‡ เคธเคพเคฅ YOLOv8 เค•เคพ เค…เคงเคฟเค•เคคเคฎ เคฒเคพเคญ เค‰เค เคพเคเค‚เฅค เคฎเคพเคจเฅเคฏเคคเคพ เคชเฅเคฐเคพเคชเฅเคค เคฎเฅ‹เคก, เคœเฅˆเคธเฅ‡ เคชเฅเคทเฅเคŸเฅ€เค•เคฐเคฃ, เคจเคฟเคฐเฅเคฏเคพเคค เค”เคฐ เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค•เคฟเค‚เค—, เค•เฅ‡ เคฒเคฟเค เค…เคตเคงเคพเคฐเคฃเคพเค“เค‚ เค”เคฐ เค‰เคฆเคพเคนเคฐเคฃ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚เฅค +keywords: Ultralytics, YOLOv8, เคฎเคถเฅ€เคจ เคฒเคฐเฅเคจเคฟเค‚เค—, เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ, เคชเฅเคทเฅเคŸเฅ€เค•เคฐเคฃ, เคชเฅ‚เคฐเฅเคตเคพเคตเคฒเฅ‹เค•เคจ, เคจเคฟเคฐเฅเคฏเคพเคค, เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—, เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค•เคฟเค‚เค— +--- + +# Ultralytics YOLOv8 เคฎเฅ‹เคก + +Ultralytics YOLO ecosystem and integrations + +## เคชเคฐเคฟเคšเคฏ + +Ultralytics YOLOv8 เคธเคฟเคฐเฅเคซ เคเค• เค“เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒ เคจเคนเฅ€เค‚ เคนเฅˆ; เคฏเคน เคฎเคถเฅ€เคจ เคฒเคฐเฅเคจเคฟเค‚เค— เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‡ เคชเฅ‚เคฐเฅเคฃ เคœเฅ€เคตเคจ เคšเค•เฅเคฐ เค•เฅ‡ เคฒเคฟเค เคเค• เคตเคฟเค•เคถเฅ€เคฒ เคซเฅเคฐเฅ‡เคฎเคตเคฐเฅเค• เคนเฅˆโ€”เคกเฅ‡เคŸเคพ เคธเค‚เค—เฅเคฐเคน เค”เคฐ เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคธเฅ‡ เคชเฅเคทเฅเคŸเฅ€เค•เคฐเคฃ, เคกเคฟเคชเฅเคฒเฅ‰เคฏเคฎเฅ‡เค‚เคŸ เค”เคฐ เคตเคพเคธเฅเคคเคตเคฟเค• เคฆเฅเคจเคฟเคฏเคพ เค•เฅ‡ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคคเค•เฅค เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เคฎเฅ‹เคก เค•เคพ เคเค• เคตเคฟเคถเฅ‡เคท เค‰เคฆเฅเคฆเฅ‡เคถเฅเคฏ เคนเฅ‹เคคเคพ เคนเฅˆ เค”เคฐ เค†เคชเค•เฅ‹ เคตเคฟเคญเคฟเคจเฅเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค”เคฐ เคฏเฅ‚เคœ-เค•เฅ‡เคธ เค•เฅ‡ เคฒเคฟเค เค†เคตเคถเฅเคฏเค• เคฒเคšเฅ€เคฒเคพเคชเคจ เค”เคฐ เค•เคพเคฐเฅเคฏเค•เฅเคทเคฎเคคเคพ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฌเคจเคพเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค + +!!! Note "เคจเฅ‹เคŸ" + + ๐Ÿšง เคนเคฎเคพเคฐเฅ€ เคฌเคนเฅเคญเคพเคทเฅ€เคฏ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เคตเคฐเฅเคคเคฎเคพเคจ เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฎเคพเคฃเคพเคงเฅ€เคจ เคนเฅˆ, เค”เคฐ เคนเคฎ เค‡เคธเฅ‡ เคธเฅเคงเคพเคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคกเคผเฅ€ เคฎเฅ‡เคนเคจเคค เค•เคฐ เคฐเคนเฅ‡ เคนเฅˆเค‚เฅค เค†เคชเค•เฅ€ เคธเคนเคจเคถเฅ€เคฒเคคเคพ เค•เฅ‡ เคฒเคฟเค เคงเคจเฅเคฏเคตเคพเคฆ! ๐Ÿ™ + +

+
+ +
+ เคฆเฅ‡เค–เฅ‡เค‚: Ultralytics เคฎเฅ‹เคก เคŸเฅเคฏเฅ‚เคŸเฅ‹เคฐเคฟเคฏเคฒ: เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ, เคชเฅเคทเฅเคŸเฅ€เค•เคฐเคฃ, เคชเฅ‚เคฐเฅเคตเคพเคตเคฒเฅ‹เค•เคจ, เคจเคฟเคฐเฅเคฏเคพเคค เค”เคฐ เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค•เคฟเค‚เค—เฅค +

+ +### เคเค• เคชเคฒ เคฎเฅ‹เคกเฅ‡เค‚ + +Ultralytics YOLOv8 เค•เฅ‡ เคธเคฎเคฐเฅเคฅเคฟเคค **เคฎเฅ‹เคก** เค•เฅ‹ เคธเคฎเคเคจเคพ เค†เคชเค•เฅ‡ เคฎเฅ‰เคกเคฒ เค•เคพ เค…เคงเคฟเค•เคคเคฎ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคนเฅˆ: + +- **เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ** เคฎเฅ‹เคก: เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เค•เคธเฅเคŸเคฎ เคฏเคพ เคชเฅ‚เคฐเฅเคต-เคญเคฐเฅเคคเฅ€ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคฎเฅ‡เค‚ เคธเค‚เคถเฅ‹เคงเคฟเคค เค•เคฐเฅ‡เค‚เฅค +- **เคชเฅเคทเฅเคŸเฅ€เค•เคฐเคฃ** เคฎเฅ‹เคก: เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‹ เคฎเคพเคจเฅเคฏเคคเคพ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฌเคพเคฆ เค•เฅ‡ เคšเฅ‡เค•เคชเฅเคตเคพเค‡เค‚เคŸ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค +- **เคชเฅ‚เคฐเฅเคตเคพเคตเคฒเฅ‹เค•เคจ** เคฎเฅ‹เคก: เคจเค เค›เคตเคฟเคฏเฅ‹เค‚ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8 เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคฐเฅ‡เค‚เฅค +- **เคจเคฟเคฐเฅเคฏเคพเคค** เคฎเฅ‹เคก: เคกเคฟเคชเฅเคฒเฅ‰เคฏเคฎเฅ‡เค‚เคŸ เค•เฅ‡ เคฒเคฟเค เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ เคฎเฅ‡เค‚ เคคเฅˆเคฏเคพเคฐ เค•เคฐเฅ‡เค‚เฅค +- **เคŸเฅเคฐเฅˆเค•** เคฎเฅ‹เคก: เคฐเฅ€เคฏเคฒ-เคŸเคพเค‡เคฎ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เคฎเฅ‡เค‚ เคฏเฅ‹เคœเคฟเคค เค†เค‡เคŸเคฎ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒ เค•เคพ เคตเคฟเคธเฅเคคเคพเคฐ เค•เคฐเฅ‡เค‚เฅค +- **เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค•** เคฎเฅ‹เคก: เคตเคฟเคตเคฟเคง เคกเคฟเคชเฅเคฒเฅ‰เคฏเคฎเฅ‡เค‚เคŸ เคตเคพเคคเคพเคตเคฐเคฃเฅ‹เค‚ เคฎเฅ‡เค‚ เคฎเฅ‰เคกเคฒ เค•เฅ€ เค—เคคเคฟ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เคพ เคตเคฟเคถเฅเคฒเฅ‡เคทเคฃ เค•เคฐเฅ‡เค‚เฅค + +เคฏเคน เคธเคพเคฎเค—เฅเคฐเฅ€ เค†เคชเค•เฅ‹ เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เคฎเฅ‹เคก เค•เคพ เค…เคตเคฒเฅ‹เค•เคจ เค”เคฐ เคตเฅเคฏเคพเคตเคนเคพเคฐเคฟเค• เค…เค‚เคฆเคพเคœเคผ เคฆเฅ‡เคจเฅ‡ เค•เคพ เค‰เคฆเฅเคฆเฅ‡เคถเฅเคฏ เคฐเค–เคคเฅ€ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เค†เคช YOLOv8 เค•เฅ€ เคชเฅ‚เคฐเฅ€ เค•เฅเคทเคฎเคคเคพ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐ เคธเค•เฅ‡เค‚เฅค + +## [เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ](train.md) + +เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฎเฅ‹เคก เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคธเฅเคŸเคฎ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ YOLOv8 เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เค‡เคธ เคฎเฅ‹เคก เคฎเฅ‡เค‚, เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค”เคฐ เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เคฎเฅ‡เค‚, เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅ‹เค‚ เค•เฅ‹ เค…เคจเฅเค•เฅ‚เคฒเคฟเคค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ เคคเคพเค•เคฟ เคฏเคน เค›เคตเคฟเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸเฅ‹เค‚ เค•เฅ€ เค•เค•เฅเคทเคพเค“เค‚ เค”เคฐ เคธเฅเคฅเคพเคจเฅ‹เค‚ เค•เคพ เคธเคŸเฅ€เค• เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคฐ เคธเค•เฅ‡เฅค + +[เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค‰เคฆเคพเคนเคฐเคฃ](train.md){ .md-button } + +## [เคชเฅเคทเฅเคŸเฅ€เค•เคฐเคฃ](val.md) + +เคชเฅเคทเฅเคŸเฅ€เค•เคฐเคฃ เคฎเฅ‹เคก เค•เคพ เค‰เคชเคฏเฅ‹เค— YOLOv8 เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฌเคพเคฆ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคฎเคพเคจเฅเคฏเคคเคพ เค”เคฐ เคธเคพเคฎเคพเคจเฅเคฏเฅ€เค•เคฐเคฃ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‹ เคฎเคพเคชเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เค‡เคธ เคฎเฅ‹เคก เคฎเฅ‡เค‚, เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคเค• เคชเฅเคฐเคฎเคพเคฃเฅ€เค•เคฐเคฃ เคธเฅ‡เคŸ เคชเคฐ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ เคคเคพเค•เคฟ เค‰เคธเค•เฅ€ เคธเคŸเฅ€เค•เคคเคพ เค”เคฐ เคธเคพเคฎเคพเคจเฅเคฏเฅ€เค•เคฐเคฃ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‹ เคฎเคพเคชเคพ เคœเคพ เคธเค•เฅ‡เฅค เค‡เคธ เคฎเฅ‹เคก เค•เคพ เค‰เคชเคฏเฅ‹เค— เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‹ เคธเฅเคงเคพเคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅ‹เค‚ เค•เฅ‹ เคŸเฅเคฏเฅ‚เคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค + +[เคชเฅเคทเฅเคŸเฅ€เค•เคฐเคฃ เค‰เคฆเคพเคนเคฐเคฃ](val.md){ .md-button } + +## [เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ](predict.md) + +เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฎเฅ‹เคก เค•เคพ เค‰เคชเคฏเฅ‹เค— เคจเคˆ เค›เคตเคฟเคฏเฅ‹เค‚ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8 เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฌเคจเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เค‡เคธ เคฎเฅ‹เคก เคฎเฅ‡เค‚, เคฎเฅ‰เคกเคฒ เคเค• เคšเฅ‡เค•เคชเฅเคตเคพเค‡เค‚เคŸ เคซเคผเคพเค‡เคฒ เคธเฅ‡ เคฒเฅ‹เคก เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ, เค”เคฐ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพ เค›เคตเคฟเคฏเฅ‹เค‚ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹เค‚ เค•เฅ‹ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค‡เคจเฅเคซเฅ‡เคฐเฅ‡เค‚เคธ เค•เคฐ เคธเค•เคคเคพ เคนเฅˆเฅค เคฎเฅ‰เคกเคฒ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพ เค•เฅ‹ เค‡เคจเคชเฅเคŸ เค›เคตเคฟเคฏเฅ‹เค‚ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸเฅ‹เค‚ เค•เฅ€ เค•เค•เฅเคทเคพเค“เค‚ เค”เคฐ เคธเฅเคฅเคพเคจเฅ‹เค‚ เค•เคพ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค + +[เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค‰เคฆเคพเคนเคฐเคฃ](predict.md){ .md-button } + +## [เคจเคฟเคฐเฅเคฏเคพเคค](export.md) + +เคจเคฟเคฐเฅเคฏเคพเคค เคฎเฅ‹เคก เค•เคพ เค‰เคชเคฏเฅ‹เค— เคเค• YOLOv8 เคฎเฅ‰เคกเคฒ เค•เฅ‹ เค‡เคธเฅเคคเฅ‡เคฎเคพเคฒ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคเค• เคชเฅเคฐเคพเคฐเฅ‚เคช เคฎเฅ‡เค‚ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ เคœเฅ‹ เค•เคฟ เค…เคจเฅเคฏ เคธเฅ‰เคซเคผเฅเคŸเคตเฅ‡เคฏเคฐ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เคฏเคพ เคนเคพเคฐเฅเคกเคตเฅ‡เคฏเคฐ เค‰เคชเค•เคฐเคฃเฅ‹เค‚ เคฆเฅเคตเคพเคฐเคพ เค‡เคธเฅเคคเฅ‡เคฎเคพเคฒ เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค เคฏเคน เคฎเฅ‹เคกเคฒ เค•เฅ‹ เค‰เคคเฅเคชเคพเคฆเคจ เค‰เคฆเฅเคฏเฅ‹เค—เฅ‹เค‚ เคฎเฅ‡เค‚ เคกเคฟเคชเฅเคฒเฅ‰เคฏ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค—เฅ€ เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +[เคจเคฟเคฐเฅเคฏเคพเคค เค‰เคฆเคพเคนเคฐเคฃ](export.md){ .md-button } + +## [เคŸเฅเคฐเฅˆเค•](track.md) + +เคŸเฅเคฐเฅˆเค• เคฎเฅ‹เคก เค•เคพ เค‰เคชเคฏเฅ‹เค— เคเค• YOLOv8 เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคฎเฅ‡เค‚ เคตเคธเฅเคคเฅเค“เค‚ เค•เคพ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เค‡เคธ เคฎเฅ‹เคก เคฎเฅ‡เค‚, เคฎเฅ‰เคกเคฒ เคเค• เคšเฅ‡เค•เคชเฅเคตเคพเค‡เค‚เคŸ เคซเคผเคพเค‡เคฒ เคธเฅ‡ เคฒเฅ‹เคก เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ, เค”เคฐ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพ เคเค• เคฒเคพเค‡เคต เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅเคŸเฅเคฐเฅ€เคฎ เคชเฅเคฐเคฆเคพเคจ เค•เคฐ เคธเค•เคคเคพ เคนเฅˆ เคคเคพเค•เคฟ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคฎเฅ‡เค‚ เคตเคธเฅเคคเฅเค“เค‚ เค•เคพ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เคฟเคฏเคพ เคœเคพ เคธเค•เฅ‡เฅค เคฏเคน เคฎเฅ‹เคก เคธเคคเคฐเฅเค•เคคเคพ เคชเฅเคฐเคฃเคพเคฒเคฟเคฏเฅ‹เค‚ เคฏเคพ เคธเฅเคตเคฏเค‚ เคšเคพเคฒเคฟเคค เค•เคพเคฐ เคœเฅˆเคธเฅ‡ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค—เฅ€ เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +[เคŸเฅเคฐเฅˆเค• เค‰เคฆเคพเคนเคฐเคฃ](track.md){ .md-button } + +## [เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค•](benchmark.md) + +เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค• เคฎเฅ‹เคก เค•เคพ เค‰เคชเคฏเฅ‹เค— YOLOv8 เค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ เค•เฅ€ เค—เคคเคฟ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เคพ เคชเฅเคฐเฅ‹เคซเคผเคพเค‡เคฒ เคฌเคจเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค• เคธเฅ‡ เคชเฅเคฐเคพเคชเฅเคค เคœเคพเคจเค•เคพเคฐเฅ€ เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคช เค•เฅ‡ เค†เค•เคพเคฐ, เค‰เคธเค•เฅ€ `mAP50-95` metric (เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค”เคฐ เคชเฅ‹เคœเคผ เค•เฅ‡ เคฒเคฟเค) +เคฏเคพ `accuracy_top5` metric (เคตเคฐเฅเค—เฅ€เค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค), เค”เคฐ เคšเคฟเคคเฅเคฐ เคฎเคพเคงเฅเคฏเคฎเคฟเค• เคธเคฎเคฏ เค•เฅ‡ เคฎเคฟเคฒเฅ€เคธเฅ‡เค•เค‚เคก เคชเฅเคฐเคคเคฟ เค‡เคฎเฅ‡เคœ เค•เฅ‡ เค…เคฒเค—-เค…เคฒเค— เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ เค•เฅ€ เคœเคพเคจเค•เคพเคฐเฅ€ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เคฏเคน เคœเคพเคจเค•เคพเคฐเฅ€ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพเค“เค‚ เค•เฅ‹ เค‰เคจเค•เฅ€ เคตเคฟเคถเฅ‡เคท เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคฎเคพเคฎเคฒเฅ‡ เคฎเฅ‡เค‚ เค‰เคจเค•เฅ€ เค–เคพเคธเคฟเคฏเคคเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคฎเคฟเคคเคฟ เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‡ เคฒเคฟเค เคธเคฐเฅเคตเฅ‹เคคเฅเคคเคฎ เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคช เค•เคพ เคšเคฏเคจ เค•เคฐเคจเฅ‡ เคฎเฅ‡เค‚ เคฎเคฆเคฆ เค•เคฐ เคธเค•เคคเฅ€ เคนเฅˆเฅค + +[เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค• เค‰เคฆเคพเคนเคฐเคฃ](benchmark.md){ .md-button } diff --git a/ultralytics/docs/hi/modes/index.md:Zone.Identifier b/ultralytics/docs/hi/modes/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/modes/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/modes/predict.md b/ultralytics/docs/hi/modes/predict.md new file mode 100755 index 0000000..86c3a04 --- /dev/null +++ b/ultralytics/docs/hi/modes/predict.md @@ -0,0 +1,226 @@ +--- +comments: true +description: เคฏเฅ‹เคฒเฅ‹เคตเฅ€ 8 เค•เฅ‡ เค…เคฒเฅเคŸเฅเคฐเคพเคฒเคพเคฏเคŸเคฟเค•เฅเคธ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฎเฅ‹เคก เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเคพ เคธเฅ€เค–เฅ‡เค‚ เค”เคฐ เคตเคฟเคญเคฟเคจเฅเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคธเฅเคฐเฅ‹เคคเฅ‹เค‚ เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคœเคพเคจเฅ‡เค‚เฅค เค‡เคฎเฅ‡เคœเฅ‡เคธ, เคตเฅ€เคกเคฟเคฏเฅ‹เคœเคผ เค”เคฐ เคกเฅ‡เคŸเคพ เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ เคœเฅˆเคธเฅ‡ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคธเฅเคฐเฅ‹เคคเฅ‹เค‚ เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคœเคพเคจเฅ‡เค‚เฅค +keywords: Ultralytics, YOLOv8, เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฎเฅ‹เคก, เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคธเฅเคฐเฅ‹เคค, เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคพเคฐเฅเคฏ, เคงเคพเคฐเคฃเคพ เคฏเฅ‹เคœเคจเคพ, เค›เคตเคฟ เคชเฅเคฐเคธเค‚เคธเฅเค•เคฐเคฃ, เคตเฅ€เคกเคฟเคฏเฅ‹ เคชเฅเคฐเคธเค‚เคธเฅเค•เคฐเคฃ, เคฎเคถเฅ€เคจ เคฒเคฐเฅเคจเคฟเค‚เค—, เคเค†เคˆ +--- + +# เค…เคฒเฅเคŸเฅเคฐเคพเคฒเคพเคฏเคŸเคฟเค•เฅเคธ YOLO เคฎเฅ‰เคกเคฒ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ + +เค…เคฒเฅเคŸเฅเคฐเคพเคฒเคพเคฏเคŸเคฟเค•เฅเคธ YOLO เคธเค‚เค˜เคŸเคจเคพ เค”เคฐ เคเค•เฅ€เค•เคฐเคฃ + +## เคชเคฐเคฟเคšเคฏ + +เคฎเคถเฅ€เคจ เคฒเคฐเฅเคจเคฟเค‚เค— เค”เคฐ เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฐ เคตเคฟเคœเคจ เค•เฅ€ เคฆเฅเคจเคฟเคฏเคพ เคฎเฅ‡เค‚ เคฆเฅƒเคถเฅเคฏเคพเค‚เคถ เคธเฅ‡ เคธเคฎเคเคจเฅ‡ เค•เฅ€ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เค•เฅ‹ 'เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ' เคฏเคพ 'เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ' เค•เคนเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เค…เคฒเฅเคŸเฅเคฐเคพเคฒเคพเคฏเคŸเคฟเค•เฅเคธ YOLOv8 เคเค• เคถเค•เฅเคคเคฟเคถเคพเคฒเฅ€ เคตเคฟเคถเฅ‡เคทเคคเคพ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ เคœเคฟเคธเฅ‡ **เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฎเฅ‹เคก** เค•เคนเคพ เคœเคพเคคเคพ เคนเฅˆ, เคœเฅ‹ เคตเฅเคฏเคพเคชเค• เคกเฅ‡เคŸเคพ เคธเฅเคฐเฅ‹เคคเฅ‹เค‚ เคชเคฐ เค‰เคšเฅเคš เคชเฅเคฐเคฆเคฐเฅเคถเคจ, เคตเคพเคธเฅเคคเฅเค•เคพเคฒเคฟเค• เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคถเฅ‡เคท เคฐเฅ‚เคช เคธเฅ‡ เคคเฅˆเคฏเคพเคฐ เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค + +

+
+ +
+ เคฆเฅ‡เค–เฅ‡เค‚: เค…เคฒเฅเคŸเฅเคฐเคพเคฒเคพเคฏเคŸเคฟเค•เฅเคธ YOLOv8 เคฎเฅ‰เคกเคฒ เคธเฅ‡ เค†เค‰เคŸเคชเฅเคŸ เคจเคฟเค•เคพเคฒเคจเฅ‡ เค•เคพ เคคเคฐเฅ€เค•เคพ เค•เคธเฅเคŸเคฎ เคชเคฐเคฟเคฏเฅ‹เคœเคจเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเคเฅค +

+ +## เคตเคพเคธเฅเคคเคตเคฟเค• เคœเค—เคค เคฎเฅ‡เค‚ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค— + +| เคตเคฟเคจเคฟเคฐเฅเคฎเคพเคฃ | เค–เฅ‡เคฒ เคธเค‚เค˜ | เคธเฅเคฐเค•เฅเคทเคพ | +|:-------------------------------------------:|:--------------------------------------------------:|:---------------------------------------------:| +| ![เคตเคพเคนเคจ เค•เฅ‡ เคชเฅเคฐเฅเคœเฅ‡ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ][car spare parts] | ![เคซเฅเคŸเคฌเฅ‰เคฒ เค–เคฟเคฒเคพเคกเคผเฅ€ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ][football player detect] | ![เคฒเฅ‹เค—เฅ‹เค‚ เค•เคพ เค—เคฟเคฐเคจเคพ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ][human fall detect] | +| เคตเคพเคนเคจ เค•เฅ‡ เคชเฅเคฐเฅเคœเฅ‡ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ | เคซเฅเคŸเคฌเฅ‰เคฒ เค–เคฟเคฒเคพเคกเคผเฅ€ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ | เคฒเฅ‹เค—เฅ‹เค‚ เค•เคพ เค—เคฟเคฐเคจเคพ | + +## เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เฅ‡ เคฒเคฟเค เค…เคฒเฅเคŸเฅเคฐเคพเคฒเคพเคฏเคŸเคฟเค•เฅเคธ YOLO เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เฅเคฏเฅ‹เค‚ เค•เคฐเฅ‡เค‚? + +เคฏเคนเคพเค‚ เค†เคชเค•เฅ‹ เคฏเฅ‹เคฒเฅ‹เคตเฅ€ 8 เค•เฅ‡ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฎเฅ‹เคก เค•เคพ เค‰เคชเคฏเฅ‹เค— เค…เคชเคจเฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เค•เคฐเคจเคพ เคšเคพเคนเคฟเค เค•เคพ เค•เคพเคฐเคฃ เคนเฅˆ: + +- **เคฌเคนเฅเคฎเฅเค–เฅ€เคชเคจ:** เค›เคตเคฟเคฏเฅ‹เค‚, เคตเฅ€เคกเคฟเคฏเฅ‹เคœ เค”เคฐ เคฏเคน เคคเค• เค•เคฟ เคฒเคพเค‡เคต เคธเฅเคŸเฅเคฐเฅ€เคฎ เค•เฅ€ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคชเคฐ เคฏเฅ‹เค—เฅเคฏ เคนเฅˆเค‚เฅค +- **เคชเฅเคฐเคฆเคฐเฅเคถเคจ:** เคฎเฅเค–เฅเคฏเคคเคƒ เคฌเคฟเคจเคพ เคธเคŸเฅ€เค•เคคเคพ เคชเคฐ เคฌเคฒเคตเคฐเฅเคงเคฟเคค, เคฐเคฟเคฏเคฒ-เคŸเคพเค‡เคฎ, เค‰เคšเฅเคš เค—เคคเคฟ เคชเฅเคฐเคธเค‚เคธเฅเค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค engineering เค•เคฟเค เค—เค เคนเฅˆเค‚เฅค +- **เค‰เคชเคฏเฅ‹เค— เคธเคนเคœ:** เค–เคฆเฅเคฏ เคชเคพเค‡เคฅเคจ เค”เคฐ เคฏเคฅเคพเคฐเฅเคฅเคคเคพ (CLI) เค‡เค‚เคŸเคฐเคซเคผเฅ‡เคธเฅ‹เค‚ เค•เฅ‹ เคœเคฒเฅเคฆเฅ€ เคตเคฟเคชเคฃเคจ เค”เคฐ เคชเคฐเฅ€เค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเคเฅค +- **เคŠเคšเฅเคšเคคเคฎ เค…เคจเฅเค•เฅ‚เคฒเคจเคฏเฅ‹เค—เฅเคฏเคคเคพ:** เค…เคชเคจเฅ€ เคตเคฟเคถเคฟเคทเฅเคŸ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‡ เค…เคจเฅเคธเคพเคฐ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เฅƒเคคเคฟ เค•เฅ‹ เคจเคฟเคฐเฅเคงเคพเคฐเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคญเคฟเคจเฅเคจ เคธเฅ‡เคŸเคฟเค‚เค— เค”เคฐ เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅค + +### เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฎเฅ‹เคก เค•เฅ€ เคชเฅเคฐเคฎเฅเค– เคธเฅเคตเคฟเคงเคพเคเค + +YOLOv8 เค•เคพ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฎเฅ‹เคก เคฎเคœเคฌเฅ‚เคค เค”เคฐ เคตเคฟเคถเฅ‡เคทเคคเคพ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคœเคผเคพเค‡เคจ เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคœเคฟเคธเคฎเฅ‡เค‚ เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚: + +- **เคฏเคฆเคฟ เค†เคชเค•เฅ‡ เคกเฅ‡เคŸเคพ เค•เฅ‡ เค•เคˆ เคธเฅเคฐเฅ‹เคคเฅ‹เค‚ เค•เฅ‡ เคชเค‚เคœเฅ€เค•เคฐเคฃ:** เคšเคพเคนเฅ‡ เค†เคชเค•เคพ เคกเฅ‡เคŸเคพ เคตเฅเคฏเค•เฅเคคเคฟเค—เคค เค›เคตเคฟเคฏเฅ‹เค‚, เค›เฅ‹เคŸเฅ‚ เคฎเคพเคฒเคพ เค›เคตเคฟเคฏเฅ‹เค‚, เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒเฅ‹เค‚ เคฏเคพ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅเคŸเฅเคฐเฅ€เคฎเฅ‹เค‚ เค•เฅ€ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคนเฅ‹, เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฎเฅ‹เคก เค†เคชเค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅเค•เฅเคค เคนเฅˆเฅค +- **เคธเฅเคŸเฅเคฐเฅ€เคฎเคฟเค‚เค— เคฎเฅ‹เคก:** `เคธเฅเคŸเฅเคฐเฅ€เคฎเคฟเค‚เค—` เคธเฅเคตเคฟเคงเคพเค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ เค”เคฐ `เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เฅ€ เค•เฅ‰เคฒ เคตเคฟเคงเคฟ` เคฎเฅ‡เค‚ `เคธเฅเคŸเฅเคฐเฅ€เคฎ = เคŸเฅเคฐเฅ‚` เคธเฅ‡เคŸ เค•เคฐเค•เฅ‡ `เคฐเคฟเคœเคฒเฅเคŸเฅเคธ` เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ‡ เคเค• เคฎเฅ‡เคฎเฅ‹เคฐเฅ€-เคชเคฐเฅเคฏเคพเคชเฅเคค เคœเฅ‡เคจเคฐเฅ‡เคŸเคฐ เค•เคพ เค‰เคคเฅเคชเคพเคฆเคจ เค•เคฐเฅ‡เค‚เฅค +- **เคฌเฅˆเคš เคชเฅเคฐเฅ‹เคธเฅ‡เคธเคฟเค‚เค—:** เคเค• เคนเฅ€ เคฌเฅˆเคš เคฎเฅ‡เค‚ เค•เคˆ เค›เคตเคฟเคฏเฅ‹เค‚ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเฅเคฐเฅ‡เคฎเฅเคธ เค•เฅ€ เคชเฅเคฐเฅ‹เคธเฅ‡เคธเคฟเค‚เค— เค•เคฐเคจเฅ‡ เค•เฅ€ เค•เฅเคทเคฎเคคเคพ, เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคธเคฎเคฏ เค•เฅ‹ เค”เคฐ เคคเฅ‡เคœเคผ เค•เคฐเคคเฅ€ เคนเฅˆเฅค +- **เค‡เค‚เคŸเฅ€เค—เฅเคฐเฅ‡เคถเคจ เคซเฅเคฐเฅ‡เค‚เคกเคฒเฅ€:** เคฒเคšเฅ€เคฒเฅ€ API เค•เฅ‡ เค•เคพเคฐเคฃ เคฎเฅŒเคœเฅ‚เคฆเคพ เคกเฅ‡เคŸเคพ เคชเคพเคˆเคชเคฒเคพเค‡เคจ เค”เคฐ เค…เคจเฅเคฏ เคธเฅ‰เคซเคผเฅเคŸเคตเฅ‡เคฏเคฐ เค˜เคŸเค•เฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เค†เคธเคพเคจเฅ€ เคธเฅ‡ เค‡เค‚เคŸเฅ€เค—เฅเคฐเฅ‡เคŸ เค•เคฐเฅ‡เค‚เฅค + +เคœเคฌ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เฅ‡ เคฆเฅŒเคฐเคพเคจ เคฎเฅ‰เคกเคฒ เค•เฅ‹ `เค—เฅ‡เคจเคฐเฅ‡เคŸเคฐ เค•เฅ€ `เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคฒเฅ‹เคก เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ, เคคเฅ‹ เค…เคฒเฅเคŸเฅเคฐเคพเคฒเคพเคฏเคŸเคฟเค•เฅเคธ YOLO เคฎเฅ‰เคกเคฒ เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เคฎเฅ‡เคฅเคก เคธเฅ‡ `เคฐเคฟเคœเคฒเฅเคŸ` เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ‡ เคเค• เคชเคพเคฏเคฅเคจ เคธเฅ‚เคšเฅ€ เคฏเคพ เคฏเคพเคฆเฅƒเคšเฅเค›เคฟเค• เคธเค‚เค–เฅเคฏเค•เคพเคฐเฅ€ เคœเคจเคฐเฅ‡เคŸเคฐ เคฒเฅŒเคŸเคพเคคเฅ‡ เคนเฅˆเค‚: + +!!! Example "เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ" + + === "`เคธเฅเคŸเฅเคฐเฅ€เคฎ = เคซเคพเคฒเฅเคธ` เค•เฅ‡ เคธเคพเคฅ เคธเฅ‚เคšเฅ€ เคฏเคพเคฆเฅƒเคšเฅเค›เคฟเค•" + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n เคฎเฅ‰เคกเคฒ + + # เคธเฅ‚เคšเฅ€ เค•เฅ‡ เคฒเคฟเค เคฌเฅˆเคšเฅเคก เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚ + results = model(['im1.jpg', 'im2.jpg']) # เคฐเคฟเคœเคฒเฅเคŸเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ€ เคธเฅ‚เคšเฅ€ เคฒเฅŒเคŸเคพเคเค + + # เคชเคฐเคฟเคฃเคพเคฎ เคธเฅ‚เคšเฅ€ เค•เฅ‹ เคชเฅเคฐเฅ‹เคธเฅ‡เคธ เค•เคฐเฅ‡เค‚ + for result in results: + boxes = result.boxes # เคฌเฅ‰เค•เฅเคธ เค•เฅ‡ เคฒเคฟเค เคฌเฅ‰เค•เฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ + masks = result.masks # เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเฅ‹เคก เค•เฅ‡ เคฒเคฟเค เคฎเคพเคธเฅเค•เฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ + keypoints = result.keypoints # เคชเฅ‹เคœเคผ เค•เฅ‡ เคฒเคฟเค เค•เฅ€เคชเฅ‰เค‡เค‚เคŸเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ + probs = result.probs # เคตเคฐเฅเค—เฅ€เค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเฅ‹เคฌเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ + ``` + + === "`เคธเฅเคŸเฅเคฐเฅ€เคฎ = เคŸเฅเคฐเฅ‚ เค•เฅ‡ เคธเคพเคฅ เคœเฅ‡เคจเคฐเฅ‡เคŸเคฐ` เค•เฅ€ เคชเฅเคฐเคพเคฅเคฎเคฟเค•เคคเคพ" + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n เคฎเฅ‰เคกเคฒ + + # เคธเฅ‚เคšเฅ€ เค•เฅ‡ เคฒเคฟเค เคฌเฅˆเคšเฅเคก เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚ + results = model(['im1.jpg', 'im2.jpg'], stream=True) # เคฐเคฟเคœเคฒเฅเคŸเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เคพ เคœเคจเคฐเฅ‡เคŸเคฐ เคฒเฅŒเคŸเคพเคเค + + # เคฐเคฟเคœเคฒเฅเคŸเฅเคธ เคœเคจเคฐเฅ‡เคŸเคฐ เค•เฅ‹ เคชเฅเคฐเฅ‹เคธเฅ‡เคธ เค•เคฐเฅ‡เค‚ + for result in results: + boxes = result.boxes # เคฌเฅ‰เค•เฅเคธ เค•เฅ‡ เคฒเคฟเค เคฌเฅ‰เค•เฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ + masks = result.masks # เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเคพเคธเฅเค•เฅเคธ เค•เฅ‡ เคฒเคฟเค เคฎเคพเคธเฅเค•เฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ + keypoints = result.keypoints # เคชเฅ‹เคœเคผ เค•เฅ‡ เคฒเคฟเค เค•เฅ€เคชเฅ‰เค‡เค‚เคŸเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ + probs = result.probs # เคตเคฐเฅเค—เฅ€เค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเฅ‹เคฌเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ + ``` + +## เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคธเฅเคฐเฅ‹เคค + +YOLOv8 เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅเคฐเค•เคพเคฐ เค•เฅ‡ เค‡เคจเคชเฅเคŸ เคธเฅเคฐเฅ‹เคคเฅ‹เค‚ เค•เฅ‹ process เค•เคฐ เคธเค•เคคเคพ เคนเฅˆ, เคœเฅˆเคธเคพ เค•เคฟ เคจเฅ€เคšเฅ‡ เคฆเคฟเค เค—เค เคคเคพเคฒเคฟเค•เคพ เคฎเฅ‡เค‚ เคฆเคฟเค–เคพเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค เคธเฅเคฐเฅ‹เคคเฅ‹เค‚ เคฎเฅ‡เค‚ เคธเฅเคฅเคฟเคฐ เค›เคตเคฟเคฏเคพเค, เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅเคŸเฅเคฐเฅ€เคฎเฅเคธ, เค”เคฐ เคตเคฟเคญเคฟเคจเฅเคจ เคกเฅ‡เคŸเคพ เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ เค•เฅ‹ เคตเคฟเค•เคพเคธ เค•เฅ‡ เคธเคพเคฅ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค เคฏเคน เคคเคพเคฒเคฟเค•เคพ เคญเฅ€ เค‡เค‚เค—เคฟเคค เค•เคฐเคคเฅ€ เคนเฅˆ เค•เคฟ เค•เฅเคฏเคพ เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เคธเฅเคฐเฅ‹เคค เค•เฅ‹ เคธเฅเคŸเฅเคฐเฅ€เคฎเคฟเค‚เค— เคฎเฅ‹เคก เคฎเฅ‡เค‚ `เคฆเฅเคตเคพเคฐเคพ เค‡เคธเฅเคคเฅ‡เคฎเคพเคฒ เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค' เคฏเคนเคพเค‚ เคธเฅเคŸเฅเคฐเฅ€เคฎเคฟเค‚เค— เคฎเฅ‹เคก เค•เคพ เค‰เคชเคฏเฅ‹เค— เคตเฅ€เคกเคฟเคฏเฅ‹ เคฏเคพ เคฒเคพเค‡เคต เคธเฅเคŸเฅเคฐเฅ€เคฎ เค•เฅ‹ เคชเฅเคฐเฅ‹เคธเฅ‡เคธ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค—เฅ€ เคนเฅˆ เค•เฅเคฏเฅ‹เค‚เค•เคฟ เค‡เคธเคฎเฅ‡เค‚ เคธเคญเฅ€ เคซเฅเคฐเฅ‡เคฎเฅเคธ เค•เฅ‹ เคฎเฅ‡เคฎเฅ‹เคฐเฅ€ เคฎเฅ‡เค‚ เคฒเฅ‹เคก เค•เคฟเค เคฌเคฟเคจเคพ เคเค• เคฐเคฟเคœเคฒเฅเคŸ เค•เฅ€ generator เคฌเคจเคพเคˆ เคœเคพเคคเฅ€ เคนเฅˆเฅค + +!!! Tip "เคธเฅเคเคพเคต" + + `เคธเฅเคŸเฅเคฐเฅ€เคฎ = เคŸเฅเคฐเฅ‚` เค•เคพ เค‰เคชเคฏเฅ‹เค— เคฌเคกเคผเฅ€ เคตเฅ€เคกเคฟเคฏเฅ‹เคœเคผ เคฏเคพ เคตเคฟเคถเคพเคฒ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‹ เคธเค‚เคšเคพเคฒเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฐเฅ‡เค‚ เคคเคพเค•เคฟ เคฎเฅ‡เคฎเฅ‹เคฐเฅ€ เค•เคพ เคฆเค•เฅเคทเคฟเคฃเคพ เคชเฅเคฐเคฌเค‚เคงเคฟเคค เค•เคฟเคฏเคพ เคœเคพ เคธเค•เฅ‡เฅค `เคธเฅเคŸเฅเคฐเฅ€เคฎ = เคซเคพเคฒเฅเคธ` เค•เฅ‡ เค–เค‚เคก เค•เฅ‡ เค–เค‚เคก เคฎเฅ‡เค‚ เคธเคญเฅ€ เคซเฅเคฐเฅ‡เคฎเฅเคธ เคฏเคพ เคกเฅ‡เคŸเคพ เคฌเคฟเค‚เคฆเฅเค“เค‚ เค•เฅ‡ เคฒเคฟเค เคชเคฐเคฟเคฃเคพเคฎ เคธเฅเคคเฅ‹เคฐ เค•เคฟเค เคœเคพเคคเฅ‡ เคนเฅˆเค‚, เคœเฅ‹ เค…เคงเคฟเค•เคพเค‚เคถเคคเคพ เคฎเฅ‡เค‚ เคฎเฅ‡เคฎเฅ‹เคฐเฅ€ เคฎเฅ‡เค‚ เคฒเฅ‹เคก เคนเฅ‹ เคธเค•เคคเฅ‡ เคนเฅˆเค‚ เค”เคฐ เคฌเคกเคผเฅ‡ เค‡เคจเคชเฅเคŸ เค•เฅ‡ เคฒเคฟเค เค†เค‰เคŸ-เค‘เคซ-เคฎเฅ‡เคฎเฅ‹เคฐเฅ€ เคคเฅเคฐเฅเคŸเคฟเคฏเคพเค‚ เค‰เคคเฅเคชเคจเฅเคจ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค เค‡เคธเค•เฅ‡ เคฌเคฐเคพเคฌเคฐ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ `เคธเฅเคŸเฅเคฐเฅ€เคฎ= True` เคเค• เคœเฅ‡เคจเคฐเฅ‡เคŸเคฐ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเคพ เคนเฅˆ, เคœเคฟเคธเค•เฅ‡ เคธเค‚เคšเคฟเคค เคนเฅ‹เคจเฅ‡ เคตเคพเคฒเฅ‡ + +เค•เฅ‡เคตเคฒ เคฌเฅเคฐเคนเฅเคฎเคฃเฅเคก เค•เฅ‡ เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เค•เฅ‹ เคธเฅ€เคฎเคฟเคค เคธเค‚เค—เฅเคฐเคน เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ, เคฌเคนเฅเคค เค•เคฎ เคฎเฅ‡เคฎเฅ‹เคฐเฅ€ เค–เคชเคค เค•เคฐเคคเคพ เคนเฅˆ เค”เคฐ เคฌเคกเคผเฅ‡ เค‡เคจเคชเฅเคŸ เค•เฅ‡ เคฒเคฟเค เค†เค‰เคŸ เค‘เคซเคฎเฅ‡เคฎเฅ‹เคฐเฅ€เคจเฅเคฎเคพเคจ syllabus เคจเฅเค•เคธเคพเคจ เคนเฅ‹เคจเฅ‡ เคธเฅ‡ เคฌเคšเคพเคคเคพ เคนเฅˆเฅค + +| เคธเฅเคฐเฅ‹เคค | เคคเคฐเฅเค• | เคชเฅเคฐเค•เคพเคฐ | เคŸเคฟเคชเฅเคชเคฃเคฟเคฏเคพเค | +|-----------------|-------------------------------------------|---------------|-------------------------------------------------------------------------------------------------------------| +| เค›เคตเคฟ | `'เค›เคตเคฟ.เคœเฅ‡เคชเฅ€เคœเฅ€'` | `เคถ. เคฏเคพ เคชเคฅ` | เคเค•เคฒ เค›เคตเคฟ เคซเคผเคพเค‡เคฒเฅค | +| เคฏเฅ‚เค†เคฐเคเคฒ | `'https://ultralytics.com/เค›เคตเคฟ/เคฌเคธ.เคœเฅ‡เคชเฅ€เคœเฅ€'` | `เคถเคƒ` | เค›เคตเคฟ เคนเฅ‹เคธเฅเคŸเฅ‡เคก เคฐเคฟเคฎเฅ‹เคŸเคฒเฅ€ เค‰เคจเฅเคจเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฏเฅ‚เค†เคฐเคเคฒ เฅค | +| เคธเฅเค•เฅเคฐเฅ€เคจเคถเฅ‰เคŸ | `'เคธเฅเค•เฅเคฐเฅ€เคจ'` | `เคถเคƒ` | เคธเฅเค•เฅเคฐเฅ€เคจ เค•เฅ€ เคตเคฐเฅเคคเคฎเคพเคจ เคธเคพเคฎเค—เฅเคฐเฅ€ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เค•เฅˆเคชเฅเคšเคฐ เฅค | +| เค†เคฆเคฐเฅเคถ | `เค‡เคฎเฅ‡เคœ.เค“เคชเคจ('เคšเคฟเคคเฅเคฐ.เคœเฅ‡เคชเฅ€เคœเฅ€')` | `เคชเฅ€เค†เคˆเคเคฒ.เค‡เคฎเฅ‡เคœ` | HWC format with RGB channelsเฅค | +| เค“เคชเคจเคธเฅ€เคตเฅ€ | `เค“เคชเฅ‡เค‚เคธเฅ€เคตเฅ€.เค‡เคฎเคฐเฅ‡เคก('เคšเคฟเคคเฅเคฐ.เคœเฅ‡เคชเฅ€เคœเฅ€')` | `เคเคจเคชเฅ€.เคจเฅเคกเค†เคฐเฅ‡` | HWC format with BGR channels `uint8 (0-255)`เฅค | +| เคจเคฎเฅเคชเฅ€ | `เคจเคชเคพเคˆ.เคœเฅ€เคฐเฅ‹เคธ((640,1280,เฅฉ))` | `เคเคจเคชเฅ€.เคจเคกเค…เคฐเฅ‡` | HWC format with BGR channels `uint8 (0-255)`เฅค | +| เคŸเฅ‰เคฐเฅเคš | `เคŸเฅ‰เคฐเฅเคš.เคœเฅ€เคฐเฅ‹เคธ(16,3,320,640)` | `เคŸเฅ‰เคฐเฅเคš.เคŸเฅ‡เค‚เคธเคฐ` | BCHW format with RGB channels `float32 (0.0-1.0)`เฅค | +| เคธเฅ€เคเคธเคตเฅ€ | `'เคธเฅเคฐเฅ‹เคค.เคธเฅ€เคเคธเคตเฅ€'` | `เคถเคƒ` or `เคชเคฅ` | เค›เคตเคฟเคฏเฅ‹เค‚, เคตเฅ€เคกเคฟเคฏเฅ‹เคœเคผ, เคฏเคพ เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพเค“เค‚ เค•เฅ€ เคชเคฅเฅ‹เค‚ เค•เฅ‹ เคธเคฎเฅ‡เคŸเคจเฅ‡ เคตเคพเคฒเฅ€ CSV เคซเคผเคพเค‡เคฒเฅค | +| เคตเฅ€เคกเคฟเคฏเฅ‹ โœ… | `'เคตเฅ€เคกเคฟเคฏเฅ‹.เคฎเฅเคชเฅช'` | `เคชเคฅ` or `เคชเคฅ` | MP4, AVI, เค†เคฆเคฟ เคœเฅˆเคธเฅ‡ เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ เคฎเฅ‡เค‚ เคตเฅ€เคกเคฟเคฏเฅ‹เฅค | +| เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ โœ… | `'เคชเคฅ/'` | `เคถเคƒ` or `เคชเคฅ` | เค›เคตเคฟเคฏเฅ‹เค‚ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹เคœเคผ เค•เฅ‹ เคธเคฎเฅ‡เคŸเคจเฅ‡ เคตเคพเคฒเฅ€ เคเค• เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ เค•เคพ เคชเคฅเฅค | +| เค—เฅเคฒเฅ‰เคฌ โœ… | `'เคชเคฅ/ *.เคœเฅ‡เคชเฅ€เคœเฅ€'` | `เคถเคƒ` | เคเค•เคพเคงเคฟเค• เคซเคผเคพเค‡เคฒเฅ‹เค‚ เค•เฅ‡ เคฎเคฟเคฒเคคเฅ‡-เคœเฅเคฒเคคเฅ‡ เค—เฅ‹เคฒเคฟเคฏเคพเคเฅค เคตเคพเค‡เคฒเฅเคกเค•เคพเคฐเฅเคก เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ `*` เคšเคฐเคฟเคคเฅเคฐ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค | +| เคฏเฅ‚เคŸเฅเคฏเฅ‚เคฌ โœ… | `'https://youtu.be/LNwODJXcvt4'` | `เคถเคƒ` | เคเค• เคฏเฅ‚เคŸเฅเคฏเฅ‚เคฌ เคตเฅ€เคกเคฟเคฏเฅ‹ เค•เฅ‡ เคฒเคฟเค เคฏเฅ‚เค†เคฐเคเคฒเฅค | +| เคธเฅเคŸเฅเคฐเฅ€เคฎ โœ… | `'rtsp://เคฎเคพเคฆเฅเคฏเคฆเคฟเคจเคคเคพ.เค•เฅ‰เคฎ/media.เคฎเฅเคชเฅช'` | `เคถเคƒ` | RTSP, RTMP, TCP เคฏเคพ IP เคชเคคเฅ‡ เคœเฅˆเคธเฅ‡ เคธเฅเคŸเฅเคฐเฅ€เคฎเคฟเค‚เค— เคชเฅเคฐเฅ‹เคŸเฅ‹เค•เฅ‰เคฒเฅเคธ เค•เฅ‡ เคฒเคฟเค เคชเคคเคพเฅค | +| เคฎเคฒเฅเคŸเฅ€-เคธเฅเคŸเฅเคฐเฅ€เคฎ โœ… | `'เคธเฅ‚เคšเฅ€.เคธเฅเคŸเฅเคฐเฅ€เคฎเฅเคธ'` | `เคถเคƒ` or `เคชเคฅ` | เคชเฅเคฐเคคเคฟ เคชเค‚เค•เฅเคคเคฟ เคเค• เคธเฅเคŸเฅเคฐเคฟเคฎ URL เค•เฅ‡ เคธเคพเคฅ `*.streams` เคชเคพเค  เคซเคผเคพเค‡เคฒ, เค‰เคฆเคพเคนเคฐเคฃ เค•เฅ‡ เคฒเคฟเค 8 เคธเฅเคŸเฅเคฐเฅ€เคฎ 8 เคฌเฅˆเคš-เค†เค•เคพเคฐ เค•เฅ‡ เคธเคพเคฅ เคšเคฒเฅ‡เค‚เค—เฅ‡เฅค | + +เคฒเฅ‡เค–เค• เค†เคฆเคพเคจ เคชเฅเคฐเคฟเคฏเคคเคฎเคพเคจเคธเฅ‹เค‚ เค•เคพ เคธเฅเคเคพเคต เคฆเฅ‡เคคเฅ‡ เคนเฅˆเค‚: + +!!! Example "เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคธเฅเคฐเฅ‹เคค" + + === "เค›เคตเคฟ" + เคเค• เค›เคตเคฟ เคซเคผเคพเค‡เคฒ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚เฅค + ```python + from ultralytics import YOLO + + # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') + + # เค›เคตเคฟ เคซเคผเคพเค‡เคฒ เค•เฅ‡ เคฒเคฟเค เคชเคฅ เคจเคฟเคฐเฅเคงเคพเคฐเคฟเคค เค•เคฐเฅ‡เค‚ + เคธเฅเคฐเฅ‹เคค = 'เคซเคพเคˆเคฒ/เคชเคฐ/เคšเคฟเคคเฅเคฐ.jpg' + + # เค›เคตเคฟ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚ + เคชเคฐเคฟเคฃเคพเคฎ = model(เคธเฅเคฐเฅ‹เคค) # เคฐเคฟเคœเคฒเฅเคŸเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ€ เคธเฅ‚เคšเฅ€ + + # เคชเคฐเคฟเคฃเคพเคฎ เคธเฅ‚เคšเฅ€ เค•เฅ‹ เคชเฅเคฐเฅ‹เคธเฅ‡เคธ เค•เคฐเฅ‡เค‚ + for เคชเคฐเคฟเคฃเคพเคฎ in เคชเคฐเคฟเคฃเคพเคฎ: + เคฌเฅ‰เค•เฅเคธ = เคชเคฐเคฟเคฃเคพเคฎ.เคฌเฅ‰เค•เฅเคธ # เคฌเฅ‰เค•เฅเคธ เค†เค‰เคŸเคชเฅเคŸเฅเคธ เค•เฅ‡ เคฒเคฟเค เคฌเฅ‰เค•เฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ + เคฎเคพเคธเฅเค•เฅเคธ = เคชเคฐเคฟเคฃเคพเคฎ.เคฎเคพเคธเฅเค•เฅเคธ # เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเคพเคธเฅเค•เฅเคธ เค†เค‰เคŸเคชเฅเคŸเฅเคธ เค•เฅ‡ เคฒเคฟเค เคฎเคพเคธเฅเค•เฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ + เค•เฅ€เคชเฅ‰เค‡เค‚เคŸเฅเคธ = เคชเคฐเคฟเคฃเคพเคฎ.เค•เฅ€เคชเฅ‰เค‡เค‚เคŸเฅเคธ # เคชเฅ‹เคœ เค•เฅ‡ เคฒเคฟเค เค•เฅ€เคชเฅ‰เค‡เค‚เคŸเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ + เคชเฅเคฐเฅ‹เคฌเฅเคธ = เคชเคฐเคฟเคฃเคพเคฎ.เคชเฅเคฐเฅ‹เคฌเฅเคธ # เคตเคฐเฅเค—เฅ€เค•เคฐเคฃ เค†เค‰เคŸเคชเฅเคŸเฅเคธ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเฅ‹เคฌเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ + ``` + + === "เคธเฅเค•เฅเคฐเฅ€เคจเคถเฅ‰เคŸ" + เคตเคฐเฅเคคเคฎเคพเคจ เคธเฅเค•เฅเคฐเฅ€เคจ เคธเคพเคฎเค—เฅเคฐเฅ€ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚เฅค + ```python + from ultralytics import YOLO + + # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') + + # เคตเคฐเฅเคคเคฎเคพเคจ เคธเฅเค•เฅเคฐเฅ€เคจ เคธเคพเคฎเค—เฅเคฐเฅ€ เค•เฅ‹ เคธเฅเคฐเฅ‹เคค เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคชเคฐเคฟเคญเคพเคทเคฟเคค เค•เคฐเฅ‡เค‚ + เคธเฅเคฐเฅ‹เคค = 'เคธเฅเค•เฅเคฐเฅ€เคจ' + + # เคตเคฐเฅเคคเคฎเคพเคจ เคธเคพเคฎเค—เฅเคฐเฅ€ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚ + เคชเคฐเคฟเคฃเคพเคฎ = model(เคธเฅเคฐเฅ‹เคค) # เคฐเคฟเคœเคฒเฅเคŸเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ€ เคธเฅ‚เคšเฅ€ + ``` + + === "เคฏเฅ‚เค†เคฐเคเคฒ" + เคฆเฅ‚เคฐเคธเฅเคฅ เค›เคตเคฟ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚เฅค + ```python + from ultralytics import YOLO + + # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') + + # เค—เคนเคจเคฐเฅ เคฐเฅ‚เคช เคธเฅ‡ เคจเคฟเคฐเฅเคงเคพเคฐเคฟเคค เคฆเฅ‚เคฐเคธเฅเคฅ เค›เคตเคฟ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹ เค•เฅ€ เคฏเฅ‚เค†เคฐเคเคฒ + เคธเฅเคฐเฅ‹เคค = 'https://ultralytics.com/เค›เคตเคฟ/เคฌเคธ.เคœเฅ‡เคชเฅ€เคœเฅ€' + + # เคฏเฅ‚เค†เคฐเคเคฒ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚ + เคชเคฐเคฟเคฃเคพเคฎ = model(เคธเฅเคฐเฅ‹เคค) # เคฐเคฟเคœเคฒเฅเคŸเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ€ เคธเฅ‚เคšเฅ€ + ``` + + === "เค†เคฆเคฐเฅเคถ" + Python Imaging Library (PIL) เค•เฅ‡ เคธเคพเคฅ เค–เฅ‹เคฒเฅ€ เค—เคˆ เค›เคตเคฟ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚เฅค + ```python + from PIL import Image + from ultralytics import YOLO + + # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') + + # Python Imaging Library (PIL) เค•เฅ‡ เคธเคพเคฅ เค–เฅ‹เคฒเฅ€ เค—เคˆ เค›เคตเคฟ + เคธเฅเคฐเฅ‹เคค = Image.open('เค›เคตเคฟ.เคœเฅ‡เคชเฅ€เคœเฅ€') + + # เค†เคฆเคฐเฅเคถ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚ + เคชเคฐเคฟเคฃเคพเคฎ = model(เคธเฅเคฐเฅ‹เคค) # เคฐเคฟเคœเคฒเฅเคŸเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ€ เคธเฅ‚เคšเฅ€ + ``` + + === "เค“เคชเฅ‡เค‚เคธเฅ€เคตเฅ€" + OpenCV เค•เฅ‡ เคธเคพเคฅ เคชเคขเคผเฅ€ เค—เคˆ เค›เคตเคฟ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚เฅค + ```python + import cv2 + from ultralytics import YOLO + + # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') + + # OpenCV เค•เฅ‡ เคธเคพเคฅ เคชเคขเคผเฅ€ เค—เคˆ เค›เคตเคฟ + เคธเฅเคฐเฅ‹เคค = cv2.imread('เค›เคตเคฟ.เคœเฅ‡เคชเฅ€เคœเฅ€') + + # เค“เคชเฅ‡เค‚เคธเฅ€เคตเฅ€ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚ + เคชเคฐเคฟเคฃเคพเคฎ = model(เคธเฅเคฐเฅ‹เคค) # เคฐเคฟเคœเคฒเฅเคŸเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ€ เคธเฅ‚เคšเฅ€ + ``` + + === "เคจเคฎเฅเคชเฅ€" + numpy array เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคชเฅเคฐเคธเฅเคคเฅเคค เค›เคตเคฟ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚เฅค + ```python + import numpy as np + from ultralytics import YOLO + + # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') + + # เค›เคตเคฟ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคเค• เคนเคฟเค‚เคฆเฅ€ เค›เคตเคฟ เค•เฅ‹ เคฌเคจเคพเคเค + เคธเฅเคฐเฅ‹เคค = np.zeros((640, 640, 3)) + + # เคจเคฎเฅเคชเฅ€ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚ + เคชเคฐเคฟเคฃเคพเคฎ = model(เคธเฅเคฐเฅ‹เคค) # เคฐเคฟเคœเคฒเฅเคŸเฅเคธ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ€ เคธเฅ‚เคšเฅ€ + ``` + +[เคตเคพเคนเคจ เค•เฅ‡ เคชเฅเคฐเฅเคœเฅ‡ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/a0f802a8-0776-44cf-8f17-93974a4a28a1 + +[เคซเฅเคŸเคฌเฅ‰เคฒ เค–เคฟเคฒเคพเคกเคผเฅ€ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/7d320e1f-fc57-4d7f-a691-78ee579c3442 + +[เคฒเฅ‹เค—เฅ‹เค‚ เค•เคพ เค—เคฟเคฐเคจเคพ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/86437c4a-3227-4eee-90ef-9efb697bdb43 diff --git a/ultralytics/docs/hi/modes/predict.md:Zone.Identifier b/ultralytics/docs/hi/modes/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/modes/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/modes/track.md b/ultralytics/docs/hi/modes/track.md new file mode 100755 index 0000000..7f5773f --- /dev/null +++ b/ultralytics/docs/hi/modes/track.md @@ -0,0 +1,358 @@ +--- +comments: true +description: เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅเคŸเฅเคฐเฅ€เคฎ เคฎเฅ‡เค‚ เค†เคตเค• เคŸเฅเคฐเฅ‡เค• เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค Ultralytics YOLO เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เฅˆเคธเฅ‡ เค•เคฐเฅ‡เค‚เฅค เคŸเฅเคฐเฅˆเค•เคฐเฅเคธ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค”เคฐ เคŸเฅเคฐเฅˆเค•เคฐ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ เค•เฅ‹ เค…เคจเฅเค•เฅ‚เคฒเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค—เคพเค‡เคกเฅค +keywords: Ultralytics, YOLO, เค†เคตเค• เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—, เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅเคŸเฅเคฐเฅ€เคฎ, BoT-SORT, ByteTrack, เคชเคพเคฏเคฅเคจ เค—เคพเค‡เคก, CLI เค—เคพเค‡เคก +--- + +# Ultralytics YOLO เค•เฅ‡ เคธเคพเคฅ เคฎเคฒเฅเคŸเฅ€-เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— + +Multi-object tracking examples + +เคตเฅ€เคกเคฟเคฏเฅ‹ เคเคจเคพเคฒเคฟเคŸเคฟเค•เฅเคธ เค•เฅ‡ เค•เฅเคทเฅ‡เคคเฅเคฐ เคฎเฅ‡เค‚, เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคเค• เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เค•เคพเคฐเฅเคฏ เคนเฅˆ เคœเฅ‹ เค•เฅ‡เคตเคฒ เคซเฅเคฐเฅ‡เคฎ เคฎเฅ‡เค‚ เคตเคธเฅเคคเฅเค“เค‚ เค•เฅ‡ เคธเฅเคฅเคพเคจ เค”เคฐ เคตเคฐเฅเค— เค•เฅ€ เคชเคนเคšเคพเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เค…เคฒเคพเคตเคพ เคตเฅ€เคกเคฟเคฏเฅ‹ เค•เฅ‡ เคชเฅเคฐเค—เคคเคฟ เค•เฅ‡ เคธเคพเคฅ-เคธเคพเคฅ เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เค–เฅ‹เคœเฅ€ เค—เคˆ เคตเคธเฅเคคเฅ เค•เฅ‡ เคฒเคฟเค เคเค• เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เค†เคˆเคกเฅ€ เคฌเคจเคพเค เคฐเค–เคคเคพ เคนเฅˆเฅค เค‡เคธเค•เฅ‡ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค— เคนเฅˆเค‚ เค…เคธเฅ€เคฎเคฟเคคโ€”เคจเคฟเค—เคฐเคพเคจเฅ€ เค”เคฐ เคธเฅเคฐเค•เฅเคทเคพ เคธเฅ‡ เคฒเฅ‡เค•เคฐ เคฐเคฟเคฏเคฒ-เคŸเคพเค‡เคฎ เคธเฅเคชเฅ‹เคฐเฅเคŸเฅเคธ เคเคจเคพเคฒเคฟเคŸเคฟเค•เฅเคธ เคคเค•เฅค + +## เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เฅ‡ เคฒเคฟเค Ultralytics YOLO เค•เฅเคฏเฅ‹เค‚ เคšเฅเคจเฅ‡เค‚? + +Ultralytics เคŸเฅเคฐเฅˆเค•เคฐเฅ‹เค‚ เคธเฅ‡ เค‰เคคเฅเคชเคจเฅเคจ เคชเคฐเคฟเคฃเคพเคฎ เคฎเคพเคจเค• เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ‡ เคธเคพเคฅ เคฎเฅ‡เคฒ เค–เคพเคคเฅ‡ เคนเฅˆเค‚, เคฒเฅ‡เค•เคฟเคจ เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅเคŸเฅเคฐเฅ€เคฎ เคฎเฅ‡เค‚ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸเฅ‹เค‚ เค•เฅ‹ เคŸเฅเคฐเฅˆเค• เค•เคฐเคจเฅ‡ เค”เคฐ เค‰เคชเคฏเฅ‹เค—เฅ€ เค—เคฃเคจเคพ เค•เคฐเคจเฅ‡ เคฎเฅ‡เค‚ เค†เคธเคพเคจ เคนเฅ‹ เคœเคพเคคเคพ เคนเฅˆเฅค เคฏเคนเคพเค เค†เคชเค•เฅ‹ Ultralytics YOLO เค•เคพ เค‰เคชเคฏเฅ‹เค— เค…เคชเคจเฅ€ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เฅ€ เคœเคฐเฅ‚เคฐเคคเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค•เคฐเคจเฅ‡ เค•เฅ€ เคธเคฒเคพเคน เคฆเฅ€ เคœเคพ เคฐเคนเฅ€ เคนเฅˆ: + +- **เคชเฅเคฐเคฆเคฐเฅเคถเคจเคถเฅ€เคฒเคคเคพ:** เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‡ เคฎเคพเคฎเคฒเฅ‡ เคฎเฅ‡เค‚ เคธเคฎเคฏ-เคธเคคเฅเคฏ เคนเฅ€ เคนเฅ‹เคจเฅ‡ เค•เฅ‡ เคธเคพเคฅ เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅเคŸเฅเคฐเฅ€เคฎ เค•เฅ‹ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เค•เคฐเฅ‡เค‚เฅค +- **เคฒเคšเฅ€เคฒเคพเคชเคจ:** เคตเคฟเคญเคฟเคจเฅเคจ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคเคฒเฅเค—เฅ‹เคฐเคฟเคฆเคฎ เค”เคฐ เคตเคฟเคจเฅเคฏเคพเคธ เคชเคฐ เคธเคฎเคฐเฅเคฅเคจ เค•เคฐเฅ‡เค‚เฅค +- **เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เคฎเฅ‡เค‚ เค†เคธเคพเคจเฅ€:** เคเคŸเคชเคŸ เคเค•เฅ€เค•เคฐเคฃ เค”เคฐ เคกเคฟเคชเฅเคฒเฅ‰เคฏ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคธเคฐเคฒ เคชเคพเคฏเคฅเคจ API เค”เคฐ CLI เคตเคฟเค•เคฒเฅเคชเฅค +- **เค•เคธเฅเคŸเคฎเคพเค‡เคœเคผเฅ‡เคฌเคฟเคฒเคฟเคŸเฅ€:** เค•เคธเฅเคŸเคฎ เคŸเฅเคฐเฅ‡เคจ เค•เคฟเค เค—เค YOLO เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เค‰เคชเคฏเฅ‹เค— เคฎเฅ‡เค‚ เค†เคธเคพเคจ, เคœเคฟเคธเคธเฅ‡ เคกเฅ‹เคฎเฅ‡เคจ-เคตเคฟเคถเคฟเคทเฅเคŸ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจ เคฎเฅ‡เค‚ เคธเคฎเคพเคตเฅ‡เคถ เค•เคฐเคจเคพ เคธเค‚เคญเคต เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +

+
+ +
+ เคฆเฅ‡เค–เฅ‡เค‚: Ultralytics YOLOv8 เค•เฅ‡ เคธเคพเคฅ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค”เคฐ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—เฅค +

+ +## เคตเคพเคธเฅเคคเคตเคฟเค• เคฆเฅเคจเคฟเคฏเคพ เค•เฅ‡ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค— + +| เคชเคฐเคฟเคตเคนเคจ | เค–เฅเคฆเคฐเคพเคฌเคพเฅ›เคพเคฐ | เคœเคฒเคœเฅ€เคตเคพเคฃเฅเคœเคจเคฟเคค เค‰เคคเฅเคชเคพเคฆเคจ | +|:-------------------------------:|:-----------------------------:|:----------------------------:| +| ![เคตเคพเคนเคจ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—][vehicle track] | ![เคฒเฅ‹เค— เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—][people track] | ![เคฎเค›เคฒเฅ€ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—][fish track] | +| เคตเคพเคนเคจ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— | เคฒเฅ‹เค— เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— | เคฎเค›เคฒเฅ€ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— | + +## เคตเคฟเคถเฅ‡เคทเคคเคพเคเค เคเค• เคเคฒเค• เคฎเฅ‡เค‚ + +Ultralytics YOLO เค…เคชเคจเฅ€ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคตเคฟเคถเฅ‡เคทเคคเคพเค“เค‚ เค•เฅ‹ เคฌเคขเคผเคพเค•เคฐ เคฎเคœเคผเคฌเฅ‚เคค เค”เคฐ เคฌเคนเฅเคฎเฅเค–เฅ€ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ: + +- **เคฐเฅ€เคฏเคฒ-เคŸเคพเค‡เคฎ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—:** เค‰เคšเฅเคš เคซเฅเคฐเฅ‡เคฎ เคฆเคฐ เคตเคพเคฒเฅ‡ เคตเฅ€เคกเคฟเคฏเฅ‹ เคฎเฅ‡เค‚ เคธเคฎเคฏเคฌเคฆเฅเคง เคฐเฅ‚เคช เคธเฅ‡ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸเฅเคธ เค•เฅ‹ เคŸเฅเคฐเฅˆเค• เค•เคฐเฅ‡เค‚เฅค +- **เคเค•เคพเคงเคฟเค• เคŸเฅเคฐเฅˆเค•เคฐ เคธเคฎเคฐเฅเคฅเคจ:** เค‡เคธเฅเคฅเคพเคชเคฟเคค เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคเคฒเฅเค—เฅ‹เคฐเคฟเคฆเคฎเฅ‹เค‚ เคฎเฅ‡เค‚ เคธเฅ‡ เคเค• เคšเฅเคจเฅ‡เค‚เฅค +- **เค•เคธเฅเคŸเคฎเคพเค‡เคœเคผเฅ‡เคฌเคฒ เคŸเฅเคฐเฅˆเค•เคฐ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ:** เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅเคธ เค•เฅ‹ เคธเคฎเคพเคฏเฅ‹เคœเคฟเคค เค•เคฐเค•เฅ‡ เคตเคฟเคถเฅ‡เคท เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‹ เคชเฅ‚เคฐเคพ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคเคฒเฅเค—เฅ‹เคฐเคฟเคฆเคฎ เค•เฅ‹ เค…เคจเฅเค•เฅ‚เคฒเคฟเคค เค•เคฐเฅ‡เค‚เฅค + +## เค‰เคชเคฒเคฌเฅเคง เคŸเฅเคฐเฅˆเค•เคฐเฅเคธ + +Ultralytics YOLO เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคเคฒเฅเค—เฅ‹เคฐเคฟเคฆเคฎเฅ‹เค‚ เค•เคพ เคธเคฎเคฐเฅเคฅเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เค†เคช เค‡เคจเฅเคนเฅ‡เค‚ เคฏเฅ‹เค—เฅเคฏ YAML เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ เคซเคผเคพเค‡เคฒ (`tracker=tracker_type.yaml`) เคชเคพเคฐเคฟเคค เค•เคฐเค•เฅ‡ เคธเค•เฅเคทเคฎ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚: + +* [BoT-SORT](https://github.com/NirAharon/BoT-SORT) - เค‡เคธ เคŸเฅเคฐเฅˆเค•เคฐ เค•เฅ‹ เคธเค•เฅเคทเคฎ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค `botsort.yaml` เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค +* [ByteTrack](https://github.com/ifzhang/ByteTrack) - เค‡เคธ เคŸเฅเคฐเฅˆเค•เคฐ เค•เฅ‹ เคธเค•เฅเคทเคฎ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค `bytetrack.yaml` เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค + +เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ เคŸเฅเคฐเฅˆเค•เคฐ BoT-SORT เคนเฅˆเฅค + +## เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— + +เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅเคŸเฅเคฐเฅ€เคฎเฅเคธ เคชเคฐ เคŸเฅเคฐเฅˆเค•เคฐ เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, YOLOv8n, YOLOv8n-seg เค”เคฐ YOLOv8n-pose เคœเฅˆเคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค Detect, Segment เคฏเคพ Pose เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "เคชเคพเคฏเคฅเคจ" + + ```python + from ultralytics import YOLO + + # เคเค• เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฏเคพ เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') # เคเค• เค†เคงเคฟเค•เคพเคฐเคฟเค• Detect เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-seg.pt') # เคเค• เค†เคงเคฟเค•เคพเคฐเคฟเค• Segment เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-pose.pt') # เคเค• เค†เคงเคฟเค•เคพเคฐเคฟเค• Pose เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เคเค• เค•เคธเฅเคŸเคฎ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เคฐเฅ‡เค‚ + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True) # เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ เคŸเฅเคฐเฅˆเค•เคฐ เค•เฅ‡ เคธเคพเคฅ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เคฐเฅ‡เค‚ + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True, tracker="bytetrack.yaml") # ByteTrack เคŸเฅเคฐเฅˆเค•เคฐ เค•เฅ‡ เคธเคพเคฅ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เคฐเฅ‡เค‚ + ``` + + === "CLI" + + ```เคฌเฅˆเคถ + # CLI เค•เฅ‡ เคธเคพเคฅ เคตเคฟเคญเคฟเคจเฅเคจ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เคฐเฅ‡เค‚ + เคฏเฅ‹เคฒเฅ‹ เคŸเฅเคฐเฅˆเค• เคฎเฅ‰เคกเคฒ=yolov8n.pt เคธเฅเคฐเฅ‹เคค="https://youtu.be/LNwODJXcvt4" # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคกเคฟเคŸเฅ‡เค•เฅเคŸ เคฎเฅ‰เคกเคฒ + เคฏเฅ‹เคฒเฅ‹ เคŸเฅเคฐเฅˆเค• เคฎเฅ‰เคกเคฒ=yolov8n-seg.pt เคธเฅเคฐเฅ‹เคค="https://youtu.be/LNwODJXcvt4" # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เคฎเฅ‰เคกเคฒ + เคฏเฅ‹เคฒเฅ‹ เคŸเฅเคฐเฅˆเค• เคฎเฅ‰เคกเคฒ=yolov8n-pose.pt เคธเฅเคฐเฅ‹เคค="https://youtu.be/LNwODJXcvt4" # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคชเฅ‹เคœ เคฎเฅ‰เคกเคฒ + เคฏเฅ‹เคฒเฅ‹ เคŸเฅเคฐเฅˆเค• เคฎเฅ‰เคกเคฒ=path/to/best.pt เคธเฅเคฐเฅ‹เคค="https://youtu.be/LNwODJXcvt4" # เค•เคธเฅเคŸเคฎ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ + + # ByteTrack เคŸเฅเคฐเฅˆเค•เคฐ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ + เคฏเฅ‹เคฒเฅ‹ เคŸเฅเคฐเฅˆเค• เคฎเฅ‰เคกเคฒ=path/to/best.pt เคŸเฅเคฐเฅˆเค•เคฐ="bytetrack.yaml" + ``` + +เคŠเคชเคฐ เค•เฅ‡ เค‰เคชเคฏเฅ‹เค— เคฎเฅ‡เค‚ เค‰เค‚เค—เคฒเคฟเคฏเฅ‹เค‚ เค•เฅ‡ เคจเคฟเคšเคฒเฅ‡ เคนเคฟเคธเฅเคธเฅ‡ เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค‰เคชเคฒเคฌเฅเคง เคนเฅˆ เคธเคญเฅ€ เคกเคฟเคŸเฅ‡เค•เฅเคŸ, เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เค”เคฐ เคชเฅ‹เคœ เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ‡ เคฒเคฟเค เคœเฅ‹ เคตเฅ€เคกเคฟเคฏเฅ‹ เคฏเคพ เคธเฅเคŸเฅเคฐเฅ€เคฎเคฟเค‚เค— เคธเฅเคฐเฅ‹เคค เคชเคฐ เคšเคฒเคพ เคœเคพ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค + +## เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ + +### เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค†เคฐเฅเค—เฅเคฏเฅเคฎเฅ‡เค‚เคŸเฅเคธ + +เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ, เคœเฅˆเคธเฅ‡ เค•เคฟ `conf`, `iou` เค”เคฐ `show`, เคจเฅ‡ เคชเฅเคฐเฅ‡เคกเคฟเค•เฅเคถเคจ เคฎเฅ‹เคก เค•เฅ‡ เคธเคพเคฅ เค—เฅเคฃเฅ‹เค‚ เค•เฅ‹ เคธเคพเคเคพ เค•เคฐเคคเคพ เคนเฅˆเฅค เค”เคฐ เคตเคฟเคจเฅเคฏเคพเคธ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, เค•เฅƒเคชเคฏเคพ [เคชเฅเคฐเฅ‡เคกเคฟเค•เฅเคถเคจ](../modes/predict.md#inference-arguments) เคฎเฅ‰เคกเคฒ เคชเฅƒเคทเฅเค  เคชเคฐ เคธเค‚เคฆเคฐเฅเคญ เค•เคฐเฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "เคชเคพเคฏเคฅเคจ" + + ```python + from ultralytics import YOLO + + # เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค†เคตเค‚เคŸเคจ เค•เคฐเฅ‡เค‚ เค”เคฐ เคŸเฅเคฐเฅˆเค•เคฐ เคšเคฒเคพเคเค‚ + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", conf=0.3, iou=0.5, show=True) + ``` + + === "CLI" + + ```เคฌเฅˆเคถ + # เค•เคฎเคพเค‚เคก เคฒเคพเค‡เคจ เค‡เค‚เคŸเคฐเคซเฅ‡เคธ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐ เค•เคฐเฅ‡เค‚ เค”เคฐ เคŸเฅเคฐเฅˆเค•เคฐ เคšเคฒเคพเคเค‚ + เคฏเฅ‹เคฒเฅ‹ เคŸเฅเคฐเฅˆเค• เคฎเฅ‰เคกเคฒ=yolov8n.pt เคธเฅเคฐเฅ‹เคค="https://youtu.be/LNwODJXcvt4" conf=0.3, iou=0.5 show + ``` + +### เคŸเฅเคฐเฅˆเค•เคฐ เคšเคฏเคจ + +Ultralytics เค†เคชเค•เฅ‹ เคเค• เคธเค‚เคถเฅ‹เคงเคฟเคค เคŸเฅเคฐเฅˆเค•เคฐ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ เคซเคผเคพเค‡เคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ€ เคญเฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเคพ เคนเฅˆเฅค เคเคธเคพ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, เคฌเคธ [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) เคธเฅ‡ เคเค• เคŸเฅเคฐเฅˆเค•เคฐ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ เคซเคผเคพเค‡เคฒ (เคœเฅˆเคธเฅ‡ `custom_tracker.yaml`) เค•เฅ€ เคเค• เคชเฅเคฐเคคเคฟเคฒเคฟเคชเคฟ เคฌเคจเคพเคเค เค”เคฐ เค•เคฟเคธเฅ€ เคญเฅ€ เคตเคฟเคจเฅเคฏเคพเคธ เค•เฅ‹ เคธเค‚เคถเฅ‹เคงเคฟเคค เค•เคฐเฅ‡เค‚ ( `tracker_type` เค•เฅ‹ เค›เฅ‹เคกเคผเค•เคฐ) เค…เคชเคจเฅ€ เคœเคฐเฅ‚เคฐเคคเฅ‹เค‚ เค•เฅ‡ เค…เคจเฅเคธเคพเคฐเฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "เคชเคพเคฏเคฅเคจ" + + ```python + from ultralytics import YOLO + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ เคเค• เค•เคธเฅเคŸเคฎ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ เคซเคผเคพเค‡เคฒ เค•เฅ‡ เคธเคพเคฅ เคŸเฅเคฐเฅˆเค•เคฐ เคšเคฒเคพเคเค‚ + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", tracker='custom_tracker.yaml') + ``` + + === "CLI" + + ```เคฌเฅˆเคถ + # เคŸเฅเคฐเฅˆเค•เคฐ เค•เฅ‡ เคธเคพเคฅ เคเค• เค•เคธเฅเคŸเคฎ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ เคซเคผเคพเค‡เคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ เค”เคฐ เคŸเฅเคฐเฅˆเค•เคฐ เคšเคฒเคพเคเค‚ + เคฏเฅ‹เคฒเฅ‹ เคŸเฅเคฐเฅˆเค• เคฎเฅ‰เคกเคฒ=yolov8n.pt เคธเฅเคฐเฅ‹เคค="https://youtu.be/LNwODJXcvt4" tracker='custom_tracker.yaml' + ``` + +เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค†เคฐเฅเค—เฅเคฏเฅเคฎเฅ‡เค‚เคŸเฅเคธ เค•เฅ€ เคเค• เคตเฅเคฏเคพเคชเค• เคธเฅ‚เคšเฅ€ เค•เฅ‡ เคฒเคฟเค, [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) เคชเฅ‡เคœ เคชเคฐ เคธเค‚เคฆเคฐเฅเคญ เค•เคฐเฅ‡เค‚เฅค + +## เคชเคพเคฏเคฅเคจ เค‰เคฆเคพเคนเคฐเคฃ + +### เคŸเฅเคฐเฅˆเค• เคชเคฐเฅเคธเคฟเคธเฅเคŸ เค•เคฐเคจเคพ + +เคฏเคนเคพเค เคเค• Python เคธเฅเค•เฅเคฐเคฟเคชเฅเคŸ เคนเฅˆ เคœเฅ‹ OpenCV (`cv2`) เค”เคฐ YOLOv8 เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเฅเคฐเฅ‡เคฎ เคชเคฐ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคนเฅˆเฅค เค‡เคธ เคธเฅเค•เฅเคฐเคฟเคชเฅเคŸ เคฎเฅ‡เค‚ เคฏเคน เคฎเคพเคจ เคฒเคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ เค•เคฟ เค†เคชเคจเฅ‡ เคชเคนเคฒเฅ‡ เคนเฅ€ เค†เคตเคถเฅเคฏเค• เคชเฅˆเค•เฅ‡เคœ (`opencv-python` เค”เคฐ `ultralytics`) เค‡เค‚เคธเฅเคŸเฅ‰เคฒ เค•เคฐ เคฒเคฟเค เคนเฅˆเค‚เฅค `persist=True` เค†เคฐเฅเค—เฅเคฏเฅเคฎเฅ‡เค‚เคŸ เคฏเฅ‡ เคŸเฅเคฐเฅˆเค•เคฐ เค•เฅ‹ เคฌเคคเคพเคคเคพ เคนเฅˆ เค•เคฟ เคฎเฅŒเคœเฅ‚เคฆเคพ เค‡เคฎเฅ‡เคœ เคฏเคพ เคซเคผเฅเคฐเฅ‡เคฎ เค‰เคจ เค…เคจเฅเคธเคฐเคฃ เคคเคฅเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคนเฅ‹เคคเคพ เคนเฅˆ เคœเฅ‹ เคชเคฟเค›เคฒเฅ‡ เค‡เคฎเฅ‡เคœ เคฎเฅ‡เค‚ เคธเฅ‡ เคฌเคจเคพเค เค—เค เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เฅค + +!!! Example "เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เฅ‡ เคฒเคฟเค เคธเฅเคŸเฅเคฐเฅ€เคฎเคฟเค‚เค— เคซเคผเฅ‹เคฐ-เคฒเฅ‚เคช" + + ```python + import cv2 + from ultralytics import YOLO + + # YOLOv8 เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') + + # เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ เค–เฅ‹เคฒเฅ‡เค‚ + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเฅเคฐเฅ‡เคฎเฅเคธ เคชเคฐ เคฒเฅ‚เคช เคšเคฒเคพเคเค‚ + while cap.isOpened(): + # เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅ‡ เคเค• เคซเฅเคฐเฅ‡เคฎ เคชเคขเคผเฅ‡เค‚ + success, frame = cap.read() + + if success: + # เคซเฅเคฐเฅ‡เคฎ เคชเคฐ YOLOv8 เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคšเคฒเคพเคเค‚, เคซเคผเฅเคฐเฅ‡เคฎ เค•เฅ‡ เคฌเฅ€เคš เคŸเฅเคฐเฅˆเค• เคชเคฐเฅเคธเคฟเคธเฅเคŸ เค•เคฐเคคเคพ เคนเฅˆ + results = model.track(frame, persist=True) + + # เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เค•เฅ‹ เคซเคผเฅเคฐเฅ‡เคฎ เคชเคฐ เคฆเคฟเค–เคพเคเค‚ + annotated_frame = results[0].plot() + + # เคŸเฅเคฐเฅˆเค• เค•เคฐเฅ‡เค‚ เคซเคผเฅเคฐเฅ‡เคฎ เค•เฅ‹ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฐเฅ‡เค‚ + cv2.imshow("YOLOv8 เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—", annotated_frame) + + # 'q' เคฆเคฌเคพเคเค‚ เคคเฅ‹ เคซเคผเฅเคฐเฅ‡เคฎ เคธเฅ‡ เคฌเคพเคนเคฐ เคจเคฟเค•เคฒเฅ‡เค‚ + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # เคตเฅ€เคกเคฟเคฏเฅ‹ เค•เฅ‡ เค…เค‚เคค เคคเค• เคชเคนเฅเคเคšเคจเฅ‡ เคชเคฐ เคญเฅ€ เคซเคผเฅเคฐเฅ‡เคฎ เคธเฅ‡ เคฌเคพเคนเคฐ เคจเคฟเค•เคฒเฅ‡เค‚ + break + + # เคตเฅ€เคกเคฟเคฏเฅ‹ เค•เฅˆเคชเฅเคšเคฐ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค›เฅ‹เคกเคผเฅ‡เค‚ เค”เคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคตเคฟเค‚เคกเฅ‹ เคฌเค‚เคฆ เค•เคฐเฅ‡เค‚ + cap.release() + cv2.destroyAllWindows() + ``` + +เคฎเฅˆเคจเฅ‡ เคซเคผเฅเคฐเฅ‡เคฎ เคธเฅ‡ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เฅ‡ เคฒเคฟเค 'model(frame)' เคธเฅ‡ 'model.track(frame)' เคฎเฅ‡เค‚ เคฌเคฆเคฒเคพเคต เค•เคฟเคฏเคพ เคนเฅˆ, เคœเฅ‹ เคธเคพเคงเคพเคฐเคฃ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ€ เคฌเคœเคพเคฏ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เฅ‹ เคธเค•เฅเคทเคฎ เค•เคฐเคคเคพ เคนเฅˆเฅค เคฏเคน เคธเค‚เคถเฅ‹เคงเคฟเคค เคธเฅเค•เฅเคฐเคฟเคชเฅเคŸ เคชเฅเคฐเคคเคฟ เคซเคผเฅเคฐเฅ‡เคฎ เคตเคพเคฒเฅ€ เคตเฅ€เคกเคฟเคฏเฅ‹ เคชเคฐ เคŸเฅเคฐเฅˆเค•เคฐ เคšเคฒเคพเคเค—เคพ, เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เค•เฅ‹ เคฆเคฟเค–เคพเคเค—เคพ เค”เคฐ เคเค• เคตเคฟเค‚เคกเฅ‹ เคฎเฅ‡เค‚ เคฆเคฟเค–เคพเคเค—เคพเฅค 'q' เคฆเคฌเคพเคจเฅ‡ เคชเคฐ เคซเคผเฅเคฐเฅ‡เคฎ เคธเฅ‡ เคฌเคพเคนเคฐ เคจเคฟเค•เคฒเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค + +### เคธเคฎเคฏ เค•เฅ‡ เคธเคพเคฅ เคŸเฅเคฐเฅˆเค• เคšเคฟเคคเฅเคฐเคฟเคค เค•เคฐเคจเคพ + +เคธเค‚เคฌเค‚เคงเคฟเคค เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเฅเคฐเฅ‡เคฎ เคชเคฐ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เฅเคธ เค•เฅ‹ เคชเฅเคฒเฅ‰เคŸ เค•เคฐเค•เฅ‡ เคธเคฎเคพเคจเฅเคคเคฐ เคธเฅเคฅเคพเคจเฅ€เคฏ เคฎเคพเคฐเฅเค—เฅ‹เค‚ เค•เฅ‹ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฐเคจเฅ‡ เคธเฅ‡ เคนเคฎเฅ‡เค‚ เคšเคฟเคคเฅเคฐเคฟเคค เคชเคฅ เค•เฅ‡ เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡ เคชเคนเคฒเฅ‡ เค•เฅ‡ เค…เค‚เคคเคฐเคพเคฒเฅ‹เค‚ เค”เคฐ เคชเคคเฅ‹เค‚ เค•เฅ€ เค†เคชเฅ‚เคฐเฅเคคเคฟ เคฎเฅ‡เค‚ เคฎเฅ‚เคฒเฅเคฏเคตเคพเคจ เคชเฅเคฐเฅ‡เคฐเคฃเคพ เคฎเคฟเคฒ เคธเค•เคคเฅ€ เคนเฅˆเฅค Ultralytics YOLOv8 เค•เฅ‡ เคธเคพเคฅ เคธเคฎเคฏ เค•เฅ‡ เคธเคพเคฅ เคŸเฅเคฐเฅˆเค•เฅเคธ เค•เฅ‹ เคชเฅเคฒเฅ‰เคŸ เค•เคฐเคจเคพ เคเค• เคšเฅเคธเฅเคค เค”เคฐ เค•เฅเคถเคฒ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เคนเฅˆเฅค + +เคจเคฟเคฎเฅเคจ เค‰เคฆเคพเคนเคฐเคฃ เคฎเฅ‡เค‚, เคนเคฎ เคฆเคฟเค–เคพเค เค—เค เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเฅเคฐเฅ‡เคฎเฅเคธ เคชเคฐ YOLO เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ€ เค—เคคเคฟ เค•เฅ‹ เคšเคฟเคคเฅเคฐเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เฅˆเคธเฅ‡ เค•เคฐเฅ‡เค‚เค—เฅ‡เฅค เคฏเคน เคธเฅเค•เฅเคฐเคฟเคชเฅเคŸ เคเค• เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ เค•เฅ‹ เค–เฅ‹เคฒเคคเคพ เคนเฅˆ, เคซเฅเคฐเฅ‡เคฎ เคฆเคฐ เคซเฅเคฐเฅ‡เคฎ เคฏเคน เคชเคขเคผเคคเคพ เคนเฅˆ, เค”เคฐ YOLO เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ€ เคชเคนเคšเคพเคจ เค”เคฐ เคŸเฅเคฐเฅˆเค• เค•เคฐเคคเคพ เคนเฅˆเฅค เคชเคนเคšเคพเคจ เคตเคพเคฒเฅ‡ เคฌเฅ‰เค•เฅเคธ เค•เฅ‡ เค•เฅ‡เค‚เคฆเฅเคฐเฅ€เคฏ เคชเฅเคฐเคพเค‚เค•เฅเคคเคฟเคฏเฅ‹เค‚ เค•เฅ‹ เคธเค‚เคตเฅ‡เคฆเฅ€ เค•เคฐเค•เฅ‡ เค‰เคจเฅเคนเฅ‡เค‚ เคœเฅ‹เคกเคผเคคเฅ‡ เคนเฅˆเค‚, เคนเคฎ เคŸเฅเคฐเฅˆเค• เค•เคฟเค เค—เค เคตเคธเฅเคคเฅเค“เค‚ เคฆเฅเคตเคพเคฐเคพ เคซเคผเคพเคฒเคคเฅ‚ เค•เฅ€ เคœเค—เคนเฅ‹เค‚ เค•เฅ‹ เคšเฅ‚เค‚เค•เคฟเคฏเฅ‹เค‚ เค•เคพ เคธเค‚เค—เฅเคฐเคนเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฒเคพเค‡เคจเฅ‡เค‚ เค–เฅ€เค‚เคš เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค + +!!! Example "เค•เคˆ เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเฅเคฐเฅ‡เคฎเฅเคธ เคชเคฐ เคชเคฅ เคšเคฟเคคเฅเคฐเคฟเคค เค•เคฐเคจเคพ" + + ```python + from collections import defaultdict + + import cv2 + import numpy as np + + from ultralytics import YOLO + + # YOLOv8 เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') + + # เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ เค–เฅ‹เคฒเฅ‡เค‚ + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # เคŸเฅเคฐเฅˆเค• เค‡เคคเคฟเคนเคพเคธ เค•เฅ‹ เคธเค‚เค—เฅเคฐเคนเฅ€เคค เค•เคฐเฅ‡เค‚ + track_history = defaultdict(lambda: []) + + # เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเฅเคฐเฅ‡เคฎเฅเคธ เคชเคฐ เคฒเฅ‚เคช เคšเคฒเคพเคเค‚ + while cap.isOpened(): + # เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅ‡ เคเค• เคซเฅเคฐเฅ‡เคฎ เคชเคขเคผเฅ‡เค‚ + success, frame = cap.read() + + if success: + # เคซเฅเคฐเฅ‡เคฎ เคชเคฐ YOLOv8 เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคšเคฒเคพเคเค‚, เคซเคผเฅเคฐเฅ‡เคฎ เค•เฅ‡ เคฌเฅ€เคš เคŸเฅเคฐเฅˆเค• เคชเคฐเฅเคธเคฟเคธเฅเคŸ เค•เคฐเคคเคพ เคนเฅˆ + results = model.track(frame, persist=True) + + # เคฌเฅ‰เค•เฅเคธ เค”เคฐ เคŸเฅเคฐเฅˆเค• เค†เคˆเคกเฅ€ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚ + boxes = results[0].boxes.xywh.cpu() + track_ids = results[0].boxes.id.int().cpu().tolist() + + # เคฐเคฟเคœเคผเคฒเฅเคŸ เคชเคฐ เคตเคฟเคœเฅเค…เคฒเคพเค‡เคœเคผ เค•เคฐเฅ‡เค‚ + annotated_frame = results[0].plot() + + # เคชเคฅ เคšเคฟเคคเฅเคฐเคฟเคค เค•เคฐเฅ‡เค‚ + for box, track_id in zip(boxes, track_ids): + x, y, w, h = box + track = track_history[track_id] + track.append((float(x), float(y))) # x, y centre point + if len(track) > 30: # 90 เคซเคผเฅเคฐเฅ‡เคฎเฅเคธ เค•เฅ‡ เคฒเคฟเค 90 เคŸเฅเคฐเฅˆเค•เฅเคธ เค•เฅ‹ เคœเคฎเคพ เค•เคฐเฅ‡เค‚ + track.pop(0) + + # เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคฒเคพเค‡เคจเฅ‡เค‚ เค–เฅ€เค‚เคšเฅ‡เค‚ + points = np.hstack(track).astype(np.int32).reshape((-1, 1, 2)) + cv2.polylines(annotated_frame, [points], isClosed=False, color=(230, 230, 230), thickness=10) + + # เคชเคฅ เค•เฅ‹ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฐเฅ‡เค‚ + cv2.imshow("YOLOv8 เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—", annotated_frame) + + # 'q' เคฆเคฌเคพเคฏเฅ‡เค‚ เคคเฅ‹ เคซเคผเฅเคฐเฅ‡เคฎ เคธเฅ‡ เคฌเคพเคนเคฐ เคจเคฟเค•เคฒเฅ‡เค‚ + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # เคตเฅ€เคกเคฟเคฏเฅ‹ เค•เฅ‡ เค…เค‚เคค เคคเค• เคชเคนเฅเคเคšเคจเฅ‡ เคชเคฐ เคญเฅ€ เคซเคผเฅเคฐเฅ‡เคฎ เคธเฅ‡ เคฌเคพเคนเคฐ เคจเคฟเค•เคฒเฅ‡เค‚ + break + + # เคตเฅ€เคกเคฟเคฏเฅ‹ เค•เฅˆเคชเฅเคšเคฐ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค›เฅ‹เคกเคผเฅ‡เค‚ เค”เคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคตเคฟเค‚เคกเฅ‹ เคฌเค‚เคฆ เค•เคฐเฅ‡เค‚ + cap.release() + cv2.destroyAllWindows() + ``` + +### เคฎเคฒเฅเคŸเฅ€เคฅเฅเคฐเฅ‡เคก เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— + +เคฎเคฒเฅเคŸเฅ€เคฅเฅเคฐเฅ‡เคก เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคเค• เคธเคพเคฅ เค•เคˆ เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅเคŸเฅเคฐเฅ€เคฎเฅ‹เค‚ เคชเคฐ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคšเคฒเคพเคจเฅ‡ เค•เฅ€ เค•เฅเคทเคฎเคคเคพ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เคฏเคน เค–เคพเคธเค•เคฐ เค‰เคชเคฏเฅ‹เค—เฅ€ เคนเฅ‹เคคเคพ เคนเฅˆ เคœเคฌ เคนเคฎ เค•เคˆ เคจเคฟเค—เคฐเคพเคจเฅ€ เค•เฅˆเคฎเคฐเฅ‹เค‚ เคธเฅ‡ เคœเฅˆเคธเฅ‡ เค•เคฟ เคตเคนเคพเค‚ เคธเฅ‡ เคฎเฅŒเคœเฅ‚เคฆ เคตเฅ€เคกเคฟเคฏเฅ‹ เค‡เคจเคชเฅเคŸ เค•เฅ‹ เคธเค‚เคญเคพเคฒเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคชเคฐเคธเฅเคชเคฐ เคชเฅเคฐเฅ‹เคธเฅ‡เคธเคฟเค‚เค— เค•เคฐเคจเฅ‡ เค•เฅ€ เค•เฅเคทเคฎเคคเคพ เคฌเคขเคผเคพ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค + +เคชเฅเคฐเคฆเคพเคจ เค•เคฟเค เค—เค เคชเคพเคฏเคฅเคจ เคธเฅเค•เฅเคฐเคฟเคชเฅเคŸ เคฎเฅ‡เค‚ เคนเคฎ Python เค•เฅ‡ `threading` เคฎเฅ‰เคกเฅเคฏเฅ‚เคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคฏเคน เคธเค‚เคญเคต เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ เค•เคฟ เค•เคˆ เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธเฅ‡เคœ เค•เฅ‹ เคเค• เคธเคพเคฅ เคŸเฅเคฐเฅˆเค•เคฐ เคšเคฒเคพเคฏเคพ เคœเคพ เคธเค•เฅ‡เฅค เคฏเคน เคนเคฐ เคฅเฅเคฐเฅ‡เคก เค•เฅ‡ เคฒเคฟเค เคเค• เคŸเฅเคฐเฅˆเค•เคฐ เคšเคฒเคพเคจเฅ‡ เค•เฅ€ เคœเคฟเคฎเฅเคฎเฅ‡เคฆเคพเคฐเฅ€ เคนเฅ‹เคคเฅ€ เคนเฅˆ, เค”เคฐ เคธเคญเฅ€ เคฅเฅเคฐเฅ‡เคก เคธเค‚เค˜ เคฅเฅเคฐเฅ‡เคก เคฌเฅˆเค•เค—เฅเคฐเคพเค‰เค‚เคก เคฎเฅ‡เค‚ เคเค• เคธเคพเคฅ เคšเคฒเคคเฅ‡ เคนเฅˆเค‚เฅค + +เคนเคฐ เคฅเฅเคฐเฅ‡เคก เค•เฅ‹ เคธเคนเฅ€ เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅเคธ (เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ, เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฎเฅ‰เคกเคฒ เค”เคฐ เคซเคผเคพเค‡เคฒ เค‡เค‚เคกเฅ‡เค•เฅเคธ) เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, เคนเคฎ `run_tracker_in_thread` เคจเคพเคฎเค• เคเค• เคซเคผเค‚เค•เฅเคถเคจ เค•เฅ‹ เคชเคฐเคฟเคญเคพเคทเคฟเคค เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ เคœเฅ‹ เค‡เคจ เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅเคธ เค•เฅ‹ เคธเฅเคตเฅ€เค•เคพเคฐ เค•เคฐเคคเคพ เคนเฅˆ เค”เคฐ เคฎเฅเค–เฅเคฏ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคฒเฅ‚เคช เค•เฅ‹ เคธเค‚เคฌเค‚เคงเคฟเคค เค•เคฐเคคเคพ เคนเฅˆเฅค เคฏเคน เคซเคผเค‚เค•เฅเคถเคจ เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเฅเคฐเฅ‡เคฎเฅเคธ เค•เฅ‹ เคซเฅเคฐเฅ‡เคฎ เคฆเฅเคตเคพเคฐเคพ เคชเคขเค•เคฐ, เคŸเฅเคฐเฅˆเค•เคฐ เคšเคฒเคพเคจเฅ‡ เค”เคฐ เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เค•เฅ‹ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เค•เคฐ เคฐเคนเฅ€ เคนเฅˆเฅค + +เค‡เคธ เค‰เคฆเคพเคนเคฐเคฃ เคฎเฅ‡เค‚ เคฆเฅ‹ เค…เคฒเค— เคฎเฅ‰เคกเคฒ เค‡เคธเฅเคคเฅ‡เคฎเคพเคฒ เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚: `yolov8n.pt` เค”เคฐ `yolov8n-seg.pt`, เคœเฅ‹ เคนเคฐ เคเค• เค…เคฒเค— เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ เคฎเฅ‡เค‚ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ‹ เคŸเฅเคฐเฅˆเค• เค•เคฐเคคเฅ‡ เคนเฅˆเค‚เฅค เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคพเค‡เคฒ `video_file1` เค”เคฐ `video_file2` เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เค•เคฟเค เค—เค เคนเฅˆเค‚เฅค `threading.Thread` เคฎเฅ‡เค‚ `daemon=True` เคตเคฟเคงเคฟเคฎเคคเคฟ เค•เคพ เค‰เคชเคฏเฅ‹เค— เคธเค‚เค•เฅ‡เคค เค•เคฐเคคเคพ เคนเฅˆ เค•เคฟ เคฏเคน เคธเฅเคจเคฟเคถเฅเคšเคฟเคค เค•เคฐเคคเคพ เคนเฅˆ เค•เคฟ เคœเคฌ เคชเฅเคฐเคฎเฅเค– เค•เคพเคฐเฅเคฏเค•เฅเคฐเคฎ เคธเคฎเคพเคชเฅเคค เคนเฅ‹ เคœเคพเค, เคคเฅ‹ เคฏเฅ‡ เคธเคญเฅ€ เคฅเฅเคฐเฅ‡เคก เคฌเค‚เคฆ เคนเฅ‹ เคœเคพเคเค‚เค—เฅ‡เฅค เคนเคฎ `start()` เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคฅเฅเคฐเฅ‡เคกเฅ‹เค‚ เค•เฅ‹ เคถเฅเคฐเฅ‚ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ เค”เคฐ `join()` เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคฎเฅเค–เฅเคฏ เคฅเฅเคฐเฅ‡เคก เค•เฅ‹ เคชเฅเคฐเคคเฅ€เค•เฅเคทเคพ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฌเคจเคพเคคเฅ‡ เคนเฅˆเค‚ เคœเคฌ เคคเค• เค•เคฟ เคŸเฅเคฐเฅˆเค•เคฐ เคฅเฅเคฐเฅ‡เคก เค–เคคเฅเคฎ เคจเคนเฅ€เค‚ เคนเฅ‹ เคœเคพเคคเฅ‡เฅค + +เคšเฅ‚เค‚เค•เคฟ เคธเคญเฅ€ เคฅเฅเคฐเฅ‡เคกเฅ‹เค‚ เคจเฅ‡ เค…เคชเคจเคพ เค•เคพเคฐเฅเคฏ เคชเฅ‚เคฐเคพ เค•เคฐ เคฒเคฟเคฏเคพ เคนเฅˆ, เค‡เคธเคฒเคฟเค `cv2.destroyAllWindows()` เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เค•เฅ‹ เคฆเคฟเค–เคพเคจเฅ‡ เคตเคพเคฒเฅ€ เคตเคฟเค‚เคกเฅ‹ เค•เฅ‹ เคฌเค‚เคฆ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚เฅค + +!!! Example "เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เฅ‡ เคฒเคฟเค เคธเฅเคŸเฅเคฐเฅ€เคฎเคฟเค‚เค— เคซเคผเฅ‹เคฐ-เคฒเฅ‚เคช" + + ```python + import threading + import cv2 + from ultralytics import YOLO + + + def run_tracker_in_thread(filename, model, file_index): + """ + เคฅเฅเคฐเฅ‡เคกเคฟเค‚เค— เค•เฅ‡ เคธเคพเคฅ YOLOv8 เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคเค• เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ เคฏเคพ webcam เคธเฅเคฐเฅ‹เคค เคธเค‚เค—เคคเคฐเฅ‚เคช เคชเคฐ เคŸเฅเคฐเฅˆเค•เคฐ เคšเคฒเคพเคคเคพ เคนเฅˆเฅค + + เคฏเคน เคซเคผเค‚เค•เฅเคถเคจ เคเค• เคตเฅ‡เคฆเคจเฅ€เคฏ เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ เคฏเคพ เค•เฅˆเคฎเคฐเคพ เคธเฅเคฐเฅ‹เคค เคธเฅ‡ เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเฅเคฐเฅ‡เคฎเฅ‹เค‚ เค•เฅ‹ เคชเค•เคกเคผเคคเคพ เคนเฅˆ เค”เคฐ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เฅ‡ เคฒเคฟเค YOLOv8 เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเคพ เคนเฅˆเฅค เคฏเคน เคซเคผเค‚เค•เฅเคถเคจ เค…เคชเคจเฅ€ เคฅเฅเคฐเฅ‡เคก เคฎเฅ‡เค‚ เคšเคฒเคคเคพ เคนเฅˆ เคœเฅ‹ เค•เคพเคฐเฅเคฏ เคชเฅเคฐเคธเค‚เคธเฅเค•เคฐเคฃ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคเค• เคธเคพเคฅ เคšเคฒเคคเคพ เคนเฅˆเฅค + + Args: + filename (str): เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ เค•เฅ‡ เคชเคฅ เคฏเคพ เค•เฅˆเคฎเคฐเฅ‡ / เคฌเคพเคนเคฐเฅ€ เค•เฅˆเคฎเคฐเฅ‡ เคธเฅเคฐเฅ‹เคค เค•เคพ เคชเคนเคšเคพเคจเค•เคฐเฅเคคเคพเฅค + model (obj): YOLOv8 เคฎเฅ‰เคกเคฒ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸเฅค + file_index (int): เคซเคผเคพเค‡เคฒ เค•เฅ‹ เคชเคนเคšเคพเคจเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคเค‚เคฆเฅเคฐเคฟเค• เค•เฅ‹เคกเฅค + + เคงเฅเคฏเคพเคจ เคฆเฅ‡เค‚: + เคตเฅ€เคกเคฟเคฏเฅ‹ เคกเคฟเคธเฅเคชเฅเคฒเฅ‡ เคตเคฟเค‚เคกเฅ‹ เคฌเค‚เคฆ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค 'q' เคฆเคฌเคพเคเค‚เฅค + """ + เคตเฅ€เคกเคฟเคฏเฅ‹ = cv2.VideoCapture(filename) # เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ เคชเคขเคผเฅ‡เค‚ + + while True: + เคธเคซเคฒเคคเคพ, เคซเคผเฅเคฐเฅ‡เคฎ = เคตเฅ€เคกเคฟเคฏเฅ‹.read() # เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเฅเคฐเฅ‡เคฎ เคชเคขเคผเฅ‡เค‚ + + # เค•เฅ‹เคˆ เคญเฅ€ เคซเคผเฅเคฐเฅ‡เคฎ เคจ เคฌเคšเคพ เคนเฅ‹, เคคเฅ‹ เคฒเฅ‚เคช เคธเฅ‡ เคฌเคพเคนเคฐ เคจเคฟเค•เคฒเฅ‡เค‚ + if not เคธเคซเคฒเคคเคพ: + เคคเฅ‹เคกเคผเฅ‹ + เคคเฅ‹เคกเคผเฅ‹ + + # เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸเฅเคธ เค•เฅ‹ เคŸเฅเคฐเฅˆเค• เค•เคฐเฅ‡เค‚ เคฏเคฆเคฟ เค‰เคชเคฒเคฌเฅเคง เคนเฅ‹เค‚ + results = model.track(เคซเคผเฅเคฐเฅ‡เคฎ, persist=True) + res_plotted = results[0].plot() + cv2.imshow(f"เคธเฅเคฐเฅ‹เคค_{file_index} เคชเคฐ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—", res_plotted) + + เค•เฅเค‚เคœเฅ€ = cv2.waitKey(1) + if เค•เฅเค‚เคœเฅ€ == ord('q'): + เคคเฅ‹เคกเคผเฅ‹ + + # เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅเคฐเฅ‹เคคเฅ‹เค‚ เค•เฅ‹ เค›เฅ‹เคกเคผเฅ‡เค‚ + เคตเฅ€เคกเคฟเคฏเฅ‹.เคฐเคฟเคฒเฅ€เคœเคผเฅ‡() + + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model1 = YOLO('yolov8n.pt') + model2 = YOLO('yolov8n-seg.pt') + + # เคŸเฅเคฐเฅˆเค•เคฐ เค•เฅ‡ เคฒเคฟเค เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒเฅ‡เค‚ เคชเคฐเคฟเคญเคพเคทเคฟเคค เค•เคฐเฅ‡เค‚ + video_file1 = "path/to/video1.mp4" # เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ เค•เคพ เคชเคฅ, เคตเฅ‡เคฌเค•เฅˆเคฎ เค•เฅ‡ เคฒเคฟเค 0 + video_file2 = 0 # เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ เค•เคพ เคชเคฅ, เคตเฅ‡เคฌเค•เฅˆเคฎ เค•เฅ‡ เคฒเคฟเค 0, เคฌเคพเคนเคฐเฅ€ เค•เฅˆเคฎเคฐเคพ เค•เฅ‡ เคฒเคฟเค 1 + + # เคŸเฅเคฐเฅˆเค•เคฐ เคฅเฅเคฐเฅ‡เคก เคธเคฌเคธเฅ‡ เคŠเคชเคฐ เคฌเคจเคพเคเค‚ + tracker_thread1 = threading.Thread(target=run_tracker_in_thread, args=(video_file1, model1, 1), daemon=True) + tracker_thread2 = threading.Thread(target=run_tracker_in_thread, args=(video_file2, model2, 2), daemon=True) + + # เคŸเฅเคฐเฅˆเค•เคฐ เคฅเฅเคฐเฅ‡เคก เคชเฅเคฐเคพเคฐเค‚เคญ เค•เคฐเฅ‡เค‚ + tracker_thread1.start() + tracker_thread2.start() + + # เคŸเฅเคฐเฅˆเค•เคฐ เคฅเฅเคฐเฅ‡เคก เค•เฅ€ เคชเฅเคฐเคคเฅ€เค•เฅเคทเคพ เค•เคฐเฅ‡เค‚ + tracker_thread1.join() + tracker_thread2.join() + + # เคธเคญเฅ€ เคŸเฅเคฐเฅˆเค•เคฐ เคฅเฅเคฐเฅ‡เคกเฅ‹เค‚ เค•เฅ‡ เคจเคฟเคชเคŸเคพเค เคœเคพเคจเฅ‡ เค•เฅ‡ เคฌเคพเคฆ, เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เค•เฅ‹ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคตเคฟเค‚เคกเฅ‹เคœ เคฌเค‚เคฆ เค•เคฐเฅ‡เค‚ + cv2.destroyAllWindows() + ``` + +เคฏเคน เค‰เคฆเคพเคนเคฐเคฃ เคธเฅเค•เฅเคฐเคฟเคชเฅเคŸ เคœเฅ‹เคกเคผเค•เคฐ เค”เคฐ เค‡เคธเฅ€ เคฎเคพเคฐเฅเค—เคฆเคฐเฅเคถเคจ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค”เคฐ เค…เคงเคฟเค• เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเคผเคพเค‡เคฒ เค”เคฐ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคฒเคฟเค เคฌเคพเคนเคฐเฅ€ เคฅเฅเคฐเฅ‡เคก เคฌเคจเคพ เค•เคฐ เค‡เคธเฅ‡ เค•เคพเคฐเฅเคฏเคพเคจเฅเคตเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค†เคธเคพเคจเฅ€ เคธเฅ‡ เคตเคฟเคธเฅเคคเคพเคฐเคฟเคค เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค + +## เคจเค เคŸเฅเคฐเฅˆเค•เคฐเฅ‹เค‚ เคฎเฅ‡เค‚ เคธเคนเคฏเฅ‹เค— เคฆเฅ‡เค‚ + +เค•เฅเคฏเคพ เค†เคช เคฌเคนเฅ-เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคฎเฅ‡เค‚ เคฎเคพเคนเคฟเคฐ เคนเฅˆเค‚ เค”เคฐ เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเคฟเคŸเคฟเค•เฅเคธ YOLO เค•เฅ‡ เคธเคพเคฅ เคเค• เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคเคฒเฅเค—เฅ‹เคฐเคฟเคฆเคฎ เค•เฅ‹ เคธเคซเคฒเคคเคพเคชเฅ‚เคฐเฅเคตเค• เค…เคฎเคฒ เคฎเฅ‡เค‚ เคฒเคพเคฏเคพ เคนเฅˆ? เคนเคฎ เค†เคชเค•เฅ‹ [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) เคฎเฅ‡เค‚ เคนเคฎเคพเคฐเฅ‡ เคŸเฅเคฐเฅˆเค•เคฐ เค–เค‚เคก เค•เฅ‡ เคฒเคฟเค เคฏเฅ‹เค—เคฆเคพเคจ เคฆเฅ‡เคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค†เคฎเค‚เคคเฅเคฐเคฟเคค เค•เคฐเคคเฅ‡ เคนเฅˆเค‚! เค†เคชเค•เคพ เคตเคพเคธเฅเคคเคตเคฟเค• เคฆเฅเคจเคฟเคฏเคพ เค•เฅ‡ เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค— เค”เคฐ เคธเคฎเคพเคงเคพเคจ เค†เคชเค•เฅ‡ เคธเคฎเฅเคฆเคพเคฏ เค•เฅ‡ เคฒเคฟเค เค…เคฎเฅ‚เคฒเฅเคฏ เคนเฅ‹ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค + +เค‡เคธ เค–เค‚เคก เคฎเฅ‡เค‚ เคฏเฅ‹เค—เคฆเคพเคจ เคฆเฅ‡เค•เคฐ, เค†เคช เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเคฟเคŸเคฟเค•เฅเคธ YOLO เคซเฅเคฐเฅ‡เคฎเคตเคฐเฅเค• เค•เฅ‡ เคญเฅ€เคคเคฐ เค‰เคชเคฒเคฌเฅเคง เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคธเคฎเคพเคงเคพเคจเฅ‹เค‚ เค•เฅ€ เคตเคฟเคธเฅเคคเคพเคฐเคตเคพเคฆเฅ€ เคธเฅ‚เคšเฅ€ เคฌเคขเคผเคพ เคธเค•เคคเฅ‡ เคนเฅˆเค‚, เคœเฅ‹ เค‰เคฒเฅเคŸเฅเคฐเคพเคฒเคฟเคŸเคฟเค•เฅเคธ YOLO เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡ เค•เคพเคฎ เค•เคฐ เคฐเคนเฅ‡ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เค…เคคเฅเคฏเคงเคฟเค• เคธเคฎเคฐเฅเคชเคฃเคถเฅ€เคฒเคคเคพ เค”เคฐ เค‰เคชเคฏเฅ‹เค—เฅ€เคคเคพ เคœเฅ‹เคกเคผเคคเฅ‡ เคนเฅˆเค‚เฅค + +เค…เคชเคจเฅ€ เคฏเฅ‹เค—เคฆเคพเคจ เค•เฅ€ เคถเฅเคฐเฅเค†เคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, เค•เฅƒเคชเคฏเคพ เคนเคฎเคพเคฐเฅ‡ [เคฏเฅ‹เค—เคฆเคพเคจ เค—เคพเค‡เคก](https://docs.ultralytics.com/help/contributing) เค•เคพ เคธเค‚เคฆเคฐเฅเคญ เคฒเฅ‡เค‚ เคœเคนเคพเค‚ เคชเคฐเคพเคฎเคฐเฅเคถเคฟเค•เคพ เคชเฅเคฐเคธเฅเคคเฅเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคธเคšเฅ‡เคค เคจเคฟเคฐเฅเคฆเฅ‡เคถ เคฆเคฟเค เค—เค เคนเฅˆเค‚เฅค เคนเคฎ เค‡เค‚เคคเคœเคพเคฐ เค•เคฐ เคฐเคนเฅ‡ เคนเฅˆเค‚ เคฆเฅ‡เค–เฅ‡เค‚ เค†เคช เค•เฅเคฏเคพ เคฒเคพเคคเฅ‡ เคนเฅˆเค‚! + +เคธเคพเคฅ เคฎเฅ‡เค‚, เคšเคฒเคฟเค Ultralytics YOLO เคชเคพเคฐเคฟเคธเฅเคฅเคฟเคคเคฟเค•เฅ€ เค•เฅ€ เค—เคคเคฟเคถเฅ€เคฒเคคเคพ เค•เฅ‹ เคฎเคœเคฌเฅ‚เคค เค•เคฐเฅ‡เค‚ ๐Ÿ™! + +[เคตเคพเคนเคจ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/ee6e6038-383b-4f21-ac29-b2a1c7d386ab + +[เคฒเฅ‹เค— เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/93bb4ee2-77a0-4e4e-8eb6-eb8f527f0527 + +[เคฎเค›เคฒเฅ€ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—]: https://github.com/RizwanMunawar/ultralytics/assets/62513924/a5146d0f-bfa8-4e0a-b7df-3c1446cd8142 diff --git a/ultralytics/docs/hi/modes/track.md:Zone.Identifier b/ultralytics/docs/hi/modes/track.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/modes/track.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/modes/train.md b/ultralytics/docs/hi/modes/train.md new file mode 100755 index 0000000..5447a84 --- /dev/null +++ b/ultralytics/docs/hi/modes/train.md @@ -0,0 +1,293 @@ +--- +comments: true +description: Ultralytics YOLO เค•เฅ‡ เคธเคพเคฅ YOLOv8 เคฎเฅ‰เคกเคฒ เคŸเฅเคฐเฅ‡เคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคšเคฐเคฃเคฌเคฆเฅเคง เคฎเคพเคฐเฅเค—เคฆเคฐเฅเคถเคฟเค•เคพ, เคเค•เคฒ-GPU เค”เคฐ เคฌเคนเฅ-GPU เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เค•เฅ‡ เค‰เคฆเคพเคนเคฐเคฃเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅเฅค +keywords: Ultralytics, YOLOv8, YOLO, เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เคŸเฅเคฐเฅ‡เคจ เคฎเฅ‹เคก, เค•เคธเฅเคŸเคฎ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ, GPU เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค—, เคฌเคนเฅ-GPU, เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ, CLI เค‰เคฆเคพเคนเคฐเคฃ, Python เค‰เคฆเคพเคนเคฐเคฃ +--- + +# Ultralytics YOLO เค•เฅ‡ เคธเคพเคฅ เคฎเฅ‰เคกเคฒ เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— + +Ultralytics YOLO เค‡เค•เฅ‹เคธเคฟเคธเฅเคŸเคฎ เค”เคฐ เค‡เค‚เคŸเฅ€เค—เฅเคฐเฅ‡เคถเคจ + +## เคชเคฐเคฟเคšเคฏ + +เคเค• เค—เคนเคฐเฅ€ เคฏเคพเคจเฅเคคเฅเคฐเคฟเค•เฅ€ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เคฆเฅ‡เคจเคพ เค‰เคธเฅ‡ เคกเฅ‡เคŸเคพ เค–เคฟเคฒเคพเคคเฅ‡ เคนเฅเค เค”เคฐ เค‡เคธเค•เฅ‡ เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅเคธ เค•เฅ‹ เคธเคฎเคพเคฏเฅ‹เคœเคฟเคค เค•เคฐเค•เฅ‡ เคธเคนเฅ€ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคฐเคจเฅ‡ เค•เฅ€ เคธเคพเคฎเคฐเฅเคฅเฅเคฏ เค•เฅ‹ เคถเคพเคฎเคฟเคฒ เค•เคฐเคคเคพ เคนเฅˆเฅค YOLOv8 เคฎเฅ‰เคกเคฒ เคฎเฅ‡เค‚ Ultralytics YOLO เค•เฅ‡ เคŸเฅเคฐเฅ‡เคจ เคฎเฅ‹เคก เคจเฅ‡ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ‹ เคชเฅเคฐเคญเคพเคตเฅ€ เค”เคฐ เคฆเค•เฅเคท เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เค•เฅ‡ เคฒเคฟเค เค‡เค‚เคœเฅ€เคจเคฟเคฏเคฐเคฟเค‚เค— เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เค†เคงเฅเคจเคฟเค• เคนเคพเคฐเฅเคกเคตเฅ‡เคฏเคฐ เค•เฅเคทเคฎเคคเคพเค“เค‚ เค•เคพ เคชเฅ‚เคฐเฅ€ เคคเคฐเคน เคธเฅ‡ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพ เคธเค•เฅ‡เฅค เคฏเคน เคฎเคพเคฐเฅเค—เคฆเคฐเฅเคถเคฟเค•เคพ เค‰เคจ เคธเคญเฅ€ เคตเคฟเคตเคฐเคฃเฅ‹เค‚ เค•เฅ‹ เค•เคตเคฐ เค•เคฐเคจเฅ‡ เค•เคพ เค‰เคฆเฅเคฆเฅ‡เคถเฅเคฏ เคฐเค–เคคเฅ€ เคนเฅˆ เคœเฅ‹ เค†เคชเค•เฅ‹ YOLOv8 เค•เฅ‡ เคฎเคœเคฌเฅ‚เคค เคธเฅ‡เคŸ เค‘เคซเคผ เคธเฅเคตเคฟเคงเคพเค“เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค…เคชเคจเฅ‡ เค–เฅเคฆ เค•เฅ‡ เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ‹ เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เคถเฅเคฐเฅ‚ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคšเคพเคนเคฟเคเฅค + +

+
+ +
+ เคฆเฅ‡เค–เฅ‡เค‚: Google Colab เคฎเฅ‡เค‚ เค…เคชเคจเฅ‡ เค•เคธเฅเคŸเคฎ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคเค• YOLOv8 เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคŸเฅเคฐเฅ‡เคจ เค•เคฐเคจเฅ‡ เค•เคพ เคคเคฐเฅ€เค•เคพเฅค +

+ +## เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค Ultralytics YOLO เค•เคพ เคšเคฏเคจ เค•เฅเคฏเฅ‹เค‚ เค•เคฐเฅ‡เค‚? + +เคฏเคนเคพเค‚ YOLOv8 เค•เฅ‡ เคŸเฅเคฐเฅ‡เคจ เคฎเฅ‹เคก เค•เฅ‹ เคšเฅเคจเคจเฅ‡ เค•เฅ‡ เค•เฅเค› เคชเฅเคฐเคฎเฅเค– เค•เคพเคฐเคฃ เคนเฅˆเค‚: + +- **เคฆเค•เฅเคทเคคเคพ:** เค…เคชเคจเฅ‡ เคนเคพเคฐเฅเคกเคตเฅ‡เคฏเคฐ เคธเฅ‡ เคธเคฌเคธเฅ‡ เค…เคงเคฟเค• เคฒเคพเคญ เค‰เค เคพเคเค‚, เคšเคพเคนเฅ‡ เค†เคช เคธเคฟเค‚เค—เคฒ-GPU เคธเฅ‡เคŸเค…เคช เคชเคฐ เคนเฅ‹เค‚ เคฏเคพ เค•เคˆ GPU เคชเคฐ เคธเฅเค•เฅ‡เคฒ เค•เคฐ เคฐเคนเฅ‡เค‚ เคนเฅ‹เค‚เฅค +- **เคชเฅเคฐเคพเค•เฅเคคเคฟเคถเคฟเคฒเฅเคคเคพ:** COCO, VOC เค”เคฐ ImageNet เคœเฅˆเคธเฅ‡ เคคเคคเฅเคชเคฐเคคเคพ เค‰เคชเคฒเคฌเฅเคง เคกเฅ‡เคŸเคพเคธเฅ‡เคŸเฅ‹เค‚ เค•เฅ‡ เค…เคฒเคพเคตเคพ เค•เคธเฅเคŸเคฎ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคŸเฅเคฐเฅ‡เคจ เค•เคฐเฅ‡เค‚เฅค +- **เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพ เคฎเคฟเคคเฅเคฐเคชเฅ‚เคฐเฅเคฃเคคเคพ:** เคธเฅ€เคงเฅ‡ เค”เคฐ เคถเค•เฅเคคเคฟเคถเคพเคฒเฅ€ CLI เค”เคฐ Python เค‡เค‚เคŸเคฐเคซเคผเฅ‡เคธ เค•เคพ เค‰เคชเคฏเฅ‹เค— เคเค• เคธเฅ€เคงเฅ€ เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เค…เคจเฅเคญเคต เค•เฅ‡ เคฒเคฟเคเฅค +- **เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เคฒเคšเฅ€เคฒเคพเคชเคจ:** เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‹ เคธเฅเคงเคพเคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคตเฅˆเคถเฅเคตเคฟเค• เคธเฅเคคเคฐ เคชเคฐ เค…เคจเฅเค•เฅ‚เคฒเคจ เคฏเฅ‹เค—เฅเคฏ เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐเฅ‹เค‚ เค•เฅ€ เคเค• เคตเฅเคฏเคพเคชเค• เคถเฅเคฐเฅƒเค‚เค–เคฒเคพเฅค + +### เคŸเฅเคฐเฅ‡เคจ เคฎเฅ‹เคก เค•เฅ€ เคชเฅเคฐเคฎเฅเค– เคธเฅเคตเคฟเคงเคพเคเค‚ + +เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค YOLOv8 เค•เฅ‡ เคŸเฅเคฐเฅ‡เคจ เคฎเฅ‹เคก เค•เฅ€ เค•เฅเค› เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคธเฅเคตเคฟเคงเคพเคเค‚ เคนเฅˆเค‚: + +- **เคธเฅเคตเคค: เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคกเคพเค‰เคจเคฒเฅ‹เคก:** COCO, VOC เค”เคฐ ImageNet เคœเฅˆเคธเฅ‡ เคฎเคพเคจเค• เคกเฅ‡เคŸเคพเคธเฅ‡เคŸเฅเคธ เค•เฅ‹ เคชเคนเคฒเฅ€ เคฌเคพเคฐ เค•เฅ‡ เค‰เคชเคฏเฅ‹เค— เคชเคฐ เคธเฅเคตเคค: เคกเคพเค‰เคจเคฒเฅ‹เคก เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค +- **เคฌเคนเฅ-GPU เคธเคฎเคฐเฅเคฅเคจ:** เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เค•เฅ€ เค—เคคเคฟ เค•เฅ‹ เคคเฅ‡เคœ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค…เคจเฅเคชเฅเคฐเคฏเฅ‹เค— เคฎเฅ‡เค‚ เค•เคˆ เคœเฅ€เคชเฅ€เคฏเฅ‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค +- **เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ:** เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค•เฅ‹ เคฏเคพเคฎเคฒ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ เคซเคผเคพเค‡เคฒ เคฏเคพ CLI เคคเคฐเฅเค•เฅ‹เค‚ เค•เฅ‡ เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡ เคธเค‚เคถเฅ‹เคงเคฟเคค เค•เคฐเคจเฅ‡ เค•เคพ เคตเคฟเค•เคฒเฅเคชเฅค +- **เคฆเฅƒเคถเฅเคฏเฅ€เค•เคฐเคฃ เค”เคฐ เคฎเฅ‰เคจเคฟเคŸเคฐเคฟเค‚เค—:** เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฎเฅˆเคŸเฅเคฐเคฟเค•เฅเคธ เค•เฅ‡ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค”เคฐ เคธเฅ€เค–เคจเฅ‡ เค•เฅ€ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เค•เฅ‡ เคฆเฅƒเคถเฅเคฏเฅ€เค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค เคฌเฅ‡เคนเคคเคฐ เค…เคตเคงเคพเคฐเคฃเคพ เค•เฅ‡ เคฒเคฟเคเฅค + +!!! Tip "เคŸเคฟเคช" + + * COCO, VOC, ImageNet เค”เคฐ เค•เคˆ เค…เคจเฅเคฏ เคœเฅˆเคธเฅ‡ YOLOv8 เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคนเคฒเฅ‡ เคธเฅ‡ เค†เคชเฅ‚เคฐเฅเคคเคฟ เคนเฅ‹ เคœเคพเคคเฅ‡ เคนเฅˆเค‚, เค‰เคชเคฏเฅ‹เค— เคชเคฐ เคธเฅเคตเคค: เคกเคพเค‰เคจเคฒเฅ‹เคก เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚, เคœเฅˆเคธเฅ‡ `yolo train data=coco.yaml` + +## เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ + +เคธเฅŒเค‚เคงเคพเค‚เค—เฅเคฐเคนเฅ€ เค•เฅ‹เคก เค•เฅ‹ เคจเคœเคฐเค…เค‚เคฆเคพเคœ เค•เคฟเค เคฌเคฟเคจเคพ เค•เฅ‹เคˆ เค‰เคคเฅเคคเคฐ เคฆเฅ‡เคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, เค•เฅ‹เค•เฅ‹128 เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‡ เคฒเคฟเค YOLOv8n เคชเคฐ เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เค•เคฐเฅ‡เค‚เฅค เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เค‰เคชเค•เคฐเคฃ `device` เคคเคฐเฅเค• เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค เค†เค—เคฐ เค•เฅ‹เคˆ เคคเคฐเฅเค• เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เคจเคนเฅ€เค‚ เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ, เคคเฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ `device=0` เคฒเค—เคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅเค•เฅเคค GPU `device=0` เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค—เคพ, เค…เคจเฅเคฏเคฅเคพ `device=cpu` เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพเคเค—เคพเฅค เคชเฅ‚เคฐเฅ€ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคคเคฐเฅเค•เฅ‹เค‚ เค•เฅ€ เคชเฅ‚เคฐเฅ€ เคธเฅ‚เคšเฅ€ เค•เฅ‡ เคฒเคฟเค เคจเฅ€เคšเฅ‡ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เคธเคฟเค‚เค—เคฒ-เคœเฅ€เคชเฅ€เคฏเฅ‚ เค”เคฐ เคธเฅ€เคชเฅ€เคฏเฅ‚ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค‰เคฆเคพเคนเคฐเคฃ" + + เค‰เคชเค•เคฐเคฃ เคธเฅเคตเคค: เคจเคฟเคฐเฅเคงเคพเคฐเคฟเคค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เคฏเคฆเคฟ เคธเคพเคเคพ-GPU เค‰เคชเคฒเคฌเฅเคง เคนเฅ‹ เคคเฅ‹ เค‰เคธเค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพเคเค—เคพ, เค…เคจเฅเคฏเคฅเคพ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคธเฅ€เคชเฅ€เคฏเฅ‚ เคชเคฐ เคถเฅเคฐเฅ‚ เคนเฅ‹เค—เคพเฅค + + === "Python" + + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.yaml') # YAML เคธเฅ‡ เคเค• เคจเคฏเคพ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค‚ + model = YOLO('yolov8n.pt') # เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค เคธเคฟเคซเคพเคฐเคฟเคถ เค•เฅ€ เคœเคพเคคเฅ€ เคนเฅˆ, เคเค• เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # YAML เคธเฅ‡ เคฌเคจเคพเคเค‚ เค”เคฐ เคตเคœเคจ เคฎเคพเคฐเฅ‡ เคŸเฅเคฐเคพเค‚เคธเคซเคฐ เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + + === "CLI" + + ```bash เคฌเฅˆเคถ + # YAML เคธเฅ‡ เคเค• เคจเคฏเคพ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค‚ เค”เคฐ เคถเฅเคฐเฅ‚ เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค *.pt เคฎเฅ‰เคกเคฒ เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # YAML เคธเฅ‡ เคเค• เคจเคฏเคพ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค‚, เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเคœเคจเฅ‹เค‚ เค•เฅ‹ เค‡เคธเคฎเฅ‡เค‚ เคธเฅเคฅเคพเคจเคพเค‚เคคเคฐเคฟเคค เค•เคฐเฅ‡เค‚ เค”เคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### เคฌเคนเฅ-เคœเฅ€เคชเฅ€เคฏเฅ‚ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ + +เคฌเคนเฅ-เคœเฅ€เคชเฅ€เคฏเฅ‚ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคเค•เคพเคงเคฟเค• เคœเฅ€เคชเฅ€เคฏเฅ‚ เค•เฅ‡ เค‰เคชเคฏเฅ‹เค— เคธเฅ‡ เค‰เคชเคฒเคฌเฅเคง เคนเฅ‹เคคเคพ เคนเฅˆ เค”เคฐ เค‰เคชเค•เคฐเคฃ เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡ เคญเฅ€ Python API เค•เฅ‡ เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡ เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเฅค เคฌเคนเฅ-เคœเฅ€เคชเฅ€เคฏเฅ‚ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‹ เคธเค•เฅเคทเคฎ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, เค†เคช เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเคพ เคšเคพเคนเคคเฅ‡ เคนเฅˆเค‚ เค‰เคจ เคœเฅ€เคชเฅ€เคฏเฅ‚ เค‰เคชเค•เคฐเคฃ เค†เคˆเคกเฅ€เคœเฅ€ เค•เฅ‹ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เค•เคฐเฅ‡เค‚เฅค + +!!! Example "เคฌเคนเฅ-เคœเฅ€เคชเฅ€เคฏเฅ‚ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เคพ เค‰เคฆเคพเคนเคฐเคฃ" + + 2 เคœเฅ€เคชเฅ€เคฏเฅ‚ เค•เฅ‡ เคธเคพเคฅ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚, CUDA เค‰เคชเค•เคฐเคฃ 0 เค”เคฐ 1 เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค เค…เคคเคฟเคฐเคฟเค•เฅเคค เคœเฅ€เคชเฅ€เคฏเฅ‚ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคธเฅเคคเคพเคฐ เค•เคฐเฅ‡เค‚ เคœเคฟเคคเคจเคพ เค†เคตเคถเฅเคฏเค• เคนเฅ‹เฅค + + === "Python" + + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') # เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค เคธเคฟเคซเคพเคฐเคฟเคถ เค•เฅ€ เคœเคพเคคเฅ€ เคนเฅˆ, เคเค• เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฆเฅ‹ เคœเฅ€เคชเฅ€เคฏเฅ‚ เค•เฅ‡ เคธเคพเคฅ เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device=[0, 1]) + ``` + + === "CLI" + + ```bash + # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค *.pt เคฎเฅ‰เคกเคฒ เคธเฅ‡ เคœเฅ€เคชเฅ€เคฏเฅ‚ 0 เค”เคฐ 1 เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=0,1 + ``` + +### เคเคชเคฒ M1 เค”เคฐ M2 MPS เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ + +เคเคชเคฒ M1 เค”เคฐ M2 เคšเคฟเคชเฅเคธ เค•เฅ‡ เคธเคฎเคฐเฅเคฅเคจ เค•เฅ‡ เคธเคพเคฅ Ultralytics YOLO เคฎเฅ‰เคกเคฒ เคชเคฐ เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เค•เคฐเคจเคพ เค…เคฌ เคเคธเฅ‡ เค‰เคชเค•เคฐเคฃเฅ‹เค‚ เคชเคฐ เคธเค‚เคญเคต เคนเฅ‹เคคเคพ เคนเฅˆ เคœเคนเคพเค‚ เคถเค•เฅเคคเคฟเคถเคพเคฒเฅ€ เคฎเฅ‡เคŸเคฒ เคชเคฐเคซเคพเคฐเฅเคฎเฅ‡เค‚เคธ เคถเฅ‡เคกเคฐ (MPS) เคซเคผเฅเคฐเฅ‡เคฎเคตเคฐเฅเค• เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค MPS เค•เค‚เคชเฅเคฏเฅ‚เคŸเฅ‡เคถเคจ เค”เคฐ เค›เคตเคฟ เคชเฅเคฐเคธเค‚เคธเฅเค•เคฐเคฃ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‹ เค†เคˆเคฏเฅ‚เคชเฅ€ เคธเฅเคฒเคฟเค•เฅ‰เคจ เคชเคฐ เคจเคฟเคทเฅเคชเคพเคฆเคฟเคค เค•เคฐเคจเฅ‡ เค•เคพ เคเค• เค‰เคšเฅเคš เค•เคพเคฐเฅเคฏเค•เฅเคทเคฎเคคเคพ เคคเคฐเฅ€เค•เคพ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค + +เคเคชเคฒ M1 เค”เคฐ M2 เคšเคฟเคชเฅเคธ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‹ เคธเค•เฅเคทเคฎ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, เค†เคชเค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เคถเฅเคฐเฅ‚ เค•เคฐเคคเฅ‡ เคธเคฎเคฏ "mps" เค•เฅ‹ เค…เคชเคจเฅ‡ เค‰เคชเค•เคฐเคฃ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เค•เคฐเคจเคพ เคšเคพเคนเคฟเคเฅค เคจเฅ€เคšเฅ‡ Python เค”เคฐ เค•เคฎเคพเค‚เคก เคฒเคพเค‡เคจ เคฎเฅ‡เค‚ เค‡เคธเฅ‡ เค•เฅˆเคธเฅ‡ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚ เค‰เคธเค•เคพ เคเค• เค‰เคฆเคพเคนเคฐเคฃ เคฆเคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ: + +!!! Example "MPS เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เคพ เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') # เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค เคธเคฟเคซเคพเคฐเคฟเคถ เค•เฅ€ เคœเคพเคคเฅ€ เคนเฅˆ, เคเค• เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฆเฅ‹ เคœเฅ€เคชเฅ€เคฏเฅ‚ เค•เฅ‡ เคธเคพเคฅ เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device='mps') + ``` + + === "CLI" + + ```bash + # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค *.pt เคฎเฅ‰เคกเคฒ เคธเฅ‡ เคœเฅ€เคชเฅ€เคฏเฅ‚ 0 เค”เคฐ 1 เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=mps + ``` + +M1/M2 เคšเคฟเคชเฅเคธ เค•เฅ‡ เค—เคฃเคฟเคคเคพเคคเฅเคฎเค• เคถเค•เฅเคคเคฟ เค•เคพ เคฒเคพเคญ เคฒเฅ‡เคคเฅ‡ เคนเฅเค, เค‡เคธเคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ€ เค•เคพเคฐเฅเคฏเค•เฅเคทเคฎเคคเคพ เค•เฅ‹ เค”เคฐ เคฌเคขเคผเคพเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เค…เคงเคฟเค• เคตเคฟเคธเฅเคคเฅƒเคค เคฎเคพเคฐเฅเค—เคฆเคฐเฅเคถเคจ เค”เคฐ เค‰เคจเฅเคจเคค เคฐเฅ‚เคชเคฐเฅ‡เค–เคพ เคตเคฟเค•เคฒเฅเคชเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค, เค•เฅƒเคชเคฏเคพ [PyTorch MPS เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ](https://pytorch.org/docs/stable/notes/mps.html) เค•เคพ เคธเค‚เคฆเคฐเฅเคญ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +### เคฌเคพเคงเคฟเคค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‹ เคฌเคนเคพเคฒ เค•เคฐเคจเคพ + +เคชเคนเคฒเฅ‡ เคนเฅ€ เคฌเคšเฅ‡ เคนเฅเค เค…เคตเคธเฅเคฅเคพ เค•เฅ€ เคคเคพเคฒเคฟเค•เคพ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคจเคพ, เค—เคนเคฐเฅ€ เคฏเคพเคจเฅเคคเฅเคฐเคฟเค•เฅ€ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เค•เคพเคฎ เค•เคฐเคคเฅ‡ เคธเคฎเคฏ เคเค• เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคธเฅเคตเคฟเคงเคพ เคนเฅˆเฅค เคฏเคน เคตเคฟเคตเคฟเคง เคชเคฐเคฟเคฆเฅƒเคถเฅเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เค‰เคชเคฏเฅ‹เค—เฅ€ เคนเฅˆ, เคœเฅˆเคธเฅ‡ เคœเคฌ เค…เคชเฅเคฐเคคเฅเคฏเคพเคถเคฟเคค เคฐเฅ‚เคช เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เคฐเฅเค• เค—เคˆ เคนเฅ‹, เคฏเคพ เคœเคฌ เค†เคช เคจเค เคกเฅ‡เคŸเคพ เค•เฅ‡ เคธเคพเคฅ เคฏเคพ เค…เคงเคฟเค• เค‡เคชเฅ‰เค•เฅเคธ เค•เฅ‡ เคฒเคฟเค เคเค• เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคœเคพเคฐเฅ€ เคฐเค–เคจเคพ เคšเคพเคนเคคเฅ‡ เคนเฅˆเค‚เฅค + +เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฌเคนเคพเคฒ เค•เคฐเคจเฅ‡ เคชเคฐ, Ultralytics YOLO เค…เค‚เคคเคฟเคฎ เคธเคนเฅ‡เคœเฅ‡ เค—เค เคฎเฅ‰เคกเคฒ เคธเฅ‡ เคตเคœเคจเฅ‹เค‚ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเคคเคพ เคนเฅˆ เค”เคฐ เค…เคฆเฅเคฏเคคเคจเค•เคฐเฅเคคเคพ เค•เฅ€ เคธเฅเคฅเคฟเคคเคฟ, เคถเคฟเค•เฅเคทเคพ เคฆเคฐ เคจเคฟเคฏเฅ‹เคœเค• เค”เคฐ เคฏเฅเค— เค•เฅเคฐเคฎเคพเค‚เค• เค•เฅ‹ เคญเฅ€ เคชเฅเคจเคฐเฅเคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคคเคพ เคนเฅˆเฅค เค‡เคธเคธเฅ‡ เค†เคช เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เค•เฅ‹ เคฌเคฟเคจเคพ เค•เคฟเคธเฅ€ เค—เคกเคผเคฌเคกเคผ เค•เฅ‡ เคฌเคพเคนเคฐ เค›เฅ‹เคกเคผ เคฆเฅ‡เคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค + +เค†เคช เค†เคธเคพเคจเฅ€ เคธเฅ‡ Ultralytics YOLO เคฎเฅ‡เค‚ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‹ เคฌเคนเคพเคฒ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚ เคœเคฌ เค†เคช `train` เคตเคฟเคงเคฟ เค•เฅ‹ เคฌเฅเคฒเคพเคจเฅ‡ เคชเคฐ `resume` เคคเคฐเฅเค• เค•เฅ‹ `True` เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เค•เคฐเค•เฅ‡ เค”เคฐ เค†เค‚เคถเคฟเค• เคฐเฅ‚เคช เคธเฅ‡ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ `pt` เคซเคผเคพเค‡เคฒ เค•เฅ‡ เคชเคฅ เค•เฅ‹ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เค•เคฐเค•เฅ‡, เค”เคฐ เค†เคชเค•เคพ เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เคœเคนเคพเค‚ เคธเฅ‡ เค›เฅ‹เคกเคผ เค—เคˆ เคฅเฅ€ เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคœเคพเคฐเฅ€ เคฐเค–เคจเฅ‡ เค•เฅ‡ เคฒเคฟเค `train` เคซเคผเค‚เค•เฅเคถเคจ เค•เฅ‹ เค•เคฎเฅเคฏเฅเคŸ เค•เฅ€เคœเคฟเคเฅค + +เคจเฅ€เคšเฅ‡ เคเค• เค‰เคฆเคพเคนเคฐเคฃ เคฆเคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆ เค•เคฟ เค•เฅˆเคธเฅ‡ เคชเคพเคฏเคฅเคจ เค”เคฐ เค•เคฎเคพเค‚เคก เคฒเคพเค‡เคจ เคฎเฅ‡เค‚ เคเค• เค…เคตเคฟเคฐเคฒ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‹ เค•เฅˆเคธเฅ‡ เคฌเคนเคพเคฒ เค•เคฐเฅ‡เค‚: + +!!! Example "เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฌเคนเคพเคฒ เค•เคฐเคจเฅ‡ เค•เคพ เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/last.pt') # เคเค• เค†เค‚เคถเคฟเค•-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฌเคนเคพเคฒ เค•เคฐเฅ‡เค‚ + results = model.train(resume=True) + ``` + + === "CLI" + ```bash เคถเฅˆเคฒ + # เคเค• เค…เคตเคฟเคฐเคฒ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฌเคนเคพเคฒ เค•เคฐเฅ‡เค‚ + yolo train resume model=path/to/last.pt + ``` + +`resume=True` เคธเฅ‡เคŸ เค•เคฐเค•เฅ‡, `train` เคซเคผเค‚เค•เฅเคถเคจ เคชเคนเคฒเฅ‡ เคธเฅ‡ เคฌเคšเฅ‡ เคนเฅเค เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเฅเคฅเคพเคจ เคฎเฅ‡เค‚ เคฌเคšเฅ‡ เคนเฅเค เค…เคตเคธเฅเคฅเคพ เคฎเฅ‡เค‚ เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคœเคพเคฐเฅ€ เคฐเค–เฅ‡เค—เคพเฅค เคฏเคฆเคฟ `resume` เคคเคฐเฅเค• เค›เฅ‹เคกเคผ เคฆเคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ เคฏเคพ `False` เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ, เคคเฅ‹ `train` เคซเคผเค‚เค•เฅเคถเคจ เคเค• เคจเคฏเคพ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคธเคคเฅเคฐ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค—เคพเฅค + +เคฏเคพเคฆ เคฐเค–เฅ‡เค‚ เค•เคฟ เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ เคฐเฅ‚เคช เคธเฅเคฅเคฟเคคเคฟ เคชเคฐ เคฆเคถเคพ-เค…เคคเฅ€เคค เคชเฅเคฐเคคเคฟ เค•เฅ‡ เค…เค‚เคค เคฎเฅ‡เค‚ เคฌเคšเคพเคตเคพเคคเฅเคฎเค• เคธเค‚เค—เฅเคฐเคนเคฃ เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚, เคฏเคพ `save_period` เคคเคฐเฅเค• เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคจเคฟเคถเฅเคšเคฟเคค เค…เค‚เคคเคฐเคพเคฒ เคชเคฐ, เค‡เคธเคฒเคฟเค เค†เคชเค•เฅ‹ เคเค• เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฆเฅŒเคกเคผ เค•เฅ‹ เคฌเคนเคพเคฒ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฎ เคธเฅ‡ เค•เคฎ 1 เค‡เคชเฅ‰เค•เฅเคธ เคชเฅ‚เคฐเฅเคฃ เค•เคฐเคจเคพ เคนเฅ‹เค—เคพเฅค + +## เคคเคฐเฅเค• + +YOLO เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคธเฅ‡เคŸเคฟเค‚เค— เคตเคฟเคญเคฟเคจเฅเคจ เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค”เคฐ เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคนเฅˆเค‚ เคœเฅ‹ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคเค• เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค— เคนเฅ‹เคคเคพ เคนเฅˆเฅค เค‡เคจ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฎเฅ‡เค‚ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ, เค—เคคเคฟ เค”เคฐ เคจเคฟเคฏเคฎเคฟเคคเคคเคพ เคชเคฐ เคชเฅเคฐเคญเคพเคต เคชเคกเคผ เคธเค•เคคเคพ เคนเฅˆเฅค เค•เฅเค› เคธเคพเคฎเคพเคจเฅเคฏ YOLO เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฎเฅ‡เค‚ เคฌเฅˆเคš เค•เคพ เค†เค•เคพเคฐ, เคธเฅ€เค–เคจเฅ‡ เคฆเคฐ, เคฎเฅ‹เคฎเฅ‡เค‚เคŸเคฎ เค”เคฐ เคตเฅ‡เคŸ เคกเคฟเค•เฅ‡ เคœเฅˆเคธเฅ€ เคฎเคพเคจเค• เค…เคฆเฅเคฏเคคเคจ เคตเคพเคฒเฅ€ เคšเฅ€เคœเฅ‡เค‚ เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚เฅค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เค•เฅ‹ เคชเฅเคฐเคญเคพเคตเฅ€ เคขเค‚เค— เคธเฅ‡ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‡เคจ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‹ เคธเคพเคตเคงเคพเคจเฅ€เคชเฅ‚เคฐเฅเคตเค• เคธเค‚เคฏเฅ‹เคœเคฟเคค เค•เคฐเคจเคพ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคนเฅˆ เค”เคฐ เคเค• เคฆเคฟเค เค—เค เค•เคพเคฐเฅเคฏ เค•เฅ‡ เคฒเคฟเค เคถเฅเคฐเฅ‡เคฃเฅ€ เคฎเฅ‡เค‚ เคธเคฌเคธเฅ‡ เค…เคšเฅเค›เฅ‡ เคชเคฐเคฟเคฃเคพเคฎ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‡เคจ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‡ เคธเคพเคฅ เคธเค‚เค—เคคเคจ เค•เคฐเคจเฅ‡ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพ เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค + +| เค•เฅเค‚เคœเฅ€ | เคฎเคพเคจ | เคตเคฟเคตเคฐเคฃ | +|-------------------|----------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `model` | `None` | เคฎเฅ‰เคกเคฒ เคซเคผเคพเค‡เคฒ เค•เคพ เคชเคฅ, เคšเคพเคนเฅ‡ yolov8n.pt, yolov8n.yaml | +| `data` | `None` | เคกเฅ‡เคŸเคพ เคซเคผเคพเค‡เคฒ เค•เคพ เคชเคฅ, เคšเคพเคนเฅ‡ coco128.yaml | +| `epochs` | `100` | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค เคฌเคพเคฐ เค•เฅ€ เคธเค‚เค–เฅเคฏเคพ | +| `patience` | `50` | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เค†เคฐเค‚เคญ เคฎเฅ‡เค‚ เค•เฅ‹เคˆ เคฆเฅ‡เค–เคจเฅ‡ เค•เฅ‡ เคฏเฅ‹เค—เฅเคฏ เคธเฅเคงเคพเคฐ เค•เฅ‡ เคฒเคฟเค เค‡เคชเฅ‰เค•เฅเคธ เค‡เค‚เคคเคœเคพเคฐ เค•เคฐเฅ‡เค‚ | +| `batch` | `16` | เคชเฅเคฐเคคเคฟ เคฌเฅˆเคš เค›เคตเคฟ เค•เฅ€ เคธเค‚เค–เฅเคฏเคพ (-1 เค•เฅ‡ เคฒเคฟเค AutoBatch) | +| `imgsz` | `640` | เคชเฅเคฐเคพเคฐเค‚เคญเคฟเค• เค›เคตเคฟเคฏเฅ‹เค‚ เค•เคพ เค†เค•เคพเคฐ เคฎเคพเคจเคฆเค‚เคก | +| `save` | `True` | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคจเคฟเคฏเค‚เคคเฅเคฐเคฟเคคเค• เค”เคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคชเคฐเคฟเคฃเคพเคฎ เคธเคนเฅ‡เคœเฅ‡เค‚ | +| `save_period` | `-1` | เคชเฅเคฐเคคเฅเคฏเฅ‡เค• x เคˆเคชเฅ‰เค•เฅเคธ เคชเคฐ เคจเคฟเคฐเฅเคตเคพเคšเคฟเคค เคšเฅ‡เค•เคชเฅเคตเคพเค‡เค‚เคŸ (1 เคธเฅ‡ เค•เคฎ เคฆเฅเคตเคพเคฐเคพ เค…เค•เฅเคทเคฎ) | +| `cache` | `False` | [เคธเคนเฅ€/เคฐเฅˆเคฎ](https://github.com/rwightman/pytorch-image-models/blob/master/timm/data/constants.py) เคฏเคพ เค–เฅ‹เคฒเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฌเฅเคฐเคพเค‰เคœเคผเคฐ เค•เฅ‡ เคฒเคฟเค เคฌเฅเคฐเคพเค‰เคœเคผเคฐ เคกเฅ‡เคŸเคพ เคฒเฅ‹เคก เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | +| `device` | `None` | เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคชเค•เคฐเคฃ, เค‰เคฆเคพเคนเคฐเคฃ เค•เฅ‡ เคฒเคฟเค cuda เค‰เคชเค•เคฐเคฃ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ device=0 เคฏเคพ device=0,1 เคฏเคพ device=cpu | +| `workers` | `8` | เคตเคฐเฅเค•เคฐ เคธเฅ‚เคคเฅเคฐเฅ‹เค‚ เค•เฅ€ เคธเค‚เค–เฅเคฏเคพ | +| `project` | `None` | เคชเฅเคฐเฅ‹เคœเฅ‡เค•เฅเคŸ เค•เคพ เคจเคพเคฎ | +| `name` | `None` | เคชเฅเคฐเคฏเฅ‹เค— เค•เคพ เคจเคพเคฎ | +| `exist_ok` | `False` | เคฎเฅŒเคœเฅ‚เคฆเคพ เคชเฅเคฐเคฏเฅ‹เค— เค•เฅ‹ เค…เคงเคฟเคฒเฅ‡เค–เคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฏเคพ เคจเคนเฅ€เค‚ | +| `pretrained` | `True` | (เคฌเฅ‚เคฒ เคฏเคพ เคธเฅเคŸเฅเคฐเคฟเค‚เค—) เค†เคœเฅเคžเคพเคจเฅเคธเคพเคฐ เคเค• เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ (เคฌเฅ‚เคฒ) เคฏเคพ เคตเคœเคจเฅ‹เค‚ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฎเฅ‰เคกเคฒ เคธเฅ‡ (เคธเฅเคŸเฅเคฐเคฟเค‚เค—) | +| `optimizer` | `'auto'` | เคšเคฏเคจ เค•เฅ‡ เคฒเคฟเค เคฌเคฐเคพเคฌเคฐเฅ€=[SGD, Adam, Adamax, AdamW, NAdam, RAdam, RMSProp, auto] | +| `verbose` | `False` | เคตเคฐเฅเคฌเฅ‹เคœเคผ เค†เค‰เคŸเคชเฅเคŸ เคชเฅเคฐเคฟเค‚เคŸ เค•เคฐเฅ‡เค‚ | +| `seed` | `0` | เคจเคฟเคฏเค‚เคคเฅเคฐเคฟเคค (เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃเฅ€เคฏ) เคฌเฅ€เคœ เค•เฅ‡ เคฒเคฟเค | +| `deterministic` | `True` | เคจเคฟเคฏเค‚เคคเฅเคฐเคฟเคค เคฎเคพเคงเฅเคฏเคฎ เค•เฅ‹ เคธเค•เฅเคทเคฎ เค•เคฐเฅ‡เค‚ | +| `single_cls` | `False` | เคนเคฟเคฒ เคตเคฟเคถเฅ‡เคทเคœเฅเคžเคคเคพ เคกเฅ‡เคŸเคพ เคธเคฟเค‚เค—เคฒ-เค•เค•เฅเคทเคพ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ | +| `rect` | `False` | เคจเฅเคฏเฅ‚เคจเคคเคฎ เคชเฅˆเคกเคฟเค‚เค— เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคคเคฟ เคฌเฅˆเคš เคฐเฅ‹ เคŸเฅˆเคฌเฅเคฐเฅ€ เค•เฅ‡ เคธเคพเคฅ เค†เคฏเคคเคพเคคเฅเคฎเค• เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ | +| `cos_lr` | `False` | เคธเคพเค‡เคจ เค•เฅ‡ เคธเคพเค‡เคจ เคถเคฟเค•เฅเคทเคฃ เคฆเคฐ เคจเคฟเคฏเฅ‹เคœเค• เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | +| `close_mosaic` | `10` | เค…เค‚เคคเคฟเคฎ เค…เคตเคงเคฟ เค•เฅ‡ เคฒเคฟเค เคฎเฅ‰เคœเคผเฅ‡เค• เคคๆ–ญเคถเฅเคฐเคพเคตเค• เคฎเฅ‡เค‚ เคฎเคพเคงเฅเคฏเคฎ เคตเฅƒเค•เฅเคทเฅ‹เค‚ เค•เฅ€ เคธเค•เฅเคทเคฎเคคเคพ (0 เค•เฅ‹ เค…เค•เฅเคทเคฎ เค•เคฐเฅ‡เค‚) | +| `resume` | `False` | เค†เค–เคฟเคฐเฅ€ เคจเคฟเคฐเฅเคตเคพเคšเคฟเคค เคšเฅ‡เค•เคชเฅเคตเคพเค‡เค‚เคŸ เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฌเคนเคพเคฒ เค•เคฐเฅ‡เค‚ | +| `amp` | `True` | เค‘เคŸเฅ‹เคฎเฅ‡เคŸเคฟเค• เคฎเคฟเค•เฅเคธเฅเคก เคชเฅเคฐเฅ‡เคธเคฟเคœเคจ (AMP) เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ, เคšเคฏเคจ=[True, False] | +| `fraction` | `1.0` | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค†เค‚เคถเคฟเค• (เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ 1.0, เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคธเฅ‡เคŸ เคฎเฅ‡เค‚ เคธเคญเฅ€ เค›เคตเคฟเคฏเคพเค‚) | +| `profile` | `False` | เคฒเฅ‰เค—เคฐเฅเคธ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฆเฅŒเคฐเคพเคจ ONNX เค”เคฐ TensorRT เค•เฅ€ เคธเฅเคชเฅ€เคก เคชเฅเคฐเฅ‹เคซเคผเคพเค‡เคฒ | +| `freeze` | `None` | เคถเฅเคฐเฅ‹เคฃเคฟ เค•เฅ€ เคชเคนเคฒเฅ‡ n เคชเคฐเคคเฅ‡เค‚, เคฏเคพ เคถเฅเคฐเฅ‹เคฃเคฟ เคธเฅ‚เคšเฅ€ เคฒเฅ‡เคฏเคฐ เคธเฅ‚เคšเฅ€ เค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฆเฅŒเคฐเคพเคจ เคฒเฅ‰เค• เค•เคฐเฅ‡เค‚ | +| `lr0` | `0.01` | เคชเฅเคฐเคพเคฐเค‚เคญเคฟเค• เคธเฅ€เค–เคจเฅ‡ เคฆเคฐ (เค‰เคฆเคพ. SGD=1E-2, Adam=1E-3) | +| `lrf` | `0.01` | เคชเคฐเคฟเคฃเคพเคฎเค•เคพเคฐเฅ€ เคธเฅ€เค–เคจเฅ‡ เคฆเคฐ (lr0 * lrf) | +| `momentum` | `0.937` | SGD เคฎเฅ‹เคฎเฅ‡เค‚เคŸเคฎ/Adam เคฌเฅ€เคŸเคพ1 | +| `weight_decay` | `0.0005` | เคถเคตเฅเคฏ เคตเคœเคจ เคฆเคฃเฅเคก 5e-4 | +| `warmup_epochs` | `3.0` | เคชเฅเคฐเคพเคฐเค‚เคญเคฟเค• เค…เคตเคงเคฟ (เค…เค‚เคถเฅ‹เค‚ เคฎเฅ‡เค‚ เค เค‚เคกเคพ) | +| `warmup_momentum` | `0.8` | เคชเฅเคฐเคพเคฐเค‚เคญเคฟเค• เค…เคตเคงเคฟ เคฎเฅ‡ เคชเฅเคฐเคพเคฐเคฎเฅเคญเคฟเค• เค…เคตเคงเคฟ | +| `warmup_bias_lr` | `0.1` | เคชเฅเคฐเคพเคฐเค‚เคญเคฟเค• เคœเฅเค•เคพเคจ เคเคฒเค†เคฐ | +| `box` | `7.5` | เคฌเฅ‰เค•เฅเคธ เคนเคพเคจเคฟ เคชเฅเคฐเคพเคชเฅเคคเคฟ | +| `cls` | `0.5` | เคตเคฐเฅเค— เคนเคพเคจเคฟ เคชเฅเคฐเคพเคชเฅเคคเคฟ (เคชเคฟเค•เฅเคธเฅ‡เคฒ เค•เฅ‡ เคธเคพเคฅ เคธเฅเคฅเคพเคชเคจเคพ เค•เคฐเฅ‡เค‚) | +| `dfl` | `1.5` | เค–เฅ€เค‚เคšเฅ€ เคนเคพเคจเคฟ เคชเฅเคฐเคพเคชเฅเคคเคฟ | +| `pose` | `12.0` | เคฎเคพเคฅเคพเคชเฅเคฐเคตเคฟเคทเฅเคŸเคฟ เคนเคพเคจเคฟ เคชเฅเคฐเคพเคชเฅเคคเคฟ (เค•เฅ‡เคตเคฒ เค เค‚เคกเคพ) | +| `kobj` | `2.0` | เค•เฅ€เคชเฅ‰เค‡เค‚เคŸ obj เคนเคพเคจเคฟ เคชเฅเคฐเคพเคชเฅเคคเคฟ (เค•เฅ‡เคตเคฒ เค เค‚เคกเคพ) | +| `label_smoothing` | `0.0` | เคฒเฅ‡เคฌเคฒ เคธเฅเคฎเฅ‚เคฆเคฟเค‚เค— (เค…เค‚เคถ) | +| `nbs` | `64` | เคจเคพเคฎเฅ‹เคœเคผเคฏเคฒ เคฌเฅˆเคš เค•เคพ เค†เค•เคพเคฐ | +| `overlap_mask` | `True` | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฆเฅŒเคฐเคพเคจ เคฎเคพเคธเฅเค• เค“เคตเคฐเคฒเฅˆเคช เคนเฅ‹เคจเฅ‡ เคšเคพเคนเคฟเค (เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เคŸเฅเคฐเฅ‡เคจ เค•เฅ‡เคตเคฒ) | +| `mask_ratio` | `4` | เคธเฅเคฅเคพเคจเค•เคŸเฅ‚ เค”เคฐเคคเคพ (เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸ เคŸเฅเคฐเฅ‡เคจ เค•เฅ‡เคตเคฒ) | +| `dropout` | `0.0` | เคจเคฟเคฐเฅเคฆเฅเคฏเคฎเคคเคพ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ (เคตเคฐเฅเค—เฅ€เค•เคฐเคฃ เค•เฅ‡เคตเคฒ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ) | +| `val` | `True` | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฆเฅŒเคฐเคพเคจ เคœเคพเคเคš/เคชเคฐเฅ€เค•เฅเคทเคฃ | + +## เคฒเฅ‰เค—เคฟเค‚เค— + +YOLO เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฎเฅ‡เค‚ เค†เคชเค•เฅ‹ เคธเคฎเคฏ-เคธเคฎเคฏ เคชเคฐ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคพ เคชเคคเคพ เคฐเค–เคจเคพ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคนเฅ‹ เคธเค•เคคเคพ เคนเฅˆเฅค เคฏเคนเคพเค‚ เคฒเฅ‰เค—เคฟเค‚เค— เค•เฅ€ เคเค• เคตเฅˆเคฐเคพเค‚เค—เคฃเคฟเค•เคคเคพ, เคฏเคพเคจเฅ€ เค•เฅ€เคฎเฅ‡เคŸ, เค•เฅเคฒเคฟเคฏเคฐเคเคฎเคเคฒ เค”เคฐ เคŸเฅ‡เค‚เคธเคฐเคฌเฅ‹เคฐเฅเคก เค•เคพ เคธเคฎเคฐเฅเคฅเคจ เคนเฅˆเฅค + +เคฒเฅ‰เค—เคฐ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, เคŠเคชเคฐเฅ€ เค•เฅ‹เคก เคธเฅเคจเคฟเคชเฅ‡เคŸ เค•เฅ‡ เค เฅ‹เค•เคตเคพเคฒเคพ เคฎเฅ‡เคจเฅ‚ เคธเฅ‡ เค‡เคธเฅ‡ เคšเคฏเคจ เค•เคฐเฅ‡เค‚ เค”เคฐ เค‡เคธเฅ‡ เคšเคฒเคพเคเค‚เฅค เคšเคฏเคจเคฟเคค เคฒเฅ‰เค—เคฐ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฟเคฏเคพ เคœเคพเคเค—เคพ เค”เคฐ เค‡เคจเคฟเคถเคฒเคพเค‡เคœเคผ เค•เคฟเคฏเคพ เคœเคพเคเค—เคพเฅค + +### เค•เฅ€เคฎเฅ‡เคŸ + +[เค•เฅ€เคฎเฅ‡เคŸ](../../../integrations/comet.md) เคเค• เคชเฅเคฒเฅ‡เคŸเคซเคผเฅ‰เคฐเฅเคฎ เคนเฅˆ เคœเฅ‹ เคกเฅ‡เคŸเคพ เคตเฅˆเคœเฅเคžเคพเคจเคฟเค•เฅ‹เค‚ เค”เคฐ เคกเฅ‡เคตเคฒเคชเคฐเฅ‹เค‚ เค•เฅ‹ เคชเฅเคฐเคฏเฅ‹เค— เค”เคฐ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ€ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฎเฅ‡เค‚ เคคเฅเคฒเคจเคพเคคเฅเคฎเค•, เคตเฅเคฏเคพเค–เฅเคฏเคพเคจ เค•เคฐเคจเฅ‡ เค”เคฐ เค…เค—เฅเคฐเคฟเคฎ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค•เคฐเคจเฅ‡ เคฎเฅ‡เค‚ เคฎเคฆเคฆ เค•เคฐเคคเคพ เคนเฅˆเฅค เค‡เคธเค•เฅ€ เคธเฅเคตเคฟเคงเคพเคเค‚ เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคฎเคพเคชเค•, เค•เฅ‹เคก เค…เค‚เคคเคฐ เค”เคฐ เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เคœเฅˆเคธเฅ€ เคตเคฟเคญเคฟเคจเฅเคจเคคเคพเคเค‚ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ€ เคนเฅˆเค‚เฅค + +เค•เฅ€เคฎเฅ‡เคŸ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค: + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + ```python + # pip install comet_ml + import comet_ml + + comet_ml.init() + ``` + +เค•เฅƒเคชเคฏเคพ เค•เฅ€เคฎเฅ‡เคŸ เคตเฅ‡เคฌเคธเคพเค‡เคŸ เคชเคฐ เค…เคชเคจเฅ‡ เค•เฅ€เคฎเฅ‡เคŸ เค–เคพเคคเฅ‡ เคฎเฅ‡เค‚ เคธเคพเค‡เคจ เค‡เคจ เค•เคฐเฅ‡เค‚ เค”เคฐ เค…เคชเคจเฅ€ เคเคชเฅ€เค†เคˆ เค•เฅเค‚เคœเฅ€ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚เฅค เค†เคชเค•เฅ‹ เค…เคชเคจเฅ‡ เคตเคพเคคเคพเคตเคฐเคฃ เคชเฅเคฐเคคเคฟเคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคจเฅ‡ เคฏเคพ เค…เคชเคจเฅ‡ เคธเฅเค•เฅเคฐเคฟเคชเฅเคŸ เคฎเฅ‡เค‚ เค‡เคธเฅ‡ เคœเฅ‹เคกเคผเคจเฅ‡ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพ เคนเฅ‹เค—เฅ€ เคคเคพเค•เคฟ เค†เคช เค…เคชเคจเฅ‡ เคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เค•เฅ‹ เคฒเฅ‰เค— เค•เคฐ เคธเค•เฅ‡เค‚เฅค + +### เค•เฅเคฒเคฟเคฏเคฐเคเคฎเคเคฒ + +[เค•เฅเคฒเคฟเคฏเคฐเคเคฎเคเคฒ](https://www.clear.ml/) เคเค• เค“เคชเคจ-เคธเฅ‹เคฐเฅเคธ เคชเฅเคฒเฅ‡เคŸเคซเคผเฅ‰เคฐเฅเคฎ เคนเฅˆ เคœเฅ‹ เคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เค•เฅ‡ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เฅ‹ เคธเฅเคตเคคเค‚เคคเฅเคฐ เค”เคฐ เคชเฅเคฐเคญเคพเคตเฅ€ เคธเค‚เคธเคพเคงเคฟเคค เค•เคฐเคจเฅ‡ เคฎเฅ‡เค‚ เคฎเคฆเคฆ เค•เคฐเคคเคพ เคนเฅˆเฅค เคฏเคน เคŸเฅ€เคฎ เค•เฅ‹ เค‰เคจเค•เฅ‡ เคเคฎเคเคฒ เค•เคพ เค•เคพเคฐเฅเคฏ เคชเฅเคฐเคฌเค‚เคงเคจ, เค•เฅเคฐเคฟเคฏเคพเค•เคฒเคพเคชเฅ‹เค‚ เค•เฅ‹ เค•เฅเคฐเคฟเคฏเคพเคจเฅเคตเคฏเคจ เค•เคฐเคจเฅ‡ เค”เคฐ เค‰เคจเค•เฅ€ เคชเฅเคจเคƒเคธเฅƒเคœเคจ เค•เฅ€ เคธเค‚เคตเฅ‡เคฆเคจเคถเฅ€เคฒเคคเคพ เคธเฅ‡ เคธเคนเคพเคฏเคคเคพ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคœเคผเคพเค‡เคจ เคฆเฅ‹เคฌเคพเคฐเคพ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคตเคฟเค•เคธเคฟเคค เค•เคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค + +เค•เฅเคฒเคฟเคฏเคฐเคเคฎเคเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค: + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + ```python + # pip install clearml + import clearml + + clearml.browser_login() + ``` + +เค‡เคธ เคธเฅเค•เฅเคฐเคฟเคชเฅเคŸ เค•เฅ‹ เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฌเคพเคฆ, เค•เฅƒเคชเคฏเคพ เค•เฅเคฒเคฟเคฏเคฐเคเคฎเคเคฒ เคตเฅ‡เคฌเคธเคพเค‡เคŸ เคชเคฐ เค…เคชเคจเฅ‡ เค•เฅเคฒเคฟเคฏเคฐเคเคฎเคเคฒ เค–เคพเคคเฅ‡ เคฎเฅ‡เค‚ เคธเคพเค‡เคจ เค‡เคจ เค•เคฐเฅ‡เค‚ เค”เคฐ เค…เคชเคจเฅ‡ เคฌเฅเคฐเคพเค‰เคœเคผเคฐ เคธเคคเฅเคฐ เค•เฅ€ เคชเฅเคฐเคฎเคพเคฃเคฟเค•เคคเคพ เคธเฅเคตเฅ€เค•เคพเคฐ เค•เคฐเฅ‡เค‚เฅค + +### เคŸเฅ‡เค‚เคธเคฐเคฌเฅ‹เคฐเฅเคก + +[เคŸเฅ‡เค‚เคธเคฐเคฌเฅ‹เคฐเฅเคก](https://www.tensorflow.org/tensorboard) เคเค• เคŸเฅ‡เคจเฅเคธเคฐเคซเคผเฅเคฒเฅ‹ เคตเฅ€เคœเคผเฅเค…เคฒเคพเค‡เคœเคผเฅ‡เคถเคจ เคŸเฅ‚เคฒเค•เคฟเคŸ เคนเฅˆเฅค เคฏเคน เค†เคชเค•เฅ‹ เค…เคชเคจเฅ‡ เคŸเฅ‡เคจเฅเคธเคฐเคซเคผเฅเคฒเฅ‹ เค—เฅเคฐเคพเคซ เค•เฅ‹ เคฆเฅƒเคทเฅเคŸเคฟเค—เคคเคฟเค• เคŸเฅเค•เคกเคผเฅ‹เค‚ เคฎเฅ‡เค‚ เคตเฅ‡เคŸเคตเฅ‡เคฆเฅเคฏ เค•เคฐเคจเฅ‡, เค†เคชเคพเคคเค•เคพเคฒเฅ€เคจ เค…เคตเค•เคฒเคจเฅ‹เค‚ เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคฎเคฟเคคเคฟเคฏเฅ‹เค‚ เค•เฅ‹ เคชเฅเคฒเฅ‰เคŸ เค•เคฐเคจเฅ‡ เค”เคฐ เค‡เคธเค•เฅ‡ เคฎเคงเฅเคฏ เคธเฅ‡ เคœเคพเคจเฅ‡ เค•เฅ€ เค•เคฒเฅเคชเคจเคพ เคธเฅ‡ เคฌเคฆเคฒเคจเฅ‡ เคœเฅˆเคธเฅ‡ เค…เคคเคฟเคฐเคฟเค•เฅเคค เคกเฅ‡เคŸเคพ เคฆเคฟเค–เคพเคจเฅ‡ เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเคพ เคนเฅˆเฅค + +[Google Colab เคฎเฅ‡เค‚](https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb) เคŸเฅ‡เค‚เคธเคฐเคฌเฅ‹เคฐเฅเคก เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค: + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "CLI" + ```bash + load_ext tensorboard + tensorboard --logdir ultralytics/runs # เคงเฅเคฏเคพเคจ เคฆเฅ‡เค‚ เค•เคฟ 'เคงเคพเคตเค•' เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ เค•เฅ‡ เคธเคพเคฅ เคฌเคฆเคฒเฅ‡เค‚ + ``` + +เคธเฅเคฅเคพเคจเฅ€เคฏ เคŸเฅ‡เค‚เคธเคฐเคฌเฅ‹เคฐเฅเคก เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคจเฅ€เคšเฅ‡ เคฆเคฟเค เค—เค เค•เคฎเคพเค‚เคก เค•เฅ‹ เคšเคฒเคพเคเค‚ เค”เคฐ เคชเคฐเคฟเคฃเคพเคฎเฅ‹เค‚ เค•เฅ‹ http://localhost:6006/ เคชเคฐ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "CLI" + ```bash + tensorboard --logdir ultralytics/runs # เคงเฅเคฏเคพเคจ เคฆเฅ‡เค‚ เค•เคฟ 'เคงเคพเคตเค•' เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ เค•เฅ‡ เคธเคพเคฅ เคฌเคฆเคฒเฅ‡เค‚ + ``` + +เค‡เคธเคธเฅ‡ เคŸเฅ‡เค‚เคธเคฐเคฌเฅ‹เคฐเฅเคก เคฒเฅ‹เคก เคนเฅ‹เค—เคพ เค”เคฐ เคฏเคน เค†เคชเค•เฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฒเฅ‰เค—เฅ‹เค‚ เค•เฅ€ เคธเคนเฅ‡เคœเฅ€ เคนเฅเคˆ เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ เค•เฅ€ เค“เคฐ เคฆเคฟเคถเคพเคจเคฟเคฐเฅเคฆเฅ‡เคถ เค•เคฐเฅ‡เค—เคพเฅค + +เคฒเฅ‰เค—เคฐ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฌเคพเคฆ, เค†เคช เค…เคชเคจเฅ‡ เคšเคฏเคจเคฟเคค เคชเฅเคฒเฅ‡เคŸเคซเคผเฅ‰เคฐเฅเคฎ เคฎเฅ‡เค‚ เคธเฅเคตเคšเคพเคฒเคฟเคค เคฐเฅ‚เคช เคธเฅ‡ เคฐเฅ‚เคชเคพเค‚เคคเคฐเคฃ เคฎเคพเคคเฅเคฐเคพเค“เค‚ เค•เฅ‹ เค…เคฆเฅเคฏเคคเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃเฅ€เคฏ เค•เฅ‹เคก เคœเคพเคฐเฅ€ เคฐเค– เคธเค•เคคเฅ‡ เคนเฅˆเค‚, เค”เคฐ เค†เคชเค•เฅ‹ เค‡เคจ เคฒเฅ‰เค—เฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคพ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚ เคšเคพเคนเฅ‡ เคฏเคน เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‡ เคธเคฎเคฏ, เคตเคฟเคญเคฟเคจเฅเคจ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เคพ เคคเฅเคฒเคจเคพเคคเฅเคฎเค• เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ, เค”เคฐ เคธเฅเคงเคพเคฐ เค•เคฐเคจเฅ‡ เค•เคพ เคชเคนเคšเคพเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเคเฅค diff --git a/ultralytics/docs/hi/modes/train.md:Zone.Identifier b/ultralytics/docs/hi/modes/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/modes/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/modes/val.md b/ultralytics/docs/hi/modes/val.md new file mode 100755 index 0000000..8880f82 --- /dev/null +++ b/ultralytics/docs/hi/modes/val.md @@ -0,0 +1,86 @@ +--- +comments: true +description: YOLOv8 เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ€ เคฎเคพเคจเฅเคฏเคคเคพ เคธเคคเฅเคฏเคพเคชเคจ เค•เฅ‡ เคฒเคฟเค เค—เคพเค‡เคกเฅค เคฏเคนเคพเค เคœเคพเคจเฅ‡เค‚ เค•เคฟ เค•เฅˆเคธเฅ‡ เคชเคพเคฏเคฅเคจ เค”เคฐ CLI เค‰เคฆเคพเคนเคฐเคฃเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เคชเคฐเฅ€เค•เฅเคทเคฃ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค”เคฐ เคฎเคพเคชเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค…เคชเคจเฅ‡ YOLO เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคพ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฐเฅ‡เค‚เฅค +keywords: Ultralytics, YOLO เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผ, YOLOv8, เคฎเคพเคจเฅเคฏเคคเคพ, เคฎเฅ‰เคกเคฒ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ, เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ, เคธเคŸเฅ€เค•เคคเคพ, เคฎเคพเคช, เคชเคพเคฏเคฅเคจ, เคธเฅ€เคเคฒเค†เคˆ +--- + +# Ultralytics YOLO เค•เฅ‡ เคธเคพเคฅ เคฎเฅ‰เคกเคฒ เคฎเคพเคจเฅเคฏเคคเคพ + +Ultralytics YOLO เคชเคพเคฐเคฟเคธเฅเคฅเคฟเคคเคฟเค•เฅ€ เค”เคฐ เคเค•เฅ€เค•เคฐเคฃ + +## เคชเคฐเคฟเคšเคฏ + +เคฎเคพเคจเฅเคฏเคคเคพ เคฎเคถเฅ€เคจ เคฒเคฐเฅเคจเคฟเค‚เค— เคชเคพเค‡เคชเคฒเคพเค‡เคจ เคฎเฅ‡เค‚ เคเค• เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคšเคฐเคฃ เคนเฅˆ, เคœเฅ‹ เค†เคชเค•เฅ‹ เค…เคชเคจเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ€ เค—เฅเคฃเคตเคคเฅเคคเคพ เค•เคพ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฐเคจเฅ‡ เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเคพ เคนเฅˆเฅค Ultralytics YOLOv8 เคฎเฅ‡เค‚ Val เคฎเฅ‹เคก เคฌเคนเฅเคค เคธเคพเคฐเฅ‡ เคŸเฅ‚เคฒเฅเคธ เค”เคฐ เคฎเคพเคชเฅ‹เค‚ เค•เคพ เคชเฅเคฐเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค†เคชเค•เฅ‡ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒเฅ‹เค‚ เค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคพ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคนเฅˆเฅค เคฏเคน เค—เคพเค‡เคก เคฏเฅ‹เค—เฅเคฏเคคเคพ เค”เคฐ เคตเคฟเคถเฅเคตเคธเคจเฅ€เคฏเคคเคพ เคฆเฅ‹เคจเฅ‹เค‚ เคธเฅเคจเคฟเคถเฅเคšเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค Val เคฎเฅ‹เคก เค•เคพ เคธเคตเคฟเคธเฅเคคเคฐ เคธเค‚เคธเคพเคงเคจ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เค•เคพเคฎ เค†เคคเคพ เคนเฅˆเฅค + +## Ultralytics YOLO เค•เฅ‡ เคธเคพเคฅ เคฎเคพเคจเฅเคฏเคคเคพ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคซเคพเคฏเคฆเฅ‡ + +เคฏเคนเคพเค เคฏเฅ‹เคฒเฅ‹เคตเฅ€8 เค•เฅ‡ Val เคฎเฅ‹เคก เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคซเคพเคฏเคฆเฅ‡ เคนเฅˆเค‚: + +- **เคธเคŸเฅ€เค•เคคเคพ:** เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคชเฅ‚เคฐเฅ€ เคคเคฐเคน เคธเฅ‡ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค mAP50, mAP75, เค”เคฐ mAP50-95 เคœเฅˆเคธเฅ‡ เคŸเคฟเค•เคพเคŠ เคฎเคพเคชเฅ‹เค‚ เค•เฅ‹ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚เฅค +- **เคธเฅเคตเคฟเคงเคพ:** เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เค•เฅ‹ เคธเคฐเคฒ เคฌเคจเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‹ เคฏเคพเคฆ เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ€ เค‡เคจเคฌเคฟเคฒเฅเคŸ เคธเฅเคตเคฟเคงเคพ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค +- **เคฒเคšเฅ€เคฒเคพเคชเคจ:** เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคเค• เคนเฅ€ เคฏเคพ เค…เคฒเค— เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค”เคฐ เค›เคตเคฟ เค†เค•เคพเคฐ เค•เฅ‡ เคธเคพเคฅ เคฎเคพเคจเฅเคฏเคคเคพ เคฆเฅ‡เค‚เฅค +- **เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เคŸเฅเคฏเฅ‚เคจเคฟเค‚เค—:** เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เคฎเคพเคชเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฌเฅ‡เคนเคคเคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เฅ‡ เคฒเคฟเค เคธเคฎเคพเคฏเฅ‹เคœเคฟเคค เค•เคฐเฅ‡เค‚เฅค + +### Val เคฎเฅ‹เคก เค•เฅ€ เคฎเฅเค–เฅเคฏ เคตเคฟเคถเฅ‡เคทเคคเคพเคเค‚ + +เคฏเฅ‡ เคนเฅˆเค‚ YOLOv8 เค•เฅ‡ Val เคฎเฅ‹เคก เคฆเฅเคตเคพเคฐเคพ เคชเฅเคฐเคฆเคพเคจ เค•เฅ€ เคœเคพเคจเฅ‡ เคตเคพเคฒเฅ€ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เค•เคพเคฐเฅเคฏเค•เฅเคทเคฎเคคเคพเคเค‚: + +- **เคธเฅเคตเคค: เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ:** เคฎเฅ‰เคกเคฒ เคฏเฅ‹เค—เฅเคฏเคคเคพ เค•เฅ‡ เคฒเคฟเค เค…เคชเคจเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคธเคฎเคพเคฏเฅ‹เคœเคจเฅ‹เค‚ เค•เฅ‹ เคธเฅเคตเคคเคƒ เคฏเคพเคฆ เคฐเค–เคคเฅ‡ เคนเฅˆเค‚เฅค +- **เคฌเคนเฅเคฎเคพเคจเฅเคฏเคคเคพ เคธเคฎเคฐเฅเคฅเคจ:** เคตเคฟเคญเคฟเคจเฅเคจ เคธเคŸเฅ€เค•เคคเคพ เคฎเคพเคชเฅ‹เค‚ เค•เฅ‡ เค†เคงเคพเคฐ เคชเคฐ เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฐเฅ‡เค‚เฅค +- **CLI เค”เคฐ เคชเคพเคฏเคฅเคจ เคเคชเฅ€เค†เคˆ:** เคฎเคพเคจเฅเคฏเคคเคพ เค•เฅ‡ เคฒเคฟเค CLI เคฏเคพ เคชเคพเคฏเคฅเคจ เคเคชเฅ€เค†เคˆ เคฎเฅ‡เค‚ เคธเฅ‡ เคเค• เค•เคพ เคšเคฏเคจ เค•เคฐเฅ‡เค‚เฅค +- **เคกเฅ‡เคŸเคพ เคธเคฎเฅเคชเคฐเฅเค•เคคเคพ:** เค•เฅ‹เค•เฅ‹เคตเคฟเคตเค• เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคšเคฐเคฃ เคฎเฅ‡เค‚ เค‰เคชเคฏเฅ‹เค— เค•เฅ€ เคœเคพเคจเฅ‡ เคตเคพเคฒเฅ€ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‡ เคธเคพเคฅ เคธเคนเคœเคคเคพ เคธเฅ‡ เค•เคพเคฎ เค•เคฐเคคเคพ เคนเฅˆเฅค + +!!! Tip "เคŸเคฟเคช" + + * YOLOv8 เคฎเฅ‰เคกเคฒ เค…เคชเคจเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‹ เคธเฅเคตเคคเคƒ เคฏเคพเคฆ เคฐเค–เคคเฅ‡ เคนเฅˆเค‚, เค‡เคธเคฒเคฟเค เค†เคช เค•เฅ‡เคตเคฒ `yolo val model=yolov8n.pt` เคฏเคพ `model('yolov8n.pt').val()` เคฆเฅเคตเคพเคฐเคพ เคธเคฐเคฒเคคเคพเคชเฅ‚เคฐเฅเคตเค• เคเค• เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคธเคฎเคพเคจ เค›เคตเคฟ เค†เค•เคพเคฐ เค•เฅ‡ เคธเคพเคฅ เค”เคฐ เคฎเฅ‚เคฒ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคฎเคพเคจเฅเคฏเคคเคพ เคฆเฅ‡ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค + +## เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เค‰เคฆเคพเคนเคฐเคฃ + +COCO128 เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n เคฎเฅ‰เคกเคฒ เค•เฅ€ เคธเคŸเฅ€เค•เคคเคพ เคฎเคพเคจเฅเคฏเคพเค‚เค•เคจ เค•เคฐเฅ‡เค‚เฅค `model` เค•เฅ‹ เคตเคฟเคฆเฅเคฏเคฎเคพเคจ เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— `data` เค”เคฐ เคคเคฐเฅเค• เคฌเคจเฅ‡ เคฐเคนเคคเฅ‡ เคนเฅˆเค‚, เค‡เคธเคฒเคฟเค เค•เฅ‹เคˆ เคคเคฐเฅเค• เคชเคพเคธ เค•เคฐเคพเคจเฅ‡ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพ เคจเคนเฅ€เค‚ เคนเฅˆเฅค เคชเฅ‚เคฐเฅ€ เคธเฅ‚เคšเฅ€ เคจเคฟเคฐเฅเคฏเคพเคค เคคเคฐเฅเค•เฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคจเฅ€เคšเฅ‡ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "เคชเคพเคฏเคฅเคจ" + + ```python + from ultralytics import YOLO + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') # เคเค• เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เคเค• เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฎเคพเคจเฅเคฏเคพเค‚เค•เคจ เค•เคฐเฅ‡เค‚ + metrics = model.val() # เค•เฅ‹เคˆ เคคเคฐเฅเค• เค†เคตเคถเฅเคฏเค• เคจเคนเฅ€เค‚ เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚, เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค”เคฐ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฏเคพเคฆ เคฐเค–เฅ‡ เคœเคพเคคเฅ‡ เคนเฅˆเค‚ + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # เคนเคฐ เคถเฅเคฐเฅ‡เคฃเฅ€ เค•เฅ‡ map50-95 เคธเฅ‡ เคฌเคจเคพ เคเค• เคธเฅ‚เคšเฅ€ + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฎเคพเคจเฅเคฏเคพเค‚เค•เคจ เค•เคฐเฅ‡เค‚ + yolo detect val model=path/to/best.pt # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฎเคพเคจเฅเคฏเคพเค‚เค•เคจ เค•เคฐเฅ‡เค‚ + ``` + +## เคคเคฐเฅเค• + +YOLO เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคฒเคฟเค เคฎเคพเคจเฅเคฏเคพเค‚เค•เคจ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚: เคนเคพเค‡เคชเคฐเคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ เค”เคฐ เคตเคฟเคจเฅเคฏเคพเคธ เคœเฅˆเคธเฅ‡, เคœเฅ‹ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคฎเคพเคจเฅเคฏเคคเคพ เค•เฅ‹ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค— เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เฅค เคฏเฅ‡ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคชเฅเคฐเคฆเคฐเฅเคถเคจ, เค—เคคเคฟ, เค”เคฐ เคธเคŸเฅ€เค•เคคเคพ เคชเคฐ เคชเฅเคฐเคญเคพเคต เคกเคพเคฒ เคธเค•เคคเฅ€ เคนเฅˆเค‚เฅค เค•เฅเค› เค†เคฎ YOLO เคฎเคพเคจเฅเคฏเคพเค‚เค•เคจ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฎเฅ‡เค‚ เคฆเคพเคฒ-เคฆเคพเคฒเคค, เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เค•เฅ‡ เคฆเฅŒเคฐเคพเคจ เคฎเคพเคจเฅเคฏเคพเค‚เค•เคจ เค•เคฌ เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ เค”เคฐ เคฎเคพเคจเฅเคฏเคพเค‚เค•เคจ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค— เค•เคฟเค เคœเคพเคจเฅ‡ เคตเคพเคฒเฅ‡ เคฎเคพเคช เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚เฅค เคฎเคพเคจเฅเคฏเคพเค‚เค•เคจ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพ เค•เฅ‹ เคชเฅเคฐเคญเคพเคตเคฟเคค เค•เคฐ เคธเค•เคจเฅ‡ เคตเคพเคฒเฅ‡ เค…เคจเฅเคฏ เค•เคพเคฐเค•เฅ‹เค‚ เคฎเฅ‡เค‚ เคฎเคพเคจเฅเคฏเคพเค‚เค•เคจ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เคพ เค†เค•เคพเคฐ เค”เคฐ เคธเค‚เคฐเคšเคจเคพ เค”เคฐ เคฎเฅ‰เคกเคฒ เค•เคพ เคตเคฟเคถเฅ‡เคท เค•เคพเคฐเฅเคฏ เคถเคพเคฎเคฟเคฒ เคนเฅˆเค‚เฅค เค“เคตเคฐเคซเคฟเคŸเคฟเค‚เค— เค•เคพ เคชเคคเคพ เคฒเค—เคพเคจเฅ‡ เค”เคฐ เคฐเฅ‹เค•เคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‡เคจ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‹ เคธเคพเคตเคงเคพเคจเฅ€เคชเฅ‚เคฐเฅเคตเค• เคธเคฎเคพเคฏเฅ‹เคœเคฟเคค เค”เคฐ เคชเฅเคฐเคฏเฅ‹เค— เค•เคฐเคจเคพ เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคนเฅˆเฅค + +| เค•เฅเค‚เคœเฅ€ | เคฎเคพเคจ | เคตเคฟเคตเคฐเคฃ | +|---------------|---------|------------------------------------------------------------------------------------| +| `data` | `None` | เคกเฅ‡เคŸเคพ เคซเคผเคพเค‡เคฒ เค•เคพ เคชเคฅ, เคœเฅˆเคธเฅ‡ เค•เฅ€ coco128.yaml | +| `imgsz` | `640` | เคชเฅเคฐเคพเคฐเฅ‚เคชเคฟเค• เค›เคตเคฟ เค•เคพ เค†เค•เคพเคฐ เคเค• เคชเฅ‚เคฐเฅเคฃเคพเค‚เค• เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ | +| `batch` | `16` | เคชเฅเคฐเคคเคฟ เคฌเฅˆเคš เค›เคตเคฟ เค•เฅ€ เคธเค‚เค–เฅเคฏเคพ (-1 for AutoBatch) | +| `save_json` | `False` | เคชเคฐเคฟเคฃเคพเคฎ JSON เคซเคผเคพเค‡เคฒ เคฎเฅ‡เค‚ เคธเคนเฅ‡เคœเฅ‡เค‚ | +| `save_hybrid` | `False` | เคชเฅเคฐเค•เคพเคฐเฅ‹เค‚ เค•เฅ‡ เคนเคพเค‡เคฌเฅเคฐเคฟเคก เคธเค‚เคธเฅเค•เคฐเคฃ เค•เฅ‹ เคธเคนเฅ‡เคœเฅ‡เค‚ (เคฒเฅ‡เคฌเคฒ + เค…เคคเคฟเคฐเคฟเค•เฅเคค เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ) | +| `conf` | `0.001` | เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เฅ‡ เคฒเคฟเค เคตเคธเฅเคคเฅ เค•เคพ เคตเคฟเคถเฅเคตเคธเคจเฅ€เคฏเคคเคพ เคฅเฅเคฐเฅ‡เคถเคนเฅ‹เคฒเฅเคก | +| `iou` | `0.6` | เคธเค‚เคฏเฅ‹เค—/เคธเค‚เคงเคฟ (IoU) เค•เฅ‡ เคฒเคฟเค เคฅเฅเคฐเฅ‡เคถเคนเฅ‹เคฒเฅเคก เคกเคพเค•เค˜เคฐ | +| `max_det` | `300` | เคชเฅเคฐเคคเคฟ เค›เคตเคฟ เค•เฅ‡ เคฒเคฟเค เค…เคงเคฟเค•เคคเคฎ เคจเคฟเค•เคพเคธเฅ€ เคธเค‚เค–เฅเคฏเคพ | +| `half` | `True` | เค…เคฐเฅเคงเคธเคฐเคฒเคคเคพ (FP16) เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | +| `device` | `None` | เคšเคฒเคพเคเค‚ เค•เฅ‡ เคฒเคฟเค เคฏเฅเค•เฅเคคเคฟ, เค‰เคฆเคพเคนเคฐเคฃ เค•เฅ‡ เคฒเคฟเค cuda device=0/1/2/3 เคฏเคพ device=cpu | +| `dnn` | `False` | เค“เคเคจเคเคจเคเค•เฅเคธ เคธเค‚เคœเฅเคžเคพเคจเคพเคคเฅเคฎเค• เค•เฅ‡ เคฒเคฟเค เค“เคชเฅ‡เค‚เคธเฅ€เคตเฅ€ เคกเฅ€เคเคจเคเคจ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | +| `plots` | `False` | เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฆเฅŒเคฐเคพเคจ เคšเคฟเคคเฅเคฐเคฟเคคเคฟเคฏเคพเค เคฆเคฟเค–เคพเคเค‚ | +| `rect` | `False` | เคจเฅเคฏเฅ‚เคจเคคเคฎ เคชเฅˆเคกเคฟเค‚เค— เค•เฅ‡ เคฒเคฟเค เคนเคฐ เคฌเฅˆเคš เค•เฅ‹ เคธเค‚เค•เคฒเคฟเคค เค†เคฏเคคเคพเค•เคพเคฐเค• เคตเคฟเคฎเคพเคจ เค•เคฐเฅ‡เค‚ | +| `split` | `val` | เคฎเคพเคจเฅเคฏเคพเค‚เค•เคจ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค— เค•เฅ€ เคœเคพเคจเฅ‡ เคตเคพเคฒเฅ€ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคธเฅเคชเฅเคฒเคฟเคŸ, เคœเฅˆเคธเฅ‡ 'val', 'test' เคฏเคพ 'train' | +| diff --git a/ultralytics/docs/hi/modes/val.md:Zone.Identifier b/ultralytics/docs/hi/modes/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/modes/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/quickstart.md b/ultralytics/docs/hi/quickstart.md new file mode 100755 index 0000000..1db79eb --- /dev/null +++ b/ultralytics/docs/hi/quickstart.md @@ -0,0 +1,327 @@ +--- +comments: true +description: Ultralytics เค•เฅ‹ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคคเคฐเฅ€เค•เฅ‹เค‚ เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคœเคพเคจเฅ‡เค‚เฅค Ultralytics เค•เฅ‹ pip, conda, git เค”เคฐ Docker เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเฅ‡เค‚เฅค Ultralytics เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฎเคพเค‚เคก เคฒเคพเค‡เคจ เค‡เค‚เคŸเคฐเคซเฅ‡เคธ เคฏเคพ เค…เคชเคจเฅ€ Python เคชเคฐเคฟเคฏเฅ‹เคœเคจเคพเค“เค‚ เค•เฅ‡ เคญเฅ€เคคเคฐ เค•เคฐเคจเคพ เคธเฅ€เค–เฅ‡เค‚เฅค +keywords: Ultralytics เคธเฅเคฅเคพเคชเคจเคพ, pip install Ultralytics, Docker install Ultralytics, Ultralytics เค•เคฎเคพเค‚เคก เคฒเคพเค‡เคจ เค‡เค‚เคŸเคฐเคซเฅ‡เคธ, Ultralytics Python เค‡เค‚เคŸเคฐเคซเฅ‡เคธ +--- + +## Ultralytics เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเฅ‡เค‚ + +Ultralytics เคจเฅ‡ pip, conda เค”เคฐ Docker เคธเคนเคฟเคค เค•เคˆ เคธเฅเคฅเคพเคชเคจเคพ เคตเคฟเคงเคฟเคฏเคพเค เคชเฅเคฐเคฆเคพเคจ เค•เฅ€ เคนเฅˆเค‚เฅค เคจเคตเฅ€เคจเคคเคฎ เคธเฅเคฅเคฟเคฐ เคธเค‚เคธเฅเค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค `ultralytics` pip เคชเฅˆเค•เฅ‡เคœ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ YOLOv8 เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเฅ‡เค‚ เคฏเคพ เคธเคฌเคธเฅ‡ เค…เคฆเฅเคฏเคคเคฟเคค เคธเค‚เคธเฅเค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค [Ultralytics GitHub repository](https://github.com/ultralytics/ultralytics) เค•เฅเคฒเฅ‹เคจ เค•เคฐเฅ‡เค‚เฅค Docker เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡, เคธเฅเคฅเคพเคจเฅ€เคฏ เคธเฅเคฅเคพเคชเคจเคพ เคธเฅ‡ เคฌเคš เค•เคฐ, เคเค• เค›เฅ‹เคŸเฅ‡ เคœเค—เคน เคฎเฅ‡เค‚ เคชเฅˆเค•เฅ‡เคœ เค•เฅ‡ เคจเค เคธเค‚เคธเฅเค•เคฐเคฃ เค•เคพ เคจเคฟเคทเฅเคชเคพเคฆเคจ เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค + +!!! Note "เคจเฅ‹เคŸ" + + ๐Ÿšง เคนเคฎเคพเคฐเฅ‡ เคฌเคนเฅเคญเคพเคทเฅ€เคฏ เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃ เค•เฅ€ เคตเคฐเฅเคคเคฎเคพเคจ เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฎเคพเคฃเคพเคงเฅ€เคจ เคนเฅˆ เค”เคฐ เคนเคฎ เค‰เคธเฅ‡ เคธเฅเคงเคพเคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เค เคฟเคจเคคเคพเค“เค‚ เคชเคฐ เค•เคพเคฎ เค•เคฐ เคฐเคนเฅ‡ เคนเฅˆเค‚เฅค เค†เคชเค•เฅ‡ เคงเฅˆเคฐเฅเคฏ เค•เฅ‡ เคฒเคฟเค เคงเคจเฅเคฏเคตเคพเคฆ! ๐Ÿ™ + +!!! Example "เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเฅ‡เค‚" + + === "Pip เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเฅ‡เค‚ (เค…เคจเฅเคถเค‚เคธเคฟเคค)" + เคฏเคฆเคฟ เค†เคชเค•เฅ‡ เคชเคพเคธ เคชเคฟเค›เคฒเฅ‡ เคธเค‚เคธเฅเค•เคฐเคฃ เค•เคพ เคธเฅเคฅเคพเคชเคจเคพ เคนเฅˆ, เคคเฅ‹ เคชเคฟเคช เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ `ultralytics` เคชเฅˆเค•เฅ‡เคœ เค•เฅ‹ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค `pip install -U ultralytics` เค•เคฎเคพเค‚เคก เคšเคฒเคพเคเค‚เฅค `ultralytics` เคชเฅˆเค•เฅ‡เคœ เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เค…เคงเคฟเค• เคตเคฟเคตเคฐเคฃ เค•เฅ‡ เคฒเคฟเค [Python Package Index (PyPI)](https://pypi.org/project/ultralytics/) เคชเคฐ เคœเคพเคเค‚เฅค + + [![PyPI version](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + + ```bash + # PyPI เคธเฅ‡ ultralytics เคชเฅˆเค•เฅ‡เคœ เค•เคพ เคธเฅเคฅเคพเคชเคจเคพ เค•เคฐเฅ‡เค‚ + pip install ultralytics + ``` + + เค†เคช เค‡เคธเฅ‡ เคธเฅ€เคงเฅ‡ [GitHub repository](https://github.com/ultralytics/ultralytics) เคธเฅ‡ เคญเฅ€ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค เคฏเคน เค…เคฆเฅเคฏเคคเคจ เคธเค‚เคธเฅเค•เคฐเคฃ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเคจเคพ เคšเคพเคนเคคเฅ‡ เคนเฅˆเค‚ เคคเฅ‹ เคฏเคน เคธเคฐเฅเคตเฅ‹เคคเฅเคคเคฎ เคนเฅ‹ เคธเค•เคคเคพ เคนเฅˆเฅค เค‡เคธเค•เฅ‡ เคฒเคฟเค เค…เคชเคจเฅ‡ เคธเคฟเคธเฅเคŸเคฎ เคชเคฐ เค—เคฟเคŸ เค•เคฎเคพเค‚เคก-เคฒเคพเค‡เคจ เคŸเฅ‚เคฒ เคธเฅเคฅเคพเคชเคฟเคค เคนเฅ‹เคจเคพ เคšเคพเคนเคฟเคเฅค `@main` เค…เคชเคฆเฅ‡เคถ เค•เฅ€ `main` เคถเคพเค–เคพ เค•เฅ‹ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคคเคพ เคนเฅˆ เค”เคฐ เค‡เคธเฅ‡ เคฆเฅ‚เคธเคฐเฅ€ เคถเคพเค–เคพ, เค‰เคฆเคพ. `@my-branch`, เคฎเฅ‡เค‚ เคธเค‚เคถเฅ‹เคงเคฟเคค เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆ, เคฏเคพ เคชเฅ‚เคฐเฅเคฃเคคเคƒ เคนเคŸเคพ เคฆเคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆ, เคคเคพเค•เคฟ เคฏเคน เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ เคฐเฅ‚เคช เคธเฅ‡ `main` เคถเคพเค–เคพ เค•เฅ‹ เคฒเฅ‡ เคœเคพเคเฅค + + ```bash + # GitHub เคธเฅ‡ ultralytics เคชเฅˆเค•เฅ‡เคœ เค•เคพ เคธเฅเคฅเคพเคชเคจเคพ เค•เคฐเฅ‡เค‚ + pip install git+https://github.com/ultralytics/ultralytics.git@main + ``` + + + === "Conda เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเฅ‡เค‚" + เคธเฅเคฅเคพเคชเคจเคพ เค•เฅ‡ เคฒเคฟเค pip เค•เฅ‡ เคฌเคฆเคฒเฅ‡ Conda เคเค• เคตเฅˆเค•เคฒเฅเคชเคฟเค• เคชเฅˆเค•เฅ‡เคœ เคชเฅเคฐเคฌเค‚เคงเค• เคนเฅˆ เคœเคฟเคธเฅ‡ เค†เคช เคธเฅเคฅเคพเคชเคจเคพ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค— เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค เค•เคฟเคธเฅ€ เคญเฅ€ เคœเคพเคจเค•เคพเคฐเฅ€ เค•เฅ‡ เคฒเคฟเค [Anaconda เค•เฅ€ เคฎเฅเค–เฅเคฏ เคธเคพเค‡เคŸ](https://anaconda.org/conda-forge/ultralytics) เคชเคฐ เคœเคพเคเค‚เฅค เค•เค‚เคกเคพ เคชเฅˆเค•เฅ‡เคœ เค•เฅ€ เค…เคฆเฅเคฏเคคเคจ เค”เคฐ เคธเค‚เคธเคพเคงเคจ เคฐเคฟเคชเฅ‹ เค•เฅ‡ เคฒเคฟเค [เคฏเคนเคพเค‚](https://github.com/conda-forge/ultralytics-feedstock/) เคฆเฅ‡เค–เฅ‡เค‚เฅค + + + [![Conda Recipe](https://img.shields.io/badge/recipe-ultralytics-green.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Version](https://img.shields.io/conda/vn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) + + ```bash + # conda เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ ultralytics เคชเฅˆเค•เฅ‡เคœ เค•เคพ เคธเฅเคฅเคพเคชเคจเคพ เค•เคฐเฅ‡เค‚ + conda install -c conda-forge ultralytics + ``` + + !!! Note "เคจเฅ‹เคŸ" + + เคฏเคฆเคฟ เค†เคช CUDA เคชเคฐเคฟเคตเฅ‡เคถ เคฎเฅ‡เค‚ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐ เคฐเคนเฅ‡ เคนเฅˆเค‚ เคคเฅ‹ เคธเคฐเฅเคตเฅ‹เคคเฅเคคเคฎ เค…เคจเฅเคถเค‚เคธเคพ เคนเฅˆ เค•เคฟ เค†เคช เค•เคฎเคพเค‚เคก-เคฒเคพเค‡เคจ เคชเคฐ `pytorch` เค”เคฐ `pytorch-cuda` เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฎเคพเค‚เคก เคเค• เคธเคพเคฅ เค‡เค‚เคธเฅเคŸเฅ‰เคฒ เค•เคฐเฅ‡เค‚ เคคเคพเค•เคฟ เค•เฅ‹เคฃเฅเคกเคพ เคชเฅˆเค•เฅ‡เคœ เคชเฅเคฐเคฌเค‚เคงเค• เค•เฅ‹ เค•เฅ‹เคˆ เคญเฅ€ เคŸเค•เคฐเคพเคต เคธเฅเคฒเคเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค…เคจเฅเคฎเคคเคฟ เคฎเคฟเคฒเฅ‡, เคฏเคพ เคซเคฟเคฐ เคœเคฐเฅ‚เคฐเคค เคชเคกเคผเคจเฅ‡ เคชเคฐ CPU-เคตเคฟเคถเคฟเคทเฅเคŸ `pytorch` เคชเฅˆเค•เฅ‡เคœ เค•เฅ‹ CPU-เคตเคฟเคถเคฟเคทเฅเคŸ เคนเฅ‹เคจเฅ‡ เคตเคพเคฒเฅ‡ `pytorch-cuda` เคชเฅˆเค•เฅ‡เคœ เค•เฅ‹ เค…เคงเคฟเคฐเฅ‹เคนเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เค‚เฅค + ```bash + # conda เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคธเคญเฅ€ เคชเฅˆเค•เฅ‡เคœเฅ‹เค‚ เค•เฅ‹ เคเค• เคธเคพเคฅ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเฅ‡เค‚ + conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics + ``` + + ### Conda Docker เค‡เคฎเฅ‡เคœ + + Ultralytics Conda Docker เค‡เคฎเฅ‡เคœ [DockerHub](https://hub.docker.com/r/ultralytics/ultralytics) เคธเฅ‡ เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเค‚เฅค เคฏเฅ‡ เค‡เคฎเฅ‡เคœเฅ‡เคœ [Miniconda3](https://docs.conda.io/projects/miniconda/en/latest/) เคชเคฐ เค†เคงเคพเคฐเคฟเคค เคนเฅˆเค‚ เค”เคฐ `ultralytics` เค•เคพ เค‰เคชเคฏเฅ‹เค— Conda เคชเคฐเฅเคฏเคพเคตเคฐเคฃ เคฎเฅ‡เค‚ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคเค• เคธเคฐเคฒ เคคเคฐเฅ€เค•เคพ เคนเฅˆเฅค + + ```bash + # เคฐเฅ‚เคชเคฐเฅ‡เค–เคพ เคจเคพเคฎ เค•เฅ‹ เคเค• เคšเคฐ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคธเฅ‡เคŸ เค•เคฐเฅ‡เค‚ + t=ultralytics/ultralytics:latest-conda + + # Docker Hub เคธเฅ‡ เคจเคตเฅ€เคจเคคเคฎ ultralytics เค‡เคฎเฅ‡เคœ เค•เฅ‹ เคชเฅเคฒ เค•เคฐเฅ‡เค‚ + sudo docker pull $t + + # เคœเฅ€เคชเฅ€เคฏเฅ‚ เคธเคฎเคฐเฅเคฅเคจ เคตเคพเคฒเฅ‡ เค•เค‚เคŸเฅ‡เคจเคฐ เคฎเฅ‡เค‚ ultralytics เค‡เคฎเฅ‡เคœ เคšเคฒเคพเคเค‚ + sudo docker run -it --ipc=host --gpus all $t # เคธเคญเฅ€ เคœเฅ€เคชเฅ€เคฏเฅ‚ + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # เคœเฅ€เคชเฅ€เคฏเฅ‚ เคฆเฅเคตเคพเคฐเคพ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เค•เคฐเฅ‡เค‚ + ``` + + === "Git เค•เฅเคฒเฅ‹เคจ" + เคฏเคฆเคฟ เค†เคช เคตเคฟเค•เคพเคธ เคฎเฅ‡เค‚ เคฏเฅ‹เค—เคฆเคพเคจ เค•เคฐเคจเฅ‡ เคฎเฅ‡เค‚ เคฐเฅเคšเคฟ เคฐเค–เคคเฅ‡ เคนเฅˆเค‚ เคฏเคพ เคจเคตเฅ€เคจเคคเคฎ เคธเฅเคฐเฅ‹เคค เค•เฅ‹เคก เค•เฅ‡ เคธเคพเคฅ เคชเฅเคฐเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ€ เค‡เคšเฅเค›เคพ เคฐเค–เคคเฅ‡ เคนเฅˆเค‚, เคคเฅ‹ `ultralytics` เคฐเคฟเคชเฅ‰เคœเคฟเคŸเคฐเฅ€ เค•เฅเคฒเฅ‹เคจ เค•เคฐเฅ‡เค‚เฅค เค•เฅเคฒเฅ‹เคจเคฟเค‚เค— เค•เฅ‡ เคฌเคพเคฆ, เค‰เคธ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸเคฟเคค เคธเค‚เคฆเคฐเฅเคญ เคฎเฅ‡เค‚ เคจเฅ‡เคตเคฟเค—เฅ‡เคŸ เค•เคฐเฅ‡เค‚ เค”เคฐ เคชเฅˆเค•เฅ‡เคœ เค•เฅ‹ เคชเคนเคšเคพเคจเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค pip เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคนเฅเค เคธเค‚เค—เค เคจเคพเคคเฅเคฎเค• เคฎเฅ‹เคก `-e` เค•เฅ‡ เคธเคพเคฅ เคชเฅˆเค•เฅ‡เคœ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเฅ‡เค‚เฅค + ```bash + # ultralytics เคฐเคฟเคชเฅ‰เคœเคฟเคŸเคฐเฅ€ เค•เฅเคฒเฅ‹เคจ เค•เคฐเฅ‡เค‚ + git clone https://github.com/ultralytics/ultralytics + + # เค•เฅเคฒเฅ‹เคจ เค•เฅ€ เค—เคˆ เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ เคฎเฅ‡เค‚ เคจเฅ‡เคตเคฟเค—เฅ‡เคŸ เค•เคฐเฅ‡เค‚ + cd ultralytics + + # เคตเคฟเค•เคพเคธ เค•เฅ‡ เคฒเคฟเค เคธเค‚เค—เค เคจเคพเคคเฅเคฎเค• เคฎเฅ‹เคก เคฎเฅ‡เค‚ เคชเฅˆเค•เฅ‡เคœ เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเฅ‡เค‚ + pip install -e . + ``` + + === "Docker" + + Docker เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ `ultralytics` เคชเฅˆเค•เฅ‡เคœ เค•เคพ เค†เคธเคพเคจเฅ€ เคธเฅ‡ เคจเคฟเคทเฅเคชเคพเคฆเคจ เค•เคฐเฅ‡เค‚ เค”เคฐ เค‡เคธเฅ‡ เคฐเค–เคฐเค–เคพเคต เคฎเฅ‡เค‚ เคฌเฅ‡เคนเคฆ เคธเฅเค—เคฎ เคฌเคจเคพเคเค‚, เค‡เคธ เคชเฅˆเค•เฅ‡เคœ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚, เคตเคฟเคญเคฟเคจเฅเคจ เคชเคฐเฅเคฏเคพเคตเคฐเคฃเฅ‹เค‚ เคชเคฐ เคธเคคเคค เค”เคฐ เคธเฅเค—เคฎ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคธเฅเคจเคฟเคถเฅเคšเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเคเฅค [Docker Hub](https://hub.docker.com/r/ultralytics/ultralytics) เคธเฅ‡ เคธเคคเฅเคฏเคพเคชเคฟเคค เค•เคพเคฐเฅเคฏเค•เคพเคฐเฅ€ เคตเคพเคคเคพเคตเคฐเคฃ เคคเค• เคชเคนเฅเค‚เคš เค•เฅ‡ เคฒเคฟเค Ultralytics 5 เคฎเฅเค–เฅเคฏ เคธเคฎเคฐเฅเคฅเคฟเคค Docker เค‡เคฎเฅ‡เคœ เค‰เคชเคฒเคฌเฅเคง เคนเฅˆเค‚, เคœเฅ‹ เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅเคฒเฅ‡เคŸเคซเคผเฅ‰เคฐเฅเคฎ เค”เคฐ เค‰เคชเคฏเฅ‹เค— เคฎเคพเคฎเคฒเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค‰เคšเฅเคš เคธเค‚เค—เคคเคคเคพ เค”เคฐ เคชเฅเคฐเคฆเคพเคฐเฅเคฅเคถเฅ€เคฒเคคเคพ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคกเคฟเคœเคผเคพเค‡เคจ เค•เคฟเค เค—เค เคนเฅˆเค‚: + + เคกเฅ‰เค•เคฐ เคชเฅเคฒเฅเคฒเฅเคธ + + - **Dockerfile:** เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค เค…เคฆเฅเคฏเคคเคจ เคธเค‚เคธเฅเค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค เค…เคจเฅเคถเค‚เคธเคฟเคค GPU เคšเคฟเคคเฅเคฐเฅค + - **Dockerfile-arm64:** ARM64 เคตเคพเคฃเคฟเคœเฅเคฏเคฟเค•เคฐเคฃ เค•เฅ‡ เคฒเคฟเค เค…เคจเฅเค•เฅ‚เคฒเคฟเคค, Raspberry Pi เค”เคฐ เค…เคจเฅเคฏ ARM64 เค†เคงเคพเคฐเคฟเคค เคชเฅเคฒเฅ‡เคŸเคซเคผเฅ‰เคฐเฅเคฎ เคชเคฐ เคฏเคพเคคเคพเคฏเคพเคค เค•เฅ€ เค…เคจเฅเคฎเคคเคฟ เคฆเฅ‡เคคเคพ เคนเฅˆเฅค + - **Dockerfile-cpu:** GPU เคฐเคนเคฟเคค เคชเคคเคฒเคพ เคฎเฅ‰เคกเคฒ, เค‰เคฌเค‚เคŸเฅ‚ เค†เคงเคพเคฐเคฟเคค เคฏเฅ‹เค—เฅเคฏเคคเคพ เคคเค• เคชเฅเคจเคฐเฅเคจเคฟเคฐเฅเคฎเคพเคฃ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅเค•เฅเคค เคนเฅˆเฅค + - **Dockerfile-jetson:** NVIDIA Jetson เค‰เคชเค•เคฐเคฃเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค†เคฆเคฐเฅเคถเฅ‹เค‚ เค•เฅ‡ เค†เคงเคพเคฐ เคชเคฐ เค—เฅ€เคฏเฅ‚ เคธเคฎเคฐเฅเคฅเคจ เคฎเคฟเคฒเคพเคจ, เค‡เคจ เคชเฅเคฒเฅ‡เคŸเคซเคผเฅ‰เคฐเฅเคฎเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค…เคจเฅเค•เฅ‚เคฒ เคฏเฅ‚เคชเฅ€เคฏเฅ‚ เคธเคฎเคฐเฅเคฅเคจ เคธเคฎเฅ‡เค•เคฟเคค เค•เคฐเคคเคพ เคนเฅˆเฅค + - **Dockerfile-python:** เค•เฅ‡เคตเคฒ Python เค”เคฐ เค†เคตเคถเฅเคฏเค•เคคเคพ เคชเฅเคฐเคคเคฟเคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ‡ เคจเฅเคฏเฅ‚เคจเคคเคฎ เค›เคตเคฟ, เคนเคฒเฅเค•เฅ‡ เคเคชเฅเคธ เค”เคฐ เคตเคฟเค•เคพเคธ เค•เฅ‡ เคฒเคฟเค เค†เคฆเคฐเฅเคถ เค›เคตเคฟเฅค + - **Dockerfile-conda:** Miniconda3 เคชเคฐ เค†เคงเคพเคฐเคฟเคค, Ultralytics เคชเฅˆเค•เฅ‡เคœ เค•เฅ‡ เค•เฅ‹เคฃเฅเคกเคพ เคธเฅเคฅเคพเคชเคจเคพ เค•เฅ‡ เคธเคพเคฅเฅค + + เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เค•เคฎเคพเค‚เคกเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคจเคตเฅ€เคจเคคเคฎ เค›เคตเคฟ เคฒเคพเคเค เค”เคฐ เค‰เคธเฅ‡ เคจเคฟเคทเฅเคชเคพเคฆเคฟเคค เค•เคฐเฅ‡เค‚: + + ```bash + # เค›เคตเคฟ เคจเคพเคฎ เค•เฅ‹ เคเค• เคšเคฐ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคธเฅ‡เคŸ เค•เคฐเฅ‡เค‚ + t=ultralytics/ultralytics:latest + + # Docker Hub เคธเฅ‡ เคจเคตเฅ€เคจเคคเคฎ ultralytics เค›เคตเคฟ เคชเฅเคฒ เค•เคฐเฅ‡เค‚ + sudo docker pull $t + + # เคœเฅ€เคชเฅ€เคฏเฅ‚ เคธเคฎเคฐเฅเคฅเคจ เคตเคพเคฒเฅ‡ เค•เค‚เคŸเฅ‡เคจเคฐ เคฎเฅ‡เค‚ ultralytics เค›เคตเคฟ เคšเคฒเคพเคเค‚ + sudo docker run -it --ipc=host --gpus all $t # เคธเคญเฅ€ เคœเฅ€เคชเฅ€เคฏเฅ‚ + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # เคœเฅ€เคชเฅ€เคฏเฅ‚ เคฆเฅเคตเคพเคฐเคพ เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เค•เคฐเฅ‡เค‚ + ``` + + เค‰เคชเคฐเฅ‹เค•เฅเคค เค•เคฎเคพเค‚เคก เคจเฅ‡ เคเค• Docker เค•เค‚เคŸเฅ‡เคจเคฐ เค•เฅ‹ เคเค•เฅเคธเฅ‡เคธ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‰เคคเฅเคฅเคพเคจ เค•เคฟเคฏเคพ เคนเฅˆเฅค `-it` เคเค‚เคเคŸเฅ€ เคเค• เคชเฅเคฐเคคเฅ€เค• TTY เค•เฅ‹ เคจเคฟเคฐเฅเคงเคพเคฐเคฟเคค เค•เคฐเคคเฅ€ เคนเฅˆ เค”เคฐ stdin เค–เฅเคฒเฅ€ เคฐเค–เคคเฅ€ เคนเฅˆ, เคœเคฟเคธเคธเฅ‡ เค†เคช เค•เค‚เคŸเฅ‡เคจเคฐ เค•เฅ‡ เคธเคพเคฅ เค‡เค‚เคŸเคฐเฅˆเค•เฅเคŸ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค `--ipc=host` เคเค‚เคเคŸเฅ€ IPC (Inter-Process Communication) เคจเฅ‡เคฎเคธเฅเคชเฅ‡เคธ เค•เฅ‹ เคนเฅ‹เคธเฅเคŸ เคชเคฐ เคธเฅ‡เคŸ เค•เคฐเคคเคพ เคนเฅˆ, เคœเฅ‹ เคชเฅเคฐเค•เฅเคฐเคฟเคฏเคพเค“เค‚ เค•เฅ‡ เคฌเฅ€เคš เคฎเฅ‡เคฎเฅ‹เคฐเฅ€ เคธเคพเคเคพ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค†เคตเคถเฅเคฏเค• เคนเฅ‹เคคเคพ เคนเฅˆเฅค `--gpus all` เคจเคฟเคฐเฅเคฆเคฟเคทเฅเคŸ เคœเฅ€เคชเฅ€เคฏเฅ‚ เค•เค‚เคŸเฅ‡เคจเคฐ เค•เฅ‡ เคฌเฅ€เคคเคฐ เคธเคญเฅ€ เค‰เคชเคฒเคฌเฅเคง เคœเฅ€เคชเฅ€เคฏเฅ‚ เค•เฅ‡ เคฒเคฟเค เคชเคนเฅเค‚เคš เคธเค•เฅเคทเคฎ เค•เคฐเคคเคพ เคนเฅˆ, เคœเฅ‹ เคœเฅ€เคชเฅ€เคฏเฅ‚ เคนเคธเฅเคคเค•เฅเคทเฅ‡เคช เค†เคตเคถเฅเคฏเค•เคคเคพ เคตเคพเคฒเฅ‡ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคฎเคนเคคเฅเคตเคชเฅ‚เคฐเฅเคฃ เคนเฅˆเฅค + + เคงเฅเคฏเคพเคจ เคฆเฅ‡เค‚: เค•เค‚เคŸเฅ‡เคจเคฐ เคฎเฅ‡เค‚ เคธเฅเคฅเคฟเคคเคฟ เคฎเฅ‡เค‚ เค…เคชเคจเฅ€ เคธเฅเคฅเคพเคจเฅ€เคฏ เคฎเคถเฅ€เคจ เคชเคฐ เคซเคผเคพเค‡เคฒเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เค•เคพเคฎ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค Docker เคตเฅ‰เคฒเฅเคฏเฅ‚เคฎ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚: + + ```bash + # เคธเฅเคฅเคพเคจเฅ€เคฏ เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ เค•เฅ‹ เค•เค‚เคŸเฅ‡เคจเคฐ เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ เคฎเฅ‡เค‚ เคฎเคพเค‰เค‚เคŸ เค•เคฐเฅ‡เค‚ + sudo docker run -it --ipc=host --gpus all -v /path/on/host:/path/in/container $t + ``` + + `/path/on/host` เค•เฅ‹ เค…เคชเคจเฅ€ เคธเฅเคฅเคพเคจเฅ€เคฏ เคฎเคถเฅ€เคจ เคชเคฐ เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ เคชเคฅ เค•เฅ‡ เคธเคพเคฅ เคฌเคฆเคฒเฅ‡เค‚ เค”เคฐ `/path/in/container` เค•เฅ‹ เค•เค‚เคŸเฅ‡เคจเคฐ เคฎเฅ‡เค‚ เคฏเฅ‹เค—เฅเคฏเคคเคพ เคคเค• เคชเคฅ เคฌเคฆเคฒเฅ‡เค‚ เคœเคฟเคธเคธเฅ‡ เคชเคนเฅเค‚เคš เคฎเคฟเคฒ เคธเค•เฅ‡เฅค + + เคชเฅ‚เคฐเฅเคฃ Docker เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคฒเคฟเค, เค†เคช [Ultralytics Docker เคฎเคพเคฐเฅเค—เคฆเคฐเฅเคถเคฟเค•เคพ](https://docs.ultralytics.com/guides/docker-quickstart/) เค•เฅ‡ เค…เคจเฅเคตเฅ‡เคทเคฃ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค + +`ultralytics` เค•เฅ‡ เคฒเคฟเค เคธเคญเฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ€ เคธเฅ‚เคšเฅ€ เค•เฅ‡ เคฒเคฟเค `ultralytics` [requirements.txt](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) เคซเคผเคพเค‡เคฒ เคฆเฅ‡เค–เฅ‡เค‚เฅค เคงเฅเคฏเคพเคจ เคฆเฅ‡เค‚ เค•เคฟ เค‰เคชเคฐเฅ‹เค•เฅเคค เคธเคญเฅ€ เค‰เคฆเคพเคนเคฐเคฃเฅ‹เค‚ เคฎเฅ‡เค‚ เคธเคญเฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพเคเค‚ เคธเฅเคฅเคพเคชเคฟเคค เคนเฅ‹เคคเฅ€ เคนเฅˆเค‚เฅค + +

+
+ +
+ เคฆเฅ‡เค–เฅ‡เค‚: Ultralytics YOLO Quick Start Guide +

+ +!!! Tip "เคธเฅเคเคพเคต" + + เค‘เคชเคฐเฅ‡เคŸเคฟเค‚เค— เคธเคฟเคธเฅเคŸเคฎ เค”เคฐ CUDA เค†เคตเคถเฅเคฏเค•เคคเคพเค“เค‚ เค•เฅ‡ เค…เคจเฅเคธเคพเคฐ PyTorch เค†เคตเคถเฅเคฏเค•เคคเคพเคเค‚ เค…เคฒเค—-เค…เคฒเค— เคนเฅ‹ เคธเค•เคคเฅ€ เคนเฅˆเค‚, เค‡เคธเคฒเคฟเค เค…เคจเฅเคถเค‚เคธเคพ เค•เฅ€ เคœเคพเคคเฅ€ เคนเฅˆ เค•เคฟ เคชเคนเคฒเฅ‡ PyTorch เคธเฅเคฅเคพเคชเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค‡เค‚เคธเฅเคŸเฅเคฐเค•เฅเคถเค‚เคธ เคชเคฐ เคœเคพเคเค‚เฅค [https://pytorch.org/get-started/locally](https://pytorch.org/get-started/locally) เคชเคฐ เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เค…เคงเคฟเค• เคœเคพเคจเค•เคพเคฐเฅ€ เค•เฅ‡ เคฒเคฟเคเฅค + + + PyTorch Installation Instructions + + +## CLI เค•เฅ‡ เคธเคพเคฅ Ultralytics เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ + +Ultralytics เค•เคฎเคพเค‚เคก เคฒเคพเค‡เคจ เค‡เค‚เคŸเคฐเคซเคผเฅ‡เคธ (CLI) เค†เคธเคพเคจ เคเค•เคฒ-เคชเค‚เค•เฅเคคเคฟ เค•เคฎเคพเค‚เคก เค•เฅ‡ เคฒเคฟเค เคธเค‚เค•เฅเคทเฅ‡เคช เคฎเฅ‡เค‚ เคนเฅ‹เคธเคฒเคพ เค…เคฆเฅเคฏเคคเคฟเคค เค•เคฐเคคเคพ เคนเฅˆ, เคชเคพเคฏเคฅเคจ เคชเคฐเฅเคฏเคพเคตเคฐเคฃ เค•เฅ€ เคœเคผเคฐเฅ‚เคฐเคค เค•เฅ‡ เคฌเคฟเคจเคพเฅค CLI เค•เฅ‹เคˆ เค…เคจเฅเค•เฅ‚เคฒเคจ เคฏเคพ เคชเคพเคฏเคฅเคจ เค•เฅ‹เคก เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพ เคจเคนเฅ€เค‚ เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค เค†เคช เค•เฅ‡เคตเคฒ `yolo` เค•เคฎเคพเค‚เคก เค•เฅ‡ เคธเคพเคฅ เคŸเคฐเฅเคฎเคฟเคจเคฒ เคธเฅ‡ เคธเคญเฅ€ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‹ เคšเคฒเคพ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค CLI เคธเฅ‡ YOLOv8 เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เค”เคฐ เค…เคงเคฟเค• เคœเคพเคจเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค [CLI Guide](/../usage/cli.md) เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "เคธเค‚เคฏเฅ‹เคœเคจ" + Ultralytics `yolo` เค•เคฎเคพเค‚เคก เค•เคพ เค‰เคชเคฏเฅ‹เค— เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เคชเฅเคฐเคพเคฐเฅ‚เคช เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเคพ เคนเฅˆ: + ```bash + yolo เคŸเคพเคธเฅเค• เคฎเฅ‹เคก ARGS + ``` + + - `เคŸเคพเคธเฅเค•` (เคตเฅˆเค•เคฒเฅเคชเคฟเค•) เค‡เคจเคฎเฅ‡เค‚ เคธเฅ‡ เคเค• เคนเฅˆ ([เค–เฅ‹เคœเฅ‹](tasks/detect.md), [เค–เค‚เคก](tasks/segment.md), [เคตเคฐเฅเค—เฅ€เค•เคฐเคฃ เค•เคฐเฅ‹](tasks/classify.md), [เคธเฅเคฅเคฟเคคเคฟ](tasks/pose.md)) + - `เคฎเฅ‹เคก` (เค†เคตเคถเฅเคฏเค•) เค‡เคจเคฎเฅ‡เค‚ เคธเฅ‡ เคเค• เคนเฅˆ ([เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ](modes/train.md), [เคฎเคพเคจเฅเคฏ](modes/val.md), [เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ](modes/predict.md), [เคจเคฟเคฐเฅเคฏเคพเคค](modes/export.md), [เคŸเฅเคฐเฅˆเค•](modes/track.md)) + - `ARGS` (เคตเฅˆเค•เคฒเฅเคชเคฟเค•) `imgsz=640` เคœเฅˆเคธเฅ‡ `arg=เคฎเคพเคจ` เคœเฅ‹ เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ เค•เฅ‹ เค“เคตเคฐเคฐเคพเค‡เคก เค•เคฐเคคเฅ‡ เคนเฅˆเค‚เฅค + + เคธเคญเฅ€ `ARGS` เค•เฅ‹ เคชเฅ‚เคฐเฅเคฃ [Configuration Guide](/../usage/cfg.md) เคฏเคพ `yolo cfg` CLI เค•เคฎเคพเค‚เคก เค•เฅ‡ เคธเคพเคฅ เคฆเฅ‡เค–เฅ‡เค‚เฅค + + === "เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ" + เคชเฅเคฐเคพเคฐเค‚เคญเคฟเค• เคถเคฟเค•เฅเคทเคฃ เค”เคฐ language เค•เฅ‡ เคธเคพเคฅ 10 เคเคชเฅ‹เค•เฅเคธ เค•เฅ‡ เคฒเคฟเค เคเค• เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚, เคœเคนเคพเค‚ + เค‡เค‚เค—เคฟเคคเฅ€ เคถเคฟเค•เฅเคทเคฃ เคฆเคฐ 0.01 เคนเฅˆ + ```bash + yolo เคŸเฅเคฐเฅ‡เคจ data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ" + เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ YouTube เคตเฅ€เคกเคฟเคฏเฅ‹ เค•เฅ€ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค•เคฐเฅ‡เค‚ + เค›เคตเคฟ เค†เค•เคพเคฐ 320: + ```bash + yolo เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ model=yolov8n-seg.pt เคธเฅเคฐเฅ‹เคค='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "เคฎเคพเคจเฅเคฏ เค•เคฐเฅ‹" + เคเค• เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคฎเคพเคจเฅเคฏเคคเคพ เคตเคพเคฒเฅ€ เคชเฅเคฐเคฎเคพเคฃเคฟเคค เค•เคฐเฅ‡เค‚ เค”เคฐ เค‡เคฎเฅ‡เคœ เค•เคพ เค†เค•เคพเคฐ 640 เค•เฅ‡ เคฌเฅˆเคš-เค†เค•เคพเคฐ 1 เค•เฅ‡ เคธเคพเคฅ เคฆเฅ‡เค–เฅ‡เค‚: + ```bash + yolo เคฎเคพเคจเฅเคฏ model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + + === "เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚" + เคเค• YOLOv8n เคตเคฐเฅเค—เฅ€เค•เคฐเคฃ เคฎเฅ‰เคกเคฒ เค•เฅ‹ ONNX เคชเฅเคฐเคพเคฐเฅ‚เคช เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚, 224x224 เค•เฅ‡ เค†เค•เคพเคฐ เคชเคฐ เค›เคตเคฟ (เค•เฅ‹เคˆ เคŸเคพเคธเฅเค• เค†เคตเคถเฅเคฏเค• เคจเคนเฅ€เค‚ เคนเฅˆ) + ```bash + yolo เคจเคฟเคฐเฅเคฏเคพเคค model=yolov8n-cls.pt format=onnx imgsz=224,128 + ``` + + === "เคตเคฟเคถเฅ‡เคท" + เคธเค‚เคธเฅเคชเฅ‡เคทเฅเคŸ เค•เคฎเคพเค‚เคกเฅ‹เค‚ เค•เฅ‹ เคšเคฒเคพเคเค‚ เคคเคพเค•เคฟ เคธเค‚เคธเฅเค•เคฐเคฃ, เคธเฅ‡เคŸเคฟเค‚เค— เคฆเฅ‡เค–เฅ‡เค‚, เคšเฅ‡เค• เค•เคฐเฅ‡เค‚ เค”เคฐ เค…เคงเคฟเค• เคฆเฅ‡เค–เฅ‡เค‚: + ```bash + yolo help + yolo checks + yolo version + yolo settings + yolo copy-cfg + yolo cfg + ``` + +!!! Warning "เคšเฅ‡เคคเคพเคตเคจเฅ€" + + เคคเคพเค•เคฟ เคฆเฅเคตเคฟเคงเคพ เคจ เคนเฅ‹, เคคเคœเคผเฅเคœเคผเคพ เคธเฅ‡เคŸเคฟเค‚เค— เค•เฅ‹ `arg=val` เคœเฅ‹เคกเคผเฅ‡ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคชเคพเคฐ เค•เคฐเคจเคพ เคนเฅ‹เค—เคพ, เคœเคฟเคจเฅเคนเฅ‡เค‚ `=` เคฐเฅ‡เค–เคพ เคฆเฅเคตเคพเคฐเคพ เคตเคฟเคญเคพเคœเคฟเคค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ เค”เคฐ เคœเฅ‹เคกเคผเฅ‹เค‚ เค•เฅ‡ เคฌเฅ€เคš เค…เค‚เคคเคฐเคฟเคค เคนเฅ‹เคคเคพ เคนเฅˆเฅค `--` เคคเคฐเฅเค•-เคชเฅ‚เคฐเฅเคตเค• เค…เค‚เคŸเฅ€เคฐเฅ‡ เคถเคฌเฅเคฆ เคฏเคพ `,` เค…เค‚เคคเคฐเคพเคฒ เคฆเฅเคตเคพเคฐเคพ เคคเคฐเฅเค•เฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เคจ เค•เคฐเฅ‡เค‚เฅค + + - `yolo predict model=yolov8n.pt imgsz=640 conf=0.25` โœ… + - `yolo predict model yolov8n.pt imgsz 640 conf 0.25` โŒ (เค…เคญเคพเคต `=`) + - `yolo predict model=yolov8n.pt, imgsz=640, conf=0.25` โŒ (`,` เค‰เคชเคฏเฅ‹เค— เคจ เค•เคฐเฅ‡เค‚) + - `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25` โŒ (`--` เค‰เคชเคฏเฅ‹เค— เคจ เค•เคฐเฅ‡เค‚) + +เคเค•เฅ‡เคจเฅเคฆเฅเคฐเฅ€เคฏ เคฏเฅ‹เค—เฅเคฏเคคเคพเคเค [Configuration Guide](/../usage/cfg.md) เคฏเคพ `yolo cfg` CLI เค•เคฎเคพเค‚เคก เค•เฅ‡ เคธเคพเคฅ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +## Python เค•เฅ‡ เคธเคพเคฅ Ultralytics เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ + +YOLOv8 เค•เคพ Python เค‡เค‚เคŸเคฐเคซเคผเฅ‡เคธ เค†เคชเค•เฅ€ Python เคชเคฐเคฟเคฏเฅ‹เคœเคจเคพเค“เค‚ เคฎเฅ‡เค‚ เค…เค‚เค•เคฟเคค เคฎเคฟเคฒเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคเค• เค†เคธเคพเคจ เคคเค•เคจเฅ€เค• เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ, เคœเคฟเคธเฅ‡ เคนเคฎเคพเคฐเฅ‡ เคชเคพเคธ เคถเคพเคฎเคฟเคฒ เค•เคฐเคจเคพ เค†เคธเคพเคจ เคนเฅ‹ เคœเคพเคคเคพ เคนเฅˆเฅค เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพเค“เค‚ เค•เฅ‹ เค‰เคจเค•เฅ‡ เคชเคฐเคฟเคฏเฅ‹เคœเคจเคพเค“เค‚ เคฎเฅ‡เค‚ เค†เคชเคพเคคเค•เคพเคฒเฅ€เคจ เคชเคนเฅเค‚เคš, เคšเคฒเคพเคจเฅ‡ เค”เคฐ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เค†เค‰เคŸเคชเฅเคŸ เค•เฅ€ เคชเฅเคฐเคธเค‚เคธเฅเค•เคฐเคฃ เค•เคฐเคจเฅ‡ เค•เฅ€ เค†เคธเคพเคจเฅ€ เค•เฅ‡ เคธเคพเคฅ เคชเฅเคฐเคถเฅเคจเฅ‹เคคเฅเคคเคฐเฅ€, เค–เค‚เคก, เค”เคฐ เคตเคฐเฅเค—เฅ€เค•เคฐเคฃ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เคธเฅเคตเคฟเคงเคพเคœเคจเค• เคฎเฅ‚เคฒเฅเคฏ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆเฅค เค‡เคธ เคคเค•เคจเฅ€เค• เค•เฅ‡ เคธเคพเคฅ, เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพเค“เค‚ เค•เฅ‡ เคฒเคฟเค เคฏเคน เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคธเคพเคงเคจ เคนเฅˆ เคœเฅ‹ เค…เคชเคจเฅ€ Python เคชเคฐเคฟเคฏเฅ‹เคœเคจเคพเค“เค‚ เคฎเฅ‡เค‚ เค‡เคจ เค—เฅเคฃเฅ‹เค‚ เค•เฅ‹ เคถเคพเคฎเคฟเคฒ เค•เคฐเคจเฅ‡ เค•เฅ€ เค‡เคšเฅเค›เคพ เคฐเค–เคคเฅ‡ เคนเฅˆเค‚เฅค + +เค‰เคฆเคพเคนเคฐเคฃ เค•เฅ‡ เคฒเคฟเค, เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพ เคธเค‚เค–เฅเคฏเคพ เค—เคฟเคจเคคเฅ€ เค•เฅ‡ เคฒเคฟเค เค•เฅเค›-เค•เฅเค› เคคเคพเคฐเคฃเฅ€ เค•เฅ€ เคฏเฅ‹เคœเคจเคพ เคฎเฅ‡เค‚ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฒเฅ‹เคก เค•เคฐเค•เฅ‡ เค‰เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚, เค‡เคธเค•เคพ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เคธเคฎเคพเคชเฅเคค เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚ เค”เคฐ เคฏเคฆเคฟ เค†เคตเคถเฅเคฏเค• เคนเฅ‹, เค‰เคธเฅ‡ ONNX เคชเฅเคฐเคพเคฐเฅ‚เคช เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค เค…เคชเคจเฅ€ Python เคชเคฐเคฟเคฏเฅ‹เคœเคจเคพเค“เค‚ เคฎเฅ‡เค‚ YOLOv8 เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เค”เคฐ เค…เคงเคฟเค• เคœเคพเคจเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค [Python Guide](/../usage/python.md) เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + ```python + from ultralytics import YOLO + + # เคชเฅ‚เคฐเฅ€ เคจเคˆ YOLO เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค + model = YOLO('yolov8n.yaml') + + # เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLO เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ (เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค เค…เคจเฅเคถเค‚เคธเคฟเคค เคนเฅˆ) + model = YOLO('yolov8n.pt') + + # 3 เคเคชเฅ‹เค•เฅเคธ เค•เฅ‡ เคฒเคฟเค "coco128.yaml" เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + results = model.train(data='coco128.yaml', epochs=3) + + # เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เคฎเคพเคจเฅเคฏเคคเคพ เคตเคพเคฒเฅ‡ เคธเฅ‡เคŸ เคชเคฐ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค•เคฐเฅ‡เค‚ + results = model.val() + + # เคฎเฅ‰เคกเคฒ เค•เฅ‹ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค›เคตเคฟ เคชเคฐ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค•เคฐเฅ‡เค‚ + results = model('https://ultralytics.com/images/bus.jpg') + + # เคฎเฅ‰เคกเคฒ เค•เฅ‹ ONNX เคชเฅเคฐเคพเคฐเฅ‚เคช เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + success = model.export(format='onnx') + ``` + +[Python Guide](/../usage/python.md){.md-button .md-button--primary} + +## Ultralytics เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ + +Ultralytics เคฒเคพเค‡เคฌเฅเคฐเฅ‡เคฐเฅ€ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคชเฅเคฐเคฌเค‚เคงเคจ เคชเฅเคฐเคฃเคพเคฒเฅ€ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเฅ€ เคนเฅˆ เคคเคพเค•เคฟ เค†เคช เค…เคชเคจเฅ‡ เคชเฅเคฐเคฏเฅ‹เค—เฅ‹เค‚ เคชเคฐ เคซเคพเค‡เคจ-เค—เฅเคฐเฅ‡เคจเฅเคก เคจเคฟเคฏเค‚เคคเฅเคฐเคฃ เคฌเคจเคพเค เคฐเค– เคธเค•เฅ‡เค‚เฅค `ultralytics.utils` เคฎเฅ‡เค‚ เคธเฅเคฅเคฟเคค `SettingsManager` เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ เค‰เคชเคฏเฅ‹เค—เค•เคฐเฅเคคเคพ เค…เคชเคจเฅ€ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคคเค• เคชเคนเฅเค‚เคš เค•เคฐเค•เฅ‡ เค‰เคจเฅเคนเฅ‡เค‚ เคชเคขเคผ เค”เคฐ เคฌเคฆเคฒ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค เค‡เคจเฅเคนเฅ‡เค‚ เคชเคพเคฏเคฅเคจ เคชเคฐเฅเคฏเคพเคตเคฐเคฃ เค•เฅ‡ เคญเฅ€เคคเคฐ เคธเฅ€เคงเฅ‡ เคฆเฅ‡เค–เคจเฅ‡ เค”เคฐ เคธเค‚เคถเฅ‹เคงเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค, เคฏเคพ CLI (เค•เคฎเคพเค‚เคก เคฒเคพเค‡เคจ เค‡เค‚เคŸเคฐเคซเคผเฅ‡เคธ) เค•เฅ‡ เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡ เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค + +### เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เคพ เค—เคฃเคจเคพ + +เค…เคชเคจเฅ€ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‡ เคฎเฅŒเคœเฅ‚เคฆเคพ เคตเคฟเคจเฅเคฏเคพเคธ เค•เฅ€ เค“เคฐเคฆเคพเคฐเฅ€ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค†เคช เค‰เคจเฅเคนเฅ‡เค‚ เคธเฅ€เคงเฅ‡ เคฆเฅ‡เค– เคธเค•เคคเฅ‡ เคนเฅˆเค‚: + +!!! Example "เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฆเฅ‡เค–เฅ‡เค‚" + + === "เคชเคพเคฏเคฅเคจ" + เค†เคช PyTorch เคธเฅ‡ `ultralytics` เคฎเฅ‰เคกเฅเคฏเฅ‚เคฒ เคฎเฅ‡เค‚ `เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ` เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ‹ เค†เคฏเคพเคค เค•เคฐเค•เฅ‡ เค…เคชเคจเฅ€ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฆเฅ‡เค– เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค `settings` เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคชเคฐ เคชเฅเคฐเคฟเค‚เคŸ เค”เคฐ เคฐเคฟเคŸเคฐเฅเคจ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‡ เคฒเคฟเค เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เค•เคฎเคพเค‚เคกเฅ‹เค‚ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚: + ```python + from ultralytics import settings + + # เคธเคญเฅ€ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฆเฅ‡เค–เฅ‡เค‚ + print(settings) + + # เคเค• เคตเคฟเคถเฅ‡เคท เคธเฅ‡เคŸเคฟเค‚เค— เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚ + value = settings['runs_dir'] + ``` + + === "CLI" + เคฏเคฆเคฟ เค†เคช เคชเฅเคฐเคพเคฅเคฎเคฟเค•เคคเคพเคเค เคฒเฅ‡เคคเฅ‡ เคนเฅˆเค‚ CLI เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคจเคพ เคชเคธเค‚เคฆ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เคคเฅ‹ เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เค•เคฎเคพเค‚เคก เค•เฅ‡ เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡ เค…เคชเคจเฅ€ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ€ เคœเคพเค‚เคš เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚: + ```bash + yolo settings + ``` + +### เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคธเค‚เคถเฅ‹เคงเคฟเคค เค•เคฐเคจเคพ + +Ultralytics เค•เฅ‡ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‹ เคธเค‚เคถเฅ‹เคงเคฟเคค เค•เคฐเคจเคพ เค†เคธเคพเคจ เคนเฅˆเฅค เคฌเคฆเคฒเคพเคตเฅ‹เค‚ เค•เฅ‹ เคจเคฟเคฎเฅเคจ เคคเคฐเฅ€เค•เฅ‹เค‚ เคธเฅ‡ เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆ: + +!!! Example "เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค…เคชเคกเฅ‡เคŸ เค•เคฐเฅ‡เค‚" + + === "เคชเคพเคฏเคฅเคจ" + เคชเคพเคฏเคฅเคจ เคชเคฐเฅเคฏเคพเคตเคฐเคฃ เค•เฅ‡ เคญเฅ€เคคเคฐ, เค…เคชเคจเฅ€ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคชเคฐ `เค…เคชเคกเฅ‡เคŸ` เคตเคฟเคงเคฟ เค•เฅ‹ เคฌเฅเคฒเคพเค•เคฐ เค…เคชเคจเฅ€ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‹ เคฌเคฆเคฒ เคธเค•เคคเฅ‡ เคนเฅˆเค‚: + ```python + from ultralytics import settings + + # เคเค• เคธเฅ‡เคŸเคฟเค‚เค— เค…เคชเคกเฅ‡เคŸ เค•เคฐเฅ‡เค‚ + settings.update({'runs_dir': '/path/to/runs'}) + + # เคเค•เคพเคงเคฟเค• เคธเฅ‡เคŸเคฟเค‚เค— เค…เคชเคกเฅ‡เคŸ เค•เคฐเฅ‡เค‚ + settings.update({'runs_dir': '/path/to/runs', 'tensorboard': False}) + + # เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ เคฎเคพเคจ เคฎเฅ‡เค‚ เคธเฅ‡เคŸเคฟเค‚เค— เคฐเฅ€เคธเฅ‡เคŸ เค•เคฐเฅ‡เค‚ + settings.reset() + ``` + + === "CLI" + เคฏเคฆเคฟ เค†เคช เค•เคฎเคพเค‚เคก เคฒเคพเค‡เคจ เค‡เค‚เคŸเคฐเคซเคผเฅ‡เคธ เคชเคฐ เคงเฅเคฏเคพเคจ เคฆเฅ‡เคคเฅ‡ เคนเฅˆเค‚, เคคเฅ‹ เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เค•เคฎเคพเค‚เคก เค•เฅ‡ เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡ เค…เคชเคจเฅ€ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‹ เคธเค‚เคถเฅ‹เคงเคฟเคค เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚: + ```bash + # เคเค• เคธเฅ‡เคŸเคฟเค‚เค— เค…เคชเคกเฅ‡เคŸ เค•เคฐเฅ‡เค‚ + yolo settings runs_dir='/path/to/runs' + + # เคเค•เคพเคงเคฟเค• เคธเฅ‡เคŸเคฟเค‚เค— เค…เคชเคกเฅ‡เคŸ เค•เคฐเฅ‡เค‚ + yolo settings runs_dir='/path/to/runs' tensorboard=False + + # เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ เคฎเคพเคจ เคฎเฅ‡เค‚ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‹ เคฌเคฐเคพเคฌเคฐเฅ€ เคฎเฅ‡เค‚ เคฐเฅ€เคธเฅ‡เคŸ เค•เคฐเฅ‡เค‚ + yolo settings reset + ``` + +### เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‹ เคธเคฎเคเคจเคพ + +เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เคŸเฅ‡เคฌเคฒ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เคพ เค…เคตเคฒเฅ‹เค•เคจ เคชเฅเคฐเคฆเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ, เคœเคฌเค•เคฟ เคชเฅเคฐเคคเคฟ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‡ เคฒเคฟเค เค‰เคฆเคพเคนเคฐเคฃ เคฎเคพเคจ, เคกเฅ‡เคŸเคพ เคชเฅเคฐเค•เคพเคฐ เค”เคฐ เคธเค‚เค•เฅเคทเฅ‡เคช เคฎเฅ‡เค‚ เคตเคฟเคตเคฐเคฃ เคฆเคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค + +| เคจเคพเคฎ | เค‰เคฆเคพเคนเคฐเคฃ เคฎเคพเคจ | เคกเฅ‡เคŸเคพ เคชเฅเคฐเค•เคพเคฐ | เคตเคฟเคตเคฐเคฃ | +|--------------------|-----------------------|-------------|----------------------------------------------------------------------------------------------------------------------------| +| `settings_version` | `'0.0.4'` | `str` | Ultralytics _settings_ เคธเค‚เคธเฅเค•เคฐเคฃ (Ultralytics [pip](https://pypi.org/project/ultralytics/) เคธเค‚เคธเฅเค•เคฐเคฃ เคธเฅ‡ เค…เคฒเค— เคนเฅ‹เคคเคพ เคนเฅˆ) | +| `datasets_dir` | `'/path/to/datasets'` | `str` | เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‹ เคธเค‚เค—เฅเคฐเคนเฅ€เคค เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ€ เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ | | +| `weights_dir` | `'/path/to/weights'` | `str` | เคฎเฅ‰เคกเคฒ เคตเฅ‡เคŸ เค•เฅ‹ เคธเค‚เค—เฅเคฐเคนเฅ€เคค เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ€ เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ | +| `runs_dir` | `'/path/to/runs'` | `str` | เคชเฅเคฐเคฏเฅ‹เค— เคฆเฅŒเคกเคผ เค•เฅ‹ เคธเค‚เค—เฅเคฐเคนเฅ€เคค เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ€ เคจเคฟเคฐเฅเคฆเฅ‡เคถเคฟเค•เคพ | +| `uuid` | `'a1b2c3d4'` | `str` | เคฎเฅŒเคœเฅ‚เคฆเคพ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‡ เคฒเคฟเค เค…เคฆเฅเคตเคฟเคคเฅ€เคฏ เคชเคนเคšเคพเคจเค•เคฐเฅเคคเคพ | +| `sync` | `True` | `bool` | Ultralytics เค”เคฐ เคฆเฅเคตเคฟเคงเคพ เค•เฅ‹ HUB เคฎเฅ‡เค‚ เคธเคฎเค•เคพเคฒเฅ€เคจ เคฐเค–เฅ‡เค‚ | +| `api_key` | `''` | `str` | Ultralytics HUB [API Key](https://hub.ultralytics.com/settings?tab=api+keys) | +| `clearml` | `True` | `bool` | ClearML เคฒเฅ‰เค—เคฟเค‚เค— เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | +| `comet` | `True` | `bool` | เคฏเคฆเคฟ [Comet ML](https://bit.ly/yolov8-readme-comet) เคชเฅเคฐเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ เคฏเคพ เคจเคนเฅ€เค‚ experiment เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค”เคฐ visualization | +| `dvc` | `True` | `bool` | เคถเฅ‹เคง เค”เคฐ เคธเค‚เคธเฅเค•เคฐเคฃ เคจเคฟเคฏเค‚เคคเฅเคฐเคฃ เค•เฅ‡ เคฒเคฟเค [DVC for experiment tracking](https://dvc.org/doc/dvclive/ml-frameworks/yolo) เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | +| `hub` | `True` | `bool` | [Ultralytics HUB](https://hub.ultralytics.com) เคเค•เฅ€เค•เคฐเคฃ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | +| `mlflow` | `True` | `bool` | เคเค•เฅเคธเคชเฅ‡เคฐเคฟเคฎเฅ‡เค‚เคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เฅ‡ เคฒเคฟเค MLFlow เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | +| `neptune` | `True` | `bool` | เคเค•เฅเคธเคชเฅ‡เคฐเคฟเคฎเฅ‡เค‚เคŸ เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค•เฅ‡ เคฒเคฟเค Neptune เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | +| `raytune` | `True` | `bool` | hyperparameter tuning เค•เฅ‡ เคฒเคฟเค Ray Tune เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | +| `tensorboard` | `True` | `bool` | เคตเคฟเคœเคผเฅเค…เคฒเคพเค‡เคœเคผเฅ‡เคถเคจ เค•เฅ‡ เคฒเคฟเค TensorBoard เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | +| `wandb` | `True` | `bool` | Weights & Biases logging เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚ | + +เคœเคฌ เค†เคช เค…เคชเคจเฅ‡ เคชเคฐเคฟเคฏเฅ‹เคœเคจเคพเค“เค‚ เคฏเคพ เค…เคจเฅเคญเคพเค—เฅ‹เค‚ เค•เฅ‡ เคฎเคพเคงเฅเคฏเคฎ เคธเฅ‡ เคšเคฒเคคเฅ‡ เคนเฅ‹เคจเฅ‡ เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เคฏเคพเคคเฅเคฐเคพ เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เคคเฅ‹ เค‡เคจ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคชเคฐ เคธเฅเคงเคพเคฐ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฒเฅŒเคŸเฅ‡เค‚เฅค diff --git a/ultralytics/docs/hi/quickstart.md:Zone.Identifier b/ultralytics/docs/hi/quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/tasks/classify.md b/ultralytics/docs/hi/tasks/classify.md new file mode 100755 index 0000000..951ec42 --- /dev/null +++ b/ultralytics/docs/hi/tasks/classify.md @@ -0,0 +1,172 @@ +--- +comments: true +description: YOLOv8 Classify เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคœเคพเคจเฅ‡เค‚ เค‡เคฎเฅ‡เคœ เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ เค•เฅ‡ เคฒเคฟเคเฅค เคชเฅเคฐเฅ€เคŸเฅเคฐเฅ‡เคจเฅเคก เคฎเคพเฅ…เคกเฅ‡เคฒเฅเคธ เค•เฅ€ เคธเฅ‚เคšเฅ€ เค”เคฐ เคŸเฅเคฐเฅ‡เคจ, เคตเฅ‡เคฒเคฟเคกเฅ‡เคŸ, เคชเฅเคฐเฅ‡เคกเคฟเค•เฅเคŸ เค”เคฐ เคเค•เฅเคธเคชเฅ‹เคฐเฅเคŸ เคฎเคพเฅ…เคกเฅ‡เคฒเฅเคธ เค•เฅ‡ เคฌเคพเคฐเฅ‡ เคฎเฅ‡เค‚ เคตเคฟเคธเฅเคคเฅƒเคค เคœเคพเคจเค•เคพเคฐเฅ€ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚เฅค +keywords: Ultralytics, YOLOv8, เค‡เคฎเฅ‡เคœ เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ, เคชเฅเคฐเฅ€เคŸเฅเคฐเฅ‡เคจเฅเคก เคฎเคพเฅ…เคกเฅ‡เคฒเฅเคธ, YOLOv8n-cls, เคŸเฅเคฐเฅ‡เคจ, เคตเฅ‡เคฒเคฟเคกเฅ‡เคŸ, เคชเฅเคฐเฅ‡เคกเคฟเค•เฅเคŸ, เคฎเคพเฅ…เคกเฅ‡เคฒ เคเค•เฅเคธเคชเฅ‹เคฐเฅเคŸ +--- + +# เค‡เคฎเฅ‡เคœ เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ + +เค‡เคฎเฅ‡เคœ เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ เค‰เคฆเคพเคนเคฐเคฃ + +เค‡เคฎเฅ‡เคœ เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ เคคเฅ€เคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เคธเฅ‡ เคธเคฌเคธเฅ‡ เคธเคฐเคฒ เคนเฅˆ เค”เคฐ เคชเฅ‚เคฐเฅ€ เคคเคธเฅเคตเฅ€เคฐ เค•เฅ‹ เคเค• เคชเฅ‚เคฐเฅเคตเคจเคฟเคฐเฅเคงเคพเคฐเคฟเคค เค•เค•เฅเคทเคพ เคฎเฅ‡เค‚ เคตเคฐเฅเค—เฅ€เค•เฅƒเคค เค•เคฐเคจเคพ เคถเคพเคฎเคฟเคฒ เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +เค‡เคฎเฅ‡เคœ เค•เฅเคฒเคพเคธเคฟเคซเคพเคฏเคฐ เค•เคพ เค†เค‰เคŸเคชเฅเคŸ เคเค• เคเค•เคฒ เค•เฅเคฒเคพเคธ เคฒเฅ‡เคฌเคฒ เค”เคฐ เคเค• เคตเคฟเคถเฅเคตเคพเคธ เคชเฅเคฐเคพเคฎเคพเคฃเคฟเค•เคคเคพ เคธเฅเค•เฅ‹เคฐ เคนเฅ‹เคคเคพ เคนเฅˆเฅค เค‡เคฎเฅ‡เคœ เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ เค‰เคชเคฏเฅ‹เค—เฅ€ เคนเฅ‹เคคเคพ เคนเฅˆ เคœเคฌ เค†เคชเค•เฅ‹ เค•เฅ‡เคตเคฒ เค‡เคธเฅ‡ เคœเคพเคจเคจเฅ‡ เค•เฅ€ เคœเคฐเฅ‚เคฐเคค เคนเฅ‹เคคเฅ€ เคนเฅˆ เค•เคฟ เคเค• เค‡เคฎเฅ‡เคœ เค•เคฟเคธ เค•เค•เฅเคทเคพ เคฎเฅ‡เค‚ เคธเคฎเฅเคฎเคฟเคฒเคฟเคค เคนเฅˆ เค”เคฐ เค†เคชเค•เฅ‹ เคจเคนเฅ€เค‚ เคชเคคเคพ เคนเฅ‹เคจเคพ เคšเคพเคนเคฟเค เค•เคฟ เค‰เคธ เค•เค•เฅเคทเคพ เค•เฅ‡ เคตเคธเฅเคคเฅเคฐเคพเคฃเฅ เค•เคฟเคธ เคธเฅเคฅเคพเคจ เคชเคฐ เคธเฅเคฅเคฟเคค เคนเฅˆเค‚ เคฏเคพ เค‰เคจเค•เฅ€ เคธเคŸเฅ€เค• เค†เค•เฅƒเคคเคฟ เค•เฅเคฏเคพ เคนเฅˆเฅค + +!!! Tip "เคŸเคฟเคช" + + YOLOv8 Classify เคฎเฅ‰เคกเฅ‡เคฒเฅเคธ เคฎเฅ‡เค‚ `-cls` เคธเค‚เค•เฅ‡เคคเค• เคชเฅเคฐเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ, เคœเฅˆเคธเฅ‡ `yolov8n-cls.pt` เค”เคฐ เค‡เคจเฅเคนเฅ‡เค‚ เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) เคชเคฐเฅค + +## [เคฎเฅ‰เคกเฅ‡เคฒ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +เคฏเคนเคพเค‚ YOLOv8 เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค Classify เคฎเฅ‰เคกเฅ‡เคฒ เคฆเคฟเค–เคพเค เค—เค เคนเฅˆเค‚เฅค Detect, Segment, เค”เคฐ Pose เคฎเฅ‰เคกเฅ‡เคฒเฅเคธ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚, เคœเคฌเค•เคฟ Classify เคฎเฅ‰เคกเฅ‡เคฒ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เฅค + +[เคฎเฅ‰เคกเฅ‡เคฒ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) เคกเคพเค‰เคจเคฒเฅ‹เคก เคชเคนเคฒเฅ€ เคฌเคพเคฐ เค‰เคชเคฏเฅ‹เค— เคชเคฐ เคคเคพเคœเค—เฅ€ Ultralytics [เคชเฅเคฐเค•เคพเคถเคจ](https://github.com/ultralytics/assets/releases) เคธเฅ‡ เคธเฅเคตเคคเคƒ เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +| เคฎเฅ‰เคกเฅ‡เคฒ | เค†เค•เคพเคฐ
(เคชเคฟเค•เฅเคธเฅ‡เคฒ) | เคคเคพเคฒเคฟเค•เคพ
เคถเฅ€เคฐเฅเคท 1 | เคคเคพเคฒเคฟเค•เคพ
เคถเฅ€เคฐเฅเคท 5 | เคธเฅเคชเฅ€เคก
เคธเฅ€เคชเฅ€เคฏเฅ‚ ONNX
(เคฎเคฟ. เคธเฅ‡เค•เค‚เคก) | เคธเฅเคชเฅ€เคก
A100 TensorRT
(เคฎเคฟ. เคธเฅ‡เค•เค‚เคก) | เคชเฅˆเคฐเคพเคฎเฅ€เคŸเคฐ
(M) | FLOPs
(B) at 640 | +|----------------------------------------------------------------------------------------------|------------------------|------------------------|------------------------|------------------------------------------|--------------------------------------------|----------------------|--------------------------| +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- **เคคเคพเคฒเคฟเค•เคพ** เคฎเฅ‰เคกเฅ‡เคฒเฅ‹เค‚ เค•เฅ€ ImageNet เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคฎเคพเคจเฅเคฏเฅ€เค•เคฐเคฃ เคธเฅ‡เคŸ เคชเคฐ เคธเคŸเฅ€เค•เคคเคพ เคนเฅˆเฅค +
`yolo val classify data=path/to/ImageNet device=0` เคฆเฅเคตเคพเคฐเคพ เคชเฅเคจเคƒ เค‰เคคเฅเคชเคจเฅเคจ เค•เคฐเฅ‡เค‚ +- **เคธเฅเคชเฅ€เคก** เคเค• [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ ImageNet เค•เฅ‡ เคตเฅˆเคฒ เค›เคตเคฟเคฏเฅ‹เค‚ เคชเคฐ เค”เคธเคค เคœเฅ‹เคกเคผเฅ€ เค—เคˆ เคนเฅˆเฅค +
`yolo val classify data=path/to/ImageNet batch=1 device=0|cpu` เคฆเฅเคตเคพเคฐเคพ เคชเฅเคจเคƒ เค‰เคคเฅเคชเคจเฅเคจ เค•เคฐเฅ‡เค‚ + +## เคŸเฅเคฐเฅ‡เคจ + +100 เคเคชเฅ‰เค•เฅเคธ เค•เฅ‡ เคฒเคฟเค MNIST160 เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ YOLOv8n-cls เค•เฅ‹ 64 เค‡เคฎเฅ‡เคœ เค†เค•เคพเคฐ เคชเคฐ เคฐเคฟเค•เฅเคคเคฟเคฏเฅ‹เค‚ เค•เฅ‡ เคธเคพเคฅ เคŸเฅเคฐเฅ‡เคจ เค•เคฐเฅ‡เค‚เฅค เค‰เคชเคฒเคฌเฅเคง เคตเคฟเค•เคฒเฅเคชเฅ‹เค‚ เค•เฅ€ เคชเฅ‚เคฐเฅ€ เคธเฅ‚เคšเฅ€ เค•เฅ‡ เคฒเคฟเค [Configuration](/../usage/cfg.md) เคชเฅ‡เคœ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเฅ‡เคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-cls.yaml') # YAML เคธเฅ‡ เคเค• เคจเคฏเคพ เคฎเฅ‰เคกเฅ‡เคฒ เคฌเคจเคพเคเค‚ + model = YOLO('yolov8n-cls.pt') # เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเฅ‡เคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ (เคŸเฅเคฐเฅ‡เคจเคฟเค‚เค— เค•เฅ‡ เคฒเคฟเค เคธเคฟเคซเคพเคฐเคฟเคถ เค•เฅ€ เคœเคพเคคเฅ€ เคนเฅˆ) + model = YOLO('yolov8n-cls.yaml').load('yolov8n-cls.pt') # YAML เคธเฅ‡ เคฌเคจเคพเคเค เค”เคฐ เคญเคพเคฐ เคŸเฅเคฐเคพเค‚เคธเคซเคฐ เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเฅ‡เคฒ เคŸเฅเคฐเฅ‡เคจ เค•เคฐเฅ‡เค‚ + results = model.train(data='mnist160', epochs=100, imgsz=64) + ``` + + === "CLI" + + ```bash + # YAML เคธเฅ‡ เคจเคฏเคพ เคฎเฅ‰เคกเฅ‡เคฒ เคฌเคจเคพเคเค‚ เค”เคฐ เค…เคšเฅเค›เฅ‡ เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo classify train data=mnist160 model=yolov8n-cls.yaml epochs=100 imgsz=64 + + # เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค *.pt เคฎเฅ‰เคกเฅ‡เคฒ เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo classify train data=mnist160 model=yolov8n-cls.pt epochs=100 imgsz=64 + + # YAML เคธเฅ‡ เคจเคฏเคพ เคฎเฅ‰เคกเฅ‡เคฒ เคฌเคจเคพเคเค, เค‰เคธเคฎเฅ‡เค‚ เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคญเคพเคฐ เคญเฅ€ เคธเฅเคฅเคพเคจเคพเค‚เคคเคฐเคฟเคค เค•เคฐเฅ‡เค‚ เค”เคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo classify train data=mnist160 model=yolov8n-cls.yaml pretrained=yolov8n-cls.pt epochs=100 imgsz=64 + ``` + +### เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเฅเคฐเคพเคฐเฅ‚เคช + +YOLO เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเฅเคฐเคพเคฐเฅ‚เคช [Dataset Guide](../../../datasets/classify/index.md) เคฎเฅ‡เค‚ เคตเคฟเคธเฅเคคเฅƒเคค เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคฆเคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค + +## เคตเฅ‡เคฒเคฟเคกเฅ‡เคŸ + +MNIST160 เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n-cls เคฎเฅ‰เคกเฅ‡เคฒ เค•เฅ€ เคธเคŸเฅ€เค•เคคเคพ เค•เคพ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฐเฅ‡เค‚เฅค เค•เฅ‹เคˆ เค†เคฐเฅเค—เฅเคฎเฅ‡เค‚เคŸ เคšเค•เฅเคฐเคตเคพเคค เคจเคนเฅ€เค‚ เค•เคฐเคจเคพ เคšเคพเคนเคฟเค เค•เฅเคฏเฅ‹เค‚เค•เคฟ `เคฎเฅ‰เคกเฅ‡เคฒ` เค…เคชเคจเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคฏเคฅเคพเคฐเฅเคฅ เคกเฅ‡เคŸเคพ เค”เคฐ เค†เคฐเฅเค—เฅเคฎเฅ‡เค‚เคŸเฅเคธ เค•เฅ‹ เคธเฅเคฎเคฐเคฃ เคฐเค–เคคเคพ เคนเฅˆเฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเฅ‡เคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-cls.pt') # เคเค• เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเฅ‡เคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เคเค• เคธเฅเคตเคšเคพเคฒเคฟเคค เคฎเฅ‰เคกเฅ‡เคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเฅ‡เคฒ เค•เคพ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฐเฅ‡เค‚ + metrics = model.val() # เค•เฅ‹เคˆ เค†เคฐเฅเค—เฅเคฎเฅ‡เค‚เคŸ เค†เคตเคถเฅเคฏเค• เคจเคนเฅ€เค‚ เคนเฅˆเค‚, เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค”เคฐ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฏเคพเคฆ เคฐเค–เฅ‡ เคœเคพเคคเฅ‡ เคนเฅˆเค‚ + metrics.top1 # เคถเฅ€เคฐเฅเคท1 เคธเคŸเฅ€เค•เคคเคพ + metrics.top5 # เคถเฅ€เคฐเฅเคท5 เคธเคŸเฅ€เค•เคคเคพ + ``` + === "CLI" + + ```bash + yolo classify val model=yolov8n-cls.pt # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเฅ‡เคฒ เค•เคพ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฐเฅ‡เค‚ + yolo classify val model=path/to/best.pt # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเฅ‡เคฒ เค•เคพ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฐเฅ‡เค‚ + ``` + +## เคชเฅเคฐเฅ‡เคกเคฟเค•เฅเคŸ + +เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n-cls เคฎเฅ‰เคกเฅ‡เคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เคคเคธเฅเคตเฅ€เคฐเฅ‹เค‚ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฐเฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคฎเฅ‰เคกเฅ‡เคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-cls.pt') # เคเค• เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเฅ‡เคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เคเค• เคธเฅเคตเคšเคพเคฒเคฟเคค เคฎเฅ‰เคกเฅ‡เคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเฅ‡เคฒ เค•เฅ‡ เคธเคพเคฅ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคฐเฅ‡เค‚ + results = model('https://ultralytics.com/images/bus.jpg') # เคเค• เค‡เคฎเฅ‡เคœ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคฐเฅ‡เค‚ + ``` + === "CLI" + + ```bash + yolo classify predict model=yolov8n-cls.pt source='https://ultralytics.com/images/bus.jpg' # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเฅ‡เคฒ เค•เฅ‡ เคธเคพเคฅ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคฐเฅ‡เค‚ + yolo classify predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเฅ‡เคฒ เค•เฅ‡ เคธเคพเคฅ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคฐเฅ‡เค‚ + ``` + +เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคชเฅ‚เคฐเคพ เคนเฅ‹เคจเฅ‡ เค•เฅ‡ เคฌเคพเคฆ เคจเคฟเคฐเฅเคฏเคพเคค เค•เฅ‹ เคธเฅ€เคงเฅ‡ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจเคฟเคค เคฎเฅ‰เคกเฅ‡เคฒ เคชเคฐ เคฒเคพเค—เฅ‚ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚, เคœเฅˆเคธเฅ‡ `yolo predict model=yolov8n-cls.onnx`เฅค เคเค•เฅเคธเคชเฅ‹เคฐเฅเคŸ เคชเฅ‚เคฐเฅเคฃ เคนเฅ‹เคจเฅ‡ เค•เฅ‡ เคฌเคพเคฆ, เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเฅ‡เคฒ เค•เฅ‡ เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคฒเคฟเค เค†เคชเค•เฅ‹ เค‰เคชเคฏเฅ‹เค— เค‰เคฆเคพเคนเคฐเคฃ เคฆเคฟเค–เคพเค เค—เค เคนเฅˆเค‚เฅค + +## เคเค•เฅเคธเคชเฅ‹เคฐเฅเคŸ + +YOLOv8n-cls เคฎเฅ‰เคกเคฒ เค•เฅ‹ ONNX, CoreML เค†เคฆเคฟ เคœเฅˆเคธเฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเฅ‡เคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-cls.pt') # load an official model + model = YOLO('path/to/best.pt') # load a custom trained model + + # เคฎเฅ‰เคกเฅ‡เคฒ เค•เฅ‹ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-cls.pt format=onnx # export official model + yolo export model=path/to/best.pt format=onnx # export custom trained model + ``` + +เคŸเฅ‡เคฌเคฒ เคฎเฅ‡เค‚ เค‰เคชเคฒเคฌเฅเคง YOLOv8-cls เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคช เคจเคฟเคฎเฅเคจเคพเคจเฅเคธเคพเคฐ เคนเฅˆเค‚เฅค เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅ‚เคฐเคพ เคนเฅ‹เคจเฅ‡ เค•เฅ‡ เคฌเคพเคฆ เค†เคช เคธเฅ€เคงเฅ‡ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฟเค เค—เค เคฎเฅ‰เคกเฅ‡เคฒ เคชเคฐ เคชเฅ‚เคฐเฅเคต-เค†เคถเฅเคฐเคฟเคคเคพเค“เค‚ เค•เฅ€ เคคเคฐเคน เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฏเคพ เคฎเฅ‚เคฒเฅเคฏเคพเค‚เค•เคจ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚, เคœเฅˆเคธเฅ‡ `yolo predict model=yolov8n-cls.onnx`เฅค เค‰เคชเคฏเฅ‹เค— เค•เฅ€ เค‰เคฆเคพเคนเคฐเคฃ เค†เคชเค•เฅ‡ เคฎเฅ‰เคกเฅ‡เคฒ เค•เฅ‡ เคฒเคฟเค เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅ‚เคฐเคพ เคนเฅ‹เคจเฅ‡ เค•เฅ‡ เคฌเคพเคฆ เคฆเคฟเค–เคพเค เค—เค เคนเฅˆเค‚เฅค + +| เคชเฅเคฐเคพเคฐเฅ‚เคช | `format` เค†เคฐเฅเค—เฅเคฎเฅ‡เค‚เคŸ | เคฎเฅ‰เคกเฅ‡เคฒ | เคฎเฅ‡เคŸเคพเคกเฅ‡เคŸเคพ | เค†เคฐเฅเค—เฅเคฎเฅ‡เค‚เคŸเฅเคธ | +|--------------------------------------------------------------------|--------------------|-------------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-cls.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-cls.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-cls.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-cls_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-cls.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-cls.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-cls_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-cls.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-cls.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-cls_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-cls_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-cls_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-cls_ncnn_model/` | โœ… | `imgsz`, `half` | + +[Export](https://docs.ultralytics.com/modes/export/) เคชเฅ‡เคœ เคฎเฅ‡เค‚ `export` เค•เฅ‡ เคชเฅ‚เคฐเฅ€ เคตเคฟเคตเคฐเคฃ เคฆเฅ‡เค–เฅ‡เค‚เฅค diff --git a/ultralytics/docs/hi/tasks/classify.md:Zone.Identifier b/ultralytics/docs/hi/tasks/classify.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/tasks/classify.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/tasks/detect.md b/ultralytics/docs/hi/tasks/detect.md new file mode 100755 index 0000000..6a55e73 --- /dev/null +++ b/ultralytics/docs/hi/tasks/detect.md @@ -0,0 +1,186 @@ +--- +comments: true +description: Ultralytics เคฆเฅเคตเคพเคฐเคพ YOLOv8 เค•เฅ‡ เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผเฅ€เค•เคฐเคฃเฅค Various เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ เคฎเฅ‡เค‚ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค, เคฎเคพเคจเฅเคฏ เค•เคฐเฅ‡เค‚, เคจเคฟเคฐเฅเคชเคฟเคค เค”เคฐ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเคจเฅ‡ เค•เคพ เค•เฅˆเคธเฅ‡ เค•เคฐเฅ‡เค‚ เคธเฅ€เค–เฅ‡เค‚เฅค เคตเคฟเคธเฅเคคเฅƒเคค เคชเฅเคฐเคฆเคฐเฅเคถเคจ เค†เคเค•เคกเคผเฅ‡ เคธเคฎเฅ‡เคคเฅค +keywords: YOLOv8, Ultralytics, เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ, เคชเฅ‚เคฐเฅเคตเคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ, เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ, เคฎเคพเคจเฅเคฏเคคเคพ, เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€, เคฎเฅ‰เคกเคฒ เคจเคฟเคฐเฅเคฏเคพเคค, COCO, ImageNet, PyTorch, ONNX, CoreML +--- + +# เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ + +เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เค‰เคฆเคพเคนเคฐเคฃ + +เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เคเค• เค•เคพเคฐเฅเคฏ เคนเฅˆ เคœเคฟเคธเคฎเฅ‡เค‚ เคšเคฟเคคเฅเคฐ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹ เคธเฅเคŸเฅเคฐเฅ€เคฎ เคฎเฅ‡เค‚ เคตเคธเฅเคคเฅเค“เค‚ เค•เฅ€ เคธเฅเคฅเคพเคจ เค”เคฐ เคตเคฐเฅเค— เค•เฅ€ เคชเคนเคšเคพเคจ เค•เคฐเคจเฅ‡ เค•เคพ เคธเคฎเคฏ เคถเคพเคฎเคฟเคฒ เคนเฅ‹เคคเคพ เคนเฅˆเฅค + +เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เคเค• เคธเฅ‡เคŸ เคนเฅ‹เคคเฅ€ เคนเฅˆ เคœเคฟเคธเคฎเฅ‡เค‚ เคตเคธเฅเคคเฅเค“เค‚ เค•เฅ‹ เค˜เฅ‡เคฐเคจเฅ‡ เคตเคพเคฒเฅ‡ เคฌเคพเค‰เค‚เคกเคฟเค‚เค— เคฌเฅ‰เค•เฅเคธ เค•เคพ เคชเคคเคพ เคฒเค—เคพเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ, เคธเคพเคฅ เคนเฅ€ เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เคฌเฅ‰เค•เฅเคธ เค•เฅ‡ เคฒเคฟเค เคตเคฐเฅเค— เคฒเฅ‡เคฌเคฒ เค”เคฐ เคตเคฟเคถเฅเคตเคธเคจเฅ€เคฏเคคเคพ เคธเฅเค•เฅ‹เคฐ เคถเคพเคฎเคฟเคฒ เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เฅค เคšเคฟเคคเฅเคฐ เคฎเฅ‡เค‚ เคนเคฐเฅ€ เค‰เคกเคผเฅ€ เคฐเฅ‡เคธ เคธเคพเคฎเค—เฅเคฐเฅ€ เคกเคฟเคŸเฅ‡เค•เฅเคŸ เค•เคฐเฅ€, เคฌเคพเค‚เคฆเคฐ เค•เฅ‹ เคกเคฟเคŸเฅ‡เค•เฅเคŸ เค•เคฐเฅ‡เค‚. เคชเฅเคฐเคคเคฟเคธเฅเคฅเคพเคจ เคธเฅ‡ เคฏเคน เคชเคคเคพ เคšเคฒเคคเคพ เคนเฅˆ เค•เคฟ เคตเคธเฅเคคเฅ เค•เคนเคพเค เคนเฅˆ เคฏเคพ เค‰เคธเค•เฅ€ เคธเคŸเฅ€เค• เค†เค•เฅƒเคคเคฟ เค•เฅเคฏเคพ เคนเฅˆ, เคชเคฐเค‚เคคเฅ เค•เฅเค› เคคเฅ‹ เคนเฅˆเค‚ เคนเฅˆเฅค + +

+
+ +
+ เคฆเฅ‡เค–เฅ‡เค‚: เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค Ultralytics YOLOv8 เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจเฅค +

+ + +!!! Tip "เคŸเคฟเคช" + +YOLOv8 Detect เคฎเฅ‰เคกเคฒ เคกเคฟเคซเคผเฅ‰เคฒเฅเคŸ YOLOv8 เคฎเฅ‰เคกเคฒ เคนเฅˆเค‚, เคฏเคพเคจเฅ€ `yolov8n.pt` เค”เคฐ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคนเฅˆเค‚เฅค + +## [เคฎเฅ‰เคกเคฒ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +YOLOv8 เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค Detect เคฎเฅ‰เคกเคฒ เคฏเคนเคพเค เคฆเคฟเค–เคพเค เค—เค เคนเฅˆเค‚เฅค Detect, Segment เค”เคฐ Pose เคฎเฅ‰เคกเคฒ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚, เคœเคฌเค•เคฟ Classify เคฎเฅ‰เคกเคฒ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เฅค + +[เคฎเฅ‰เคกเคฒ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) เคชเคนเคฒเฅ€ เคฌเคพเคฐ เค‡เคธเฅเคคเฅ‡เคฎเคพเคฒ เคชเคฐ Ultralytics เค•เฅ‡ เคจเคตเฅ€เคจเคคเคฎ [เคชเฅเคฐเค•เคพเคถเคจ](https://github.com/ultralytics/assets/releases) เคธเฅ‡ เคธเฅเคตเคšเคพเคฒเคฟเคค เคฐเฅ‚เคช เคธเฅ‡ เคกเคพเค‰เคจเคฒเฅ‹เคก เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เฅค + +| เคฎเฅ‰เคกเคฒ | เคธเคพเค‡เคœเคผ
(pixels) | mAPval
50-95 | เคธเฅเคชเฅ€เคกCPU ONNX
(ms) | เคธเฅเคชเฅ€เคกA100 TensorRT
(ms) | เคชเฅˆเคฐเคพเคฎเฅเคธ
(M) | FLOPs
(B) | +|--------------------------------------------------------------------------------------|------------------------|----------------------|----------------------------|---------------------------------|---------------------|-------------------| +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- **mAPval** เคฎเคพเคจ เค•เฅ‹ [COCO val2017](http://cocodataset.org) เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคธเคฟเค‚เค—เคฒ-เคฎเฅ‰เคกเฅ‡เคฒ เคธเคฟเค‚เค—เคฒ-เคธเฅเค•เฅ‡เคฒ เค•เฅ‡ เคฒเคฟเค เคนเฅˆเฅค +
`yolo` เคฆเฅเคตเคพเคฐเคพ เคชเฅเคจเคƒ เค‰เคคเฅเคชเคจเฅเคจ เค•เคฐเฅ‡เค‚ `เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เคตเคฟเคจเฅเคฏเคพเคธ เค•เคฐเฅ‡เค‚ yolo val data=coco.yaml device=0` +- **Speed** [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) + เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ COCO val เค›เคตเคฟเคฏเฅ‹เค‚ เคชเคฐ เค”เคธเคค เคฒเคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค +
`yolo` เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เคชเฅเคจเคƒ เค‰เคคเฅเคชเคจเฅเคจ เค•เคฐเฅ‡เค‚ `เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เคตเคฟเคจเฅเคฏเคพเคธ เค•เคฐเฅ‡เค‚ yolo val data=coco128.yaml batch=1 device=0|cpu` + +## เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ + +100 เคฏเฅเค—เฅ‹เค‚ เคฎเฅ‡เค‚ 640 เค†เค•เฅƒเคคเคฟ เคตเคพเคฒเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฏเฅ‹เคฒเฅ‹เคตเฅ€8 เคเคจ เค•เฅ‹ COCO128 เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚เฅค เค‰เคชเคฒเคฌเฅเคง เคคเคพเคฐเฅเค•เคฟเค• เคคเคฐเฅเค•เฅ‹เค‚ เค•เฅ€ เคชเฅ‚เคฐเฅ€ เคธเฅ‚เคšเฅ€ เค•เฅ‡ เคฒเคฟเค [เค•เฅ‰เคจเฅเคซเคผเคฟเค—เคฐเฅ‡เคถเคจ](/../usage/cfg.md) เคชเฅƒเคทเฅเค  เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.yaml') # YAML เคธเฅ‡ เคจเคฏเคพ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค + model = YOLO('yolov8n.pt') # เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค เคธเคฟเคซเคพเคฐเคฟเคถ เค•เคฟเค เค—เค เคชเฅ‚เคฐเฅเคตเคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # YAML เคธเฅ‡ เคฌเคจเคพเคเค‚ เค”เคฐ เคญเคพเคฐ เคŸเฅเคฐเคพเค‚เคธเคซเคฐ เค•เคฐเฅ‡เค‚ เค”เคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # YAML เคธเฅ‡ เคเค• เคจเคฏเคพ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเค•เคฐ เค–เคพเคฒเฅ€ เคธเฅ‡ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค *.pt เคฎเฅ‰เคกเคฒ เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # เคฏเฅˆเคคเคพเคฏเคคเฅเคฎเคฟเค• เคฐเฅ‚เคช เคธเฅ‡ เคญเคพเคฐ เคŸเฅเคฐเคพเค‚เคธเคซเคฐ เค•เคฐเค•เฅ‡ เคจเคฏเคพ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค เค”เคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเฅเคฐเคพเคฐเฅ‚เคช + +YOLO เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเฅเคฐเคพเคฐเฅ‚เคช เค•เฅ‹ [เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค—เคพเค‡เคก](../../../datasets/detect/index.md) เคฎเฅ‡เค‚ เคตเคฟเคธเฅเคคเคพเคฐ เคธเฅ‡ เคฆเฅ‡เค–เคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค เค•เฅƒเคชเคฏเคพ เค…เคชเคจเฅ‡ เคฎเฅŒเคœเฅ‚เคฆเคพ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‹ เค…เคจเฅเคฏ เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ (เคœเฅˆเคธเฅ‡ COCO เค†เคฆเคฟ) เคธเฅ‡ YOLO เคชเฅเคฐเคพเคฐเฅ‚เคช เคฎเฅ‡เค‚ เคฌเคฆเคฒเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) เค‰เคชเค•เคฐเคฃ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค + +## เคฎเคพเคจเฅเคฏเคคเคพ + +COCO128 เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n เคฎเฅ‰เคกเคฒ เค•เฅ€ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‹ เคฎเคพเคจเฅเคฏเคคเคพ เคฆเฅ‡เค‚เฅค เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคฆเคฐเฅเคถเคจ เคธเฅ‡ เคœเฅเคกเคผเฅ€ เค•เฅ‹เคˆ เคตเคฟเคงเคฟ เคจเคนเฅ€เค‚ เคนเฅ‹เคจเฅ€ เคšเคพเคนเคฟเคเฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เค•เฅ€ เคฎเคพเคจเฅเคฏเคคเคพ เคœเคพเค‚เคšเฅ‡เค‚ + metrics = model.val() # เคคเฅเคฒเคจเคพ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เฅ‹เคˆ เคตเคฟเคงเคฟ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพ เคจเคนเฅ€เค‚ เคนเฅˆ, เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค”เคฐ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฏเคพเคฆ เคฐเค–เฅ‡ เคœเคพเคคเฅ‡ เคนเฅˆเค‚ + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # เคนเคฐ เคถเฅเคฐเฅ‡เคฃเฅ€ เค•เฅ‡ map50-95 เคธเฅ‡ เคธเค‚เคฌเค‚เคงเคฟเคค เคธเฅ‚เคšเฅ€ + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เค•เฅ€ เคฎเคพเคจเฅเคฏเคคเคพ + yolo detect val model=path/to/best.pt # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคฎเคพเคจเฅเคฏเคคเคพ + ``` + +## เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ + +เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เคšเคฟเคคเฅเคฐเฅ‹เค‚ เคชเคฐ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฐเฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค•เคฐเฅ‡เค‚ + results = model('https://ultralytics.com/images/bus.jpg') # เคเค• เค›เคตเคฟ เคชเคฐ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค•เคฐเฅ‡เค‚ + ``` + === "CLI" + + ```bash + yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ + yolo detect predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ + ``` + +เคชเฅ‚เคฐเฅเคฃ `predict` เคฎเฅ‹เคกเคผ เคตเคฟเคตเคฐเคฃ เค•เฅ‹ [เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€](https://docs.ultralytics.com/modes/predict/) เคชเฅƒเคทเฅเค  เคฎเฅ‡เค‚ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +## เคจเคฟเคฐเฅเคฏเคพเคค + +YOLOv8n เคฎเฅ‰เคกเคฒ เค•เฅ‹ เค…เคจเฅเคฏ เคชเฅเคฐเคพเคฐเฅ‚เคช (เคœเฅˆเคธเฅ‡ ONNX, CoreML เค†เคฆเคฟ) เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n.pt') # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เค•เคธเฅเคŸเคฎ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + yolo export model=path/to/best.pt format=onnx # เค•เคธเฅเคŸเคฎ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + ``` + +เค‰เคชเคฒเคฌเฅเคง YOLOv8 เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคช เคจเฅ€เคšเฅ‡ เค•เฅ€ เคธเคพเคฐเคฃเฅ€ เคฎเฅ‡เค‚ เคนเฅˆเค‚เฅค เค†เคช เคจเคฟเคฐเฅเคฏเคพเคคเคฟเคค เคฎเฅ‰เคกเคฒ เคชเคฐ เคธเฅ€เคงเฅ‡ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เคฏเคพ เคฎเคพเคจเฅเคฏเคคเคพ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚, เคœเฅˆเคธเฅ‡ 'yolo predict model=yolov8n.onnx' เค†เคฆเคฟเฅค เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅ‚เคฐเฅเคฃ เคนเฅ‹เคจเฅ‡ เค•เฅ‡ เคฌเคพเคฆ เค†เคชเค•เฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เค‰เคฆเคพเคนเคฐเคฃ เคฆเคฟเค–เคพเค เคœเคพเคคเฅ‡ เคนเฅˆเค‚เฅค + +| เคชเฅเคฐเคพเคฐเฅ‚เคช | `format` เคคเคฐเฅเค• | เคฎเฅ‰เคกเคฒ | เคฎเฅ‡เคŸเคพเคกเคพเคŸเคพ | เคคเคฐเฅเค• | +|--------------------------------------------------------------------|---------------|---------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half`, `int8` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras`, `int8` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +เคชเฅ‚เคฐเฅเคฃ `export` เคตเคฟเคตเคฐเคฃ เค•เฅ‹ [เคจเคฟเคฐเฅเคฏเคพเคค](https://docs.ultralytics.com/modes/export/) เคชเฅƒเคทเฅเค  เคฎเฅ‡เค‚ เคฆเฅ‡เค–เฅ‡เค‚เฅค diff --git a/ultralytics/docs/hi/tasks/detect.md:Zone.Identifier b/ultralytics/docs/hi/tasks/detect.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/tasks/detect.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/tasks/index.md b/ultralytics/docs/hi/tasks/index.md new file mode 100755 index 0000000..7aaf33b --- /dev/null +++ b/ultralytics/docs/hi/tasks/index.md @@ -0,0 +1,55 @@ +--- +comments: true +description: เคœเคพเคจเฅ‡เค‚ YOLOv8 เคœเฅ‹ เค•เคฟ เคตเคฟเคญเคฟเคจเฅเคจ เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฐ เคตเคฟเคœเคจ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เคœเฅˆเคธเฅ‡ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เคธเฅ‡เค—เฅเคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ, เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ เค”เคฐ เคชเฅ‹เฅ› เคเคธเฅเคŸเคฟเคฎเฅ‡เคถเคจ เค•เฅ‹ เค•เคฐ เคธเค•เคคเคพ เคนเฅˆ| เค…เคชเคจเฅ‡เค‚ AI เคชเฅเคฐเฅ‹เคœเฅ‡เค•เฅเคŸเฅเคธ เคฎ เค‡เคจ เคŸเคพเคธเฅเค• เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เคฌเคพเคฐเฅ‡เค‚ เคฎ เคฎเคฐเฅเคฏเคพเคฆเคฟเคค เคนเฅ‹ เคœเคพเคเค‚ +keywords: Ultralytics, YOLOv8, เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เคธเฅ‡เค—เฅเคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ, เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ, เคชเฅ‹เฅ› เคเคธเฅเคŸเคฟเคฎเฅ‡เคถเคจ, AI Framework, เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฐ เคตเคฟเคœเคจ เค•เคพเคฐเฅเคฏ +--- + +# Ultralytics YOLOv8 เคคเคพเคธเฅเค• + +
+Ultralytics YOLO Supported เคŸเคพเคธเฅเค•เฅเคธ + +YOLOv8 เคเค• AI เคซเฅเคฐเฅ‡เคฎเคตเคฐเฅเค• เคนเฅˆ เคœเฅ‹ เคฎเคฒเฅเคŸเฅ€เคชเคฒ เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฐ เคตเคฟเคœเคจ **เคคเคพเคธเฅเค•เฅเคธ** เค•เฅ‹ เคธเคชเฅ‹เคฐเฅเคŸ เค•เคฐเคคเคพ เคนเฅˆเฅค เค‡เคธ เคซเฅเคฐเฅ‡เคฎเคตเคฐเฅเค• เค•เคพ เค‰เคชเคฏเฅ‹เค— [เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ](detect.md), [เคธเฅ‡เค—เฅเคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ](segment.md), [เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ](classify.md), เค”เคฐ [เคชเฅ‹เฅ›](pose.md) เคเคธเฅเคŸเคฟเคฎเฅ‡เคถเคจ เค•เฅ‹ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเค‚เฅค เคนเคฐ เคŸเคพเคธเฅเค• เค•เคพ เคเค• เค…เคฒเค— เค‰เคฆเฅเคฆเฅ‡เคถเฅเคฏ เค”เคฐ เคฏเฅ‚เคœ เค•เฅ‡เคธ เคนเฅ‹เคคเคพ เคนเฅˆเค‚เฅค + +!!! Note "เคจเฅ‹เคŸ" + + ๐Ÿšง เคนเคฎเคพเคฐเคพ เคฎเคฒเฅเคŸเฅ€-เคญเคพเคทเคพ เคกเฅ‰เค•เฅเคฏเฅเคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคตเคฐเฅเคคเคฎเคพเคจ เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฎเคพเคฃเคพเคงเฅ€เคจ เคนเฅˆเค‚, เค”เคฐ เคนเคฎ เค‰เคธเฅ‡ เคธเฅเคงเคพเคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เคฎเฅ‡เคนเคจเคค เค•เคฐ เคฐเคนเฅ‡เค‚ เคนเฅˆเค‚เฅค เค†เคชเค•เฅ€ เคธเคนเคพเคจเฅเคญเฅ‚เคคเคฟ เค•เฅ‡ เคฒเคฟเค เคงเคจเฅเคฏเคตเคพเคฆ! ๐Ÿ™ + +

+
+ +
+ เคฆเฅ‡เค–เฅ‡เค‚: เคœเคพเค‚เคšเฅ‡เค‚ Ultralytics YOLO เคŸเคพเคธเฅเค•เฅเคธ: เคตเคธเฅเคคเฅ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เคธเฅ‡เค—เฅเคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ, เคŸเฅเคฐเฅˆเค•เคฟเค‚เค— เค”เคฐ เคชเฅ‹เฅ› เคเคธเฅเคŸเคฟเคฎเฅ‡เคถเคจเฅค +

+ +## [เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ](detect.md) + +เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ YOLOv8 เคฆเฅเคตเคพเคฐเคพ เคธเคชเฅ‹เคฐเฅเคŸ เค•เคฟเคฏเคพ เคœเคพเคจเฅ‡ เคตเคพเคฒเคพ เคชเฅเคฐเคพเคฅเคฎเคฟเค• เคŸเคพเคธเฅเค• เคนเฅˆเค‚เฅค เค‡เคธเค•เคพ เคฎเคคเคฒเคฌ เคนเฅ‹เคคเคพ เคนเฅˆเค‚ เค•เคฟ เคเค• เค›เคตเคฟ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเฅเคฐเฅ‡เคฎ เคฎเฅ‡เค‚ เคตเคธเฅเคคเฅเค“เค‚ เค•เฅ‹ เค–เฅ‹เคœเฅ‡เค‚ เค”เคฐ เค‰เคจเค•เฅ‡ เคšเคพเคฐเฅ‹เค‚ เค“เคฐ เค—เฅเคฐเฅ‡เคกเฅ€เคฏเฅ‡เคถเคจ เคฌเฅ‰เค•เฅเคธ เคฌเคจเคพเคเคเฅค เคชเคพเคฏเฅ€ เค—เคฏเฅ€ เคตเคธเฅเคคเฅเค“เค‚ เค•เฅ‹ เค‰เคจเค•เฅ‡ เคซเฅ€เคšเคฐเฅเคธ เค•เฅ‡ เค†เคงเคพเคฐ เคชเคฐ เคตเคฟเคญเคฟเคจเฅเคจ เคถเฅเคฐเฅ‡เคฃเคฟเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เคตเคฐเฅเค—เฅ€เค•เฅƒเคค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเค‚เฅค YOLOv8 เคเค• เคนเฅ€ เค›เคตเคฟ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเฅเคฐเฅ‡เคฎ เคฎเฅ‡เค‚ เค•เคˆ เคตเคธเฅเคคเฅเคเค‚ เคชเคนเคšเคพเคจ เคธเค•เคคเฅ€ เคนเฅˆเค‚ เค”เคฐ เค‰เคธเฅ‡ เค‰เคšเฅเคš เคธเคŸเฅ€เค•เคคเคพ เค”เคฐ เค—เคคเคฟ เคธเฅ‡ เค•เคฐ เคธเค•เคคเฅ€ เคนเฅˆเค‚เฅค + +[เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เค‰เคฆเคพเคนเคฐเคฃ](detect.md){ .md-button } + +## [เคธเฅ‡เค—เฅเคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ](segment.md) + +เคธเฅ‡เค—เฅเคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคเค• เคŸเคพเคธเฅเค• เคนเฅˆเค‚ เคœเคฟเคธเคฎเฅ‡ เคเค• เค›เคตเคฟ เค•เฅ‹ เค‰เคธเค•เฅ€ เคธเคพเคฎเค—เฅเคฐเฅ€ เค•เฅ‡ เค†เคงเคพเคฐ เคชเคฐ เคตเคฟเคญเคฟเคจเฅเคจ เค•เฅเคทเฅ‡เคคเฅเคฐเฅ‹เค‚ เคฎเฅ‡เค‚ เคตเคฟเคญเคพเคœเคฟเคค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเค‚เฅค เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เค•เฅเคทเฅ‡เคคเฅเคฐ เค•เฅ‹ เค‰เคธเค•เฅ€ เคธเคพเคฎเค—เฅเคฐเฅ€ เค•เฅ‡ เค†เคงเคพเคฐ เคชเคฐ เคเค• เคฒเฅ‡เคฌเคฒ เคฆเคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเค‚เฅค เคฏเคน เคŸเคพเคธเฅเค• เค›เคตเคฟ เคธเฅ‡เค—เฅเคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค”เคฐ เคฎเฅ‡เคกเคฟเค•เคฒ เค‡เคฎเฅ‡เคœเคฟเค‚เค— เคœเฅˆเคธเฅ‡ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจเฅเคธ เคฎเฅ‡เค‚ เค‰เคชเคฏเฅ‹เค—เฅ€ เคนเฅ‹เคคเฅ€ เคนเฅˆเค‚เฅค YOLOv8 เคธเฅ‡เค—เฅเคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค U-Net เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เค•เคพ เค‡เคธเฅเคคเฅ‡เคฎเคพเคฒ เค•เคฐเคคเคพ เคนเฅˆเค‚เฅค + +[เคธเฅ‡เค—เฅเคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค‰เคฆเคพเคนเคฐเคฃ](segment.md){ .md-button } + +## [เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ](classify.md) + +เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ เคเค• เคŸเคพเคธเฅเค• เคนเฅˆเค‚ เคœเคฟเคธเคฎเฅ‡ เคเค• เค›เคตเคฟ เค•เฅ‹ เคตเคฟเคญเคฟเคจเฅเคจ เคถเฅเคฐเฅ‡เคฃเคฟเคฏเฅ‹เค‚ เคฎเฅ‡เค‚ เคตเคฐเฅเค—เฅ€เค•เฅƒเคค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเค‚เฅค YOLOv8 เค•เคพ เค‰เคชเคฏเฅ‹เค— เค›เคตเคฟเคฏเฅ‹เค‚ เค•เฅ‹ เค‰เคจเค•เฅ€ เคธเคพเคฎเค—เฅเคฐเฅ€ เค•เฅ‡ เค†เคงเคพเคฐ เคชเคฐ เค•เฅเคฒเคพเคธเคฟเคซเคพเคˆ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฟเคฏเคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเค‚เฅค เคฏเคน เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค EfficientNet เค†เคฐเฅเค•เคฟเคŸเฅ‡เค•เฅเคšเคฐ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเคพ เคนเฅˆเค‚เฅค + +[เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ เค‰เคฆเคพเคนเคฐเคฃ](classify.md){ .md-button } + +## [เคชเฅ‹เฅ›](pose.md) + +เคชเฅ‹เฅ›/เค•เฅ€เคชเฅ‰เค‡เค‚เคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคเค• เคŸเคพเคธเฅเค• เคนเฅˆเค‚ เคœเคฟเคธเคฎเฅ‡ เคเค• เค›เคตเคฟ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเฅเคฐเฅ‡เคฎ เคฎเฅ‡เค‚ เคตเคฟเคถเฅ‡เคท เคฌเคฟเค‚เคฆเฅเค“เค‚ เค•เฅ‹ เค–เฅ‹เคœเฅ‡เค‚เฅค เค‡เคจ เคฌเคฟเค‚เคฆเฅเค“เค‚ เค•เฅ‹ เค•เฅ€เคชเฅ‰เค‡เค‚เคŸ เค•เคนเคพ เคœเคพเคคเคพ เคนเฅˆเค‚ เค”เคฐ เค‡เคจเค•เคพ เค‰เคชเคฏเฅ‹เค— เค—เคคเคฟ เคฏเคพ เคชเฅ‹เฅ› เคเคธเฅเคŸเคฟเคฎเฅ‡เคถเคจ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเค‚เฅค YOLOv8 เคเค• เค›เคตเคฟ เคฏเคพ เคตเฅ€เคกเคฟเคฏเฅ‹ เคซเฅเคฐเฅ‡เคฎ เคฎเฅ‡เค‚ เค‰เคšเฅเคš เคธเคŸเฅ€เค•เคคเคพ เค”เคฐ เค—เคคเคฟ เคธเฅ‡ เค•เฅ€เคชเฅ‰เค‡เค‚เคŸ เคกเคฟเคŸเฅ‡เค•เฅเคŸ เค•เคฐ เคธเค•เคคเคพ เคนเฅˆเค‚เฅค + +[เคชเฅ‹เฅ› เค‰เคฆเคพเคนเคฐเคฃ](pose.md){ .md-button } + +## เคจเคฟเคทเฅเค•เคฐเฅเคท + +YOLOv8 เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ, เคธเฅ‡เค—เฅเคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ, เค•เฅเคฒเคพเคธเคฟเคซเคฟเค•เฅ‡เคถเคจ เค”เคฐ เค•เฅ€เคชเฅ‰เค‡เค‚เคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคœเฅˆเคธเฅ‡ เคฎเคฒเฅเคŸเฅ€เคชเคฒ เคŸเคพเคธเฅเค•เฅเคธ เค•เฅ‹ เคธเคชเฅ‹เคฐเฅเคŸ เค•เคฐเคคเคพ เคนเฅˆเค‚เฅค เคนเคฐ เคเค• เคŸเคพเคธเฅเค• เค•เคพ เค…เคฒเค— เค‰เคฆเฅเคฆเฅ‡เคถเฅเคฏ เค”เคฐ เคฏเฅ‚เคœ เค•เฅ‡เคธ เคนเฅ‹เคคเคพ เคนเฅˆเค‚เฅค เค‡เคจ เคŸเคพเคธเฅเค•เฅเคธ เค•เฅ‡ เคฌเฅ€เคš เค…เค‚เคคเคฐ เค•เฅ‹ เคธเคฎเคเค•เคฐ, เค†เคช เค…เคชเคจเฅ‡ เค•เค‚เคชเฅเคฏเฅ‚เคŸเคฐ เคตเคฟเคœเคจ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจ เค•เฅ‡ เคฒเคฟเค เค‰เคšเคฟเคค เคŸเคพเคธเฅเค• เค•เคพ เคšเฅเคจเคพเคต เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค diff --git a/ultralytics/docs/hi/tasks/index.md:Zone.Identifier b/ultralytics/docs/hi/tasks/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/tasks/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/tasks/pose.md b/ultralytics/docs/hi/tasks/pose.md new file mode 100755 index 0000000..1a7a7a9 --- /dev/null +++ b/ultralytics/docs/hi/tasks/pose.md @@ -0,0 +1,183 @@ +--- +comments: true +description: Ultralytics YOLOv8 เค•เคพ เค‰เคชเคฏเฅ‹เค— เคชเฅ‹เคœ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค•เฅˆเคธเฅ‡ เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ เค‡เคธเค•เฅ€ เคœเคพเคจเฅ‡เค‚เฅค เคชเฅเคฐเฅ€-เคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคขเฅ‚เค‚เคขเฅ‡เค‚, เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ, เคฎเคพเคจเฅเคฏเคคเคพ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚, เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฒเค—เคพเคเค‚, เค”เคฐ เค…เคชเคจเคพ เค–เฅเคฆ เค•เคพ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚เฅค +keywords: Ultralytics, YOLO, YOLOv8, pose estimation, keypoints detection, object detection, pre-trained models, machine learning, artificial intelligence +--- + +# เคชเฅ‹เคœ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ + +เคชเฅ‹เคœ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค‰เคฆเคพเคนเคฐเคฃ + +เคชเฅ‹เคœ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เคเค• เค•เคพเคฐเฅเคฏ เคนเฅˆ เคœเคฟเคธเคฎเฅ‡เค‚ เคเค• เค›เคตเคฟ เคฎเฅ‡เค‚ เคตเคฟเคถเฅ‡เคท เคฌเคฟเค‚เคฆเฅเค“เค‚ เค•เฅ‡ เคธเฅเคฅเคพเคจ เค•เฅ€ เคชเคนเคšเคพเคจ เค•เคฐเคจเคพ เคถเคพเคฎเคฟเคฒ เคนเฅ‹เคคเคพ เคนเฅˆ, เคœเคฟเคธเฅ‡ เค†เคฎเคคเฅŒเคฐ เคชเคฐ เค•เฅ€เคชเฅ‰เค‡เค‚เคŸเฅเคธ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เค•เคนเคพ เคœเคพเคคเคพ เคนเฅˆเฅค เค•เฅ€เคชเฅ‰เค‡เค‚เคŸเฅเคธ เคตเคฟเคญเคฟเคจเฅเคจ เค…เค‚เค—เฅ‹เค‚, เคญเฅ‚เคฎเคฟเค•เคพเค“เค‚ เคฏเคพ เค…เคจเฅเคฏ เคตเคฟเคถเคฟเคทเฅเคŸ เคธเฅเคตเคฟเคงเคพเค“เค‚ เค†เคฆเคฟ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคตเคธเฅเคคเฅ เค•เฅ‡ เคตเคฟเคญเคฟเคจเฅเคจ เคนเคฟเคธเฅเคธเฅ‹เค‚ เค•เฅ‹ เคชเฅเคฐเคคเคฟเคทเฅเค เคฟเคค เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค เค•เฅ€เคชเฅ‰เค‡เค‚เคŸเฅเคธ เค•เฅ‡ เคธเฅเคฅเคพเคจ เค†เคฎเคคเฅŒเคฐ เคชเคฐ 2D `[x, y]` เคฏเคพ 3D `[x, y, เคฆเคฟเค–เคพเคˆ เคฆเฅ‡เคจเฅ‡ เคตเคพเคฒเคพ]` เค•เฅ‹เค†เคฐเฅเคกเคฟเคจเฅ‡เคŸ เค•เฅ‡ เคธเฅ‡เคŸ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคชเฅเคฐเคฆเคฐเฅเคถเคฟเคค เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เฅค + +เคชเฅ‹เคœ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เคฎเฅ‰เคกเคฒ เค•เฅ€ เค‰เคคเฅเคชเคพเคฆเคจ เคเค• เค›เคตเคฟ เคฎเฅ‡เค‚ เคตเคธเฅเคคเฅ เค•เฅ‡ เค•เฅ€เคชเฅ‰เค‡เค‚เคŸเฅเคธ เค•เฅ‹ เคชเฅเคฐเคคเคฟเคทเฅเค เคฟเคค เค•เคฐเคจเฅ‡ เคตเคพเคฒเฅ‡ เค•เฅเค› เคฌเคฟเค‚เคฆเฅเค“เค‚ เค•เคพ เคธเฅ‡เคŸ เคนเฅ‹เคคเฅ€ เคนเฅˆ, เค†เคฎเคคเฅŒเคฐ เคชเคฐ เคนเคฐ เคฌเคฟเค‚เคฆเฅ เค•เฅ‡ เคฒเคฟเค เคตเคฟเคถเฅเคตเคธเคจเฅ€เคฏเคคเคพ เคธเฅเค•เฅ‹เคฐ เค•เฅ‡ เคธเคพเคฅเฅค เคชเฅ‹เคœ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค‰เคšเคฟเคค เคตเคฟเค•เคฒเฅเคช เคนเฅˆ เคœเคฌ เค†เคชเค•เฅ‹ เคธเฅเคŸเฅ€เคจ เคฎเฅ‡เค‚ เคเค• เคตเคธเฅเคคเฅ เค•เฅ‡ เคตเคฟเคถเฅ‡เคท เคนเคฟเคธเฅเคธเฅ‹เค‚ เค•เฅ€ เคชเคนเคšเคพเคจ เค•เคฐเคจเฅ€ เคนเฅ‹เคคเฅ€ เคนเฅˆ เค”เคฐ เคตเคฟเคญเคฟเคจเฅเคจ เคนเคฟเคธเฅเคธเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค‰เคจเค•เฅ‡ เคธเฅเคฅเคพเคจ เค•เฅ€ เคชเคนเคšเคพเคจ เค•เคฐเคจเฅ€ เคนเฅ‹เคคเฅ€ เคนเฅˆเฅค + +

+
+ +
+ เคฆเฅ‡เค–เฅ‡เค‚: Ultralytics YOLOv8 เค•เฅ‡ เคธเคพเคฅ เคชเฅ‹เคœ เคจเคฟเคฐเฅเคงเคพเคฐเคฃเฅค +

+ +!!! Tip "เคฏเฅเค•เฅเคคเคฟ" + + YOLOv8 _pose_ เคฎเฅ‰เคกเคฒ เคฎเฅ‡เค‚ `-pose` เคธเคซเคฟเค•เฅเคธ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ, เคœเฅˆเคธเฅ‡ `yolov8n-pose.pt`เฅค เคฏเฅ‡ เคฎเฅ‰เคกเคฒ [COCO เค•เฅ€เคชเฅ‰เค‡เค‚เคŸ](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco-pose.yaml) เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚ เค”เคฐ เคตเคฟเคญเคฟเคจเฅเคจ เคชเฅ‹เคœ เคจเคฟเคฐเฅเคงเคพเคฐเคฃ เค•เคพเคฐเฅเคฏเฅ‹เค‚ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅเค•เฅเคค เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เฅค + +## [เคฎเฅ‰เคกเคฒเฅเคธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +YOLOv8 เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจเคฟเคค เคชเฅ‹เคœ เคฎเฅ‰เคกเคฒเคธ เคฏเคนเคพเค เคฆเคฟเค–เคพเค เคœเคพเคคเฅ‡ เคนเฅˆเค‚เฅค เคชเคนเคšเคพเคจเฅ‡เค‚, เค…เค‚เคถ เค”เคฐ เคชเฅ‹เคœ เคฎเฅ‰เคกเคฒ เคฎเฅเค–เฅเคฏเคคเคƒ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคนเฅˆเค‚, เคœเคฌเค•เคฟ เค•เฅเคฒเคพเคธเคฟเคซเคพเคˆ เคฎเฅ‰เคกเคฒเฅเคธ เค•เฅ‹ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค + +เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจเคฟเคค เคฎเฅ‰เคกเคฒ `Models` เค•เฅ‹ Ultralytics เค•เฅ‡ เคจเคตเฅ€เคจเคคเคฎ [เคฐเคฟเคฒเฅ€เคœเคผ](https://github.com/ultralytics/assets/releases) เคธเฅ‡ เคธเฅเคตเคšเคพเคฒเคฟเคค เคฐเฅ‚เคช เคธเฅ‡ เคกเคพเค‰เคจเคฒเฅ‹เคก เค•เคฐเฅ‡เค‚เค—เฅ‡เฅค + +| เคฎเฅ‰เคกเคฒ | เค†เค•เคพเคฐ
(เคคเคคเฅเคต) | mAPเคชเฅ‹เคœ
50-95 | mAPเคชเฅ‹เคœ
50 | เคนเฅเคตเฅ‡เค—
CPU ONNX
(ms) | เคนเฅเคตเฅ‡เค—
A100 TensorRT
(ms) | เคชเฅˆเคฐเคพเคฎเฅเคธ
(M) | FLOPs
(B) | +|------------------------------------------------------------------------------------------------------|---------------------|----------------------|-------------------|--------------------------------|-------------------------------------|---------------------|-------------------| +| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | +| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | +| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | +| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | +| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | +| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +- **mAPval** เคฎเคพเคจ เคเค•เคฒ เคฎเฅ‰เคกเคฒ เคเค•เคฒ เคธเฅเค•เฅ‡เคฒ เคชเคฐ [COCO เค•เฅ€เคชเฅ‰เค‡เค‚เคŸ val2017](http://cocodataset.org) เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคนเฅˆเฅค +
`yolo val pose data=coco-pose.yaml device=0` เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เคชเฅเคจเคฐเฅ‹เคคเฅเคชเคพเคฆเคฟเคค เค•เคฐเฅ‡เค‚ +- **Speed** [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) เค‡เคจเฅเคธเฅเคŸเฅ‡เค‚เคธ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคนเฅเค COCO val เค›เคตเคฟเคฏเฅ‹เค‚ เคชเคฐ เค”เคธเคคเคฟเคค เค—เคฃเคจเคพเฅค +
`yolo val pose data=coco8-pose.yaml batch=1 device=0|cpu` เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เคชเฅเคจเคฐเคพเคฐเฅเคšเคจ เค•เคฐเฅ‡เค‚ + +## เคŸเฅเคฐเฅ‡เคจ + +COCO128-pose เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ YOLOv8-pose เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-pose.yaml') # YAML เคธเฅ‡ เคเค• เคจเคฏเคพ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค + model = YOLO('yolov8n-pose.pt') # เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจเคฟเคค เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ (เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค เคธเคฟเคซเคพเคฐเคฟเคถ เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆ) + model = YOLO('yolov8n-pose.yaml').load('yolov8n-pose.pt') # YAML เคธเฅ‡ เคฌเคจเคพเคเค เค”เคฐ เคตเคœเคจ เคธเฅเคฅเคพเคจเคพเค‚เคคเคฐเคฟเคค เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + results = model.train(data='coco8-pose.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # YAML เคธเฅ‡ เคจเคฏเคพ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค เค”เคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจเคฟเคค เคตเคœเคจ เคธเฅเคฅเคพเคจเคพเค‚เคคเคฐเคฟเคค เค•เคฐเคจเคพ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml epochs=100 imgsz=640 + + # เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจเคฟเคค *.pt เคฎเฅ‰เคกเคฒ เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.pt epochs=100 imgsz=640 + + # YAML เคธเฅ‡ เคจเคฏเคพ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค, เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจเคฟเคค เคตเคœเคจเฅ‹เค‚ เค•เฅ‹ เคธเฅเคฅเคพเคจเคพเค‚เคคเคฐเคฟเคค เค•เคฐเฅ‡เค‚ เค”เคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml pretrained=yolov8n-pose.pt epochs=100 imgsz=640 + ``` + +### เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเฅเคฐเคพเคฐเฅ‚เคช + +YOLO เคชเฅ‹เคœ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเฅเคฐเคพเคฐเฅ‚เคช เค•เฅ‹ เคตเคฟเคธเฅเคคเคพเคฐ เคธเฅ‡ [เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค—เคพเค‡เคก](../../../datasets/pose/index.md) เคฎเฅ‡เค‚ เคฆเคฟเคฏเคพ เค—เคฏเคพ เคนเฅˆเฅค เค…เคชเคจเฅ€ เคฎเฅŒเคœเฅ‚เคฆเคพ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‹ เค…เคจเฅเคฏ เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ (เคœเฅˆเคธเฅ‡ เค•เคฟ COCO เค†เคฆเคฟ) เคธเฅ‡ YOLO เคชเฅเคฐเคพเคฐเฅ‚เคช เคฎเฅ‡เค‚ เคฐเฅ‚เคชเคพเค‚เคคเคฐเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เฅƒเคชเคฏเคพ [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) เค‰เคชเค•เคฐเคฃ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค + +## เคฎเคพเคจเฅเคฏเคคเคพ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚ + +COCO128-pose เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n-pose เคฎเฅ‰เคกเคฒ เค•เฅ€ เคธเคŸเฅ€เค•เคคเคพ เค•เฅ‹ เคฎเคพเคจเฅเคฏเคคเคพ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚เฅค `model` เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เค•เฅ‹เคˆ เค†เคฐเฅเค—เฅเคฏเฅเคฎเฅ‡เค‚เคŸ เคชเคพเคฐเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ€ เค†เคตเคถเฅเคฏเค•เคคเคพ เคจเคนเฅ€เค‚ เคนเฅˆ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ `data` เค”เคฐ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เค•เฅ‹ เคฎเฅ‰เคกเคฒ เค–เคฟเคคเคพเคฌเฅ‹เค‚ เค•เฅ‡ เคฐเฅ‚เคช เคฎเฅ‡เค‚ เคฐเค–เคคเคพ เคนเฅˆเฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-pose.pt') # เคฐเคฟเคชเฅ‹เคฐเฅเคŸเฅ‡เคก เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เคเค• เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เค•เฅ€ เคธเคŸเฅ€เค•เคคเคพ เคฎเคพเคจเฅเคฏเคคเคพ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚ + metrics = model.val() # เค•เฅ‹เคˆ เค†เคฐเฅเค—เฅเคฏเฅเคฎเฅ‡เค‚เคŸ เค†เคตเคถเฅเคฏเค• เคจเคนเฅ€เค‚ เคนเฅˆ, เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค”เคฐ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฏเคพเคฆ เคฐเค–เคพ เคœเคพเคคเคพ เคนเฅˆ + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เคถเฅเคฐเฅ‡เคฃเฅ€ เค•เฅ‡ map50-95 เคธเฅ‚เคšเฅ€ เคฎเฅ‡เค‚ เคนเฅˆ + ``` + === "CLI" + + ```bash + yolo pose val model=yolov8n-pose.pt # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เคฎเคพเคจเฅเคฏเคพเค‚เค•เคจ เค•เคฐเฅ‡เค‚ + yolo pose val model=path/to/best.pt # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคฎเคพเคจเฅเคฏเคคเคพ เคชเฅเคฐเคพเคชเฅเคค เค•เคฐเฅ‡เค‚ + ``` + +## เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฒเค—เคพเคเค‚ + +เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n-pose เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เค›เคตเคฟเคฏเฅ‹เค‚ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคšเคฒเคพเคเค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-pose.pt') # เคฐเคฟเคชเฅ‹เคฐเฅเคŸเฅ‡เคก เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เคเค• เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคฐเฅ‡เค‚ + results = model('https://ultralytics.com/images/bus.jpg') # เคเค• เค›เคตเคฟ เคชเคฐ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เค•เคฐเฅ‡เค‚ + ``` + === "CLI" + + ```bash + yolo pose predict model=yolov8n-pose.pt source='https://ultralytics.com/images/bus.jpg' # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฒเค—เคพเคเค‚ + yolo pose predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฒเค—เคพเคเค‚ + ``` + +เคเค•เฅเคธเคชเฅ‹เคฐเฅเคŸ + +YOLOv8n เคชเฅ‹เคœ เคฎเฅ‰เคกเคฒ เค•เฅ‹ ONNX, CoreML เคœเฅˆเคธเฅ‡ เค…เคจเฅเคฏ เคชเฅเคฐเคพเคฐเฅ‚เคช เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-pose.pt') # เคฐเคฟเคชเฅ‹เคฐเฅเคŸเฅ‡เคก เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เคเค• เค•เคธเฅเคŸเคฎ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-pose.pt format=onnx # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + yolo export model=path/to/best.pt format=onnx # เค•เคธเฅเคŸเคฎ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + ``` + +เคจเคฟเคฐเฅเคฏเคพเคค เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฒเคฌเฅเคง YOLOv8-pose เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคช เคจเฅ€เคšเฅ‡ เค•เคฐเฅ‡เค‚ เคฆเคฟเค เค—เค เคนเฅˆเค‚เฅค เค†เคช เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฟเค เค—เค เคฎเฅ‰เคกเคฒ เคชเคฐ เคธเฅ€เคงเคพ เคชเฅ‚เคฐเฅเคตเคพเคจเฅเคฎเคพเคจ เคฏเคพ เคฎเคพเคจเฅเคฏเคคเคพ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚, เค‰เคฆเคพเคนเคฐเคฃ เค•เฅ‡ เคฒเคฟเค `yolo predict model=yolov8n-pose.onnx`เฅค เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅ‚เคฐเคพ เคนเฅ‹เคจเฅ‡ เค•เฅ‡ เคฌเคพเคฆ เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เค‰เคฆเคพเคนเคฐเคฃ เคฆเคฟเค–เคพเค เค—เค เคนเฅˆเค‚เฅค + +| เคชเฅเคฐเคพเคฐเฅ‚เคช | `format` เค†เคฐเฅเค—เฅเคฏเฅเคฎเฅ‡เค‚เคŸ | เคฎเฅ‰เคกเคฒ | เคฎเฅ‡เคŸเคพเคกเฅ‡เคŸเคพ | เค†เคฐเฅเค—เฅเคฏเฅเคฎเฅ‡เค‚เคŸเฅค | +|--------------------------------------------------------------------|----------------------|--------------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-pose.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-pose.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-pose.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-pose_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-pose.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-pose.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-pose_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-pose.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-pose.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-pose_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-pose_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-pose_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-pose_ncnn_model/` | โœ… | `imgsz`, `half` | + +เคจเคฟเคฐเฅเคฏเคพเคค เคตเคฟเคตเคฐเคฃ เค•เฅ‡ เคฒเคฟเค [เคจเคฟเคฐเฅเคฏเคพเคค](https://docs.ultralytics.com/modes/export/) เคชเฅƒเคทเฅเค  เคฆเฅ‡เค–เฅ‡เค‚เฅค diff --git a/ultralytics/docs/hi/tasks/pose.md:Zone.Identifier b/ultralytics/docs/hi/tasks/pose.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/tasks/pose.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/hi/tasks/segment.md b/ultralytics/docs/hi/tasks/segment.md new file mode 100755 index 0000000..cd111b2 --- /dev/null +++ b/ultralytics/docs/hi/tasks/segment.md @@ -0,0 +1,187 @@ +--- +comments: true +description: Ultralytics YOLO เค•เฅ‡ เคธเคพเคฅ เค‰เคฆเคพเคนเคฐเคฃ เคฆเฅ‡เค–เฅ‡เค‚ เค•เคฟ เค•เฅˆเคธเฅ‡ instance segmentation เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ, เคฎเคพเคจเฅเคฏเคคเคพ, เค›เคตเคฟ เค•เฅ€ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค”เคฐ เคฎเฅ‰เคกเคฒ เคจเคฟเคฐเฅเคฏเคพเคค เคชเคฐ เคจเคฟเคฐเฅเคฆเฅ‡เคถเฅค +keywords: yolov8, instance segmentation, Ultralytics, COCO dataset, image segmentation, object detection, model training, model validation, image prediction, model export +--- + +# Instance Segmentation + +เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เคกเคฟเคŸเฅ‡เค•เฅเคถเคจ เคธเฅ‡ เคเค• เค•เคฆเคฎ เค†เค—เฅ‡ เคœเคพเคคเคพ เคนเฅˆ เค”เคฐ เค›เคตเคฟ เคฎเฅ‡เค‚ เคตเฅเคฏเค•เฅเคคเคฟ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ€ เคชเคนเคšเคพเคจ เค•เคฐเคคเคพ เคนเฅˆ เค”เคฐ เค‰เคจเฅเคนเฅ‡เค‚ เค›เคตเคฟ เค•เฅ‡ เคฌเคพเค•เฅ€ เคนเคฟเคธเฅเคธเฅ‡ เคธเฅ‡ เคตเคฟเคญเคพเคœเคฟเคค เค•เคฐเคคเคพ เคนเฅˆเฅค + +เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคฎเฅ‰เคกเคฒ เค•เคพ เค†เค‰เคŸเคชเฅเคŸ เคเค• เคธเฅ‡เคŸ เคฎเคพเคธเฅเค• เคฏเคพ เค•เค‚เคŸเฅ‹เคฐ เคนเฅ‹เคคเคพ เคนเฅˆ เคœเฅ‹ เค›เคตเคฟ เคฎเฅ‡เค‚ เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เคพ เคธเค‚เค•เฅ‡เคค เคฆเฅ‡เคคเคพ เคนเฅˆ, เคธเคพเคฅ เคนเฅ€ เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เฅ‡ เคฒเคฟเค เคตเคฐเฅเค— เคฒเฅ‡เคฌเคฒ เค”เคฐ เค†เคคเฅเคฎเคตเคฟเคถเฅเคตเคพเคธ เคธเฅเค•เฅ‹เคฐ เคนเฅ‹เคคเคพ เคนเฅˆเฅค เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เค‰เคชเคฏเฅ‹เค—เฅ€ เคนเฅ‹เคคเคพ เคนเฅˆ เคœเคฌ เค†เคชเค•เฅ‹ เคจ เค•เฅ‡เคตเคฒ เคชเคคเคพ เคšเคฒเฅ‡เค—เคพ เค•เคฟ เค›เคตเคฟ เคฎเฅ‡เค‚ เค‘เคฌเฅเคœเฅ‡เค•เฅเคŸ เค•เคนเคพเค เคนเฅˆเค‚, เคฌเคฒเฅเค•เคฟ เคตเคพเคธเฅเคคเคต เคฎเฅ‡เค‚ เค‰เคจเค•เคพ เคตเคพเคธเฅเคคเคตเคฟเค• เค†เค•เคพเคฐ เค•เฅเคฏเคพ เคนเฅˆเฅค + +

+
+ +
+ เคฆเฅ‡เค–เฅ‡เค‚: เคชเคพเคฏเคฅเคจ เคฎเฅ‡เค‚ เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค Ultralytics YOLOv8 เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ Segmentation เคšเคฒเคพเคเค‚เฅค +

+ +!!! Tip "เคŸเคฟเคช" + + YOLOv8 Segment เคฎเฅ‰เคกเคฒ `yolov8n-seg.pt` เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคนเฅˆเค‚, เค”เคฐ เค‡เคธเฅ‡ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) เคชเคฐ เคชเฅ‚เคฐเฅ€ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฟเคฏเคพ เคœเคพเคคเคพ เคนเฅˆเฅค + +## [เคฎเฅ‰เคกเคฒ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +YOLOv8 เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค Segment เคฎเฅ‰เคกเคฒ เคฏเคนเคพเค‚ เคฆเคฟเค–เคพเค เค—เค เคนเฅˆเค‚เฅค Detect, Segment เค”เคฐ Pose เคฎเฅ‰เคกเคฒ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคนเฅˆเค‚, เคœเคฌเค•เคฟ Classify เคฎเฅ‰เคกเคฒ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅ‚เคฐเฅเคต เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคนเฅˆเค‚เฅค + +[เคฎเฅ‰เคกเคฒ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) เค•เฅ‹ เค‰เคชเคฏเฅ‹เค— เค•เคฐเค•เฅ‡ Ultralytics [เคฐเคฟเคฒเฅ€เคœเคผ](https://github.com/ultralytics/assets/releases) เคธเฅ‡ เคชเฅ‚เคฐเฅเคฃ เคกเคพเค‰เคจเคฒเฅ‹เคก เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เค‚เฅค + +| เคฎเฅ‰เคกเคฒ | เค†เค•เคพเคฐ
(เคชเคฟเค•เฅเคธเฅ‡เคฒ) | mAPเคฌเฅ‰เค•เฅเคธ
50-95 | mAPเคฎเคพเคธเฅเค•
50-95 | เคธเฅเคชเฅ€เคก
CPU ONNX
(เคฎเคฟ.เคธเฅ‡เค•เค‚เคก) | เคธเฅเคชเฅ€เคก
A100 TensorRT
(เคฎเคฟ.เคธเฅ‡เค•เค‚เคก) | เคชเฅˆเคฐเคพเคฎเฅเคธ
(M) | FLOPs
(B) | +|----------------------------------------------------------------------------------------------|------------------------|------------------------|------------------------|--------------------------------------|-------------------------------------------|---------------------|-------------------| +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- **mAPval** เคฎเคพเคจ เคเค•เคฒ เคฎเฅ‰เคกเคฒ เคเค•เคฒ เคธเฅเค•เฅ‡เคฒ เค•เฅ‡ เคฒเคฟเค [COCO val2017](http://cocodataset.org) เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคนเฅ‹เคคเฅ‡ เคนเฅˆเค‚เฅค +
`yolo val segment data=coco.yaml device=0` เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เคชเฅเคจเคฐเฅเคœเฅ€เคตเคฟเคค เค•เคฟเค เคœเคพเคเค‚เฅค +- **เคธเฅเคชเฅ€เคก** เคเค• [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) เค‡เค‚เคธเฅเคŸเฅ‡เค‚เคธ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเคคเฅ‡ เคนเฅเค COCO val เค›เคตเคฟเคฏเฅ‹เค‚ เค•เฅ‡ เคฌเฅ€เคš เค”เคธเคคเคจเฅค +
`yolo val segment data=coco128-seg.yaml batch=1 device=0|cpu` เค•เฅ‡ เคฆเฅเคตเคพเคฐเคพ เคชเฅเคจเคฐเฅเคœเฅ€เคตเคฟเคค เค•เคฟเค เคœเคพ เคธเค•เคคเฅ‡ เคนเฅˆเค‚เฅค + +## เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ + +100 เคเคชเฅ‰เค•เฅเคธ เคชเคฐ 640 เค›เคตเคฟ เค•เฅ‡ เค†เค•เคพเคฐ เค•เฅ‡ COCO128-seg เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ YOLOv8n-seg เค•เฅ‹ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚เฅค เค‰เคชเคฒเคฌเฅเคง เคคเคพเคฐเฅเค•เคฟเค• เคคเคฐเฅเค• เค•เฅ€ เคชเฅ‚เคฐเฅ€ เคธเฅ‚เคšเฅ€ เค•เฅ‡ เคฒเคฟเค [Configuration](/../usage/cfg.md) เคชเฅƒเคทเฅเค  เคฆเฅ‡เค–เฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "เคชเคพเคฏเคฅเคจ" + + ```python + from ultralytics import YOLO + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-seg.yaml') # YAML เคธเฅ‡ เคจเคฏเคพ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค‚ + model = YOLO('yolov8n-seg.pt') # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ (เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ เคฒเคฟเค เคธเคฟเคซเคพเคฐเคฟเคถ เค•เฅ€ เคœเคพเคคเฅ€ เคนเฅˆ) + model = YOLO('yolov8n-seg.yaml').load('yolov8n.pt') # YAML เคธเฅ‡ เคจเค เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค‚ เค”เคฐ เคงเคพเคฐเคฟเคค เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เค•เคฐเฅ‡เค‚ + results = model.train(data='coco128-seg.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # YAML เคธเฅ‡ เคจเคฏเคพ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค‚ เค”เคฐ เคถเฅ‚เคจเฅเคฏ เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml epochs=100 imgsz=640 + + # เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค *.pt เคฎเฅ‰เคกเคฒ เคธเฅ‡ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.pt epochs=100 imgsz=640 + + # YAML เคธเฅ‡ เคจเคฏเคพ เคฎเฅ‰เคกเคฒ เคฌเคจเคพเคเค‚, เคชเฅ‚เคฐเฅเคต-เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคตเคœเคจเฅ‹เค‚ เค•เฅ‹ เค‡เคธเฅ‡ เคŸเฅเคฐเคพเค‚เคธเคซเคฐ เค•เคฐเฅ‡เค‚ เค”เคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เคถเฅเคฐเฅ‚ เค•เคฐเฅ‡เค‚ + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml pretrained=yolov8n-seg.pt epochs=100 imgsz=640 + ``` + +### เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเฅเคฐเคพเคฐเฅ‚เคช + +YOLO เคธเฅ‡เค—เคฎเฅ‡เค‚เคŸเฅ‡เคถเคจ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเฅเคฐเคพเคฐเฅ‚เคช [เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค—เคพเค‡เคก](../../../datasets/segment/index.md) เคฎเฅ‡เค‚ เคตเคฟเคธเฅเคคเคพเคฐ เคธเฅ‡ เคฆเฅ‡เค–เคพ เคœเคพ เคธเค•เคคเคพ เคนเฅˆเฅค เค•เฅƒเคชเคฏเคพ เค…เคชเคจเฅ‡ เคฎเฅŒเคœเฅ‚เคฆเคพ เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เค•เฅ‹ เค…เคจเฅเคฏ เคชเฅเคฐเคพเคฐเฅ‚เคชเฅ‹เค‚ (เคœเฅˆเคธเฅ‡ เค•เคฟ COCO เค†เคฆเคฟ) เคธเฅ‡ YOLO เคชเฅเคฐเคพเคฐเฅ‚เคช เคฎเฅ‡เค‚ เคชเคฐเคฟเคตเคฐเฅเคคเคฟเคค เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) เค‰เคชเค•เคฐเคฃ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค•เคฐเฅ‡เค‚เฅค + +## เคฎเคพเคจเฅเคฏเคคเคพ + +COCO128-seg เคกเฅ‡เคŸเคพเคธเฅ‡เคŸ เคชเคฐ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n-seg เคฎเฅ‰เคกเคฒ เค•เฅ€ เคธเคคเฅเคฏเคพเคชเคจ เค•เคฐเฅ‡เค‚เฅค `เคฎเฅ‰เคกเคฒ` เคชเคพเคธ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เฅ‹เคˆ เคคเคฐเฅเค• เค†เคตเคถเฅเคฏเค• เคจเคนเฅ€เค‚ เคนเฅ‹เคคเคพ เคนเฅˆ เค•เฅเคฏเฅ‹เค‚เค•เคฟ `เคฎเฅ‰เคกเคฒ` +เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ เค•เฅ‡ `เคกเฅ‡เคŸเคพ` เค”เคฐ เคคเคฐเฅเค•เฅ‹เค‚ เค•เคพ เคงเฅเคฏเคพเคจ เคฐเค–เคคเคพ เคนเฅˆเฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-seg.pt') # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เค•เฅ€ เคธเคคเฅเคฏเคพเคชเคจเคพ เค•เคฐเฅ‡เค‚ + metrics = model.val() # เค•เฅ‹เคˆ เคคเคฐเฅเค• เค†เคตเคถเฅเคฏเค• เคจเคนเฅ€เค‚ เคนเฅˆ, เคกเฅ‡เคŸเคพ เค”เคฐ เคธเฅ‡เคŸเคฟเค‚เค—เฅเคธ เคฏเคพเคฆ เคฐเค–เฅ‡ เคœเคพเคคเฅ‡ เคนเฅˆเค‚ + metrics.box.map # map50-95(B) + metrics.box.map50 # map50(B) + metrics.box.map75 # map75(B) + metrics.box.maps # เคเค• เคธเฅ‚เคšเฅ€ เคนเฅˆ เคœเคฟเคธเคฎเฅ‡เค‚ เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เคถเฅเคฐเฅ‡เคฃเฅ€ เค•เคพ map50-95(B) เคนเฅ‹เคคเคพ เคนเฅˆ + metrics.seg.map # map50-95(M) + metrics.seg.map50 # map50(M) + metrics.seg.map75 # map75(M) + metrics.seg.maps # เคเค• เคธเฅ‚เคšเฅ€ เคนเฅˆ เคœเคฟเคธเคฎเฅ‡เค‚ เคชเฅเคฐเคคเฅเคฏเฅ‡เค• เคถเฅเคฐเฅ‡เคฃเฅ€ เค•เคพ map50-95(M) เคนเฅ‹เคคเคพ เคนเฅˆ + ``` + === "CLI" + + ```bash + yolo segment val model=yolov8n-seg.pt # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เค•เฅ€ เคฎเคพเคจเฅเคฏเคคเคพ + yolo segment val model=path/to/best.pt # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เค•เฅ€ เคฎเคพเคจเฅเคฏเคคเคพ + ``` + +## เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ + +เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค YOLOv8n-seg เคฎเฅ‰เคกเคฒ เค•เคพ เค‰เคชเคฏเฅ‹เค— เค›เคตเคฟเคฏเฅ‹เค‚ เคชเคฐ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค•เคฐเคจเฅ‡ เค•เฅ‡ เคฒเคฟเค เค•เคฐเฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-seg.pt') # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค•เคฐเฅ‡เค‚ + results = model('https://ultralytics.com/images/bus.jpg') # เคเค• เค›เคตเคฟ เคชเคฐ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค•เคฐเฅ‡เค‚ + ``` + === "CLI" + + ```bash + yolo segment predict model=yolov8n-seg.pt source='https://ultralytics.com/images/bus.jpg' # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค•เคฐเฅ‡เค‚ + yolo segment predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # เค•เคธเฅเคŸเคฎ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคธเคพเคฅ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เค•เคฐเฅ‡เค‚ + ``` + +เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เคฎเฅ‹เคก เค•เฅ‡ เคชเฅ‚เคฐเฅเคฃ เคตเคฟเคตเคฐเคฃ เค•เฅ‹ [Predict](https://docs.ultralytics.com/modes/predict/) เคชเฅƒเคทเฅเค  เคฎเฅ‡เค‚ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +## เคจเคฟเคฐเฅเคฏเคพเคค + +YOLOv8n-seg เคฎเฅ‰เคกเคฒ เค•เฅ‹ ONNX, CoreML เค†เคฆเคฟ เคœเฅˆเคธเฅ‡ เค…เคจเฅเคฏ เคชเฅเคฐเคพเคฐเฅ‚เคช เคฎเฅ‡เค‚ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚เฅค + +!!! Example "เค‰เคฆเคพเคนเคฐเคฃ" + + === "Python" + + ```python + from ultralytics import YOLO + + # เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('yolov8n-seg.pt') # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + model = YOLO('path/to/best.pt') # เค•เคธเฅเคŸเคฎ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เคฒเฅ‹เคก เค•เคฐเฅ‡เค‚ + + # เคฎเฅ‰เคกเคฒ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-seg.pt format=onnx # เค†เคงเคฟเค•เคพเคฐเคฟเค• เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + yolo export model=path/to/best.pt format=onnx # เค•เคธเฅเคŸเคฎ เคชเฅเคฐเคถเคฟเค•เฅเคทเคฟเคค เคฎเฅ‰เคกเคฒ เค•เฅ‹ เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฐเฅ‡เค‚ + ``` + +YOLOv8-seg เคจเคฟเคฐเฅเคฏเคพเคค เคชเฅเคฐเคพเคฐเฅ‚เคช เคจเคฟเคฎเฅเคจเคฒเคฟเค–เคฟเคค เคคเคพเคฒเคฟเค•เคพ เคฎเฅ‡เค‚ เคฌเคคเคพเค เค—เค เคนเฅˆเค‚เฅค เค†เคช เคจเคฟเคฐเฅเคฏเคพเคค เค•เคฟเค เค—เค เคฎเฅ‰เคกเคฒ เคชเคฐ เคธเฅ€เคงเฅ‡ เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€ เคฏเคพ เคฎเคพเคจเฅเคฏเคคเคพ เค•เคฐ เคธเค•เคคเฅ‡ เคนเฅˆเค‚, เค…เคฐเฅเคฅเคพเคค `yolo predict model=yolov8n-seg.onnx`เฅค เคจเคฟเคฐเฅเคฏเคพเคค เคนเฅ‹เคจเฅ‡ เค•เฅ‡ เคฌเคพเคฆ เค…เคชเคจเฅ‡ เคฎเฅ‰เคกเคฒ เค•เฅ‡ เคฒเคฟเค เค‰เคชเคฏเฅ‹เค— เค•เฅ‡ เค‰เคฆเคพเคนเคฐเคฃ เคฆเฅ‡เค–เฅ‡เค‚เฅค + +| เคชเฅเคฐเคพเคฐเฅ‚เคช | `format` Argument | เคฎเฅ‰เคกเคฒ | เคฎเฅ‡เคŸเคพเคกเฅ‡เคŸเคพ | เคคเคฐเฅเค• | +|--------------------------------------------------------------------|-------------------|-------------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-seg.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-seg.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-seg.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-seg_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-seg.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-seg.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-seg_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-seg.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-seg.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-seg_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-seg_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-seg_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-seg_ncnn_model/` | โœ… | `imgsz`, `half` | + +[Export](https://docs.ultralytics.com/modes/export/) เคชเฅƒเคทเฅเค  เคฎเฅ‡เค‚ เคชเฅ‚เคฐเฅเคฃ `เคจเคฟเคฐเฅเคฏเคพเคค` เคตเคฟเคตเคฐเคฃ เคฆเฅ‡เค–เฅ‡เค‚เฅค diff --git a/ultralytics/docs/hi/tasks/segment.md:Zone.Identifier b/ultralytics/docs/hi/tasks/segment.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/hi/tasks/segment.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/index.md b/ultralytics/docs/ja/index.md new file mode 100755 index 0000000..c66c3f9 --- /dev/null +++ b/ultralytics/docs/ja/index.md @@ -0,0 +1,83 @@ +--- +comments: true +description: Ultralytics YOLOv8ใซ้–ขใ™ใ‚‹ๅฎŒๅ…จใ‚ฌใ‚คใƒ‰ใ€‚้ซ˜้€Ÿใง้ซ˜็ฒพๅบฆใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใƒป็”ปๅƒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒขใƒ‡ใƒซใ€‚ใ‚คใƒณใ‚นใƒˆใƒผใƒซใ€ไบˆๆธฌใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒใƒฅใƒผใƒˆใƒชใ‚ขใƒซใชใฉใ€‚ +keywords: Ultralytics, YOLOv8, ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ, ็”ปๅƒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ, ๆฉŸๆขฐๅญฆ็ฟ’, ใƒ‡ใ‚ฃใƒผใƒ—ใƒฉใƒผใƒ‹ใƒณใ‚ฐ, ใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒ“ใ‚ธใƒงใƒณ, YOLOv8 ใ‚คใƒณใ‚นใƒˆใƒผใƒซ, YOLOv8 ไบˆๆธฌ, YOLOv8 ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ, YOLO ๆญดๅฒ, YOLO ใƒฉใ‚คใ‚ปใƒณใ‚น +--- + +
+

+ + Ultralytics YOLOใƒใƒŠใƒผ +

+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ Ultralytics CI + Ultralytics ใ‚ณใƒผใƒ‰ใ‚ซใƒใƒฌใƒƒใ‚ธ + YOLOv8 ๅผ•็”จๆƒ…ๅ ฑ + Docker ใƒ—ใƒซ + Discord +
+ GradientไธŠใงๅฎŸ่กŒ + Colabใง้–‹ใ + Kaggleใง้–‹ใ +
+ +ๅ…จใๆ–ฐใ—ใ„[Ultralytics](https://ultralytics.com)ใฎ[YOLOv8](https://github.com/ultralytics/ultralytics)ใ‚’็ดนไป‹ใ—ใพใ™ใ€‚ใ“ใ‚Œใฏใ€ๅฎŸๆ™‚้–“ใงๅ‹•ไฝœใ™ใ‚‹ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใŠใ‚ˆใณ็”ปๅƒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒขใƒ‡ใƒซใฎๆœ€ๆ–ฐใƒใƒผใ‚ธใƒงใƒณใงใ™ใ€‚YOLOv8ใฏใ€ใƒ‡ใ‚ฃใƒผใƒ—ใƒฉใƒผใƒ‹ใƒณใ‚ฐใจใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒ“ใ‚ธใƒงใƒณใฎๆœ€ๅ…ˆ็ซฏใฎ้€ฒๆญฉใซๅŸบใฅใ„ใฆใŠใ‚Šใ€้€Ÿๅบฆใจ็ฒพๅบฆใฎ้ขใงๆฏ”้กžใฎใชใ„ๆ€ง่ƒฝใ‚’ๆไพ›ใ—ใพใ™ใ€‚ใใฎๅˆ็†ๅŒ–ใ•ใ‚ŒใŸ่จญ่จˆใซใ‚ˆใ‚Šใ€ใ‚จใƒƒใ‚ธใƒ‡ใƒใ‚คใ‚นใ‹ใ‚‰ใ‚ฏใƒฉใ‚ฆใƒ‰APIใพใงใ€ใ•ใพใ–ใพใชใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใ‚„ใƒใƒผใƒ‰ใ‚ฆใ‚งใ‚ขใƒ—ใƒฉใƒƒใƒˆใƒ•ใ‚ฉใƒผใƒ ใธใฎ้ฉๅฟœใŒๅฎนๆ˜“ใงใ™ใ€‚ + +YOLOv8ใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใ‚’ๆŽข็ดขใ—ใ€ใใฎ็‰นๅพดใจ่ƒฝๅŠ›ใ‚’็†่งฃใ—ใ€ๆดป็”จใ™ใ‚‹ใŸใ‚ใฎๅŒ…ๆ‹ฌ็š„ใชใƒชใ‚ฝใƒผใ‚นใ‚’ๆไพ›ใ—ใพใ™ใ€‚ๆฉŸๆขฐๅญฆ็ฟ’ใฎ็ตŒ้จ“่€…ใงใ‚ใ‚Œใ€ๅˆ†้‡Žใฎๆ–ฐๅ…ฅใ‚Šใงใ‚ใ‚Œใ€ใ“ใฎใƒใƒ–ใฏใ‚ใชใŸใฎใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใงYOLOv8ใฎใƒใƒ†ใƒณใ‚ทใƒฃใƒซใ‚’ๆœ€ๅคง้™ใซๅผ•ใๅ‡บใ™ใ“ใจใ‚’็›ฎๆŒ‡ใ—ใฆใ„ใพใ™ใ€‚ + +!!! Note "ใƒŽใƒผใƒˆ" + + ๐Ÿšง ๅคš่จ€่ชžใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใฏ็พๅœจไฝœๆˆไธญใงใ‚ใ‚Šใ€ๆ”นๅ–„ใซๅŠชใ‚ใฆใŠใ‚Šใพใ™ใ€‚ใŠๅพ…ใกใ„ใŸใ ใใ€ใ‚ใ‚ŠใŒใจใ†ใ”ใ–ใ„ใพใ™๏ผ ๐Ÿ™ + +## ใฏใ˜ใ‚ใซ + +- pipใง`ultralytics`ใ‚’**ใ‚คใƒณใ‚นใƒˆใƒผใƒซ**ใ—ใ€ๆ•ฐๅˆ†ใง็จผๅƒ   [:material-clock-fast: ใฏใ˜ใ‚ใซ](quickstart.md){ .md-button } +- YOLOv8ใงๆ–ฐใ—ใ„็”ปๅƒใ‚„ใƒ“ใƒ‡ใ‚ชใซ**ไบˆๆธฌ**   [:octicons-image-16: ็”ปๅƒใงไบˆๆธฌ](modes/predict.md){ .md-button } +- ็‹ฌ่‡ชใฎใ‚ซใ‚นใ‚ฟใƒ ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงๆ–ฐใ—ใ„YOLOv8ใƒขใƒ‡ใƒซใ‚’**ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ**   [:fontawesome-solid-brain: ใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ](modes/train.md){ .md-button } +- ใ‚ปใ‚ฐใƒกใƒณใƒˆใ€ใ‚ฏใƒฉใ‚นๅˆ†ใ‘ใ€ใƒใƒผใ‚บใ€ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใชใฉใฎYOLOv8ใ‚ฟใ‚นใ‚ฏใ‚’**ๆŽขๆฑ‚**   [:material-magnify-expand: ใ‚ฟใ‚นใ‚ฏใ‚’ๆŽขๆฑ‚](tasks/index.md){ .md-button } + +

+
+ +
+ ่ฆ–่ด: Google Colabใงใ‚ซใ‚นใ‚ฟใƒ ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใซYOLOv8ใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ๆ–นๆณ•ใ€‚ +

+ +## YOLO: ็ฐกๅ˜ใชๆญดๅฒ + +[YOLO](https://arxiv.org/abs/1506.02640)๏ผˆYou Only Look Onceใ€ไธ€ๅบฆใ ใ‘่ฆ‹ใ‚‹๏ผ‰ใฏใ€ใƒฏใ‚ทใƒณใƒˆใƒณๅคงๅญฆใฎJoseph RedmonใจAli Farhadiใซใ‚ˆใฃใฆ้–‹็™บใ•ใ‚ŒใŸใ€ๆต่กŒใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใŠใ‚ˆใณ็”ปๅƒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒขใƒ‡ใƒซใงใ™ใ€‚2015ๅนดใซ็™บๅฃฒใ•ใ‚ŒใŸYOLOใฏใ€ใใฎ้ซ˜้€Ÿใ‹ใคๆญฃ็ขบใ•ใ‹ใ‚‰ใ™ใใซไบบๆฐ—ใ‚’ๅšใ—ใพใ—ใŸใ€‚ + +- [YOLOv2](https://arxiv.org/abs/1612.08242)ใฏใ€2016ๅนดใซใƒชใƒชใƒผใ‚นใ•ใ‚Œใ€ใƒใƒƒใƒๆญฃ่ฆๅŒ–ใ€ใ‚ขใƒณใ‚ซใƒผใƒœใƒƒใ‚ฏใ‚นใ€ๆฌกๅ…ƒใ‚ฏใƒฉใ‚นใ‚ฟใƒชใƒณใ‚ฐใ‚’ๅฐŽๅ…ฅใ—ใ€ใ‚ชใƒชใ‚ธใƒŠใƒซใƒขใƒ‡ใƒซใ‚’ๆ”นๅ–„ใ—ใพใ—ใŸใ€‚ +- [YOLOv3](https://pjreddie.com/media/files/papers/YOLOv3.pdf)ใฏใ€2018ๅนดใซใ‚ˆใ‚ŠๅŠน็އ็š„ใชใƒใƒƒใ‚ฏใƒœใƒผใƒณใƒใƒƒใƒˆใƒฏใƒผใ‚ฏใ€่ค‡ๆ•ฐใฎใ‚ขใƒณใ‚ซใƒผใ€็ฉบ้–“ใƒ”ใƒฉใƒŸใƒƒใƒ‰ใƒ—ใƒผใƒชใƒณใ‚ฐใ‚’ไฝฟ็”จใ—ใฆใ€ใƒขใƒ‡ใƒซใฎๆ€ง่ƒฝใ‚’ไธ€ๆฎตใจๅ‘ไธŠใ•ใ›ใพใ—ใŸใ€‚ +- [YOLOv4](https://arxiv.org/abs/2004.10934)ใฏ2020ๅนดใซใƒชใƒชใƒผใ‚นใ•ใ‚Œใ€ใƒขใ‚ถใ‚คใ‚ฏใƒ‡ใƒผใ‚ฟใ‚ชใƒผใ‚ฎใƒฅใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ€ๆ–ฐใ—ใ„ใ‚ขใƒณใ‚ซใƒผใƒ•ใƒชใƒผๆคœๅ‡บใƒ˜ใƒƒใƒ‰ใ€ๆ–ฐใ—ใ„ๆๅคฑ้–ขๆ•ฐใชใฉใฎ้ฉๆ–ฐใ‚’ๅฐŽๅ…ฅใ—ใพใ—ใŸใ€‚ +- [YOLOv5](https://github.com/ultralytics/yolov5)ใฏใ€ใƒขใƒ‡ใƒซใฎๆ€ง่ƒฝใ‚’ใ•ใ‚‰ใซๅ‘ไธŠใ•ใ›ใ€ใƒใ‚คใƒ‘ใƒผใƒ‘ใƒฉใƒกใƒผใ‚ฟๆœ€้ฉๅŒ–ใ€็ตฑๅˆๅฎŸ้จ“ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ€ไธ€่ˆฌ็š„ใชใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผใธใฎ่‡ชๅ‹•ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใชใฉใฎๆ–ฐๆฉŸ่ƒฝใ‚’่ฟฝๅŠ ใ—ใพใ—ใŸใ€‚ +- [YOLOv6](https://github.com/meituan/YOLOv6)ใฏใ€2022ๅนดใซ[Meituan](https://about.meituan.com/)ใซใ‚ˆใฃใฆใ‚ชใƒผใƒ—ใƒณใ‚ฝใƒผใ‚นๅŒ–ใ•ใ‚Œใ€ๅŒ็คพใฎๅคšใใฎ่‡ชๅ‹•้…้€ใƒญใƒœใƒƒใƒˆใงไฝฟ็”จใ•ใ‚Œใฆใ„ใพใ™ใ€‚ +- [YOLOv7](https://github.com/WongKinYiu/yolov7)ใฏใ€COCOใ‚ญใƒผใƒใ‚คใƒณใƒˆใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆไธŠใฎใƒใƒผใ‚บๆŽจๅฎšใชใฉใฎ่ฟฝๅŠ ใ‚ฟใ‚นใ‚ฏใ‚’่ฟฝๅŠ ใ—ใพใ—ใŸใ€‚ +- [YOLOv8](https://github.com/ultralytics/ultralytics)ใฏใ€Ultralyticsใซใ‚ˆใ‚‹YOLOใฎๆœ€ๆ–ฐ็‰ˆใงใ™ใ€‚ๆœ€ๅ…ˆ็ซฏใฎๆœ€ๆ–ฐใƒขใƒ‡ใƒซใจใ—ใฆใ€YOLOv8ใฏๅ‰ใƒใƒผใ‚ธใƒงใƒณใฎๆˆๅŠŸใซๅŸบใฅใใ€ๆ€ง่ƒฝใ€ๆŸ”่ปŸๆ€งใ€ๅŠน็އใ‚’ๅ‘ไธŠใ•ใ›ใ‚‹ๆ–ฐๆฉŸ่ƒฝใ‚„ๆ”นๅ–„ใ‚’ๅฐŽๅ…ฅใ—ใฆใ„ใพใ™ใ€‚YOLOv8ใฏใ€[ๆคœๅ‡บ](tasks/detect.md)ใ€[ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ](tasks/segment.md)ใ€[ใƒใƒผใ‚บๆŽจๅฎš](tasks/pose.md)ใ€[ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ](modes/track.md)ใ€[ๅˆ†้กž](tasks/classify.md)ใชใฉใ€่ฆ–่ฆšAIใ‚ฟใ‚นใ‚ฏใฎๅ…จ็ฏ„ๅ›ฒใ‚’ใ‚ตใƒใƒผใƒˆใ—ใฆใ„ใพใ™ใ€‚ใ“ใฎๅคšๆ‰ๆ€งใซใ‚ˆใ‚Šใ€ใƒฆใƒผใ‚ถใƒผใฏๅคšๆง˜ใชใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใจใƒ‰ใƒกใ‚คใƒณใงYOLOv8ใฎๆฉŸ่ƒฝใ‚’ๆดป็”จใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +## YOLO ใƒฉใ‚คใ‚ปใƒณใ‚น: UltralyticsใฎYOLOใฏใฉใฎใ‚ˆใ†ใซใƒฉใ‚คใ‚ปใƒณใ‚นใ•ใ‚Œใฆใ„ใพใ™ใ‹๏ผŸ + +Ultralyticsใฏใ€ใ•ใพใ–ใพใชใƒฆใƒผใ‚นใ‚ฑใƒผใ‚นใซๅฏพๅฟœใ™ใ‚‹ใŸใ‚ใซ2็จฎ้กžใฎใƒฉใ‚คใ‚ปใƒณใ‚นใ‚ชใƒ—ใ‚ทใƒงใƒณใ‚’ๆไพ›ใ—ใฆใ„ใพใ™๏ผš + +- **AGPL-3.0 ใƒฉใ‚คใ‚ปใƒณใ‚น**: ใ“ใฎ[OSI่ชๅฎš](https://opensource.org/licenses/)ใฎใ‚ชใƒผใƒ—ใƒณใ‚ฝใƒผใ‚นใƒฉใ‚คใ‚ปใƒณใ‚นใฏใ€ๅญฆ็”Ÿใ‚„ๆ„›ๅฅฝๅฎถใซ็†ๆƒณ็š„ใงใ‚ใ‚Šใ€ใ‚ชใƒผใƒ—ใƒณใชใ‚ณใƒฉใƒœใƒฌใƒผใ‚ทใƒงใƒณใจ็Ÿฅ่ญ˜ๅ…ฑๆœ‰ใ‚’ๅฅจๅŠฑใ—ใฆใ„ใพใ™ใ€‚่ฉณ็ดฐใฏ[ใƒฉใ‚คใ‚ปใƒณใ‚น](https://github.com/ultralytics/ultralytics/blob/main/LICENSE)ใƒ•ใ‚กใ‚คใƒซใ‚’ใ”่ฆงใใ ใ•ใ„ใ€‚ +- **ใ‚จใƒณใ‚ฟใƒผใƒ—ใƒฉใ‚คใ‚บ ใƒฉใ‚คใ‚ปใƒณใ‚น**: ๅ•†ๆฅญ็”จ้€”ใซ่จญ่จˆใ•ใ‚ŒใŸใ“ใฎใƒฉใ‚คใ‚ปใƒณใ‚นใฏใ€Ultralyticsใฎใ‚ฝใƒ•ใƒˆใ‚ฆใ‚งใ‚ขใŠใ‚ˆใณAIใƒขใƒ‡ใƒซใ‚’ๅ•†ๆฅญๅ•†ๅ“ใ‚„ใ‚ตใƒผใƒ“ใ‚นใซใ‚ทใƒผใƒ ใƒฌใ‚นใซ็ตฑๅˆใ™ใ‚‹ใ“ใจใ‚’่จฑๅฏใ—ใ€AGPL-3.0ใฎใ‚ชใƒผใƒ—ใƒณใ‚ฝใƒผใ‚น่ฆไปถใ‚’ใƒใ‚คใƒ‘ใ‚นใงใใพใ™ใ€‚ๅ•†ๆฅญ็š„ใชใ‚ชใƒ•ใ‚กใƒชใƒณใ‚ฐใธใฎ็ต„ใฟ่พผใฟใ‚’ๅซใ‚€ใ‚ทใƒŠใƒชใ‚ชใงใ‚ใ‚Œใฐใ€[Ultralytics ใƒฉใ‚คใ‚ปใƒณใ‚น](https://ultralytics.com/license)ใ‚’้€šใ˜ใฆใŠๅ•ใ„ๅˆใ‚ใ›ใใ ใ•ใ„ใ€‚ + +็งใŸใกใฎใƒฉใ‚คใ‚ปใƒณใ‚นๆˆฆ็•ฅใฏใ€ใ‚ชใƒผใƒ—ใƒณใ‚ฝใƒผใ‚นใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใซๅฏพใ™ใ‚‹ใ‚ใ‚‰ใ‚†ใ‚‹ๆ”นๅ–„ใŒใ‚ณใƒŸใƒฅใƒ‹ใƒ†ใ‚ฃใซ้‚„ๅ…ƒใ•ใ‚Œใ‚‹ใ“ใจใ‚’็ขบๅฎŸใซใ™ใ‚‹ใŸใ‚ใซ่จญ่จˆใ•ใ‚Œใฆใ„ใพใ™ใ€‚็งใŸใกใฏใ‚ชใƒผใƒ—ใƒณใ‚ฝใƒผใ‚นใฎๅŽŸๅ‰‡ใ‚’ๅคงๅˆ‡ใซใ—ใฆใŠใ‚Šใ€็งใŸใกใฎ่ฒข็ŒฎใŒๅ…จใฆใฎไบบใซใจใฃใฆๆœ‰็›Šใชๆ–นๆณ•ใงๅˆฉ็”จๅฏ่ƒฝใงใ‚ใ‚Šใ€ใ•ใ‚‰ใซๆ‹กๅผตใ•ใ‚Œใ‚‹ใ“ใจใ‚’ไฟ่จผใ™ใ‚‹ใ“ใจใ‚’ไฝฟๅ‘ฝใจใ—ใฆใ„ใพใ™ใ€‚โค๏ธ diff --git a/ultralytics/docs/ja/index.md:Zone.Identifier b/ultralytics/docs/ja/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/models/fast-sam.md b/ultralytics/docs/ja/models/fast-sam.md new file mode 100755 index 0000000..0f148b2 --- /dev/null +++ b/ultralytics/docs/ja/models/fast-sam.md @@ -0,0 +1,193 @@ +--- +comments: true +description: FastSAMใฏใ€็”ปๅƒๅ†…ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใงใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ™ใ‚‹ใŸใ‚ใฎCNNใƒ™ใƒผใ‚นใฎใ‚ฝใƒชใƒฅใƒผใ‚ทใƒงใƒณใงใ™ใ€‚ๅˆฉ็”จ่€…ใฎๅฏพ่ฉฑใ€่จˆ็ฎ—ๅŠน็އใฎๅ‘ไธŠใ€ๆง˜ใ€…ใชใƒ“ใ‚ธใƒงใƒณใ‚ฟใ‚นใ‚ฏใซๅฏพๅฟœๅฏ่ƒฝใงใ™ใ€‚ +keywords: FastSAM, ๆฉŸๆขฐๅญฆ็ฟ’, CNNใƒ™ใƒผใ‚นใฎใ‚ฝใƒชใƒฅใƒผใ‚ทใƒงใƒณ, ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ, ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ฝใƒชใƒฅใƒผใ‚ทใƒงใƒณ, Ultralytics, ใƒ“ใ‚ธใƒงใƒณใ‚ฟใ‚นใ‚ฏ, ็”ปๅƒๅ‡ฆ็†, ๅทฅๆฅญ็”จ้€”, ใƒฆใƒผใ‚ถใƒผๅฏพ่ฉฑ +--- + +# Fast Segment Anything Model (FastSAM) + +Fast Segment Anything Model๏ผˆFastSAM๏ผ‰ใฏใ€ใ‚ปใ‚ฐใƒกใƒณใƒˆใ‚จใƒ‹ใ‚ทใƒณใ‚ฐใ‚ฟใ‚นใ‚ฏใฎใŸใ‚ใฎๆ–ฐใ—ใ„ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใฎCNNใƒ™ใƒผใ‚นใฎใ‚ฝใƒชใƒฅใƒผใ‚ทใƒงใƒณใงใ™ใ€‚ใ“ใฎใ‚ฟใ‚นใ‚ฏใฏใ€ใ•ใพใ–ใพใชใƒฆใƒผใ‚ถใƒผๅฏพ่ฉฑใฎใƒ—ใƒญใƒณใƒ—ใƒˆใซๅŸบใฅใ„ใฆ็”ปๅƒๅ†…ใฎไปปๆ„ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ใ‚ปใ‚ฐใƒกใƒณใƒˆๅŒ–ใ™ใ‚‹ใ“ใจใ‚’็›ฎ็š„ใจใ—ใฆใ„ใพใ™ใ€‚FastSAMใฏใ€ๅ„ชใ‚ŒใŸๆ€ง่ƒฝใ‚’็ถญๆŒใ—ใชใŒใ‚‰ใ€่จˆ็ฎ—่ฆไปถใ‚’ๅคงๅน…ใซๅ‰Šๆธ›ใ—ใ€ๆง˜ใ€…ใชใƒ“ใ‚ธใƒงใƒณใ‚ฟใ‚นใ‚ฏใซๅฎŸ็”จ็š„ใช้ธๆŠž่‚ขใจใชใ‚Šใพใ™ใ€‚ + +![Fast Segment Anything Model (FastSAM) architecture overview](https://user-images.githubusercontent.com/26833433/248551984-d98f0f6d-7535-45d0-b380-2e1440b52ad7.jpg) + +## ๆฆ‚่ฆ + +FastSAMใฏใ€[Segment Anything Model (SAM)](sam.md)ใฎๅˆถ็ด„ไบ‹้ …ใซๅฏพๅ‡ฆใ™ใ‚‹ใŸใ‚ใซ่จญ่จˆใ•ใ‚Œใพใ—ใŸใ€‚SAMใฏใ€ๅคง่ฆๆจกใช่จˆ็ฎ—ใƒชใ‚ฝใƒผใ‚นใ‚’่ฆใ™ใ‚‹้‡ใ„Transformerใƒขใƒ‡ใƒซใงใ™ใ€‚FastSAMใฏใ€ใ‚ปใ‚ฐใƒกใƒณใƒˆใ‚จใƒ‹ใ‚ทใƒณใ‚ฐใ‚ฟใ‚นใ‚ฏใ‚’2ใคใฎ้€ฃ็ถšใ™ใ‚‹ใ‚นใƒ†ใƒผใ‚ธใซๅˆ†ๅ‰ฒใ—ใ€ใ™ในใฆใฎใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใจใƒ—ใƒญใƒณใƒ—ใƒˆใ‚ฌใ‚คใƒ‰ใฎ้ธๆŠžใ‚’่กŒใ„ใพใ™ใ€‚ๆœ€ๅˆใฎใ‚นใƒ†ใƒผใ‚ธใงใฏใ€[YOLOv8-seg](../tasks/segment.md)ใ‚’ไฝฟ็”จใ—ใฆใ€็”ปๅƒๅ†…ใฎใ™ในใฆใฎใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใฎใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒžใ‚นใ‚ฏใ‚’็”Ÿๆˆใ—ใพใ™ใ€‚2็•ช็›ฎใฎใ‚นใƒ†ใƒผใ‚ธใงใฏใ€ใƒ—ใƒญใƒณใƒ—ใƒˆใซๅฏพๅฟœใ™ใ‚‹้ ˜ๅŸŸใ‚’ๅ‡บๅŠ›ใ—ใพใ™ใ€‚ + +## ไธปใช็‰นๅพด + +1. **ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ฝใƒชใƒฅใƒผใ‚ทใƒงใƒณ๏ผš** CNNใฎ่จˆ็ฎ—ๅŠน็އใ‚’ๆดป็”จใ™ใ‚‹ใ“ใจใงใ€FastSAMใฏใ‚ปใ‚ฐใƒกใƒณใƒˆใ‚จใƒ‹ใ‚ทใƒณใ‚ฐใ‚ฟใ‚นใ‚ฏใฎใŸใ‚ใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ฝใƒชใƒฅใƒผใ‚ทใƒงใƒณใ‚’ๆไพ›ใ—ใ€่ฟ…้€Ÿใช็ตๆžœใ‚’ๅฟ…่ฆใจใ™ใ‚‹ๅทฅๆฅญ็”จ้€”ใซไพกๅ€คใ‚’ใ‚‚ใŸใ‚‰ใ—ใพใ™ใ€‚ + +2. **ๅŠน็އใจๆ€ง่ƒฝ๏ผš** FastSAMใฏใ€่จˆ็ฎ—ใŠใ‚ˆใณใƒชใ‚ฝใƒผใ‚นใฎ่ฆๆฑ‚ใ‚’ๅคงๅน…ใซๅ‰Šๆธ›ใ—ใชใŒใ‚‰ใ€ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใฎๅ“่ณชใ‚’ๆใชใ†ใ“ใจใชใใ€SAMใจๅŒ็ญ‰ใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’้”ๆˆใ—ใพใ™ใ€‚ใ“ใ‚Œใซใ‚ˆใ‚Šใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใŒๅฏ่ƒฝใจใชใ‚Šใพใ™ใ€‚ + +3. **ใƒ—ใƒญใƒณใƒ—ใƒˆใ‚ฌใ‚คใƒ‰ใฎใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ๏ผš** FastSAMใฏใ€ใ•ใพใ–ใพใชใƒฆใƒผใ‚ถใƒผๅฏพ่ฉฑใฎใƒ—ใƒญใƒณใƒ—ใƒˆใซๅŸบใฅใ„ใฆ็”ปๅƒๅ†…ใฎไปปๆ„ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ใ‚ปใ‚ฐใƒกใƒณใƒˆๅŒ–ใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ“ใ‚Œใซใ‚ˆใ‚Šใ€ๆง˜ใ€…ใชใ‚ทใƒŠใƒชใ‚ชใงใฎๆŸ”่ปŸๆ€งใจ้ฉๅฟœๆ€งใŒๆไพ›ใ•ใ‚Œใพใ™ใ€‚ + +4. **YOLOv8-segใซๅŸบใฅใ๏ผš** FastSAMใฏใ€ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒ–ใƒฉใƒณใƒใ‚’ๅ‚™ใˆใŸใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใงใ‚ใ‚‹[YOLOv8-seg](../tasks/segment.md)ใซๅŸบใฅใ„ใฆใ„ใพใ™ใ€‚ใ“ใ‚Œใซใ‚ˆใ‚Šใ€็”ปๅƒๅ†…ใฎใ™ในใฆใฎใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใฎใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒžใ‚นใ‚ฏใ‚’ๅŠนๆžœ็š„ใซ็”Ÿๆˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +5. **ใƒ™ใƒณใƒใƒžใƒผใ‚ฏใงใฎ็ซถๅˆๅŠ›ใฎใ‚ใ‚‹็ตๆžœ๏ผš** MS COCOใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒ—ใƒญใƒใƒผใ‚ถใƒซใ‚ฟใ‚นใ‚ฏใซใŠใ„ใฆใ€FastSAMใฏๅ˜ไธ€ใฎNVIDIA RTX 3090ไธŠใงใฎSAMใ‚ˆใ‚Šใ‚‚ใฏใ‚‹ใ‹ใซ้ซ˜้€Ÿใซ้ซ˜ๅพ—็‚นใ‚’็ฒๅพ—ใ—ใ€ใใฎๅŠน็އๆ€งใจ่ƒฝๅŠ›ใ‚’็คบใ—ใฆใ„ใพใ™ใ€‚ + +6. **ๅฎŸ็”จ็š„ใชๅฟœ็”จ๏ผš** ๆๆกˆใ•ใ‚ŒใŸใ‚ขใƒ—ใƒญใƒผใƒใฏใ€็พๅœจใฎๆ–นๆณ•ใ‚ˆใ‚Šใ‚‚ๆ•ฐๅๅ€ใพใŸใฏๆ•ฐ็™พๅ€ใ‚‚้ซ˜้€Ÿใช้€Ÿๅบฆใงใ€้žๅธธใซ้ซ˜้€Ÿใชvisionใ‚ฟใ‚นใ‚ฏใฎๆ–ฐใ—ใ„ๅฎŸ็”จ็š„ใชใ‚ฝใƒชใƒฅใƒผใ‚ทใƒงใƒณใ‚’ๆไพ›ใ—ใพใ™ใ€‚ + +7. **ใƒขใƒ‡ใƒซใฎๅœง็ธฎใฎๅฏ่ƒฝๆ€ง๏ผš** FastSAMใฏใ€ๆง‹้€ ใธใฎไบบๅทฅ็š„ใชไบ‹ๅ‰ๆกไปถใ‚’ๅฐŽๅ…ฅใ™ใ‚‹ใ“ใจใซใ‚ˆใ‚Šใ€่จˆ็ฎ—่ฒ ่ทใ‚’ๅคงๅน…ใซๅ‰Šๆธ›ใ™ใ‚‹ๅฏ่ƒฝใช็ตŒ่ทฏใ‚’็คบใ—ใ€ไธ€่ˆฌ็š„ใชใƒ“ใ‚ธใƒงใƒณใ‚ฟใ‚นใ‚ฏใฎๅคง่ฆๆจกใƒขใƒ‡ใƒซใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใฎๆ–ฐใŸใชๅฏ่ƒฝๆ€งใ‚’้–‹ใใ“ใจใ‚’็คบใ—ใฆใ„ใพใ™ใ€‚ + +## ๅˆฉ็”จๅฏ่ƒฝใชใƒขใƒ‡ใƒซใ€ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏใ€ใŠใ‚ˆใณๅ‹•ไฝœใƒขใƒผใƒ‰ + +ใ“ใฎ่กจใฏใ€ๅˆฉ็”จๅฏ่ƒฝใชใƒขใƒ‡ใƒซใจใใ‚Œใžใ‚Œใฎ็‰นๅฎšใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใ‚ฆใ‚งใ‚คใƒˆใ€ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏใ€ใŠใ‚ˆใณInferenceใ€Validationใ€Trainingใ€Exportใชใฉใฎ็•ฐใชใ‚‹ๆ“ไฝœใƒขใƒผใƒ‰ใจใฎไบ’ๆ›ๆ€งใ‚’็คบใ—ใฆใ„ใพใ™ใ€‚ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใฆใ„ใ‚‹ใƒขใƒผใƒ‰ใฏโœ…ใ€ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใฆใ„ใชใ„ใƒขใƒผใƒ‰ใฏโŒใฎ็ตตๆ–‡ๅญ—ใง็คบใ•ใ‚Œใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซใฎ็จฎ้กž | ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใ‚ฆใ‚งใ‚คใƒˆ | ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏ | Inference | Validation | Training | Export | +|-----------|----------------|----------------------------------------|-----------|------------|----------|--------| +| FastSAM-s | `FastSAM-s.pt` | [ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| FastSAM-x | `FastSAM-x.pt` | [ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## ไฝฟ็”จไพ‹ + +FastSAMใƒขใƒ‡ใƒซใฏใ€Pythonใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซ็ฐกๅ˜ใซ็ตฑๅˆใงใใพใ™ใ€‚Ultralyticsใฏใ€้–‹็™บใ‚’ๅŠน็އๅŒ–ใ™ใ‚‹ใŸใ‚ใฎใƒฆใƒผใ‚ถใƒผใƒ•ใƒฌใƒณใƒ‰ใƒชใƒผใชPython APIใŠใ‚ˆใณCLIใ‚ณใƒžใƒณใƒ‰ใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚ + +### ไบˆๆธฌใฎไฝฟ็”จๆ–นๆณ• + +็”ปๅƒใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚’ๅฎŸ่กŒใ™ใ‚‹ใซใฏใ€ไปฅไธ‹ใฎใ‚ˆใ†ใซ`predict`ใƒกใ‚ฝใƒƒใƒ‰ใ‚’ไฝฟ็”จใ—ใพใ™๏ผš + +!!! Example "ไพ‹" + + === "Python" + ```python + from ultralytics import FastSAM + from ultralytics.models.fastsam import FastSAMPrompt + + # ๆŽจ่ซ–ๅ…ƒใฎใ‚ฝใƒผใ‚นใ‚’ๅฎš็พฉใ™ใ‚‹ + source = 'path/to/bus.jpg' + + # FastSAMใƒขใƒ‡ใƒซใ‚’ไฝœๆˆใ™ใ‚‹ + model = FastSAM('FastSAM-s.pt') # ใพใŸใฏ FastSAM-x.pt + + # ็”ปๅƒใธใฎๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ™ใ‚‹ + everything_results = model(source, device='cpu', retina_masks=True, imgsz=1024, conf=0.4, iou=0.9) + + # Prompt Processใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ๆบ–ๅ‚™ใ™ใ‚‹ + prompt_process = FastSAMPrompt(source, everything_results, device='cpu') + + # Everything prompt + ann = prompt_process.everything_prompt() + + # ใƒใ‚ฆใƒณใƒ‡ใ‚ฃใƒณใ‚ฐใƒœใƒƒใ‚ฏใ‚นใฎใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฎๅฝข็Šถใฏ [0,0,0,0] -> [x1,y1,x2,y2] + ann = prompt_process.box_prompt(bbox=[200, 200, 300, 300]) + + # ใƒ†ใ‚ญใ‚นใƒˆใƒ—ใƒญใƒณใƒ—ใƒˆ + ann = prompt_process.text_prompt(text='a photo of a dog') + + # ใƒใ‚คใƒณใƒˆใƒ—ใƒญใƒณใƒ—ใƒˆ + # pointsใฎใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ [[0,0]] [[x1,y1],[x2,y2]] + # point_labelใฎใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ [0] [1,0] 0:background, 1:foreground + ann = prompt_process.point_prompt(points=[[200, 200]], pointlabel=[1]) + prompt_process.plot(annotations=ann, output='./') + ``` + + === "CLI" + ```bash + # FastSAMใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€ใใ‚Œใซใ‚ˆใฃใฆeverythingใ‚’ใ‚ปใ‚ฐใƒกใƒณใƒˆๅŒ–ใ™ใ‚‹ + yolo segment predict model=FastSAM-s.pt source=path/to/bus.jpg imgsz=640 + ``` + +ใ“ใฎใ‚นใƒ‹ใƒšใƒƒใƒˆใฏใ€ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€ใ‚คใƒกใƒผใ‚ธใซๅฏพใ™ใ‚‹ไบˆๆธฌใ‚’ๅฎŸ่กŒใ™ใ‚‹ใ‚ทใƒณใƒ—ใƒซใ•ใ‚’็คบใ—ใฆใ„ใพใ™ใ€‚ + +### ๆคœ่จผใฎไฝฟ็”จๆ–นๆณ• + +ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆไธŠใงใƒขใƒ‡ใƒซใฎๆคœ่จผใ‚’่กŒใ†ใซใฏใ€ไปฅไธ‹ใฎใ‚ˆใ†ใซใ—ใพใ™๏ผš + +!!! Example "ไพ‹" + + === "Python" + ```python + from ultralytics import FastSAM + + # FastSAMใƒขใƒ‡ใƒซใ‚’ไฝœๆˆใ™ใ‚‹ + model = FastSAM('FastSAM-s.pt') # ใพใŸใฏ FastSAM-x.pt + + # ใƒขใƒ‡ใƒซใ‚’ๆคœ่จผใ™ใ‚‹ + results = model.val(data='coco8-seg.yaml') + ``` + + === "CLI" + ```bash + # FastSAMใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€COCO8ใฎไพ‹ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงๆคœ่จผใ™ใ‚‹๏ผˆใ‚คใƒกใƒผใ‚ธใ‚ตใ‚คใ‚บ๏ผš640๏ผ‰ + yolo segment val model=FastSAM-s.pt data=coco8.yaml imgsz=640 + ``` + +FastSAMใฏใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎๆคœๅ‡บใจใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ‚’1ใคใฎใ‚ฏใƒฉใ‚นใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใซๅฏพใ—ใฆใฎใฟใ‚ตใƒใƒผใƒˆใ—ใฆใ„ใพใ™ใ€‚ใ“ใ‚Œใฏใ€ใ™ในใฆใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ๅŒใ˜ใ‚ฏใƒฉใ‚นใจใ—ใฆ่ช่ญ˜ใ—ใ€ใ‚ปใ‚ฐใƒกใƒณใƒˆๅŒ–ใ™ใ‚‹ใ“ใจใ‚’ๆ„ๅ‘ณใ—ใพใ™ใ€‚ใใฎใŸใ‚ใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ๆบ–ๅ‚™ใ™ใ‚‹้š›ใซใฏใ€ใ™ในใฆใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ซใƒ†ใ‚ดใƒชIDใ‚’0ใซๅค‰ๆ›ใ™ใ‚‹ๅฟ…่ฆใŒใ‚ใ‚Šใพใ™ใ€‚ + +## FastSAMๅ…ฌๅผใฎไฝฟ็”จๆ–นๆณ• + +FastSAMใฏใ€[https://github.com/CASIA-IVA-Lab/FastSAM](https://github.com/CASIA-IVA-Lab/FastSAM)ใƒชใƒใ‚ธใƒˆใƒชใ‹ใ‚‰็›ดๆŽฅๅˆฉ็”จใ™ใ‚‹ใ“ใจใ‚‚ใงใใพใ™ใ€‚ไปฅไธ‹ใฏใ€FastSAMใ‚’ไฝฟ็”จใ™ใ‚‹ใŸใ‚ใฎไธ€่ˆฌ็š„ใชๆ‰‹้ †ใฎๆฆ‚่ฆใงใ™ใ€‚ + +### ใ‚คใƒณใ‚นใƒˆใƒผใƒซ + +1. FastSAMใƒชใƒใ‚ธใƒˆใƒชใ‚’ใ‚ฏใƒญใƒผใƒณใ™ใ‚‹๏ผš + ```shell + git clone https://github.com/CASIA-IVA-Lab/FastSAM.git + ``` + +2. Python 3.9ใ‚’ไฝฟ็”จใ—ใŸConda็’ฐๅขƒใ‚’ไฝœๆˆใ—ใฆใ‚ขใ‚ฏใƒ†ใ‚ฃใƒ™ใƒผใƒˆใ™ใ‚‹๏ผš + ```shell + conda create -n FastSAM python=3.9 + conda activate FastSAM + ``` + +3. ใ‚ฏใƒญใƒผใƒณใ•ใ‚ŒใŸใƒชใƒใ‚ธใƒˆใƒชใซ็งปๅ‹•ใ—ใ€ๅฟ…่ฆใชใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซใ™ใ‚‹๏ผš + ```shell + cd FastSAM + pip install -r requirements.txt + ``` + +4. CLIPใƒขใƒ‡ใƒซใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซใ™ใ‚‹๏ผš + ```shell + pip install git+https://github.com/openai/CLIP.git + ``` + +### ไฝฟ็”จไพ‹ + +1. [ใƒขใƒ‡ใƒซใฎใƒใ‚งใƒƒใ‚ฏใƒใ‚คใƒณใƒˆ](https://drive.google.com/file/d/1m1sjY4ihXBU1fZXdQ-Xdj-mDltW-2Rqv/view?usp=sharing)ใ‚’ใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰ใ—ใพใ™ใ€‚ + +2. FastSAMใ‚’ๆŽจ่ซ–ใซไฝฟ็”จใ—ใพใ™ใ€‚ไปฅไธ‹ใฏๅฎŸ่กŒไพ‹ใงใ™๏ผš + + - ็”ปๅƒๅ†…ใฎใ™ในใฆใ‚’ใ‚ปใ‚ฐใƒกใƒณใƒˆๅŒ–ใ™ใ‚‹๏ผš + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg + ``` + + - ใƒ†ใ‚ญใ‚นใƒˆใƒ—ใƒญใƒณใƒ—ใƒˆใ‚’ไฝฟ็”จใ—ใฆ็‰นๅฎšใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ใ‚ปใ‚ฐใƒกใƒณใƒˆๅŒ–ใ™ใ‚‹๏ผš + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --text_prompt "the yellow dog" + ``` + + - ใƒใ‚ฆใƒณใƒ‡ใ‚ฃใƒณใ‚ฐใƒœใƒƒใ‚ฏใ‚นๅ†…ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ใ‚ปใ‚ฐใƒกใƒณใƒˆๅŒ–ใ™ใ‚‹๏ผˆxywhๅฝขๅผใงใƒœใƒƒใ‚ฏใ‚นๅบงๆจ™ใ‚’ๆŒ‡ๅฎšใ—ใพใ™๏ผ‰๏ผš + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --box_prompt "[570,200,230,400]" + ``` + + - ็‰นๅฎšใฎใƒใ‚คใƒณใƒˆใฎ่ฟ‘ใใซใ‚ใ‚‹ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ใ‚ปใ‚ฐใƒกใƒณใƒˆๅŒ–ใ™ใ‚‹๏ผš + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --point_prompt "[[520,360],[620,300]]" --point_label "[1,0]" + ``` + +ใ•ใ‚‰ใซใ€FastSAMใ‚’[Colabใƒ‡ใƒข](https://colab.research.google.com/drive/1oX14f6IneGGw612WgVlAiy91UHwFAvr9?usp=sharing)ใ‚„[HuggingFaceใ‚ฆใ‚งใƒ–ใƒ‡ใƒข](https://huggingface.co/spaces/An-619/FastSAM)ใง่ฉฆใ™ใ“ใจใ‚‚ใงใใพใ™ใ€‚ + +## ๅผ•็”จใจ่ฌ่พž + +FastSAMใฎ่‘—่€…ใซใฏใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใฎๅˆ†้‡Žใงใฎ้‡่ฆใช่ฒข็Œฎใ‚’็งฐใˆใŸใ„ใจๆ€ใ„ใพใ™ใ€‚ + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{zhao2023fast, + title={Fast Segment Anything}, + author={Xu Zhao and Wenchao Ding and Yongqi An and Yinglong Du and Tao Yu and Min Li and Ming Tang and Jinqiao Wang}, + year={2023}, + eprint={2306.12156}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +FastSAMใฎใ‚ชใƒชใ‚ธใƒŠใƒซใฎ่ซ–ๆ–‡ใฏใ€[arXiv](https://arxiv.org/abs/2306.12156)ใงๅ…ฅๆ‰‹ใงใใพใ™ใ€‚่‘—่€…ใฏๅฝผใ‚‰ใฎไฝœๅ“ใ‚’ๅบƒใๅ…ฌ้–‹ใ—ใ€ใ‚ณใƒผใƒ‰ใƒ™ใƒผใ‚นใฏ[GitHub](https://github.com/CASIA-IVA-Lab/FastSAM)ใงใ‚ขใ‚ฏใ‚ปใ‚นใงใใ‚‹ใ‚ˆใ†ใซใ—ใฆใ„ใพใ™ใ€‚็งใŸใกใฏใ€ๅฝผใ‚‰ใŒใƒ•ใ‚ฃใƒผใƒซใƒ‰ใ‚’้€ฒๆญฉใ•ใ›ใ€ใใฎๆˆๆžœใ‚’ๅบƒใ„ใ‚ณใƒŸใƒฅใƒ‹ใƒ†ใ‚ฃใซใ‚ขใ‚ฏใ‚ปใ‚นๅฏ่ƒฝใซใ—ใฆใใ‚ŒใŸๅฝผใ‚‰ใฎๅŠชๅŠ›ใซๆ„Ÿ่ฌใ—ใฆใ„ใพใ™ใ€‚ diff --git a/ultralytics/docs/ja/models/fast-sam.md:Zone.Identifier b/ultralytics/docs/ja/models/fast-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/models/fast-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/models/index.md b/ultralytics/docs/ja/models/index.md new file mode 100755 index 0000000..490ac76 --- /dev/null +++ b/ultralytics/docs/ja/models/index.md @@ -0,0 +1,98 @@ +--- +comments: true +description: UltralyticsใŒใ‚ตใƒใƒผใƒˆใ™ใ‚‹YOLOใƒ•ใ‚กใƒŸใƒชใƒผใ€SAMใ€MobileSAMใ€FastSAMใ€YOLO-NASใ€RT-DETRใƒขใƒ‡ใƒซใฎๅคšๆง˜ใช็ฏ„ๅ›ฒใ‚’ๆŽข็ดขใ—ใ€CLIใŠใ‚ˆใณPythonใฎไฝฟ็”จไพ‹ใงๅง‹ใ‚ใพใ—ใ‚‡ใ†ใ€‚ +keywords: Ultralytics, ใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ, YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS, RT-DETR, ใƒขใƒ‡ใƒซ, ใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃ, Python, CLI +--- + +# UltralyticsใŒใ‚ตใƒใƒผใƒˆใ™ใ‚‹ใƒขใƒ‡ใƒซ + +Ultralyticsใฎใƒขใƒ‡ใƒซใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใธใ‚ˆใ†ใ“ใ๏ผๆˆ‘ใ€…ใฏใ€[ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ](../tasks/detect.md)ใ€[ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ](../tasks/segment.md)ใ€[็”ปๅƒๅˆ†้กž](../tasks/classify.md)ใ€[ใƒใƒผใ‚บๆŽจๅฎš](../tasks/pose.md)ใ€[ๅคšๅฏพ่ฑกใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ](../modes/track.md)ใชใฉใฎ็‰นๅฎšใฎใ‚ฟใ‚นใ‚ฏใซ็‰นๅŒ–ใ—ใŸๅน…ๅบƒใ„ใƒขใƒ‡ใƒซใฎใ‚ตใƒใƒผใƒˆใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚Ultralyticsใซใ‚ใชใŸใฎใƒขใƒ‡ใƒซใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใ‚’่ฒข็Œฎใ—ใŸใ„ๅ ดๅˆใฏใ€[่ฒข็Œฎใ‚ฌใ‚คใƒ‰](../../help/contributing.md)ใ‚’็ขบ่ชใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Note "ๆณจๆ„" + + ๐Ÿšง ็พๅœจใ€ใ•ใพใ–ใพใช่จ€่ชžใงใฎใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใ‚’ๆง‹็ฏ‰ไธญใงใ‚ใ‚Šใ€ๆ”นๅ–„ใซๅŠชใ‚ใฆใ„ใพใ™ใ€‚ใ”็†่งฃใ‚ใ‚ŠใŒใจใ†ใ”ใ–ใ„ใพใ™๏ผ๐Ÿ™ + +## ็‰น้›†ใƒขใƒ‡ใƒซ + +ใ“ใ“ใงใฏใ‚ตใƒใƒผใƒˆใ•ใ‚Œใฆใ„ใ‚‹ไธป่ฆใชใƒขใƒ‡ใƒซใ‚’ใ„ใใคใ‹็ดนไป‹ใ—ใพใ™๏ผš + +1. **[YOLOv3](yolov3.md)**๏ผšJoseph Redmonใซใ‚ˆใ‚‹YOLOใƒขใƒ‡ใƒซใƒ•ใ‚กใƒŸใƒชใƒผใฎ็ฌฌไธ‰ไธ–ไปฃใงใ€ๅŠน็އ็š„ใชใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ่ƒฝๅŠ›ใง็Ÿฅใ‚‰ใ‚Œใฆใ„ใพใ™ใ€‚ +2. **[YOLOv4](yolov4.md)**๏ผš2020ๅนดใซAlexey Bochkovskiyใซใ‚ˆใฃใฆใƒชใƒชใƒผใ‚นใ•ใ‚ŒใŸYOLOv3ใฎdarknetใƒใ‚คใƒ†ใ‚ฃใƒ–ใ‚ขใƒƒใƒ—ใƒ‡ใƒผใƒˆใงใ™ใ€‚ +3. **[YOLOv5](yolov5.md)**๏ผšUltralyticsใซใ‚ˆใ‚‹YOLOใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใฎๆ”น่‰ฏ็‰ˆใงใ€ไปฅๅ‰ใฎใƒใƒผใ‚ธใƒงใƒณใจๆฏ”่ผƒใ—ใฆใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใจ้€Ÿๅบฆใฎใƒˆใƒฌใƒผใƒ‰ใ‚ชใƒ•ใŒๅ‘ไธŠใ—ใฆใ„ใพใ™ใ€‚ +4. **[YOLOv6](yolov6.md)**๏ผš2022ๅนดใซ[็พŽๅ›ฃ](https://about.meituan.com/)ใซใ‚ˆใฃใฆใƒชใƒชใƒผใ‚นใ•ใ‚Œใ€ๅŒ็คพใฎๅคšๆ•ฐใฎ่‡ชๅ‹•้…้€ใƒญใƒœใƒƒใƒˆใงไฝฟ็”จใ•ใ‚Œใฆใ„ใพใ™ใ€‚ +5. **[YOLOv7](yolov7.md)**๏ผšYOLOv4ใฎ่‘—่€…ใซใ‚ˆใฃใฆ2022ๅนดใซใƒชใƒชใƒผใ‚นใ•ใ‚ŒใŸYOLOใƒขใƒ‡ใƒซใฎใ‚ขใƒƒใƒ—ใƒ‡ใƒผใƒˆใงใ™ใ€‚ +6. **[YOLOv8](yolov8.md) ๆ–ฐๆฉŸ่ƒฝ ๐Ÿš€**๏ผšYOLOใƒ•ใ‚กใƒŸใƒชใƒผใฎๆœ€ๆ–ฐใƒใƒผใ‚ธใƒงใƒณใงใ€ไพ‹ใˆใฐใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ€ใƒใƒผใ‚บ/ใ‚ญใƒผใƒใ‚คใƒณใƒˆๆŽจๅฎšใ€ๅˆ†้กžใชใฉใฎๆฉŸ่ƒฝใŒๅผทๅŒ–ใ•ใ‚Œใฆใ„ใพใ™ใ€‚ +7. **[Segment Anything Model (SAM)](sam.md)**๏ผšMetaใฎSegment Anything Model (SAM)ใงใ™ใ€‚ +8. **[Mobile Segment Anything Model (MobileSAM)](mobile-sam.md)**๏ผšๆ…ถๆ‡‰็พฉๅกพๅคงๅญฆใซใ‚ˆใ‚‹ใƒขใƒใ‚คใƒซใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใฎใŸใ‚ใฎMobileSAMใงใ™ใ€‚ +9. **[Fast Segment Anything Model (FastSAM)](fast-sam.md)**๏ผšไธญๅ›ฝ็ง‘ๅญฆ้™ข่‡ชๅ‹•ๅŒ–็ ”็ฉถๆ‰€ใ€็”ปๅƒๅŠใณใƒ“ใƒ‡ใ‚ช่งฃๆžใ‚ฐใƒซใƒผใƒ—ใฎFastSAMใงใ™ใ€‚ +10. **[YOLO-NAS](yolo-nas.md)**๏ผšYOLO Neural Architecture Search (NAS)ใƒขใƒ‡ใƒซใงใ™ใ€‚ +11. **[Realtime Detection Transformers (RT-DETR)](rtdetr.md)**๏ผš็™พๅบฆใฎPaddlePaddle Realtime Detection Transformer (RT-DETR)ใƒขใƒ‡ใƒซใงใ™ใ€‚ + +

+
+ +
+ ่ฆ–่ด: Ultralytics YOLOใƒขใƒ‡ใƒซใ‚’ใ‚ใšใ‹ๆ•ฐ่กŒใฎใ‚ณใƒผใƒ‰ใงๅฎŸ่กŒใ—ใพใ™ใ€‚ +

+ +## Getting Started: ไฝฟ็”จไพ‹ + +ใ“ใฎไพ‹ใฏใ€YOLOใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใจๆŽจ่ซ–ใฎ็ฐกๅ˜ใชไพ‹ใ‚’ๆไพ›ใ—ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใŠใ‚ˆใณใใฎไป–ใฎ[ใƒขใƒผใƒ‰](../modes/index.md)ใซใคใ„ใฆใฎๅฎŒๅ…จใชใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใซใคใ„ใฆใฏใ€[Predict](../modes/predict.md)ใ€[Train](../modes/train.md)ใ€[Val](../modes/val.md)ใ€[Export](../modes/export.md)ใฎใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใƒšใƒผใ‚ธใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +ไปฅไธ‹ใฎไพ‹ใฏใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใฎใŸใ‚ใฎYOLOv8 [Detect](../tasks/detect.md) ใƒขใƒ‡ใƒซใซใคใ„ใฆใงใ™ใ€‚่ฟฝๅŠ ใฎใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏใซใคใ„ใฆใฏใ€[Segment](../tasks/segment.md)ใ€[Classify](../tasks/classify.md)ใ€[Pose](../tasks/pose.md)ใฎใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + PyTorchใฎไบ‹ๅ‰่จ“็ทดๆธˆใฟ`*.pt`ใƒขใƒ‡ใƒซใ‚„ๆง‹ๆˆ`*.yaml`ใƒ•ใ‚กใ‚คใƒซใฏใ€`YOLO()`ใ€`SAM()`ใ€`NAS()`ใ€`RTDETR()`ใ‚ฏใƒฉใ‚นใซๆธกใ—ใฆใ€Pythonใงใƒขใƒ‡ใƒซใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚’ไฝœๆˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™๏ผš + + ```python + from ultralytics import YOLO + + # COCOใงไบ‹ๅ‰่จ“็ทดใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ™ใ‚‹ + model = YOLO('yolov8n.pt') + + # ใƒขใƒ‡ใƒซๆƒ…ๅ ฑใ‚’่กจ็คบใ™ใ‚‹๏ผˆไปปๆ„๏ผ‰ + model.info() + + # ใƒขใƒ‡ใƒซใ‚’COCO8ใฎไพ‹็คบใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใง100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # 'bus.jpg'็”ปๅƒใงYOLOv8nใƒขใƒ‡ใƒซใ‚’็”จใ„ใŸๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ™ใ‚‹ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ใƒขใƒ‡ใƒซใ‚’็›ดๆŽฅๅฎŸ่กŒใ™ใ‚‹ใŸใ‚ใฎCLIใ‚ณใƒžใƒณใƒ‰ใŒๅˆฉ็”จๅฏ่ƒฝใงใ™๏ผš + + ```bash + # COCOใงไบ‹ๅ‰่จ“็ทดใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€COCO8ใฎไพ‹็คบใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใง100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCOใงไบ‹ๅ‰่จ“็ทดใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€'bus.jpg'็”ปๅƒใงๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ™ใ‚‹ + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใฎ่ฒข็Œฎ + +Ultralyticsใซใ‚ใชใŸใฎใƒขใƒ‡ใƒซใ‚’่ฒข็Œฎใ™ใ‚‹ใ“ใจใซ่ˆˆๅ‘ณใŒใ‚ใ‚Šใพใ™ใ‹๏ผŸ็ด ๆ™ดใ‚‰ใ—ใ„ใงใ™๏ผๆˆ‘ใ€…ใฏๅธธใซใƒขใƒ‡ใƒซใฎใƒใƒผใƒˆใƒ•ใ‚ฉใƒชใ‚ชใ‚’ๆ‹กๅผตใ™ใ‚‹ใ“ใจใซ่ˆˆๅ‘ณใŒใ‚ใ‚Šใพใ™ใ€‚ + +1. **ใƒชใƒใ‚ธใƒˆใƒชใ‚’ใƒ•ใ‚ฉใƒผใ‚ฏใ™ใ‚‹**๏ผš[Ultralytics GitHubใƒชใƒใ‚ธใƒˆใƒช](https://github.com/ultralytics/ultralytics)ใ‚’ใƒ•ใ‚ฉใƒผใ‚ฏใ™ใ‚‹ใ“ใจใ‹ใ‚‰ๅง‹ใ‚ใพใ™ใ€‚ + +2. **ใ‚ใชใŸใฎใƒ•ใ‚ฉใƒผใ‚ฏใ‚’ใ‚ฏใƒญใƒผใƒณใ™ใ‚‹**๏ผšใ‚ใชใŸใฎใƒ•ใ‚ฉใƒผใ‚ฏใ‚’ใƒญใƒผใ‚ซใƒซใƒžใ‚ทใƒณใซใ‚ฏใƒญใƒผใƒณใ—ใ€ไฝœๆฅญใ‚’่กŒใ†ๆ–ฐใ—ใ„ใƒ–ใƒฉใƒณใƒใ‚’ไฝœๆˆใ—ใพใ™ใ€‚ + +3. **ใ‚ใชใŸใฎใƒขใƒ‡ใƒซใ‚’ๅฎŸ่ฃ…ใ™ใ‚‹**๏ผš[่ฒข็Œฎใ‚ฌใ‚คใƒ‰](../../help/contributing.md)ใซ็คบใ•ใ‚Œใฆใ„ใ‚‹ใ‚ณใƒผใƒ‡ใ‚ฃใƒณใ‚ฐ่ฆๆ ผใŠใ‚ˆใณๆŒ‡้‡ใซๅพ“ใฃใฆใƒขใƒ‡ใƒซใ‚’่ฟฝๅŠ ใ—ใพใ™ใ€‚ + +4. **ๅพนๅบ•็š„ใซใƒ†ใ‚นใƒˆใ™ใ‚‹**๏ผšใƒ‘ใ‚คใƒ—ใƒฉใ‚คใƒณใฎไธ€้ƒจใจใ—ใฆใ ใ‘ใงใชใใ€ๅ˜็‹ฌใงใƒขใƒ‡ใƒซใ‚’ๅŽณๅฏ†ใซใƒ†ใ‚นใƒˆใ™ใ‚‹ใ“ใจใ‚’็ขบ่ชใ—ใฆใใ ใ•ใ„ใ€‚ + +5. **ใƒ—ใƒซใƒชใ‚ฏใ‚จใ‚นใƒˆใ‚’ไฝœๆˆใ™ใ‚‹**๏ผšใƒขใƒ‡ใƒซใซๆบ€่ถณใ—ใŸใ‚‰ใ€ใƒฌใƒ“ใƒฅใƒผใฎใŸใ‚ใซๆœฌใƒชใƒใ‚ธใƒˆใƒชใซใƒ—ใƒซใƒชใ‚ฏใ‚จใ‚นใƒˆใ‚’ไฝœๆˆใ—ใพใ™ใ€‚ + +6. **ใ‚ณใƒผใƒ‰ใƒฌใƒ“ใƒฅใƒผ๏ผ†ใƒžใƒผใ‚ธ**๏ผšใƒฌใƒ“ใƒฅใƒผๅพŒใ€ใƒขใƒ‡ใƒซใŒๆˆ‘ใ€…ใฎๅŸบๆบ–ใ‚’ๆบ€ใŸใ—ใฆใ„ใ‚‹ๅ ดๅˆใ€ๆœฌใƒชใƒใ‚ธใƒˆใƒชใซใƒžใƒผใ‚ธใ•ใ‚Œใพใ™ใ€‚ + +่ฉณ็ดฐใชๆ‰‹้ †ใซใคใ„ใฆใฏใ€[่ฒข็Œฎใ‚ฌใ‚คใƒ‰](../../help/contributing.md)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ diff --git a/ultralytics/docs/ja/models/index.md:Zone.Identifier b/ultralytics/docs/ja/models/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/models/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/models/mobile-sam.md b/ultralytics/docs/ja/models/mobile-sam.md new file mode 100755 index 0000000..53414f5 --- /dev/null +++ b/ultralytics/docs/ja/models/mobile-sam.md @@ -0,0 +1,116 @@ +--- +comments: true +description: Ultralyticsใƒ•ใƒฌใƒผใƒ ใƒฏใƒผใ‚ฏๅ†…ใงMobileSAMใ‚’ใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰ใ—ใฆใƒ†ใ‚นใƒˆใ™ใ‚‹ๆ–นๆณ•ใ€MobileSAMใฎๅฎŸ่ฃ…ใ€ใ‚ชใƒชใ‚ธใƒŠใƒซใฎSAMใจใฎๆฏ”่ผƒใซใคใ„ใฆ่ฉณใ—ใ็Ÿฅใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ไปŠๆ—ฅใ‹ใ‚‰ใƒขใƒใ‚คใƒซใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใ‚’ๆ”นๅ–„ใ—ใพใ—ใ‚‡ใ†ใ€‚ +keywords: MobileSAM, Ultralytics, SAM, ใƒขใƒใ‚คใƒซใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณ, Arxiv, GPU, API, ็”ปๅƒใ‚จใƒณใ‚ณใƒผใƒ€, ใƒžใ‚นใ‚ฏใƒ‡ใ‚ณใƒผใƒ€, ใƒขใƒ‡ใƒซใฎใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰, ใƒ†ใ‚นใƒˆๆ–นๆณ• +--- + +![MobileSAM ใƒญใ‚ด](https://github.com/ChaoningZhang/MobileSAM/blob/master/assets/logo2.png?raw=true) + +# Mobile Segment Anything๏ผˆMobileSAM๏ผ‰ + +MobileSAM่ซ–ๆ–‡ใŒ[arXiv](https://arxiv.org/pdf/2306.14289.pdf)ใงๅˆฉ็”จๅฏ่ƒฝใซใชใ‚Šใพใ—ใŸใ€‚ + +CPUไธŠใงๅ‹•ไฝœใ™ใ‚‹MobileSAMใฎใƒ‡ใƒขใฏใ€[ใ“ใกใ‚‰ใฎใƒ‡ใƒขใƒชใƒณใ‚ฏ](https://huggingface.co/spaces/dhkim2810/MobileSAM)ใ‹ใ‚‰ใ‚ขใ‚ฏใ‚ปใ‚นใงใใพใ™ใ€‚Mac i5 CPUไธŠใงใฏใ€็ด„3็ง’ใ‹ใ‹ใ‚Šใพใ™ใ€‚Hugging Faceใฎใƒ‡ใƒขใงใฏใ€ใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚นใจไฝŽๆ€ง่ƒฝใชCPUใŒ้…ใ„ๅฟœ็ญ”ใซๅฏ„ไธŽใ—ใฆใ„ใพใ™ใŒใ€ๅŠนๆžœ็š„ใซๅ‹•ไฝœใ—็ถšใ‘ใพใ™ใ€‚ + +MobileSAMใฏใ€[Grounding-SAM](https://github.com/IDEA-Research/Grounded-Segment-Anything)ใ€[AnyLabeling](https://github.com/vietanhdev/anylabeling)ใ€ใŠใ‚ˆใณ[Segment Anything in 3D](https://github.com/Jumpat/SegmentAnythingin3D)ใชใฉใ€ใ•ใพใ–ใพใชใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใงๅฎŸ่ฃ…ใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +MobileSAMใฏใ€100kใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆ๏ผˆๅ…ƒใฎ็”ปๅƒใฎ1%๏ผ‰ใ‚’ๅ˜ไธ€ใฎGPUใงๅญฆ็ฟ’ใ—ใ€1ๆ—ฅๆœชๆบ€ใง่จ“็ทดใŒๅฎŒไบ†ใ—ใพใ™ใ€‚ใ“ใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใฎใ‚ณใƒผใƒ‰ใฏๅฐ†ๆฅๅ…ฌ้–‹ใ•ใ‚Œใ‚‹ไบˆๅฎšใงใ™ใ€‚ + +## ๅˆฉ็”จๅฏ่ƒฝใชใƒขใƒ‡ใƒซใ€ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใฆใ„ใ‚‹ใ‚ฟใ‚นใ‚ฏใ€ใŠใ‚ˆใณๅ‹•ไฝœใƒขใƒผใƒ‰ + +ใ“ใฎ่กจใฏใ€ๅˆฉ็”จๅฏ่ƒฝใชใƒขใƒ‡ใƒซใจใใ‚Œใžใ‚Œใฎๅ›บๆœ‰ใฎไบ‹ๅ‰ๅญฆ็ฟ’้‡ใฟใ€ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใฆใ„ใ‚‹ใ‚ฟใ‚นใ‚ฏใ€ใŠใ‚ˆใณ[ไบˆๆธฌ](../modes/predict.md)ใ€[ๆคœ่จผ](../modes/val.md)ใ€[่จ“็ทด](../modes/train.md)ใ€ใŠใ‚ˆใณ[ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ](../modes/export.md)ใฎใ‚ˆใ†ใชใ•ใพใ–ใพใชๅ‹•ไฝœใƒขใƒผใƒ‰ใซๅฏพใ™ใ‚‹ไบ’ๆ›ๆ€งใ‚’็คบใ—ใฆใ„ใพใ™ใ€‚`โœ…`ใฏๅฏพๅฟœใ—ใฆใ„ใ‚‹ใƒขใƒผใƒ‰ใ€`โŒ`ใฏๅฏพๅฟœใ—ใฆใ„ใชใ„ใƒขใƒผใƒ‰ใ‚’็คบใ—ใฆใ„ใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซใ‚ฟใ‚คใƒ— | ไบ‹ๅ‰ๅญฆ็ฟ’้‡ใฟ | ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใฆใ„ใ‚‹ใ‚ฟใ‚นใ‚ฏ | ไบˆๆธฌ | ๆคœ่จผ | ่จ“็ทด | ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ | +|-----------|-----------------|----------------------------------------|----|----|----|--------| +| MobileSAM | `mobile_sam.pt` | [ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## SAMใ‹ใ‚‰MobileSAMใธใฎ็งป่กŒ + +MobileSAMใฏใ€ใ‚ชใƒชใ‚ธใƒŠใƒซใฎSAMใจๅŒใ˜ใƒ‘ใ‚คใƒ—ใƒฉใ‚คใƒณใ‚’็ถญๆŒใ—ใฆใ„ใ‚‹ใŸใ‚ใ€ใ‚ชใƒชใ‚ธใƒŠใƒซใฎๅ‰ๅ‡ฆ็†ใ€ๅพŒๅ‡ฆ็†ใ€ใŠใ‚ˆใณใใฎไป–ใฎใ‚คใƒณใ‚ฟใƒ•ใ‚งใƒผใ‚นใ‚’็ต„ใฟ่พผใ‚“ใงใ„ใพใ™ใ€‚ใใฎใŸใ‚ใ€็พๅœจใ‚ชใƒชใ‚ธใƒŠใƒซใฎSAMใ‚’ไฝฟ็”จใ—ใฆใ„ใ‚‹ๅ ดๅˆใงใ‚‚ใ€MobileSAMใธใฎ็งป่กŒใฏๆœ€ๅฐ้™ใฎๅŠดๅŠ›ใง่กŒใ†ใ“ใจใŒใงใใพใ™ใ€‚ + +MobileSAMใฏใ€ใ‚ชใƒชใ‚ธใƒŠใƒซใฎSAMใจๅŒ็ญ‰ใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’็™บๆฎใ—ใ€ใ‚คใƒกใƒผใ‚ธใ‚จใƒณใ‚ณใƒผใƒ€ใ‚’ๅค‰ๆ›ดใ™ใ‚‹ใ“ใจใงๅŒใ˜ใƒ‘ใ‚คใƒ—ใƒฉใ‚คใƒณใ‚’ไฟๆŒใ—ใฆใ„ใพใ™ใ€‚ๅ…ทไฝ“็š„ใซใฏใ€ๅ…ƒใฎ้‡ใ„ViT-Hใ‚จใƒณใ‚ณใƒผใƒ€๏ผˆ632M๏ผ‰ใ‚’ใ‚ˆใ‚Šๅฐใ•ใ„Tiny-ViT๏ผˆ5M๏ผ‰ใซ็ฝฎใๆ›ใˆใฆใ„ใพใ™ใ€‚ๅ˜ไธ€ใฎGPUไธŠใงMobileSAMใฏใ€ใŠใŠใ‚ˆใ็”ปๅƒใ‚ใŸใ‚Š12msใงๅ‹•ไฝœใ—ใพใ™๏ผšใ‚คใƒกใƒผใ‚ธใ‚จใƒณใ‚ณใƒผใƒ€ใง8msใ€ใƒžใ‚นใ‚ฏใƒ‡ใ‚ณใƒผใƒ€ใง4msใงใ™ใ€‚ + +ๆฌกใฎ่กจใฏใ€ViTใƒ™ใƒผใ‚นใฎใ‚คใƒกใƒผใ‚ธใ‚จใƒณใ‚ณใƒผใƒ€ใฎๆฏ”่ผƒใงใ™๏ผš + +| ใ‚คใƒกใƒผใ‚ธใ‚จใƒณใ‚ณใƒผใƒ€ | ใ‚ชใƒชใ‚ธใƒŠใƒซใฎSAM | MobileSAM | +|-----------|-----------|-----------| +| ใƒ‘ใƒฉใƒกใƒผใ‚ฟใƒผ | 611M | 5M | +| ้€Ÿๅบฆ | 452ms | 8ms | + +ใ‚ชใƒชใ‚ธใƒŠใƒซใฎSAMใจMobileSAMใฏใ€ๅŒใ˜ใƒ—ใƒญใƒณใƒ—ใƒˆ่ช˜ๅฐŽๅž‹ใƒžใ‚นใ‚ฏใƒ‡ใ‚ณใƒผใƒ€ใ‚’ไฝฟ็”จใ—ใฆใ„ใพใ™๏ผš + +| ใƒžใ‚นใ‚ฏใƒ‡ใ‚ณใƒผใƒ€ | ใ‚ชใƒชใ‚ธใƒŠใƒซใฎSAM | MobileSAM | +|---------|-----------|-----------| +| ใƒ‘ใƒฉใƒกใƒผใ‚ฟใƒผ | 3.876M | 3.876M | +| ้€Ÿๅบฆ | 4ms | 4ms | + +ไปฅไธ‹ใฏใ€ๅ…จไฝ“ใฎใƒ‘ใ‚คใƒ—ใƒฉใ‚คใƒณใฎๆฏ”่ผƒใงใ™๏ผš + +| ใƒ‘ใ‚คใƒ—ใƒฉใ‚คใƒณๅ…จไฝ“๏ผˆใ‚จใƒณใ‚ณใƒผใƒ€+ใƒ‡ใ‚ณใƒผใƒ€๏ผ‰ | ใ‚ชใƒชใ‚ธใƒŠใƒซใฎSAM | MobileSAM | +|----------------------|-----------|-----------| +| ใƒ‘ใƒฉใƒกใƒผใ‚ฟใƒผ | 615M | 9.66M | +| ้€Ÿๅบฆ | 456ms | 12ms | + +MobileSAMใจใ‚ชใƒชใ‚ธใƒŠใƒซใฎSAMใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใฏใ€ใƒใ‚คใƒณใƒˆใจใƒœใƒƒใ‚ฏใ‚นใ‚’ใƒ—ใƒญใƒณใƒ—ใƒˆใจใ—ใฆไฝฟ็”จใ—ใŸๅ ดๅˆใซ็คบใ•ใ‚Œใพใ™ใ€‚ + +![ใƒใ‚คใƒณใƒˆใ‚’ใƒ—ใƒญใƒณใƒ—ใƒˆใซใ—ใŸ็”ปๅƒ](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +![ใƒœใƒƒใ‚ฏใ‚นใ‚’ใƒ—ใƒญใƒณใƒ—ใƒˆใซใ—ใŸ็”ปๅƒ](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +MobileSAMใฏใ€็พๅœจใฎFastSAMใ‚ˆใ‚Šใ‚‚็ด„5ๅ€ๅฐใ•ใใ€็ด„7ๅ€้ซ˜้€Ÿใงใ™ใ€‚่ฉณ็ดฐใฏ[MobileSAMใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใƒšใƒผใ‚ธ](https://github.com/ChaoningZhang/MobileSAM)ใงใ”่ฆงใ„ใŸใ ใ‘ใพใ™ใ€‚ + +## UltralyticsใงใฎMobileSAMใฎใƒ†ใ‚นใƒˆ + +ใ‚ชใƒชใ‚ธใƒŠใƒซใฎSAMใจๅŒๆง˜ใซใ€ใƒใ‚คใƒณใƒˆใจใƒœใƒƒใ‚ฏใ‚นใฎใƒ—ใƒญใƒณใƒ—ใƒˆใฎไธกๆ–นใซๅฏพๅฟœใ—ใŸUltralyticsใงใฎ็ฐกๅ˜ใชใƒ†ใ‚นใƒˆๆ–นๆณ•ใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚ + +### ใƒขใƒ‡ใƒซใฎใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰ + +ใƒขใƒ‡ใƒซใฏ[ใ“ใกใ‚‰ใ‹ใ‚‰ใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰](https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt)ใงใใพใ™ใ€‚ + +### ใƒใ‚คใƒณใƒˆใƒ—ใƒญใƒณใƒ—ใƒˆ + +!!! Example "ไพ‹" + + === "Python" + ```python + from ultralytics import SAM + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = SAM('mobile_sam.pt') + + # ใƒใ‚คใƒณใƒˆใƒ—ใƒญใƒณใƒ—ใƒˆใซๅŸบใฅใ„ใฆใ‚ปใ‚ฐใƒกใƒณใƒˆใ‚’ไบˆๆธฌ + model.predict('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +### ใƒœใƒƒใ‚ฏใ‚นใƒ—ใƒญใƒณใƒ—ใƒˆ + +!!! Example "ไพ‹" + + === "Python" + ```python + from ultralytics import SAM + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = SAM('mobile_sam.pt') + + # ใƒœใƒƒใ‚ฏใ‚นใƒ—ใƒญใƒณใƒ—ใƒˆใซๅŸบใฅใ„ใฆใ‚ปใ‚ฐใƒกใƒณใƒˆใ‚’ไบˆๆธฌ + model.predict('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + ``` + +`MobileSAM`ใจ`SAM`ใฏใ€ๅŒใ˜APIใ‚’ไฝฟ็”จใ—ใฆๅฎŸ่ฃ…ใ•ใ‚Œใฆใ„ใพใ™ใ€‚่ฉณ็ดฐใชไฝฟ็”จๆ–นๆณ•ใซใคใ„ใฆใฏใ€[SAMใƒšใƒผใ‚ธ](sam.md)ใ‚’ใ”่ฆงใใ ใ•ใ„ใ€‚ + +## ๅผ•็”จใจ่ฌ่พž + +MobileSAMใŒ็ ”็ฉถใ‚„้–‹็™บใฎใŠๅฝนใซ็ซ‹ใคๅ ดๅˆใฏใ€ๆฌกใฎ่ซ–ๆ–‡ใ‚’ๅผ•็”จใ—ใฆใ„ใŸใ ใ‘ใ‚‹ใจๅนธใ„ใงใ™๏ผš + +!!! Quoteๆ–‡ "" + + === "BibTeX" + + ```bibtex + @article{mobile_sam, + title={Faster Segment Anything: Towards Lightweight SAM for Mobile Applications}, + author={Zhang, Chaoning and Han, Dongshen and Qiao, Yu and Kim, Jung Uk and Bae, Sung Ho and Lee, Seungkyu and Hong, Choong Seon}, + journal={arXiv preprint arXiv:2306.14289}, + year={2023} + } diff --git a/ultralytics/docs/ja/models/mobile-sam.md:Zone.Identifier b/ultralytics/docs/ja/models/mobile-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/models/mobile-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/models/rtdetr.md b/ultralytics/docs/ja/models/rtdetr.md new file mode 100755 index 0000000..11b73b7 --- /dev/null +++ b/ultralytics/docs/ja/models/rtdetr.md @@ -0,0 +1,93 @@ +--- +comments: true +description: RT-DETRใฏใ€Baiduใซใ‚ˆใฃใฆ้–‹็™บใ•ใ‚ŒใŸใ€้ซ˜้€Ÿใ‹ใค้ซ˜็ฒพๅบฆใชใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใงใ™ใ€‚Vision Transformers๏ผˆViT๏ผ‰ใฎๅŠ›ใ‚’ๅ€Ÿใ‚Šใฆใ€ใƒžใƒซใƒใ‚นใ‚ฑใƒผใƒซใฎ็‰นๅพดใ‚’ๅŠน็އ็š„ใซๅ‡ฆ็†ใ—ใพใ™ใ€‚RT-DETRใฏ้žๅธธใซ้ฉๅฟœๆ€งใŒใ‚ใ‚Šใ€ๅ†ๅญฆ็ฟ’ใ›ใšใซ็•ฐใชใ‚‹ใƒ‡ใ‚ณใƒผใƒ€ใƒผใƒฌใ‚คใƒคใƒผใ‚’ไฝฟ็”จใ—ใฆๆŽจ่ซ–้€Ÿๅบฆใ‚’ๆŸ”่ปŸใซ่ชฟๆ•ดใงใใพใ™ใ€‚ใ“ใฎใƒขใƒ‡ใƒซใฏใ€TensorRTใ‚’ไฝฟ็”จใ—ใŸCUDAใชใฉใฎ้ซ˜้€Ÿใ‚จใƒณใƒ‰ใƒใƒƒใ‚ฏใ‚จใƒณใƒ‰ใงๅ„ชใ‚ŒใŸๆ€ง่ƒฝใ‚’็™บๆฎใ—ใ€ๅคšใใฎไป–ใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใ‚’ๅ‡Œ้ง•ใ—ใพใ™ใ€‚ +keywords: RT-DETR, Baidu, Vision Transformers, object detection, real-time performance, CUDA, TensorRT, IoU-aware query selection, Ultralytics, Python API, PaddlePaddle +--- + +# BaiduใฎRT-DETR: Vision Transformerใƒ™ใƒผใ‚นใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จ + +## ๆฆ‚่ฆ + +BaiduใŒ้–‹็™บใ—ใŸใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ๆคœๅ‡บTransformer๏ผˆRT-DETR๏ผ‰ใฏใ€้ซ˜ใ„็ฒพๅบฆใ‚’็ถญๆŒใ—ใชใŒใ‚‰ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ๆ€ง่ƒฝใ‚’ๆไพ›ใ™ใ‚‹ๆœ€ๅ…ˆ็ซฏใฎใ‚จใƒณใƒ‰ใƒ„ใƒผใ‚จใƒณใƒ‰ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใงใ™ใ€‚Vision Transformers๏ผˆViT๏ผ‰ใฎๅŠ›ใ‚’ๅ€Ÿใ‚Šใฆใ€ใƒžใƒซใƒใ‚นใ‚ฑใƒผใƒซใฎ็‰นๅพดใ‚’ๅŠน็އ็š„ใซๅ‡ฆ็†ใ™ใ‚‹ใ“ใจใซใ‚ˆใ‚Šใ€RT-DETRใฏ้ซ˜ใ„้ฉๅฟœๆ€งใ‚’ๆŒใกใพใ™ใ€‚ๅ†ๅญฆ็ฟ’ใ›ใšใซ็•ฐใชใ‚‹ใƒ‡ใ‚ณใƒผใƒ€ใƒผใƒฌใ‚คใƒคใƒผใ‚’ไฝฟ็”จใ—ใฆๆŽจ่ซ–้€Ÿๅบฆใ‚’ๆŸ”่ปŸใซ่ชฟๆ•ดใงใใ‚‹ใŸใ‚ใ€ใ“ใฎใƒขใƒ‡ใƒซใฏTensorRTใ‚’ไฝฟ็”จใ—ใŸCUDAใชใฉใฎ้ซ˜้€Ÿใƒใƒƒใ‚ฏใ‚จใƒณใƒ‰ใงๅคšใใฎไป–ใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใ‚’ๅ‡Œ้ง•ใ—ใพใ™ใ€‚ + +![ใƒขใƒ‡ใƒซใฎไพ‹](https://user-images.githubusercontent.com/26833433/238963168-90e8483f-90aa-4eb6-a5e1-0d408b23dd33.png) +**BaiduใฎRT-DETRใฎๆฆ‚่ฆใ€‚** RT-DETRใฎใƒขใƒ‡ใƒซใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใƒ€ใ‚คใ‚ขใ‚ฐใƒฉใƒ ใงใฏใ€ใƒใƒƒใ‚ฏใƒœใƒผใƒณใฎๆœ€ๅพŒใฎ3ใคใฎใ‚นใƒ†ใƒผใ‚ธ{S3ใ€S4ใ€S5}ใŒใ‚จใƒณใ‚ณใƒผใƒ€ใƒผใธใฎๅ…ฅๅŠ›ใจใ—ใฆ่กจ็คบใ•ใ‚Œใพใ™ใ€‚ๅŠน็އ็š„ใชใƒใ‚คใƒ–ใƒชใƒƒใƒ‰ใ‚จใƒณใ‚ณใƒผใƒ€ใƒผใฏใ€ใƒžใƒซใƒใ‚นใ‚ฑใƒผใƒซใฎ็‰นๅพดใ‚’ใ‚คใƒณใƒˆใƒฉใ‚นใ‚ฑใƒผใƒซ็‰นๅพดใฎ็›ธไบ’ไฝœ็”จ๏ผˆAIFI๏ผ‰ใจใ‚ฏใƒญใ‚นใ‚นใ‚ฑใƒผใƒซ็‰นๅพด่žๅˆใƒขใ‚ธใƒฅใƒผใƒซ๏ผˆCCFM๏ผ‰ใ‚’ไป‹ใ—ใฆ็”ปๅƒ็‰นๅพดใฎใ‚ทใƒผใ‚ฑใƒณใ‚นใซๅค‰ๆ›ใ—ใพใ™ใ€‚IoU-awareใ‚ฏใ‚จใƒช้ธๆŠžใฏใ€ใƒ‡ใ‚ณใƒผใƒ€ใƒผใฎๅˆๆœŸใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚ฏใ‚จใƒชใจใ—ใฆๅ›บๅฎšๆ•ฐใฎ็”ปๅƒ็‰นๅพดใ‚’้ธๆŠžใ™ใ‚‹ใŸใ‚ใซไฝฟ็”จใ•ใ‚Œใพใ™ใ€‚ๆœ€ๅพŒใซใ€ใƒ‡ใ‚ณใƒผใƒ€ใƒผใฏ่ฃœๅŠฉไบˆๆธฌใƒ˜ใƒƒใƒ‰ใจใจใ‚‚ใซใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚ฏใ‚จใƒชใ‚’ๅๅพฉๆœ€้ฉๅŒ–ใ—ใฆใƒœใƒƒใ‚ฏใ‚นใจไฟก้ ผใ‚นใ‚ณใ‚ขใ‚’็”Ÿๆˆใ—ใพใ™๏ผˆ[ๅ‡บๅ…ธ](https://arxiv.org/pdf/2304.08069.pdf)๏ผ‰ใ€‚ + +### ไธปใช็‰นๅพด + +- **ๅŠน็އ็š„ใชใƒใ‚คใƒ–ใƒชใƒƒใƒ‰ใ‚จใƒณใ‚ณใƒผใƒ€ใƒผ๏ผš** BaiduใฎRT-DETRใฏใ€ใƒžใƒซใƒใ‚นใ‚ฑใƒผใƒซใฎ็‰นๅพดใ‚’ใ‚คใƒณใƒˆใƒฉใ‚นใ‚ฑใƒผใƒซใฎ็›ธไบ’ไฝœ็”จใจใ‚ฏใƒญใ‚นใ‚นใ‚ฑใƒผใƒซใฎ่žๅˆใ‚’ๅˆ†้›ขใ™ใ‚‹ใ“ใจใงๅ‡ฆ็†ใ™ใ‚‹ๅŠน็އ็š„ใชใƒใ‚คใƒ–ใƒชใƒƒใƒ‰ใ‚จใƒณใ‚ณใƒผใƒ€ใƒผใ‚’ไฝฟ็”จใ—ใฆใ„ใพใ™ใ€‚ใ“ใฎใƒฆใƒ‹ใƒผใ‚ฏใชVision Transformersใƒ™ใƒผใ‚นใฎ่จญ่จˆใซใ‚ˆใ‚Šใ€่จˆ็ฎ—ใ‚ณใ‚นใƒˆใ‚’ๅ‰Šๆธ›ใ—ใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚’ๅฎŸ็พใ—ใฆใ„ใพใ™ใ€‚ +- **IoU-awareใ‚ฏใ‚จใƒช้ธๆŠž๏ผš** BaiduใฎRT-DETRใฏใ€IoU-awareใ‚ฏใ‚จใƒช้ธๆŠžใ‚’ๆดป็”จใ—ใฆใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚ฏใ‚จใƒชใฎๅˆๆœŸๅŒ–ใ‚’ๆ”นๅ–„ใ—ใพใ™ใ€‚ใ“ใ‚Œใซใ‚ˆใ‚Šใ€ใƒขใƒ‡ใƒซใฏใ‚ทใƒผใƒณๅ†…ใฎ้–ข้€ฃๆ€งใฎ้ซ˜ใ„ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใซ็„ฆ็‚นใ‚’ๅฝ“ใฆใฆๆคœๅ‡บใฎ็ฒพๅบฆใ‚’ๅ‘ไธŠใ•ใ›ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ +- **้ฉๅฟœๅฏ่ƒฝใชๆŽจ่ซ–้€Ÿๅบฆ๏ผš** BaiduใฎRT-DETRใฏใ€ๅ†ๅญฆ็ฟ’ใ›ใšใซ็•ฐใชใ‚‹ใƒ‡ใ‚ณใƒผใƒ€ใƒผใƒฌใ‚คใƒคใƒผใ‚’ไฝฟ็”จใ—ใฆๆŽจ่ซ–้€Ÿๅบฆใ‚’ๆŸ”่ปŸใซ่ชฟๆ•ดใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ“ใฎ้ฉๅฟœๆ€งใซใ‚ˆใ‚Šใ€ใ•ใพใ–ใพใชใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚ทใƒŠใƒชใ‚ชใงใฎๅฎŸ็”จ็š„ใชๅฟœ็”จใŒๅฎนๆ˜“ใซใชใ‚Šใพใ™ใ€‚ + +## ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซ + +Ultralytics Python APIใฏใ€็•ฐใชใ‚‹ใ‚นใ‚ฑใƒผใƒซใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟPaddlePaddle RT-DETRใƒขใƒ‡ใƒซใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚ + +- RT-DETR-L๏ผšCOCO val2017ใง53.0%ใฎAPใ€T4 GPUใง114 FPS +- RT-DETR-X๏ผšCOCO val2017ใง54.8%ใฎAPใ€T4 GPUใง74 FPS + +## ไฝฟ็”จไพ‹ + +ใ“ใฎไพ‹ใงใฏใ€RT-DETRใฎ่จ“็ทดใจๆŽจ่ซ–ใฎ็ฐกๅ˜ใชไพ‹ใ‚’ๆไพ›ใ—ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใจไป–ใฎ[ใƒขใƒผใƒ‰](../modes/index.md)ใฎ่ฉณใ—ใ„ใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใซใคใ„ใฆใฏใ€[Predict](../modes/predict.md)ใ€[Train](../modes/train.md)ใ€[Val](../modes/val.md)ใ€ใŠใ‚ˆใณ[Export](../modes/export.md)ใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใƒšใƒผใ‚ธใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import RTDETR + + # COCOใงไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎRT-DETR-lใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใพใ™ + model = RTDETR('rtdetr-l.pt') + + # ใƒขใƒ‡ใƒซๆƒ…ๅ ฑใ‚’่กจ็คบใ—ใพใ™๏ผˆใ‚ชใƒ—ใ‚ทใƒงใƒณ๏ผ‰ + model.info() + + # COCO8ใฎไพ‹ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใƒขใƒ‡ใƒซใ‚’100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ—ใพใ™ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # 'bus.jpg'็”ปๅƒใงRT-DETR-lใƒขใƒ‡ใƒซใงๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ—ใพใ™ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ```bash + # COCOใงไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎRT-DETR-lใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€COCO8ใฎไพ‹ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใง100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ—ใพใ™ + yolo train model=rtdetr-l.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCOใงไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎRT-DETR-lใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€'bus.jpg'็”ปๅƒใงๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ—ใพใ™ + yolo predict model=rtdetr-l.pt source=path/to/bus.jpg + ``` + +## ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใฆใ„ใ‚‹ใ‚ฟใ‚นใ‚ฏใจใƒขใƒผใƒ‰ + +ใ“ใฎ่กจใซใฏใ€ๅ„ใƒขใƒ‡ใƒซใŒใ‚ตใƒใƒผใƒˆใ™ใ‚‹ใ‚ฟใ‚นใ‚ฏใ€็‰นๅฎšใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟ้‡ใฟใ€ใŠใ‚ˆใณใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ•ใพใ–ใพใชใƒขใƒผใƒ‰๏ผˆ[Train](../modes/train.md)ใ€[Val](../modes/val.md)ใ€[Predict](../modes/predict.md)ใ€[Export](../modes/export.md)๏ผ‰ใŒโœ…็ตตๆ–‡ๅญ—ใง็คบใ•ใ‚Œใฆใ„ใ‚‹ๆƒ…ๅ ฑใŒ็คบใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซใฎ็จฎ้กž | ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟ้‡ใฟ | ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏ | ๆŽจ่ซ– | ๆคœ่จผ | ่จ“็ทด | ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ | +|---------------------|---------------|--------------------------------|----|----|----|--------| +| RT-DETR Large | `rtdetr-l.pt` | [ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| RT-DETR Extra-Large | `rtdetr-x.pt` | [ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +## ๅผ•็”จใจ่ฌ่พž + +็ ”็ฉถใ‚„้–‹็™บใฎไธญใงBaiduใฎRT-DETRใ‚’ไฝฟ็”จใ™ใ‚‹ๅ ดๅˆใฏใ€[ๅ…ƒใฎ่ซ–ๆ–‡](https://arxiv.org/abs/2304.08069)ใ‚’ๅผ•็”จใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{lv2023detrs, + title={DETRs Beat YOLOs on Real-time Object Detection}, + author={Wenyu Lv and Shangliang Xu and Yian Zhao and Guanzhong Wang and Jinman Wei and Cheng Cui and Yuning Du and Qingqing Dang and Yi Liu}, + year={2023}, + eprint={2304.08069}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +็งใŸใกใฏใ€Baiduใจ[PaddlePaddle](https://github.com/PaddlePaddle/PaddleDetection)ใƒใƒผใƒ ใซใ€ใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒ“ใ‚ธใƒงใƒณใ‚ณใƒŸใƒฅใƒ‹ใƒ†ใ‚ฃๅ‘ใ‘ใฎใ“ใฎ่ฒด้‡ใชใƒชใ‚ฝใƒผใ‚นใ‚’ไฝœๆˆใ—ใƒกใƒณใƒ†ใƒŠใƒณใ‚นใ—ใฆใ„ใŸใ ใ„ใŸใ“ใจใซๆ„Ÿ่ฌใ„ใŸใ—ใพใ™ใ€‚Vision Transformersใƒ™ใƒผใ‚นใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใงใ‚ใ‚‹RT-DETRใฎ้–‹็™บใซใ‚ˆใ‚‹ใ€ๅฝผใ‚‰ใฎใƒ•ใ‚ฃใƒผใƒซใƒ‰ใธใฎ่ฒข็Œฎใฏ้žๅธธใซ่ฉ•ไพกใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +*Keywords: RT-DETR, Transformer, ViT, Vision Transformers, Baidu RT-DETR, PaddlePaddle, Paddle Paddle RT-DETR, real-time object detection, Vision Transformers-based object detection, pre-trained PaddlePaddle RT-DETR models, Baidu's RT-DETR usage, Ultralytics Python API* diff --git a/ultralytics/docs/ja/models/rtdetr.md:Zone.Identifier b/ultralytics/docs/ja/models/rtdetr.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/models/rtdetr.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/models/sam.md b/ultralytics/docs/ja/models/sam.md new file mode 100755 index 0000000..ae63eb1 --- /dev/null +++ b/ultralytics/docs/ja/models/sam.md @@ -0,0 +1,226 @@ +--- +comments: true +description: ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใฎ็”ปๅƒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ‚’ๅฏ่ƒฝใซใ™ใ‚‹ใ‚ฆใƒซใƒˆใƒฉใƒชใƒ†ใ‚ฃใ‚ฏใ‚นใฎๆœ€ๅ…ˆ็ซฏSegment Anything Model (SAM)ใ‚’็ดนไป‹ใ—ใพใ™ใ€‚SAMใฎใƒ—ใƒญใƒณใƒ—ใƒˆๅฏ่ƒฝใชใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ€ใ‚ผใƒญใ‚ทใƒงใƒƒใƒˆใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ€ไฝฟ็”จๆ–นๆณ•ใซใคใ„ใฆๅญฆใณใพใ—ใ‚‡ใ†ใ€‚ +keywords: Ultralytics, ็”ปๅƒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ, Segment Anything Model, SAM, SA-1B ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆ, ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚น, ใ‚ผใƒญใ‚ทใƒงใƒƒใƒˆ่ปข้€, ็‰ฉไฝ“ๆคœๅ‡บ, ็”ปๅƒ่งฃๆž, ๆฉŸๆขฐๅญฆ็ฟ’ +--- + +# Segment Anything Model (SAM) + +ใ‚ฆใƒซใƒˆใƒฉใƒชใƒ†ใ‚ฃใ‚ฏใ‚นใฎSegment Anything Model๏ผˆSAM๏ผ‰ใธใ‚ˆใ†ใ“ใใ€‚ใ“ใฎ้ฉๆ–ฐ็š„ใชใƒขใƒ‡ใƒซใฏใ€ใƒ—ใƒญใƒณใƒ—ใƒˆๅฏ่ƒฝใช็”ปๅƒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ‚’ๅฎŸ็พใ—ใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใง็”ปๆœŸ็š„ใชๆˆๆžœใ‚’ไธŠใ’ใ€ใ“ใฎๅˆ†้‡Žใงๆ–ฐใŸใชๅŸบๆบ–ใ‚’่จญๅฎšใ—ใพใ—ใŸใ€‚ + +## SAMใฎ็ดนไป‹: Segment Anything Model + +Segment Anything Model๏ผˆSAM๏ผ‰ใฏใ€็”ปๅƒ่งฃๆžใ‚ฟใ‚นใ‚ฏใซใŠใ‘ใ‚‹ๆŸ”่ปŸใชใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ‚’ๅฏ่ƒฝใซใ™ใ‚‹ๆœ€ๅ…ˆ็ซฏใฎ็”ปๅƒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒขใƒ‡ใƒซใงใ™ใ€‚SAMใฏใ€ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใจใ„ใ†ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ€ใ‚ฟใ‚นใ‚ฏใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ๅฐŽๅ…ฅใ—ใŸ็”ปๆœŸ็š„ใชใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใ€ŒSegment Anythingใ€ใฎไธญๆ ธใ‚’ใชใ—ใฆใ„ใพใ™ใ€‚ + +SAMใฎ้ซ˜ๅบฆใช่จญ่จˆใซใ‚ˆใ‚Šใ€ๆ–ฐใ—ใ„็”ปๅƒๅˆ†ๅธƒใ‚„ใ‚ฟใ‚นใ‚ฏใซไบ‹ๅ‰ใฎ็Ÿฅ่ญ˜ใชใ—ใง้ฉๅฟœใ™ใ‚‹ใ‚ผใƒญใ‚ทใƒงใƒƒใƒˆ่ปข้€ใฎๆฉŸ่ƒฝใ‚’ๆŒใฃใฆใ„ใพใ™ใ€‚่ฑŠๅฏŒใช[SA-1B ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆ](https://ai.facebook.com/datasets/segment-anything/)ใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸSAMใฏใ€1ๅ„„ไปฅไธŠใฎใƒžใ‚นใ‚ฏใ‚’ๅซใ‚€1,100ไธ‡ๆžšไปฅไธŠใฎๅŽณ้ธใ•ใ‚ŒใŸ็”ปๅƒใซๅบƒใŒใ‚‹่‡ชๅ‹•็š„ใซใ‚ขใƒŽใƒ†ใƒผใ‚ทใƒงใƒณใ•ใ‚ŒใŸใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒžใ‚นใ‚ฏใ‚’ๅ‚™ใˆใฆใŠใ‚Šใ€ๅคšใใฎๅ ดๅˆใ€ๅ‰ๅ‘ใใซ็›ฃ็ฃใ•ใ‚ŒใŸ็ตๆžœใ‚’ไธŠๅ›žใ‚‹ๅ“่ถŠใ—ใŸใ‚ผใƒญใ‚ทใƒงใƒƒใƒˆใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’็™บๆฎใ—ใฆใ„ใพใ™ใ€‚ + +![ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚ตใƒณใƒ—ใƒซใ‚คใƒกใƒผใ‚ธ](https://user-images.githubusercontent.com/26833433/238056229-0e8ffbeb-f81a-477e-a490-aff3d82fd8ce.jpg) +ๆ–ฐใŸใซๅฐŽๅ…ฅใ•ใ‚ŒใŸSA-1Bใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‹ใ‚‰ใ‚ฌใ‚คใƒ‰ใƒžใ‚นใ‚ฏใ‚’้‡็•ณใ—ใŸไพ‹ใฎ็”ปๅƒใงใ™ใ€‚SA-1Bใซใฏใ€ๅคšๆง˜ใช้ซ˜่งฃๅƒๅบฆใฎใƒฉใ‚คใ‚ปใƒณใ‚น็”ปๅƒใจ11ๅ„„ไปถไปฅไธŠใฎ้ซ˜ๅ“่ณชใฎใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒžใ‚นใ‚ฏใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎใƒžใ‚นใ‚ฏใฏใ€SAMใซใ‚ˆใฃใฆๅฎŒๅ…จ่‡ชๅ‹•็š„ใซๆณจ้‡ˆไป˜ใ‘ใ•ใ‚Œใ€ไบบ้–“ใฎ่ฉ•ไพกใจๆ•ฐๅคšใใฎๅฎŸ้จ“ใง้ซ˜ๅ“่ณชใจๅคšๆง˜ๆ€งใŒ็ขบ่ชใ•ใ‚Œใฆใ„ใพใ™ใ€‚็”ปๅƒใฏๅฏ่ฆ–ๅŒ–ใฎใŸใ‚ใซ็”ปๅƒใ‚ใŸใ‚Šใฎใƒžใ‚นใ‚ฏใฎๆ•ฐใงใ‚ฐใƒซใƒผใƒ—ๅŒ–ใ•ใ‚Œใฆใ„ใพใ™๏ผˆๅนณๅ‡ใงใŠใŠใ‚ˆใ100ๅ€‹ใฎใƒžใ‚นใ‚ฏใŒใ‚ใ‚Šใพใ™๏ผ‰ใ€‚ + +## Segment Anything Model (SAM)ใฎไธปใช็‰นๅพด + +- **ใƒ—ใƒญใƒณใƒ—ใƒˆๅฏ่ƒฝใชใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ‚ฟใ‚นใ‚ฏ:** SAMใฏใ€ใƒ—ใƒญใƒณใƒ—ใƒˆ๏ผˆใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’็‰นๅฎšใ™ใ‚‹็ฉบ้–“็š„ใชใพใŸใฏใƒ†ใ‚ญใ‚นใƒˆ็š„ใชๆ‰‹ใŒใ‹ใ‚Š๏ผ‰ใ‹ใ‚‰ๆœ‰ๅŠนใชใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒžใ‚นใ‚ฏใ‚’็”Ÿๆˆใ™ใ‚‹ใ‚ˆใ†ใซ่จญ่จˆใ•ใ‚Œใฆใ„ใพใ™ใ€‚ +- **้ซ˜ๅบฆใชใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃ:** Segment Anything Modelใฏใ€ๅผทๅŠ›ใช็”ปๅƒใ‚จใƒณใ‚ณใƒผใƒ€ใ€ใƒ—ใƒญใƒณใƒ—ใƒˆใ‚จใƒณใ‚ณใƒผใƒ€ใ€่ปฝ้‡ใฎใƒžใ‚นใ‚ฏใƒ‡ใ‚ณใƒผใƒ€ใ‚’ๆŽก็”จใ—ใฆใ„ใพใ™ใ€‚ใ“ใฎใƒฆใƒ‹ใƒผใ‚ฏใชใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใซใ‚ˆใ‚Šใ€ๆŸ”่ปŸใชใƒ—ใƒญใƒณใƒ—ใƒ†ใ‚ฃใƒณใ‚ฐใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใฎใƒžใ‚นใ‚ฏ่จˆ็ฎ—ใ€ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ‚ฟใ‚นใ‚ฏใฎๆ›–ๆ˜งใ•ใฎ่ช่ญ˜ใŒๅฏ่ƒฝใงใ™ใ€‚ +- **SA-1Bใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆ:** Segment Anythingใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใซใ‚ˆใฃใฆๅฐŽๅ…ฅใ•ใ‚ŒใŸSA-1Bใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฏใ€1,100ไธ‡ๆžšไปฅไธŠใฎ็”ปๅƒใซ1,000,000,000ไปถไปฅไธŠใฎใƒžใ‚นใ‚ฏใ‚’ๆไพ›ใ—ใพใ™ใ€‚ใ“ใ‚Œใพใงใงๆœ€ใ‚‚ๅคง่ฆๆจกใชใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใ‚ใ‚Šใ€SAMใซๅคšๆง˜ใงๅคง่ฆๆจกใชใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒ‡ใƒผใ‚ฟใ‚ฝใƒผใ‚นใ‚’ๆไพ›ใ—ใพใ™ใ€‚ +- **ใ‚ผใƒญใ‚ทใƒงใƒƒใƒˆใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚น:** SAMใฏใ€ใ•ใพใ–ใพใชใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ‚ฟใ‚นใ‚ฏใงๅ„ชใ‚ŒใŸใ‚ผใƒญใ‚ทใƒงใƒƒใƒˆใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’็™บๆฎใ—ใ€ใƒ—ใƒญใƒณใƒ—ใƒˆใ‚จใƒณใ‚ธใƒ‹ใ‚ขใƒชใƒณใ‚ฐใฎๆœ€ๅฐ้™ใฎๅฟ…่ฆๆ€งใงๅคšๆง˜ใชใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซๅณๅบงใซไฝฟ็”จใงใใ‚‹ใƒ„ใƒผใƒซใจใชใ‚Šใพใ™ใ€‚ + +Segment Anything ModelใŠใ‚ˆใณSA-1Bใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎ่ฉณ็ดฐใซใคใ„ใฆใฏใ€[Segment Anything website](https://segment-anything.com)ใ‚’ใ”่ฆงใ„ใŸใ ใใ‹ใ€็ ”็ฉถ่ซ–ๆ–‡[Segment Anything](https://arxiv.org/abs/2304.02643)ใ‚’ใ”่ฆงใใ ใ•ใ„ใ€‚ + +## ไฝฟ็”จๅฏ่ƒฝใชใƒขใƒ‡ใƒซใ€ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏใ€ใŠใ‚ˆใณๅ‹•ไฝœใƒขใƒผใƒ‰ + +ใ“ใฎใƒ†ใƒผใƒ–ใƒซใงใฏใ€ไฝฟ็”จๅฏ่ƒฝใชใƒขใƒ‡ใƒซใจใใฎ็‰นๅฎšใฎไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟ้‡ใฟใ€ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใฆใ„ใ‚‹ใ‚ฟใ‚นใ‚ฏใ€ใŠใ‚ˆใณInferenceใ€Validationใ€Trainingใ€Exportใชใฉใฎใ•ใพใ–ใพใชๆ“ไฝœใƒขใƒผใƒ‰ใซๅฏพใ™ใ‚‹ไบ’ๆ›ๆ€งใ‚’็คบใ—ใฆใ„ใพใ™ใ€‚ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใƒขใƒผใƒ‰ใฏโœ…ใฎ็ตตๆ–‡ๅญ—ใง่กจ็คบใ•ใ‚Œใ€ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใฆใ„ใชใ„ใƒขใƒผใƒ‰ใฏโŒใฎ็ตตๆ–‡ๅญ—ใง่กจ็คบใ•ใ‚Œใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซใฎ็จฎ้กž | ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎ้‡ใฟ | ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใฆใ„ใ‚‹ใ‚ฟใ‚นใ‚ฏ | Inference | Validation | Training | Export | +|-----------|---------------|---------------------------------------------------------------|-----------|------------|----------|--------| +| SAM base | `sam_b.pt` | [Instance Segmentation๏ผˆใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ๏ผ‰](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| SAM large | `sam_l.pt` | [Instance Segmentation๏ผˆใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ๏ผ‰](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## SAMใฎไฝฟ็”จๆ–นๆณ•: ็”ปๅƒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใซใŠใ‘ใ‚‹ๆŸ”่ปŸๆ€งใจใƒ‘ใƒฏใƒผ + +Segment Anything Modelใฏใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒ‡ใƒผใ‚ฟใ‚’่ถ…ใˆใŸๅคšใใฎใƒ€ใ‚ฆใƒณใ‚นใƒˆใƒชใƒผใƒ ใ‚ฟใ‚นใ‚ฏใซไฝฟ็”จใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ“ใ‚Œใซใฏใ‚จใƒƒใ‚ธๆคœๅ‡บใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎๆๆกˆ็”Ÿๆˆใ€ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ€ใŠใ‚ˆใณไบˆๅ‚™็š„ใชใƒ†ใ‚ญใ‚นใƒˆใ‹ใ‚‰ใƒžใ‚นใ‚ฏใธใฎไบˆๆธฌใชใฉใŒๅซใพใ‚Œใพใ™ใ€‚ใƒ—ใƒญใƒณใƒ—ใƒˆใ‚จใƒณใ‚ธใƒ‹ใ‚ขใƒชใƒณใ‚ฐใ‚’ไฝฟ็”จใ™ใ‚‹ใ“ใจใงใ€SAMใฏใ‚ผใƒญใ‚ทใƒงใƒƒใƒˆใฎๆ–นๆณ•ใงๆ–ฐใ—ใ„ใ‚ฟใ‚นใ‚ฏใจใƒ‡ใƒผใ‚ฟๅˆ†ๅธƒใซใ™ใฐใ‚„ใ้ฉๅฟœใ™ใ‚‹ใ“ใจใŒใงใใ€ใ‚ใ‚‰ใ‚†ใ‚‹็”ปๅƒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใซ้–ขใ™ใ‚‹ๆŸ”่ปŸใงๅผทๅŠ›ใชใƒ„ใƒผใƒซใจใชใ‚Šใพใ™ใ€‚ + +### SAMใฎไบˆๆธฌใฎไพ‹ + +!!! Example "ใƒ—ใƒญใƒณใƒ—ใƒˆใงใ‚ปใ‚ฐใƒกใƒณใƒˆใ™ใ‚‹" + + ไธŽใˆใ‚‰ใ‚ŒใŸใƒ—ใƒญใƒณใƒ—ใƒˆใง็”ปๅƒใ‚’ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ—ใพใ™ใ€‚ + + === "Python" + + ```python + from ultralytics import SAM + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = SAM('sam_b.pt') + + # ใƒขใƒ‡ใƒซๆƒ…ๅ ฑใ‚’่กจ็คบ๏ผˆใ‚ชใƒ—ใ‚ทใƒงใƒณ๏ผ‰ + model.info() + + # ใƒใ‚ฆใƒณใƒ‡ใ‚ฃใƒณใ‚ฐใƒœใƒƒใ‚ฏใ‚นใฎใƒ—ใƒญใƒณใƒ—ใƒˆใงไบˆๆธฌใ‚’ๅฎŸ่กŒ + model('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + + # ใƒใ‚คใƒณใƒˆใฎใƒ—ใƒญใƒณใƒ—ใƒˆใงไบˆๆธฌใ‚’ๅฎŸ่กŒ + model('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +!!! Example "ใ™ในใฆใ‚’ใ‚ปใ‚ฐใƒกใƒณใƒˆใ™ใ‚‹" + + ็”ปๅƒๅ…จไฝ“ใ‚’ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ—ใพใ™ใ€‚ + + === "Python" + + ```python + from ultralytics import SAM + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = SAM('sam_b.pt') + + # ใƒขใƒ‡ใƒซๆƒ…ๅ ฑใ‚’่กจ็คบ๏ผˆใ‚ชใƒ—ใ‚ทใƒงใƒณ๏ผ‰ + model.info() + + # ไบˆๆธฌใ‚’ๅฎŸ่กŒ + model('path/to/image.jpg') + ``` + + === "CLI" + + ```bash + # SAMใƒขใƒ‡ใƒซใงไบˆๆธฌใ‚’ๅฎŸ่กŒ + yolo predict model=sam_b.pt source=path/to/image.jpg + ``` + +- ใ“ใ“ใงใฏใ€ใƒ—ใƒญใƒณใƒ—ใƒˆ๏ผˆใƒใ‚ฆใƒณใƒ‡ใ‚ฃใƒณใ‚ฐใƒœใƒƒใ‚ฏใ‚น/ใƒใ‚คใƒณใƒˆ/ใƒžใ‚นใ‚ฏ๏ผ‰ใ‚’ๆŒ‡ๅฎšใ—ใชใ„ๅ ดๅˆใฏใ€็”ปๅƒๅ…จไฝ“ใŒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ•ใ‚Œใ‚‹ใƒญใ‚ธใƒƒใ‚ฏใงใ™ใ€‚ + +!!! Example "SAMPredictorใฎไพ‹" + + ็”ปๅƒใ‚’ไธ€ๅบฆ่จญๅฎšใ—ใ€ใ‚คใƒกใƒผใ‚ธใ‚จใƒณใ‚ณใƒผใƒ€ใ‚’่ค‡ๆ•ฐๅ›žๅฎŸ่กŒใ™ใ‚‹ใ“ใจใชใ่ค‡ๆ•ฐๅ›žใƒ—ใƒญใƒณใƒ—ใƒˆๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใงใใพใ™ใ€‚ + + === "ใƒ—ใƒญใƒณใƒ—ใƒˆๆŽจ่ซ–" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # SAMPredictorใ‚’ไฝœๆˆ + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # ใ‚คใƒกใƒผใ‚ธใ‚’่จญๅฎšใ™ใ‚‹ + predictor.set_image("ultralytics/assets/zidane.jpg") # ็”ปๅƒใƒ•ใ‚กใ‚คใƒซใง่จญๅฎšใ™ใ‚‹ + predictor.set_image(cv2.imread("ultralytics/assets/zidane.jpg")) # np.ndarrayใง่จญๅฎšใ™ใ‚‹ + results = predictor(bboxes=[439, 437, 524, 709]) + results = predictor(points=[900, 370], labels=[1]) + + # ใ‚คใƒกใƒผใ‚ธใ‚’ใƒชใ‚ปใƒƒใƒˆใ™ใ‚‹ + predictor.reset_image() + ``` + + ่ฟฝๅŠ ใฎๅผ•ๆ•ฐใ‚’ๆŒ‡ๅฎšใ—ใฆใ™ในใฆใฎใ‚ปใ‚ฐใƒกใƒณใƒˆใ‚’่จญๅฎšใ—ใพใ™ใ€‚ + + === "ใ™ในใฆใ‚’ใ‚ปใ‚ฐใƒกใƒณใƒˆใ™ใ‚‹" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # SAMPredictorใ‚’ไฝœๆˆ + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # ่ฟฝๅŠ ใฎๅผ•ๆ•ฐใงใ‚ปใ‚ฐใƒกใƒณใƒˆ + results = predictor(source="ultralytics/assets/zidane.jpg", crop_n_layers=1, points_stride=64) + ``` + +- `ใ™ในใฆใ‚’ใ‚ปใ‚ฐใƒกใƒณใƒˆใ™ใ‚‹` ใฎใŸใ‚ใฎ่ฟฝๅŠ ใฎๅผ•ๆ•ฐใฎ่ฉณ็ดฐใฏใ€[`Predictor/generate` ใƒชใƒ•ใ‚กใƒฌใƒณใ‚น](../../../reference/models/sam/predict.md)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +## YOLOv8ใจใฎSAMๆฏ”่ผƒ + +ใ“ใ“ใงใฏใ€Metaใฎๆœ€ๅฐใฎSAMใƒขใƒ‡ใƒซใงใ‚ใ‚‹SAM-bใจใ€Ultralyticsใฎๆœ€ๅฐใฎใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒขใƒ‡ใƒซใงใ‚ใ‚‹[YOLOv8n-seg](../tasks/segment.md)ใจใ‚’ๆฏ”่ผƒใ—ใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซ | ใ‚ตใ‚คใ‚บ | ใƒ‘ใƒฉใƒกใƒผใ‚ฟๆ•ฐ | ใ‚นใƒ”ใƒผใƒ‰๏ผˆCPU๏ผ‰ | +|------------------------------------------------|-----------------------|----------------------|-----------------------| +| MetaใฎSAM-b | 358 MB | 94.7 M | 51096 ms/im | +| [MobileSAM](mobile-sam.md) | 40.7 MB | 10.1 M | 46122 ms/im | +| [FastSAM-s](fast-sam.md) with YOLOv8 backbone | 23.7 MB | 11.8 M | 115 ms/im | +| Ultralytics [YOLOv8n-seg](../tasks/segment.md) | **6.7 MB** (53.4ๅ€ๅฐใ•ใ„) | **3.4 M** (27.9ๅ€ๅฐ‘ใชใ„) | **59 ms/im** (866ๅ€้€Ÿใ„) | + +ใ“ใฎๆฏ”่ผƒใงใฏใ€ใƒขใƒ‡ใƒซใฎใ‚ตใ‚คใ‚บใจใ‚นใƒ”ใƒผใƒ‰ใฎๆก้•ใ„ใฎ้•ใ„ใŒ็คบใ•ใ‚Œใฆใ„ใพใ™ใ€‚SAMใฏ่‡ชๅ‹•ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใฎใƒฆใƒ‹ใƒผใ‚ฏใชๆฉŸ่ƒฝใ‚’ๆไพ›ใ—ใพใ™ใŒใ€ใ‚ˆใ‚Šๅฐใ•ใ„ใ€ใ‚ˆใ‚Š้€Ÿใใ€ใ‚ˆใ‚ŠๅŠน็އ็š„ใชYOLOv8ใ‚ปใ‚ฐใƒกใƒณใƒˆใƒขใƒ‡ใƒซใจใฏ็ซถๅˆใ—ใพใ›ใ‚“ใ€‚ + +ใƒ†ใ‚นใƒˆใฏใ€2023ๅนด่ฃฝใฎApple M2 Macbookใ€16GBใฎRAMใงๅฎŸ่กŒใ•ใ‚Œใพใ—ใŸใ€‚ใ“ใฎใƒ†ใ‚นใƒˆใ‚’ๅ†็พใ™ใ‚‹ใซใฏ: + +!!! Example "ไพ‹" + + === "Python" + ```python + from ultralytics import FastSAM, SAM, YOLO + + # SAM-bใฎใƒ—ใƒญใƒ•ใ‚กใ‚คใƒชใƒณใ‚ฐ + model = SAM('sam_b.pt') + model.info() + model('ultralytics/assets') + + # MobileSAMใฎใƒ—ใƒญใƒ•ใ‚กใ‚คใƒชใƒณใ‚ฐ + model = SAM('mobile_sam.pt') + model.info() + model('ultralytics/assets') + + # FastSAM-sใฎใƒ—ใƒญใƒ•ใ‚กใ‚คใƒชใƒณใ‚ฐ + model = FastSAM('FastSAM-s.pt') + model.info() + model('ultralytics/assets') + + # YOLOv8n-segใฎใƒ—ใƒญใƒ•ใ‚กใ‚คใƒชใƒณใ‚ฐ + model = YOLO('yolov8n-seg.pt') + model.info() + model('ultralytics/assets') + ``` + +## ใ‚ชใƒผใƒˆใ‚ขใƒŽใƒ†ใƒผใ‚ทใƒงใƒณ: ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎ่ฟ…้€Ÿใชไฝœๆˆๆ–นๆณ• + +ใ‚ชใƒผใƒˆใ‚ขใƒŽใƒ†ใƒผใ‚ทใƒงใƒณใฏใ€SAMใฎไธป่ฆใชๆฉŸ่ƒฝใฎไธ€ใคใงใ‚ใ‚Šใ€ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎๆคœๅ‡บใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใฆ[ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆ](https://docs.ultralytics.com/datasets/segment)ใ‚’็”Ÿๆˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ“ใฎๆฉŸ่ƒฝใซใ‚ˆใ‚Šใ€ๆ™‚้–“ใฎใ‹ใ‹ใ‚‹ๆ‰‹ไฝœๆฅญใฎใƒฉใƒ™ใƒชใƒณใ‚ฐไฝœๆฅญใ‚’ๅ›ž้ฟใ—ใ€ๅคง้‡ใฎ็”ปๅƒใฎ่ฟ…้€Ÿใ‹ใคๆญฃ็ขบใชๆณจ้‡ˆไป˜ใ‘ใŒๅฏ่ƒฝใซใชใ‚Šใพใ™ใ€‚ + +### ใƒ‡ใ‚ฃใƒ†ใ‚ฏใ‚ทใƒงใƒณใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใŸใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎ็”Ÿๆˆ + +Ultralyticsใƒ•ใƒฌใƒผใƒ ใƒฏใƒผใ‚ฏใ‚’ไฝฟ็”จใ—ใฆใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ใ‚ชใƒผใƒˆใ‚ขใƒŽใƒ†ใƒผใ‚ทใƒงใƒณใ™ใ‚‹ใซใฏใ€ไปฅไธ‹ใฎใ‚ˆใ†ใซ`auto_annotate`้–ขๆ•ฐใ‚’ไฝฟ็”จใ—ใพใ™: + +!!! Example "ไพ‹" + + === "Python" + ```python + from ultralytics.data.annotator import auto_annotate + + auto_annotate(data="path/to/images", det_model="yolov8x.pt", sam_model='sam_b.pt') + ``` + +| ๅผ•ๆ•ฐ | ใ‚ฟใ‚คใƒ— | ่ชฌๆ˜Ž | ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ | +|------------|------------------|--------------------------------------------------------------|--------------| +| data | str | ๆณจ้‡ˆใ‚’ไป˜ใ‘ใ‚‹็”ปๅƒใŒๅซใพใ‚Œใ‚‹ใƒ•ใ‚ฉใƒซใƒ€ใธใฎใƒ‘ใ‚นใ€‚ | | +| det_model | str, ใ‚ชใƒ—ใ‚ทใƒงใƒณ | ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎYOLOๆคœๅ‡บใƒขใƒ‡ใƒซใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ'yolov8x.pt'ใ€‚ | 'yolov8x.pt' | +| sam_model | str, ใ‚ชใƒ—ใ‚ทใƒงใƒณ | ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎSAMใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒขใƒ‡ใƒซใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ'sam_b.pt'ใ€‚ | 'sam_b.pt' | +| device | str, ใ‚ชใƒ—ใ‚ทใƒงใƒณ | ใƒขใƒ‡ใƒซใ‚’ๅฎŸ่กŒใ™ใ‚‹ใƒ‡ใƒใ‚คใ‚นใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ็ฉบใฎๆ–‡ๅญ—ๅˆ—๏ผˆCPUใพใŸใฏGPUใŒๅˆฉ็”จๅฏ่ƒฝใชๅ ดๅˆ๏ผ‰ใ€‚ | | +| output_dir | str, None, ใ‚ชใƒ—ใ‚ทใƒงใƒณ | ๆณจ้‡ˆไป˜ใ‘็ตๆžœใ‚’ไฟๅญ˜ใ™ใ‚‹ใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒชใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏใ€'data'ใจๅŒใ˜ใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒชๅ†…ใฎ 'labels' ใƒ•ใ‚ฉใƒซใƒ€ใƒผใงใ™ใ€‚ | None | + +`auto_annotate`้–ขๆ•ฐใฏใ€็”ปๅƒใธใฎใƒ‘ใ‚นใ€ไปปๆ„ใฎไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎๆคœๅ‡บใŠใ‚ˆใณSAMใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒขใƒ‡ใƒซใ€ใƒขใƒ‡ใƒซใ‚’ๅฎŸ่กŒใ™ใ‚‹ใƒ‡ใƒใ‚คใ‚นใ€ใŠใ‚ˆใณๆณจ้‡ˆไป˜ใ‘็ตๆžœใ‚’ไฟๅญ˜ใ™ใ‚‹ๅ‡บๅŠ›ใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒชใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใŸใ‚ใฎใ‚ชใƒ—ใ‚ทใƒงใƒณๅผ•ๆ•ฐใ‚’ๅ–ใ‚Šใพใ™ใ€‚ + +ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใŸใ‚ชใƒผใƒˆใ‚ขใƒŽใƒ†ใƒผใ‚ทใƒงใƒณใซใ‚ˆใ‚Šใ€้ซ˜ๅ“่ณชใชใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ไฝœๆˆใ™ใ‚‹ใŸใ‚ใฎๆ™‚้–“ใจๅŠดๅŠ›ใ‚’ๅคงๅน…ใซ็ฏ€็ด„ใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ“ใฎๆฉŸ่ƒฝใฏใ€ๅคง้‡ใฎ็”ปๅƒใ‚ณใƒฌใ‚ฏใ‚ทใƒงใƒณใซๅ–ใ‚Š็ต„ใ‚“ใงใ„ใ‚‹็ ”็ฉถ่€…ใ‚„้–‹็™บ่€…ใซใจใฃใฆ็‰นใซๆœ‰็›Šใงใ‚ใ‚Šใ€ใƒขใƒ‡ใƒซใฎ้–‹็™บใจ่ฉ•ไพกใซ้›†ไธญใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +## ๅผ•็”จใจ่ฌ่พž + +SAMใŒ็ ”็ฉถใ‚„้–‹็™บใฎๅ ดใงๅฝน็ซ‹ใคๅ ดๅˆใฏใ€ๅผ•็”จใซใ”ๅ”ๅŠ›ใ„ใŸใ ใ‘ใ‚‹ใจๅนธใ„ใงใ™ใ€‚ + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{kirillov2023segment, + title={Segment Anything}, + author={Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollรกr and Ross Girshick}, + year={2023}, + eprint={2304.02643}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ใ“ใฎ่ฒด้‡ใชใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒ“ใ‚ธใƒงใƒณใ‚ณใƒŸใƒฅใƒ‹ใƒ†ใ‚ฃๅ‘ใ‘ใฎใƒชใ‚ฝใƒผใ‚นใ‚’ไฝœๆˆใŠใ‚ˆใณ็ถญๆŒใ—ใฆใใ‚ŒใŸMeta AIใซๆ„Ÿ่ฌใฎๆ„ใ‚’่กจใ—ใพใ™ใ€‚ + +*keywords: Segment Anything, Segment Anything Model, SAM, Meta SAM, ็”ปๅƒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ, ใƒ—ใƒญใƒณใƒ—ใƒˆๅฏ่ƒฝใชใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ, ใ‚ผใƒญใ‚ทใƒงใƒƒใƒˆใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚น, SA-1B ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆ, ๅ…ˆ้€ฒใฎใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃ, ใ‚ชใƒผใƒˆใ‚ขใƒŽใƒ†ใƒผใ‚ทใƒงใƒณ, Ultralytics, ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใƒขใƒ‡ใƒซ, SAM base, SAM large, ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ, ใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒ“ใ‚ธใƒงใƒณ, AI, ไบบๅทฅ็Ÿฅ่ƒฝ, ๆฉŸๆขฐๅญฆ็ฟ’, ใƒ‡ใƒผใ‚ฟใ‚ขใƒŽใƒ†ใƒผใ‚ทใƒงใƒณ, ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒžใ‚นใ‚ฏ, ใƒ‡ใ‚ฃใƒ†ใ‚ฏใ‚ทใƒงใƒณใƒขใƒ‡ใƒซ, YOLOใƒ‡ใ‚ฃใƒ†ใ‚ฏใ‚ทใƒงใƒณใƒขใƒ‡ใƒซ, bibtex, Meta AI.* diff --git a/ultralytics/docs/ja/models/sam.md:Zone.Identifier b/ultralytics/docs/ja/models/sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/models/sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/models/yolo-nas.md b/ultralytics/docs/ja/models/yolo-nas.md new file mode 100755 index 0000000..a7d1863 --- /dev/null +++ b/ultralytics/docs/ja/models/yolo-nas.md @@ -0,0 +1,121 @@ +--- +comments: true +description: YOLO-NASใฏใ€ๅ„ชใ‚ŒใŸ็‰ฉไฝ“ๆคœๅ‡บใƒขใƒ‡ใƒซใงใ™ใ€‚ใใฎๆฉŸ่ƒฝใ€ไบ‹ๅ‰ๅญฆ็ฟ’ใƒขใƒ‡ใƒซใ€Ultralytics Python APIใฎไฝฟ็”จๆณ•ใชใฉใซใคใ„ใฆ่ฉณใ—ใ่ชฌๆ˜Žใ—ใพใ™ใ€‚ +keywords: YOLO-NAS, Deci AI, ็‰ฉไฝ“ๆคœๅ‡บ, ๆทฑๅฑคๅญฆ็ฟ’, ใƒ‹ใƒฅใƒผใƒฉใƒซใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃๆคœ็ดข, Ultralytics Python API, YOLOใƒขใƒ‡ใƒซ, ไบ‹ๅ‰ๅญฆ็ฟ’ใƒขใƒ‡ใƒซ, ้‡ๅญๅŒ–, ๆœ€้ฉๅŒ–, COCO, Objects365, Roboflow 100 +--- + +# YOLO-NAS + +## ๆฆ‚่ฆ + +Deci AIใซใ‚ˆใฃใฆ้–‹็™บใ•ใ‚ŒใŸYOLO-NASใฏใ€็”ปๆœŸ็š„ใช็‰ฉไฝ“ๆคœๅ‡บใƒ™ใƒผใ‚นใƒขใƒ‡ใƒซใงใ™ใ€‚ๅพ“ๆฅใฎYOLOใƒขใƒ‡ใƒซใฎๅˆถ็ด„ใซๅฏพๅ‡ฆใ™ใ‚‹ใŸใ‚ใฎ้ซ˜ๅบฆใชใƒ‹ใƒฅใƒผใƒฉใƒซใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃๆคœ็ดขๆŠ€่ก“ใซใ‚ˆใฃใฆ็”Ÿใฟๅ‡บใ•ใ‚Œใฆใ„ใพใ™ใ€‚้‡ๅญๅŒ–ใฎใ‚ตใƒใƒผใƒˆใจ็ฒพๅบฆใจใƒฌใ‚คใƒ†ใƒณใ‚ทใฎใƒˆใƒฌใƒผใƒ‰ใ‚ชใƒ•ใฎๆ”นๅ–„ใซใ‚ˆใ‚Šใ€YOLO-NASใฏ็‰ฉไฝ“ๆคœๅ‡บใซใŠใ„ใฆๅคงใใช้€ฒๆญฉใ‚’้‚ใ’ใฆใ„ใพใ™ใ€‚ + +![ใƒขใƒ‡ใƒซใฎไพ‹ใฎ็”ปๅƒ](https://learnopencv.com/wp-content/uploads/2023/05/yolo-nas_COCO_map_metrics.png) +**YOLO-NASใฎๆฆ‚่ฆใ€‚** YOLO-NASใฏใ€้‡ๅญๅŒ–ๅฏพๅฟœใฎใƒ–ใƒญใƒƒใ‚ฏใจ้ธๆŠž็š„้‡ๅญๅŒ–ใ‚’ไฝฟ็”จใ—ใฆๆœ€้ฉใชใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’ๅฎŸ็พใ—ใพใ™ใ€‚ใƒขใƒ‡ใƒซใ‚’INT8ใง้‡ๅญๅŒ–ใ™ใ‚‹ใจใ€ไป–ใฎใƒขใƒ‡ใƒซใ‚ˆใ‚Šใ‚‚็ฒพๅบฆใŒใปใจใ‚“ใฉไฝŽไธ‹ใ›ใšใซๆœ€้ฉใชใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใŒๅพ—ใ‚‰ใ‚Œใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎ้€ฒๆญฉใซใ‚ˆใ‚Šใ€ๅ‰ไพ‹ใฎใชใ„็‰ฉไฝ“ๆคœๅ‡บ่ƒฝๅŠ›ใจๅ„ชใ‚ŒใŸใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’ๅ‚™ใˆใŸๅ„ชใ‚ŒใŸใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใŒๅฎŸ็พใ•ใ‚Œใพใ™ใ€‚ + +### ไธปใช็‰นๅพด + +- **้‡ๅญๅŒ–ใƒ•ใƒฌใƒณใƒ‰ใƒชใƒผใชๅŸบๆœฌใƒ–ใƒญใƒƒใ‚ฏ:** YOLO-NASใฏใ€ๅพ“ๆฅใฎYOLOใƒขใƒ‡ใƒซใฎๅˆถ็ด„ใฎ1ใคใงใ‚ใ‚‹้‡ๅญๅŒ–ใซๅฏพๅฟœใ—ใŸๆ–ฐใ—ใ„ๅŸบๆœฌใƒ–ใƒญใƒƒใ‚ฏใ‚’ๅฐŽๅ…ฅใ—ใฆใ„ใพใ™ใ€‚ +- **ๆด—็ทดใ•ใ‚ŒใŸใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใจ้‡ๅญๅŒ–:** YOLO-NASใฏใ€้ซ˜ๅบฆใชใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚นใ‚ญใƒผใƒ ใจใƒใ‚นใƒˆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ้‡ๅญๅŒ–ใ‚’ๆดป็”จใ—ใฆใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’ๅ‘ไธŠใ•ใ›ใฆใ„ใพใ™ใ€‚ +- **AutoNACๆœ€้ฉๅŒ–ใจไบ‹ๅ‰ๅญฆ็ฟ’:** YOLO-NASใฏAutoNACๆœ€้ฉๅŒ–ใ‚’ๅˆฉ็”จใ—ใ€COCOใ€Objects365ใ€Roboflow 100ใชใฉใฎๆณจ็›ฎใ•ใ‚Œใ‚‹ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงไบ‹ๅ‰ๅญฆ็ฟ’ใ•ใ‚Œใฆใ„ใพใ™ใ€‚ใ“ใฎไบ‹ๅ‰ๅญฆ็ฟ’ใซใ‚ˆใ‚Šใ€่ฃฝๅ“็’ฐๅขƒใงใฎไธ‹ๆต็‰ฉไฝ“ๆคœๅ‡บใ‚ฟใ‚นใ‚ฏใซ้žๅธธใซ้ฉใ—ใฆใ„ใพใ™ใ€‚ + +## ไบ‹ๅ‰ๅญฆ็ฟ’ใƒขใƒ‡ใƒซ + +UltralyticsใŒๆไพ›ใ™ใ‚‹ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎYOLO-NASใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใฆใ€ๆฌกไธ–ไปฃใฎ็‰ฉไฝ“ๆคœๅ‡บใฎใƒ‘ใƒฏใƒผใ‚’ไฝ“้จ“ใ—ใฆใใ ใ•ใ„ใ€‚ใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใฏใ€้€Ÿๅบฆใจ็ฒพๅบฆใฎไธกๆ–นใฎ้ขใงๅ„ชใ‚ŒใŸใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’ๆไพ›ใ™ใ‚‹ใ‚ˆใ†ใซ่จญ่จˆใ•ใ‚Œใฆใ„ใพใ™ใ€‚็‰นๅฎšใฎใƒ‹ใƒผใ‚บใซๅˆใ‚ใ›ใฆใ•ใพใ–ใพใชใ‚ชใƒ—ใ‚ทใƒงใƒณใ‹ใ‚‰้ธๆŠžใงใใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซ | mAP | ใƒฌใ‚คใƒ†ใƒณใ‚ท (ms) | +|------------------|-------|------------| +| YOLO-NAS S | 47.5 | 3.21 | +| YOLO-NAS M | 51.55 | 5.85 | +| YOLO-NAS L | 52.22 | 7.87 | +| YOLO-NAS S INT-8 | 47.03 | 2.36 | +| YOLO-NAS M INT-8 | 51.0 | 3.78 | +| YOLO-NAS L INT-8 | 52.1 | 4.78 | + +ๅ„ใƒขใƒ‡ใƒซใฎใƒใƒชใ‚จใƒผใ‚ทใƒงใƒณใฏใ€Mean Average Precision๏ผˆmAP๏ผ‰ใจใƒฌใ‚คใƒ†ใƒณใ‚ทใฎใƒใƒฉใƒณใ‚นใ‚’ๅ–ใ‚Šใ€ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใจใ‚นใƒ”ใƒผใƒ‰ใฎไธกๆ–นใซๆœ€้ฉๅŒ–ใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +## ไฝฟ็”จไพ‹ + +Ultralyticsใฎ`ultralytics` Pythonใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ไฝฟ็”จใ—ใฆใ€YOLO-NASใƒขใƒ‡ใƒซใ‚’Pythonใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซ็ฐกๅ˜ใซ็ตฑๅˆใงใใ‚‹ใ‚ˆใ†ใซใ—ใพใ—ใŸใ€‚ใ“ใฎใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใฏใ€ใƒ—ใƒญใ‚ปใ‚นใ‚’ใ‚นใƒ ใƒผใ‚บใซใ™ใ‚‹ใƒฆใƒผใ‚ถใƒผใƒ•ใƒฌใƒณใƒ‰ใƒชใƒผใชPython APIใ‚’ๆไพ›ใ—ใพใ™ใ€‚ + +ๆฌกใฎไพ‹ใงใฏใ€ๆŽจ่ซ–ใจๆคœ่จผใฎใŸใ‚ใซ`ultralytics`ใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ไฝฟ็”จใ—ใฆYOLO-NASใƒขใƒ‡ใƒซใ‚’ใฉใฎใ‚ˆใ†ใซไฝฟ็”จใ™ใ‚‹ใ‹ใ‚’็คบใ—ใฆใ„ใพใ™ใ€‚ + +### ๆŽจ่ซ–ใจๆคœ่จผใฎไพ‹ + +ใ“ใฎไพ‹ใงใฏใ€COCO8ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงYOLO-NAS-sใ‚’ๆคœ่จผใ—ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + ใ“ใฎไพ‹ใงใฏใ€YOLO-NASใฎๆŽจ่ซ–ใจๆคœ่จผใฎใŸใ‚ใฎใ‚ทใƒณใƒ—ใƒซใชใ‚ณใƒผใƒ‰ใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚ๆŽจ่ซ–็ตๆžœใฎๅ‡ฆ็†ใซใคใ„ใฆใฏใ€[Predict](../modes/predict.md)ใƒขใƒผใƒ‰ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ไป–ใฎใƒขใƒผใƒ‰ใงYOLO-NASใ‚’ไฝฟ็”จใ™ใ‚‹ๆ–นๆณ•ใซใคใ„ใฆใฏใ€[Val](../modes/val.md)ใŠใ‚ˆใณ[Export](../modes/export.md)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚`ultralytics`ใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใฎYOLO-NASใฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’ใ‚ตใƒใƒผใƒˆใ—ใฆใ„ใพใ›ใ‚“ใ€‚ + + === "Python" + + Pythonใงใ€PyTorchใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎ`*.pt`ใƒขใƒ‡ใƒซใƒ•ใ‚กใ‚คใƒซใ‚’`NAS()`ใ‚ฏใƒฉใ‚นใซๆธกใ™ใ“ใจใงใ€ใƒขใƒ‡ใƒซใฎใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚’ไฝœๆˆใงใใพใ™: + + ```python + from ultralytics import NAS + + # COCOไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎYOLO-NAS-sใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = NAS('yolo_nas_s.pt') + + # ใƒขใƒ‡ใƒซๆƒ…ๅ ฑใฎ่กจ็คบ๏ผˆใ‚ชใƒ—ใ‚ทใƒงใƒณ๏ผ‰ + model.info() + + # COCO8ใฎไพ‹ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใƒขใƒ‡ใƒซใ‚’ๆคœ่จผ + results = model.val(data='coco8.yaml') + + # 'bus.jpg'็”ปๅƒไธŠใงYOLO-NAS-sใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใŸๆŽจ่ซ– + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ใƒขใƒ‡ใƒซใ‚’็›ดๆŽฅๅฎŸ่กŒใ™ใ‚‹ใŸใ‚ใฎCLIใ‚ณใƒžใƒณใƒ‰ใ‚‚ใ‚ใ‚Šใพใ™: + + ```bash + # COCOไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎYOLO-NAS-sใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€COCO8ใฎไพ‹ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’ๆคœ่จผ + yolo val model=yolo_nas_s.pt data=coco8.yaml + + # COCOไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎYOLO-NAS-sใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€'bus.jpg'็”ปๅƒไธŠใงๆŽจ่ซ–ใ‚’ๅฎŸ่กŒ + yolo predict model=yolo_nas_s.pt source=path/to/bus.jpg + ``` + +## ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใฆใ„ใ‚‹ใ‚ฟใ‚นใ‚ฏใจใƒขใƒผใƒ‰ + +YOLO-NASใƒขใƒ‡ใƒซใฏใ€Small๏ผˆs๏ผ‰ใ€Medium๏ผˆm๏ผ‰ใ€Large๏ผˆl๏ผ‰ใฎ3ใคใฎใƒใƒชใ‚จใƒผใ‚ทใƒงใƒณใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚ๅ„ใƒใƒชใ‚จใƒผใ‚ทใƒงใƒณใฏใ€็•ฐใชใ‚‹่จˆ็ฎ—ใƒชใ‚ฝใƒผใ‚นใจใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใฎใƒ‹ใƒผใ‚บใซๅฏพๅฟœใ™ใ‚‹ใ‚ˆใ†ใซ่จญ่จˆใ•ใ‚Œใฆใ„ใพใ™: + +- **YOLO-NAS-s:** ่จˆ็ฎ—ใƒชใ‚ฝใƒผใ‚นใŒ้™ใ‚‰ใ‚Œใฆใ„ใ‚‹็’ฐๅขƒใงๅŠน็އใŒ้‡่ฆใชๅ ดๅˆใซๆœ€้ฉๅŒ–ใ•ใ‚Œใฆใ„ใพใ™ใ€‚ +- **YOLO-NAS-m:** ๅน…ๅบƒใ„ไธ€่ˆฌ็š„ใช็‰ฉไฝ“ๆคœๅ‡บใฎใƒ‹ใƒผใ‚บใซ้ฉใ—ใŸใƒใƒฉใƒณใ‚นใฎๅ–ใ‚ŒใŸใ‚ขใƒ—ใƒญใƒผใƒใงใ™ใ€‚ +- **YOLO-NAS-l:** ่จˆ็ฎ—ใƒชใ‚ฝใƒผใ‚นใฎๅˆถ็ด„ใŒๅฐ‘ใชใ„ๆœ€้ซ˜ใฎ็ฒพๅบฆใŒๆฑ‚ใ‚ใ‚‰ใ‚Œใ‚‹ใ‚ทใƒŠใƒชใ‚ชใซๅฏพๅฟœใ—ใฆใ„ใพใ™ใ€‚ + +ไปฅไธ‹ใฏใ€ๅ„ใƒขใƒ‡ใƒซใฎ่ฉณ็ดฐใชๆฆ‚่ฆใงใ‚ใ‚Šใ€ใใ‚Œใ‚‰ใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟ้‡ใฟใธใฎใƒชใƒณใ‚ฏใ€ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏใ€ใ•ใพใ–ใพใชๅ‹•ไฝœใƒขใƒผใƒ‰ใจใฎไบ’ๆ›ๆ€งใŒ็คบใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซใฎ็จฎ้กž | ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎ้‡ใฟ | ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏ | ๆŽจ่ซ– | ๆคœ่จผ | ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ | ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ | +|------------|-----------------------------------------------------------------------------------------------|----------------------------|----|----|--------|--------| +| YOLO-NAS-s | [yolo_nas_s.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_s.pt) | [็‰ฉไฝ“ๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-m | [yolo_nas_m.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_m.pt) | [็‰ฉไฝ“ๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-l | [yolo_nas_l.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_l.pt) | [็‰ฉไฝ“ๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | + +## ๅผ•็”จใจ่ฌ่พž + +็ ”็ฉถใ‚„้–‹็™บใฎไธญใงYOLO-NASใ‚’ไฝฟ็”จใ™ใ‚‹ๅ ดๅˆใฏใ€SuperGradientsใ‚’ๅผ•็”จใ—ใฆใใ ใ•ใ„: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{supergradients, + doi = {10.5281/ZENODO.7789328}, + url = {https://zenodo.org/record/7789328}, + author = {Aharon, Shay and {Louis-Dupont} and {Ofri Masad} and Yurkova, Kate and {Lotem Fridman} and {Lkdci} and Khvedchenya, Eugene and Rubin, Ran and Bagrov, Natan and Tymchenko, Borys and Keren, Tomer and Zhilko, Alexander and {Eran-Deci}}, + title = {Super-Gradients}, + publisher = {GitHub}, + journal = {GitHub repository}, + year = {2021}, + } + ``` + +ใ“ใฎใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒ“ใ‚ธใƒงใƒณใ‚ณใƒŸใƒฅใƒ‹ใƒ†ใ‚ฃๅ‘ใ‘ใฎ่ฒด้‡ใชใƒชใ‚ฝใƒผใ‚นใ‚’ไฝœๆˆใŠใ‚ˆใณ็ถญๆŒใ™ใ‚‹ใŸใ‚ใซใ€Deci AIใฎ[SuperGradients](https://github.com/Deci-AI/super-gradients/)ใƒใƒผใƒ ใซๆ„Ÿ่ฌใฎๆ„ใ‚’่กจใ—ใพใ™ใ€‚้ฉๆ–ฐ็š„ใชใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใจๅ„ชใ‚ŒใŸ็‰ฉไฝ“ๆคœๅ‡บ่ƒฝๅŠ›ใ‚’ๆŒใคYOLO-NASใŒใ€้–‹็™บ่€…ใ‚„็ ”็ฉถ่€…ใฎ้‡่ฆใชใƒ„ใƒผใƒซใซใชใ‚‹ใจไฟกใ˜ใฆใ„ใพใ™ใ€‚ + +*keywords: YOLO-NAS, Deci AI, ็‰ฉไฝ“ๆคœๅ‡บ, ๆทฑๅฑคๅญฆ็ฟ’, ใƒ‹ใƒฅใƒผใƒฉใƒซใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃๆคœ็ดข, Ultralytics Python API, YOLOใƒขใƒ‡ใƒซ, SuperGradients, ไบ‹ๅ‰ๅญฆ็ฟ’ใƒขใƒ‡ใƒซ, ้‡ๅญๅŒ–ใƒ•ใƒฌใƒณใƒ‰ใƒชใƒผใชๅŸบๆœฌใƒ–ใƒญใƒƒใ‚ฏ, ้ซ˜ๅบฆใชใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚นใ‚ญใƒผใƒ , ใƒใ‚นใƒˆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ้‡ๅญๅŒ–, AutoNACๆœ€้ฉๅŒ–, COCO, Objects365, Roboflow 100* diff --git a/ultralytics/docs/ja/models/yolo-nas.md:Zone.Identifier b/ultralytics/docs/ja/models/yolo-nas.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/models/yolo-nas.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/models/yolov3.md b/ultralytics/docs/ja/models/yolov3.md new file mode 100755 index 0000000..19f62af --- /dev/null +++ b/ultralytics/docs/ja/models/yolov3.md @@ -0,0 +1,98 @@ +--- +comments: true +description: YOLOv3ใ€YOLOv3-Ultralyticsใ€ใŠใ‚ˆใณYOLOv3uใฎๆฆ‚่ฆใ‚’ๆŠŠๆกใ—ใฆใใ ใ•ใ„ใ€‚ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใซๅฏพใ™ใ‚‹ใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใฎไธปใช็‰นๅพดใ€ไฝฟ็”จๆ–นๆณ•ใ€ใŠใ‚ˆใณใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏใซใคใ„ใฆๅญฆใณใพใ™ใ€‚ +keywords: YOLOv3, YOLOv3-Ultralytics, YOLOv3u, ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ, ๆŽจ่ซ–, ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ, Ultralytics +--- + +# YOLOv3ใ€YOLOv3-Ultralyticsใ€ใŠใ‚ˆใณYOLOv3u + +## ๆฆ‚่ฆ + +ใ“ใฎใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใงใฏใ€[YOLOv3](https://pjreddie.com/darknet/yolo/)ใ€[YOLOv3-Ultralytics](https://github.com/ultralytics/yolov3)ใ€ใŠใ‚ˆใณ[YOLOv3u](https://github.com/ultralytics/ultralytics)ใจใ„ใ†3ใคใฎ้–ข้€ฃใ™ใ‚‹ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใƒขใƒ‡ใƒซใซใคใ„ใฆๆฆ‚่ชฌใ—ใพใ™ใ€‚ + +1. **YOLOv3:** ใ“ใ‚ŒใฏYou Only Look Once (YOLO) ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚ขใƒซใ‚ดใƒชใ‚บใƒ ใฎ3็•ช็›ฎใฎใƒใƒผใ‚ธใƒงใƒณใงใ™ใ€‚Joseph Redmonใซใ‚ˆใฃใฆๆœ€ๅˆใซ้–‹็™บใ•ใ‚ŒใŸYOLOv3ใฏใ€ใƒžใƒซใƒใ‚นใ‚ฑใƒผใƒซไบˆๆธฌใ‚„3ใคใฎ็•ฐใชใ‚‹ใ‚ตใ‚คใ‚บใฎๆคœๅ‡บใ‚ซใƒผใƒใƒซใชใฉใ€ใ•ใพใ–ใพใชๆฉŸ่ƒฝใ‚’ๅฐŽๅ…ฅใ—ใ€ๅ‰ใƒใƒผใ‚ธใƒงใƒณใ‚ˆใ‚Šใ‚‚ๆ€ง่ƒฝใ‚’ๅ‘ไธŠใ•ใ›ใพใ—ใŸใ€‚ + +2. **YOLOv3-Ultralytics:** ใ“ใ‚ŒใฏUltralyticsใซใ‚ˆใ‚‹YOLOv3ใƒขใƒ‡ใƒซใฎๅฎŸ่ฃ…ใงใ™ใ€‚ใ‚ชใƒชใ‚ธใƒŠใƒซใฎYOLOv3ใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใ‚’ๅ†็พใ—ใ€ใ‚ˆใ‚Šๅคšใใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซใฎใ‚ตใƒใƒผใƒˆใ‚„็ฐกๅ˜ใชใ‚ซใ‚นใ‚ฟใƒžใ‚คใ‚บใ‚ชใƒ—ใ‚ทใƒงใƒณใชใฉใ€่ฟฝๅŠ ใฎๆฉŸ่ƒฝใ‚’ๆไพ›ใ—ใพใ™ใ€‚ + +3. **YOLOv3u:** ใ“ใ‚ŒใฏYOLOv8ใƒขใƒ‡ใƒซใงไฝฟ็”จใ•ใ‚Œใ‚‹ใ‚ขใƒณใ‚ซใƒผใƒ•ใƒชใƒผใงใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒใ‚นใƒ•ใƒชใƒผใชใ‚นใƒ—ใƒชใƒƒใƒˆใƒ˜ใƒƒใƒ‰ใ‚’็ต„ใฟ่พผใ‚“ใ YOLOv3-Ultralyticsใฎๆ›ดๆ–ฐ็‰ˆใงใ™ใ€‚YOLOv3uใฏใ€YOLOv3ใจๅŒใ˜ใƒใƒƒใ‚ฏใƒœใƒผใƒณใจใƒใƒƒใ‚ฏใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใ‚’ไฟๆŒใ—ใชใŒใ‚‰ใ€YOLOv8ใฎๆ›ดๆ–ฐใ•ใ‚ŒใŸๆคœๅ‡บใƒ˜ใƒƒใƒ‰ใ‚’ๅ‚™ใˆใฆใ„ใพใ™ใ€‚ + +![Ultralytics YOLOv3](https://raw.githubusercontent.com/ultralytics/assets/main/yolov3/banner-yolov3.png) + +## ไธปใช็‰นๅพด + +- **YOLOv3:** 3ใคใฎ็•ฐใชใ‚‹ๆคœๅ‡บใ‚นใ‚ฑใƒผใƒซใ‚’ไฝฟ็”จใ—ใ€13x13ใ€26x26ใ€ใŠใ‚ˆใณ52x52ใฎ3ใคใฎ็•ฐใชใ‚‹ใ‚ตใ‚คใ‚บใฎๆคœๅ‡บใ‚ซใƒผใƒใƒซใ‚’ๆดป็”จใ—ใพใ—ใŸใ€‚ใ“ใ‚Œใซใ‚ˆใ‚Šใ€ใ•ใพใ–ใพใชใ‚ตใ‚คใ‚บใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎๆคœๅ‡บ็ฒพๅบฆใŒๅคงๅน…ใซๅ‘ไธŠใ—ใพใ—ใŸใ€‚ใ•ใ‚‰ใซใ€YOLOv3ใงใฏๅ„ใƒใ‚ฆใƒณใƒ‡ใ‚ฃใƒณใ‚ฐใƒœใƒƒใ‚ฏใ‚นใฎ่ค‡ๆ•ฐใฎใƒฉใƒ™ใƒซไบˆๆธฌใ‚„ใ€ใ‚ˆใ‚Š่‰ฏใ„็‰นๅพดๆŠฝๅ‡บใƒใƒƒใƒˆใƒฏใƒผใ‚ฏใชใฉใฎๆฉŸ่ƒฝใŒ่ฟฝๅŠ ใ•ใ‚Œใพใ—ใŸใ€‚ + +- **YOLOv3-Ultralytics:** UltralyticsใฎYOLOv3ใฎๅฎŸ่ฃ…ใฏใ€ใ‚ชใƒชใ‚ธใƒŠใƒซใฎใƒขใƒ‡ใƒซใจๅŒใ˜ๆ€ง่ƒฝใ‚’ๆไพ›ใ—ใชใŒใ‚‰ใ€ใ‚ˆใ‚Šๅคšใใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซใฎใ‚ตใƒใƒผใƒˆใ€่ฟฝๅŠ ใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆ–นๆณ•ใ€ใŠใ‚ˆใณ็ฐกๅ˜ใชใ‚ซใ‚นใ‚ฟใƒžใ‚คใ‚บใ‚ชใƒ—ใ‚ทใƒงใƒณใ‚’ๆไพ›ใ—ใพใ™ใ€‚ใ“ใ‚Œใซใ‚ˆใ‚Šใ€ๅฎŸ็”จ็š„ใชใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซใŠใ„ใฆใ‚ˆใ‚ŠๆŸ”่ปŸใงไฝฟใ„ใ‚„ใ™ใใชใ‚Šใพใ™ใ€‚ + +- **YOLOv3u:** ใ“ใฎๆ›ดๆ–ฐใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใฏใ€YOLOv8ใ‹ใ‚‰ไฝฟ็”จใ•ใ‚Œใฆใ„ใ‚‹ใ‚ขใƒณใ‚ซใƒผใƒ•ใƒชใƒผใงใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒใ‚นใƒ•ใƒชใƒผใชใ‚นใƒ—ใƒชใƒƒใƒˆใƒ˜ใƒƒใƒ‰ใ‚’็ต„ใฟ่พผใ‚“ใงใ„ใพใ™ใ€‚ไบ‹ๅ‰ๅฎš็พฉใ•ใ‚ŒใŸใ‚ขใƒณใ‚ซใƒผใƒœใƒƒใ‚ฏใ‚นใจใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒใ‚นใ‚นใ‚ณใ‚ขใฎๅฟ…่ฆๆ€งใ‚’ๆŽ’้™คใ™ใ‚‹ใ“ใจใงใ€ใ“ใฎๆคœๅ‡บใƒ˜ใƒƒใƒ‰ใฎ่จญ่จˆใฏใ€ใ•ใพใ–ใพใชใ‚ตใ‚คใ‚บใ‚„ๅฝข็Šถใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎๆคœๅ‡บ่ƒฝๅŠ›ใ‚’ๅ‘ไธŠใ•ใ›ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ“ใ‚Œใซใ‚ˆใ‚Šใ€YOLOv3uใฏใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚ฟใ‚นใ‚ฏใซใŠใ„ใฆใ‚ˆใ‚Šๅ …็‰ขใงๆญฃ็ขบใชใƒขใƒ‡ใƒซใจใชใ‚Šใพใ™ใ€‚ + +## ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏใจใƒขใƒผใƒ‰ + +YOLOv3ใ‚ทใƒชใƒผใ‚บใ€YOLOv3ใ€YOLOv3-Ultralyticsใ€ใŠใ‚ˆใณYOLOv3uใฏใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚ฟใ‚นใ‚ฏใซ็‰นๅŒ–ใ—ใฆ่จญ่จˆใ•ใ‚Œใฆใ„ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใฏใ€็ฒพๅบฆใจ้€Ÿๅบฆใฎใƒใƒฉใƒณใ‚นใ‚’ๅ–ใ‚ŠใชใŒใ‚‰ใ•ใพใ–ใพใชๅฎŸไธ–็•Œใฎใ‚ทใƒŠใƒชใ‚ชใงใฎๅŠนๆžœใŒ้ซ˜ใ„ใ“ใจใง็Ÿฅใ‚‰ใ‚Œใฆใ„ใพใ™ใ€‚ๅ„ใƒใƒชใ‚ขใƒณใƒˆใฏใƒฆใƒ‹ใƒผใ‚ฏใชๆฉŸ่ƒฝใจๆœ€้ฉๅŒ–ใ‚’ๆไพ›ใ—ใ€ใ•ใพใ–ใพใชใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซ้ฉใ—ใฆใ„ใพใ™ใ€‚ + +3ใคใฎใƒขใƒ‡ใƒซใฏ[ๆŽจ่ซ–](../modes/predict.md)ใ€[ๆคœ่จผ](../modes/val.md)ใ€[ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ](../modes/train.md)ใ€ใŠใ‚ˆใณ[ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ](../modes/export.md)ใชใฉใ€ๅน…ๅบƒใ„ใƒขใƒผใƒ‰ใ‚’ใ‚ตใƒใƒผใƒˆใ—ใฆใŠใ‚Šใ€ๅŠนๆžœ็š„ใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใฎใŸใ‚ใฎๅฎŒๅ…จใชใƒ„ใƒผใƒซใ‚ญใƒƒใƒˆใ‚’ๆไพ›ใ—ใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซใฎ็จฎ้กž | ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏ | ๆŽจ่ซ– | ๆคœ่จผ | ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ | ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ | +|--------------------|--------------------------------|----|----|--------|--------| +| YOLOv3 | [ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3-Ultralytics | [ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3u | [ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +ใ“ใฎ่กจใฏใ€ๅ„YOLOv3ใƒใƒชใ‚ขใƒณใƒˆใฎๆฉŸ่ƒฝใ‚’ไธ€็›ฎใงๆŠŠๆกใ™ใ‚‹ใŸใ‚ใฎใ‚‚ใฎใงใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใƒฏใƒผใ‚ฏใƒ•ใƒญใƒผใฎใ•ใพใ–ใพใชใ‚ฟใ‚นใ‚ฏใจๆ“ไฝœใƒขใƒผใƒ‰ใซๅฏพใ™ใ‚‹ๅคšๆง˜ๆ€งใจ้ฉ็”จๆ€งใ‚’ๅผท่ชฟใ—ใฆใ„ใพใ™ใ€‚ + +## ไฝฟ็”จไพ‹ + +ใ“ใฎไพ‹ใงใฏใ€YOLOv3ใฎ็ฐกๅ˜ใชใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใŠใ‚ˆใณๆŽจ่ซ–ใฎไพ‹ใ‚’ๆไพ›ใ—ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใŠใ‚ˆใณใใฎไป–ใฎ[ใƒขใƒผใƒ‰](../modes/index.md)ใฎๅฎŒๅ…จใชใƒ‰ใ‚ญใƒฅใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใซใคใ„ใฆใฏใ€[Predict](../modes/predict.md)ใ€[Train](../modes/train.md)ใ€[Val](../modes/val.md)ใ€ใŠใ‚ˆใณ[Export](../modes/export.md)ใฎใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใƒšใƒผใ‚ธใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + PyTorchใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟ `*.pt` ใƒขใƒ‡ใƒซใจ่จญๅฎš `*.yaml` ใƒ•ใ‚กใ‚คใƒซใฏใ€`YOLO()` ใ‚ฏใƒฉใ‚นใซๆธกใ—ใฆใƒขใƒ‡ใƒซใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚’ไฝœๆˆใงใใพใ™ใ€‚ + + ```python + from ultralytics import YOLO + + # COCOใงๅญฆ็ฟ’ๆธˆใฟใฎYOLOv3nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov3n.pt') + + # ใƒขใƒ‡ใƒซๆƒ…ๅ ฑใฎ่กจ็คบ๏ผˆไปปๆ„๏ผ‰ + model.info() + + # COCO8ใฎใ‚ตใƒณใƒ—ใƒซใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใƒขใƒ‡ใƒซใ‚’100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # YOLOv3nใƒขใƒ‡ใƒซใง'bus.jpg'็”ปๅƒใซๅฏพใ—ใฆๆŽจ่ซ–ๅฎŸ่กŒ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLIใ‚ณใƒžใƒณใƒ‰ใ‚’ไฝฟ็”จใ—ใฆ็›ดๆŽฅใƒขใƒ‡ใƒซใ‚’ๅฎŸ่กŒใงใใพใ™ใ€‚ + + ```bash + # COCOใงๅญฆ็ฟ’ๆธˆใฟใฎYOLOv3nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€COCO8ใฎใ‚ตใƒณใƒ—ใƒซใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใง100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + yolo train model=yolov3n.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCOใงๅญฆ็ฟ’ๆธˆใฟใฎYOLOv3nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€'bus.jpg'็”ปๅƒใซๅฏพใ—ใฆๆŽจ่ซ–ๅฎŸ่กŒ + yolo predict model=yolov3n.pt source=path/to/bus.jpg + ``` + +## ๅผ•็”จใจ่ฌ่พž + +็ ”็ฉถใงYOLOv3ใ‚’ไฝฟ็”จใ™ใ‚‹ๅ ดๅˆใฏใ€ๅ…ƒใฎYOLO่ซ–ๆ–‡ใจUltralyticsใฎYOLOv3ใƒชใƒใ‚ธใƒˆใƒชใ‚’ๅผ•็”จใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Redmon, Joseph and Farhadi, Ali}, + journal={arXiv preprint arXiv:1804.02767}, + year={2018} + } + ``` + +Joseph RedmonใจAli Farhadiใซใฏใ€ใ‚ชใƒชใ‚ธใƒŠใƒซใฎYOLOv3ใ‚’้–‹็™บใ—ใฆใ„ใŸใ ใ„ใŸใ“ใจใซๆ„Ÿ่ฌใ—ใพใ™ใ€‚ diff --git a/ultralytics/docs/ja/models/yolov3.md:Zone.Identifier b/ultralytics/docs/ja/models/yolov3.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/models/yolov3.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/models/yolov4.md b/ultralytics/docs/ja/models/yolov4.md new file mode 100755 index 0000000..92627cf --- /dev/null +++ b/ultralytics/docs/ja/models/yolov4.md @@ -0,0 +1,71 @@ +--- +comments: true +description: YOLOv4ใฏใ€2020ๅนดใซAlexey Bochkovskiyใซใ‚ˆใฃใฆใƒชใƒชใƒผใ‚นใ•ใ‚ŒใŸๆœ€ๅ…ˆ็ซฏใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใซ้–ขใ™ใ‚‹่ฉณ็ดฐใชใ‚ฌใ‚คใƒ‰ใ‚’ใ”่ฆงใใ ใ•ใ„ใ€‚ใใฎใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใฎใƒใ‚คใƒฉใ‚คใƒˆใ€้ฉๆ–ฐ็š„ใชๆฉŸ่ƒฝใ€ใŠใ‚ˆใณๅฟœ็”จไพ‹ใ‚’็†่งฃใ—ใฆใใ ใ•ใ„ใ€‚ +keywords: ultralyticsใ€YOLOv4ใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ€ใƒ‹ใƒฅใƒผใƒฉใƒซใƒใƒƒใƒˆใƒฏใƒผใ‚ฏใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ๆคœๅ‡บใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใ€ๆฉŸๆขฐๅญฆ็ฟ’ +--- + +# YOLOv4: ้ซ˜้€Ÿใงๆญฃ็ขบใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ + +YOLOv4ใฎUltralyticsใƒ‰ใ‚ญใƒฅใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒšใƒผใ‚ธใธใ‚ˆใ†ใ“ใใ€‚YOLOv4ใฏใ€2020ๅนดใซAlexey Bochkovskiyใซใ‚ˆใฃใฆใƒชใƒชใƒผใ‚นใ•ใ‚ŒใŸๆœ€ๅ…ˆ็ซฏใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใงใ™ใ€‚้€Ÿๅบฆใจ็ฒพๅบฆใฎๆœ€้ฉใชใƒใƒฉใƒณใ‚นใ‚’ๆไพ›ใ™ใ‚‹ใ‚ˆใ†่จญ่จˆใ•ใ‚ŒใฆใŠใ‚Šใ€ใ•ใพใ–ใพใชใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซๅ„ชใ‚ŒใŸ้ธๆŠž่‚ขใงใ™ใ€‚ + +![YOLOv4ใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃๅ›ณ](https://user-images.githubusercontent.com/26833433/246185689-530b7fe8-737b-4bb0-b5dd-de10ef5aface.png) +**YOLOv4ใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃๅ›ณ**ใ€‚YOLOv4ใฎ่ค‡้›‘ใชใƒใƒƒใƒˆใƒฏใƒผใ‚ฏ่จญ่จˆใ‚’็คบใ—ใฆใŠใ‚Šใ€ๆœ€้ฉใชใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใฎใŸใ‚ใฎใƒใƒƒใ‚ฏใƒœใƒผใƒณใ€ใƒใƒƒใ‚ฏใ€ใŠใ‚ˆใณใƒ˜ใƒƒใƒ‰ใ‚ณใƒณใƒใƒผใƒใƒณใƒˆใ€ใŠใ‚ˆใณใใ‚Œใ‚‰ใฎ็›ธไบ’ๆŽฅ็ถšใ•ใ‚ŒใŸใƒฌใ‚คใƒคใƒผใŒ่กจ็คบใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +## ๅฐŽๅ…ฅ + +YOLOv4ใฏใ€You Only Look Once version 4ใฎ็•ฅใงใ€[YOLOv3](yolov3.md)ใ‚„ไป–ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใƒขใƒ‡ใƒซใชใฉใ€ไปฅๅ‰ใฎYOLOใƒใƒผใ‚ธใƒงใƒณใฎๅˆถ็ด„ใซๅฏพๅ‡ฆใ™ใ‚‹ใŸใ‚ใซ้–‹็™บใ•ใ‚ŒใŸใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใƒขใƒ‡ใƒซใงใ™ใ€‚ไป–ใฎ็•ณใฟ่พผใฟใƒ‹ใƒฅใƒผใƒฉใƒซใƒใƒƒใƒˆใƒฏใƒผใ‚ฏ๏ผˆCNN๏ผ‰ใƒ™ใƒผใ‚นใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใจใฏ็•ฐใชใ‚Šใ€YOLOv4ใฏๆŽจ่–ฆใ‚ทใ‚นใƒ†ใƒ ใ ใ‘ใงใชใใ€ใ‚นใ‚ฟใƒณใƒ‰ใ‚ขใƒญใƒณใฎใƒ—ใƒญใ‚ปใ‚น็ฎก็†ใ‚„ไบบ้–“ใฎๅ…ฅๅŠ›ๅ‰Šๆธ›ใซใ‚‚้ฉ็”จๅฏ่ƒฝใงใ™ใ€‚ๅพ“ๆฅใฎใ‚ฐใƒฉใƒ•ใ‚ฃใƒƒใ‚ฏใ‚นใƒ—ใƒญใ‚ปใƒƒใ‚ทใƒณใ‚ฐใƒฆใƒ‹ใƒƒใƒˆ๏ผˆGPU๏ผ‰ไธŠใงใฎๅ‹•ไฝœใฏใ€ๆ‰‹ใ”ใ‚ใชไพกๆ ผใงๅคง้‡ใฎไฝฟ็”จใŒๅฏ่ƒฝใงใ‚ใ‚Šใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใซใฏ1ใคใฎGPUใฎใฟใŒๅฟ…่ฆใงใ™ใ€‚ + +## ใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃ + +YOLOv4ใฏใ€ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใฎๆœ€้ฉๅŒ–ใซไธ€็ท’ใซๆฉŸ่ƒฝใ™ใ‚‹ใ„ใใคใ‹ใฎ้ฉๆ–ฐ็š„ใชๆฉŸ่ƒฝใ‚’ๅˆฉ็”จใ—ใฆใ„ใพใ™ใ€‚ใ“ใ‚Œใซใฏใ€Weighted-Residual-Connections๏ผˆWRC๏ผ‰ใ€Cross-Stage-Partial-connections๏ผˆCSP๏ผ‰ใ€Cross mini-Batch Normalization๏ผˆCmBN๏ผ‰ใ€Self-adversarial-training๏ผˆSAT๏ผ‰ใ€Mish-activationใ€Mosaic data augmentationใ€DropBlock regularizationใ€ใŠใ‚ˆใณCIoU lossใŒๅซใพใ‚Œใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎๆฉŸ่ƒฝใฏ็ต„ใฟๅˆใ‚ใ›ใฆใ€ๆœ€ๅ…ˆ็ซฏใฎ็ตๆžœใ‚’้”ๆˆใ™ใ‚‹ใŸใ‚ใซไฝฟ็”จใ•ใ‚Œใพใ™ใ€‚ + +ๅ…ธๅž‹็š„ใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใฏใ€ๅ…ฅๅŠ›ใ€ใƒใƒƒใ‚ฏใƒœใƒผใƒณใ€ใƒใƒƒใ‚ฏใ€ใใ—ใฆใƒ˜ใƒƒใƒ‰ใฎ่ค‡ๆ•ฐใฎ้ƒจๅˆ†ใงๆง‹ๆˆใ•ใ‚Œใฆใ„ใพใ™ใ€‚YOLOv4ใฎใƒใƒƒใ‚ฏใƒœใƒผใƒณใฏImageNetใงไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚Œใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ฏใƒฉใ‚นใจๅขƒ็•Œใƒœใƒƒใ‚ฏใ‚นใ‚’ไบˆๆธฌใ™ใ‚‹ใŸใ‚ใซไฝฟ็”จใ•ใ‚Œใพใ™ใ€‚ใƒใƒƒใ‚ฏใƒœใƒผใƒณใฏVGGใ€ResNetใ€ResNeXtใ€ใพใŸใฏDenseNetใชใฉใฎ่ค‡ๆ•ฐใฎใƒขใƒ‡ใƒซใ‹ใ‚‰้ธๆŠžใงใใพใ™ใ€‚ใƒ‡ใ‚ฃใƒ†ใ‚ฏใ‚ฟใƒผใฎใƒใƒƒใ‚ฏ้ƒจๅˆ†ใฏใ€็•ฐใชใ‚‹ใ‚นใƒ†ใƒผใ‚ธใ‹ใ‚‰ใฎ็‰นๅพดใƒžใƒƒใƒ—ใ‚’ๅŽ้›†ใ™ใ‚‹ใŸใ‚ใซไฝฟ็”จใ•ใ‚Œใ€้€šๅธธใ€่ค‡ๆ•ฐใฎใƒœใƒˆใƒ ใ‚ขใƒƒใƒ—ใƒ‘ใ‚นใจ่ค‡ๆ•ฐใฎใƒˆใƒƒใƒ—ใƒ€ใ‚ฆใƒณใƒ‘ใ‚นใŒๅซใพใ‚Œใพใ™ใ€‚ใƒ˜ใƒƒใƒ‰้ƒจๅˆ†ใฏใ€ๆœ€็ต‚็š„ใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎๆคœๅ‡บใจๅˆ†้กžใซไฝฟ็”จใ•ใ‚Œใพใ™ใ€‚ + +## ใƒ•ใƒชใƒผใƒ“ใƒผใฎใƒใƒƒใ‚ฐ + +YOLOv4ใฏใ€ใ€Œใƒ•ใƒชใƒผใƒ“ใƒผใฎใƒใƒƒใ‚ฐใ€ใจใ—ใฆ็Ÿฅใ‚‰ใ‚Œใ‚‹ๆ‰‹ๆณ•ใ‚‚ไฝฟ็”จใ—ใฆใŠใ‚Šใ€ใ“ใ‚ŒใฏๆŽจ่ซ–ใฎใ‚ณใ‚นใƒˆใ‚’ๅข—ใ‚„ใ•ใšใซใƒขใƒ‡ใƒซใฎ็ฒพๅบฆใ‚’ๅ‘ไธŠใ•ใ›ใ‚‹ๆŠ€่ก“ใงใ™ใ€‚ใƒ‡ใƒผใ‚ฟๆ‹กๅผตใฏใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใงไฝฟ็”จใ•ใ‚Œใ‚‹ไธ€่ˆฌ็š„ใชใƒ•ใƒชใƒผใƒ“ใƒผใฎใƒใƒƒใ‚ฐใฎๆŠ€่ก“ใฎไธ€ใคใงใ‚ใ‚Šใ€ๅ…ฅๅŠ›็”ปๅƒใฎๅค‰ๅ‹•ๆ€งใ‚’ๅข—ๅŠ ใ•ใ›ใ‚‹ใ“ใจใงใƒขใƒ‡ใƒซใฎๅ …็‰ขๆ€งใ‚’ๅ‘ไธŠใ•ใ›ใพใ™ใ€‚ใƒ‡ใƒผใ‚ฟๆ‹กๅผตใฎไพ‹ใซใฏใ€ๅ…‰ๅบฆใฎๆญชใฟ๏ผˆ็”ปๅƒใฎๆ˜Žใ‚‹ใ•ใ€ใ‚ณใƒณใƒˆใƒฉใ‚นใƒˆใ€่‰ฒ่ชฟใ€ๅฝฉๅบฆใ€ใƒŽใ‚คใ‚บใฎ่ชฟๆ•ด๏ผ‰ใ‚„ๅนพไฝ•ๅญฆ็š„ๆญชใฟ๏ผˆใƒฉใƒณใƒ€ใƒ ใชใ‚นใ‚ฑใƒผใƒชใƒณใ‚ฐใ€ใ‚ฏใƒญใƒƒใƒ”ใƒณใ‚ฐใ€ๅ่ปขใ€ๅ›ž่ปขใฎ่ฟฝๅŠ ๏ผ‰ใŒใ‚ใ‚Šใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎๆŠ€่ก“ใฏใ€ใƒขใƒ‡ใƒซใŒ็•ฐใชใ‚‹็จฎ้กžใฎ็”ปๅƒใซๅฏพใ—ใฆใ‚ˆใ‚Š่‰ฏใ„ไธ€่ˆฌๅŒ–ใ‚’ใ™ใ‚‹ใฎใซๅฝน็ซ‹ใกใพใ™ใ€‚ + +## ๆฉŸ่ƒฝใจๆ€ง่ƒฝ + +YOLOv4ใฏใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใฎๆœ€้ฉใช้€Ÿๅบฆใจ็ฒพๅบฆใ‚’็›ฎๆŒ‡ใ—ใฆ่จญ่จˆใ•ใ‚Œใฆใ„ใพใ™ใ€‚YOLOv4ใฎใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใซใฏใ€ใƒใƒƒใ‚ฏใƒœใƒผใƒณใจใ—ใฆCSPDarknet53ใ€ใƒใƒƒใ‚ฏใจใ—ใฆPANetใ€ๆคœๅ‡บใƒ˜ใƒƒใƒ‰ใจใ—ใฆYOLOv3ใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚ใ“ใฎ่จญ่จˆใซใ‚ˆใ‚Šใ€YOLOv4ใฏๅฐ่ฑก็š„ใช้€Ÿๅบฆใงใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚’ๅฎŸ่กŒใงใใ‚‹ใŸใ‚ใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซ้ฉใ—ใฆใ„ใพใ™ใ€‚YOLOv4ใฏ็ฒพๅบฆใงใ‚‚ๅ„ชใ‚ŒใฆใŠใ‚Šใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใฎใƒ™ใƒณใƒใƒžใƒผใ‚ฏใงๆœ€ๅ…ˆ็ซฏใฎ็ตๆžœใ‚’้”ๆˆใ—ใฆใ„ใพใ™ใ€‚ + +## ไฝฟ็”จไพ‹ + +็พๆ™‚็‚นใงใฏใ€Ultralyticsใฏ็พๅœจใ€YOLOv4ใƒขใƒ‡ใƒซใ‚’ใ‚ตใƒใƒผใƒˆใ—ใฆใ„ใพใ›ใ‚“ใ€‚ใใฎใŸใ‚ใ€YOLOv4ใ‚’ไฝฟ็”จใ—ใŸใ„ใƒฆใƒผใ‚ถใƒผใฏใ€ใ‚คใƒณใ‚นใƒˆใƒผใƒซใŠใ‚ˆใณไฝฟ็”จๆ–นๆณ•ใซ้–ขใ™ใ‚‹ๆƒ…ๅ ฑใฏ็›ดๆŽฅYOLOv4ใฎGitHubใƒชใƒใ‚ธใƒˆใƒชใ‚’ๅ‚็…งใ™ใ‚‹ๅฟ…่ฆใŒใ‚ใ‚Šใพใ™ใ€‚ + +ใ“ใ“ใงใฏใ€YOLOv4ใ‚’ไฝฟ็”จใ™ใ‚‹ใŸใ‚ใฎไธ€่ˆฌ็š„ใชๆ‰‹้ †ใฎๆฆ‚่ฆใ‚’็คบใ—ใพใ™ใ€‚ + +1. YOLOv4ใฎGitHubใƒชใƒใ‚ธใƒˆใƒชใซใ‚ขใ‚ฏใ‚ปใ‚นใ—ใพใ™: [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). + +2. ใ‚คใƒณใ‚นใƒˆใƒผใƒซใฎใŸใ‚ใซREADMEใƒ•ใ‚กใ‚คใƒซใซ่จ˜่ผ‰ใ•ใ‚Œใฆใ„ใ‚‹ๆŒ‡็คบใซๅพ“ใ„ใพใ™ใ€‚ไธ€่ˆฌ็š„ใซใฏใ€ใƒชใƒใ‚ธใƒˆใƒชใฎใ‚ฏใƒญใƒผใƒณใ€ๅฟ…่ฆใชไพๅญ˜้–ขไฟ‚ใฎใ‚คใƒณใ‚นใƒˆใƒผใƒซใ€ใŠใ‚ˆใณๅฟ…่ฆใช็’ฐๅขƒๅค‰ๆ•ฐใฎใ‚ปใƒƒใƒˆใ‚ขใƒƒใƒ—ใŒๅซใพใ‚Œใพใ™ใ€‚ + +3. ใ‚คใƒณใ‚นใƒˆใƒผใƒซใŒๅฎŒไบ†ใ—ใŸใ‚‰ใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎๆบ–ๅ‚™ใ€ใƒขใƒ‡ใƒซใƒ‘ใƒฉใƒกใƒผใ‚ฟใฎ่จญๅฎšใ€ใƒขใƒ‡ใƒซใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใฆใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚’ๅฎŸ่กŒใ™ใ‚‹ใชใฉใ€ใƒชใƒใ‚ธใƒˆใƒชใงๆไพ›ใ•ใ‚Œใฆใ„ใ‚‹ไฝฟ็”จๆ–นๆณ•ใซๅพ“ใฃใฆใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใŠใ‚ˆใณไฝฟ็”จใงใใพใ™ใ€‚ + +็‰นๅฎšใฎๆ‰‹้ †ใฏใ€็‰นๅฎšใฎใƒฆใƒผใ‚นใ‚ฑใƒผใ‚นใจYOLOv4ใƒชใƒใ‚ธใƒˆใƒชใฎ็พๅœจใฎ็Šถๆ…‹ใซใ‚ˆใฃใฆๅค‰ใ‚ใ‚‹ๅ ดๅˆใŒใ‚ใ‚Šใพใ™ใ€‚ใ—ใŸใŒใฃใฆใ€YOLOv4 GitHubใƒชใƒใ‚ธใƒˆใƒชใงๆไพ›ใ•ใ‚Œใฆใ„ใ‚‹ๆŒ‡็คบใซ็›ดๆŽฅๅ‚็…งใ™ใ‚‹ใ“ใจใ‚’ๅผทใใŠๅ‹งใ‚ใ—ใพใ™ใ€‚ + +YOLOv4ใฎใ‚ตใƒใƒผใƒˆใŒๅฎŸ่ฃ…ใ•ใ‚Œๆฌก็ฌฌใ€Ultralyticsใฎไฝฟ็”จไพ‹ใฎใŸใ‚ใซใ“ใฎใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใ‚’ๆ›ดๆ–ฐใ™ใ‚‹ใ“ใจใ‚’ใŠ่ฉซใณ็”ณใ—ไธŠใ’ใพใ™ใ€‚ + +## ็ต่ซ– + +YOLOv4ใฏใ€้€Ÿๅบฆใจ็ฒพๅบฆใฎใƒใƒฉใƒณใ‚นใ‚’ๅ–ใฃใŸๅผทๅŠ›ใงๅŠน็އ็š„ใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใƒขใƒ‡ใƒซใงใ™ใ€‚ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐไธญใซใƒฆใƒ‹ใƒผใ‚ฏใชๆฉŸ่ƒฝใจใƒใƒƒใ‚ฐใฎใƒ•ใƒชใƒผใƒ“ใƒผใฎๆŠ€่ก“ใ‚’ไฝฟ็”จใ™ใ‚‹ใ“ใจใงใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚ฟใ‚นใ‚ฏใงๅ„ชใ‚ŒใŸใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’็™บๆฎใ—ใพใ™ใ€‚้€šๅธธใฎGPUใ‚’ๆŒใค่ชฐใงใ‚‚ใŒใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใจไฝฟ็”จใ‚’่กŒใ†ใ“ใจใŒใงใใ‚‹ใŸใ‚ใ€ๅน…ๅบƒใ„ใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซใ‚ขใ‚ฏใ‚ปใ‚นๅฏ่ƒฝใ‹ใคๅฎŸ็”จ็š„ใงใ™ใ€‚ + +## ๅผ•็”จใจ่ฌ่พž + +ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใฎๅˆ†้‡Žใงใฎ้‡่ฆใช่ฒข็Œฎใซๅฏพใ—ใฆใ€YOLOv4ใฎ่‘—่€…ใซ่ฌๆ„ใ‚’่กจใ—ใพใ™: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{bochkovskiy2020yolov4, + title={YOLOv4: Optimal Speed and Accuracy of Object Detection}, + author={Alexey Bochkovskiy and Chien-Yao Wang and Hong-Yuan Mark Liao}, + year={2020}, + eprint={2004.10934}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ใ‚ชใƒชใ‚ธใƒŠใƒซใฎYOLOv4ใฎ่ซ–ๆ–‡ใฏ[arXiv](https://arxiv.org/abs/2004.10934)ใง่ฆ‹ใคใ‘ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚่‘—่€…ใฏๅฝผใ‚‰ใฎไป•ไบ‹ใ‚’ไธ€่ˆฌใซๅ…ฌ้–‹ใ—ใฆใŠใ‚Šใ€ใ‚ณใƒผใƒ‰ใƒ™ใƒผใ‚นใฏ[GitHub](https://github.com/AlexeyAB/darknet)ใงใ‚ขใ‚ฏใ‚ปใ‚นใงใใพใ™ใ€‚็งใŸใกใฏใ€ๅฝผใ‚‰ใฎๅŠชๅŠ›ใซๆ„Ÿ่ฌใ—ใ€ๅฝผใ‚‰ใฎไป•ไบ‹ใ‚’ๅบƒใ„ใ‚ณใƒŸใƒฅใƒ‹ใƒ†ใ‚ฃใซใ‚ขใ‚ฏใ‚ปใ‚นๅฏ่ƒฝใซใ—ใฆใใ‚ŒใŸใ“ใจใซๆ„Ÿ่ฌใ—ใฆใ„ใพใ™ใ€‚ diff --git a/ultralytics/docs/ja/models/yolov4.md:Zone.Identifier b/ultralytics/docs/ja/models/yolov4.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/models/yolov4.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/models/yolov5.md b/ultralytics/docs/ja/models/yolov5.md new file mode 100755 index 0000000..b5e4e86 --- /dev/null +++ b/ultralytics/docs/ja/models/yolov5.md @@ -0,0 +1,113 @@ +--- +comments: true +description: YOLOv5uใฏใ€ๆ”น่‰ฏใ•ใ‚ŒใŸ็ฒพๅบฆใจ้€Ÿๅบฆใฎใƒˆใƒฌใƒผใƒ‰ใ‚ชใƒ•ใจใ€ใ•ใพใ–ใพใช็‰ฉไฝ“ๆคœๅ‡บใ‚ฟใ‚นใ‚ฏๅ‘ใ‘ใฎๅคšๆ•ฐใฎไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒ‡ใƒซใ‚’ๅ‚™ใˆใŸYOLOv5ใƒขใƒ‡ใƒซใฎ้€ฒๅŒ–ใƒใƒผใ‚ธใƒงใƒณใงใ™ใ€‚ +keywords: YOLOv5u, ็‰ฉไฝ“ๆคœๅ‡บ, ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒ‡ใƒซ, Ultralytics, Inference, Validation, YOLOv5, YOLOv8, ใ‚ขใƒณใ‚ซใƒผใƒ•ใƒชใƒผ, ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒ•ใƒชใƒผ, ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณ, ๆฉŸๆขฐๅญฆ็ฟ’ +--- + +# YOLOv5 + +## ๆฆ‚่ฆ + +YOLOv5uใฏใ€็‰ฉไฝ“ๆคœๅ‡บๆ–นๆณ•่ซ–ใฎ้€ฒๆญฉใ‚’่กจใ—ใฆใ„ใพใ™ใ€‚UltralyticsใŒ้–‹็™บใ—ใŸ[YOLOv5](https://github.com/ultralytics/yolov5)ใƒขใƒ‡ใƒซใฎๅŸบๆœฌใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใ‚’่ตทๆบใจใ™ใ‚‹YOLOv5uใฏใ€ใ‚ขใƒณใ‚ซใƒผใƒ•ใƒชใƒผใงใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒ•ใƒชใƒผใฎๅˆ†ๅ‰ฒใƒ˜ใƒƒใƒ‰ใ‚’ๆŽก็”จใ—ใฆใŠใ‚Šใ€ไปฅๅ‰ใฎ[YOLOv8](yolov8.md)ใƒขใƒ‡ใƒซใงๅฐŽๅ…ฅใ•ใ‚ŒใŸ็‰นๅพดใงใ™ใ€‚ใ“ใฎ้ฉๅฟœใซใ‚ˆใ‚Šใ€ใƒขใƒ‡ใƒซใฎใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใŒๆด—็ทดใ•ใ‚Œใ€็‰ฉไฝ“ๆคœๅ‡บใ‚ฟใ‚นใ‚ฏใซใŠใ‘ใ‚‹ๆ”นๅ–„ใ•ใ‚ŒใŸ็ฒพๅบฆใจ้€Ÿๅบฆใฎใƒˆใƒฌใƒผใƒ‰ใ‚ชใƒ•ใŒๅฎŸ็พใ•ใ‚Œใพใ™ใ€‚็ตŒ้จ“็š„ใช็ตๆžœใจๆดพ็”Ÿใ—ใŸ็‰นๅพดใ‹ใ‚‰ๆ˜Žใ‚‰ใ‹ใชใจใŠใ‚Šใ€YOLOv5uใฏใ€็ ”็ฉถใจๅฎŸ็”จใฎไธกๆ–นใงๅ …็‰ขใชใ‚ฝใƒชใƒฅใƒผใ‚ทใƒงใƒณใ‚’ๆฑ‚ใ‚ใ‚‹ไบบใ€…ใซใจใฃใฆๅŠน็އ็š„ใช้ธๆŠž่‚ขใงใ™ใ€‚ + +![Ultralytics YOLOv5](https://raw.githubusercontent.com/ultralytics/assets/main/yolov5/v70/splash.png) + +## ไธปใช็‰นๅพด + +- **ใ‚ขใƒณใ‚ซใƒผใƒ•ใƒชใƒผใชๅˆ†ๅ‰ฒUltralyticsใƒ˜ใƒƒใƒ‰๏ผš** ไผ็ตฑ็š„ใช็‰ฉไฝ“ๆคœๅ‡บใƒขใƒ‡ใƒซใฏใ€ไบ‹ๅ‰ใซๅฎš็พฉใ•ใ‚ŒใŸใ‚ขใƒณใ‚ซใƒผใƒœใƒƒใ‚ฏใ‚นใ‚’ไฝฟ็”จใ—ใฆใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎไฝ็ฝฎใ‚’ไบˆๆธฌใ—ใพใ™ใ€‚ใ—ใ‹ใ—ใ€YOLOv5uใฏใ“ใฎใ‚ขใƒ—ใƒญใƒผใƒใ‚’่ฟ‘ไปฃๅŒ–ใ—ใฆใ„ใพใ™ใ€‚ใ‚ขใƒณใ‚ซใƒผใƒ•ใƒชใƒผใชๅˆ†ๅ‰ฒUltralyticsใƒ˜ใƒƒใƒ‰ใ‚’ๆŽก็”จใ™ใ‚‹ใ“ใจใงใ€ใ‚ˆใ‚ŠๆŸ”่ปŸใ‹ใค้ฉๅฟœๆ€งใฎใ‚ใ‚‹ๆคœๅ‡บใƒกใ‚ซใƒ‹ใ‚บใƒ ใŒ็ขบไฟใ•ใ‚Œใ€ใ•ใพใ–ใพใชใ‚ทใƒŠใƒชใ‚ชใงใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใŒๅ‘ไธŠใ—ใพใ™ใ€‚ + +- **ๆœ€้ฉๅŒ–ใ•ใ‚ŒใŸ็ฒพๅบฆใจ้€Ÿๅบฆใฎใƒˆใƒฌใƒผใƒ‰ใ‚ชใƒ•๏ผš** ้€Ÿๅบฆใจ็ฒพๅบฆใฏใ—ใฐใ—ใฐๅๅฏพใฎๆ–นๅ‘ใซๅผ•ใฃๅผตใ‚‰ใ‚Œใพใ™ใ€‚ใ—ใ‹ใ—ใ€YOLOv5uใฏใ“ใฎใƒˆใƒฌใƒผใƒ‰ใ‚ชใƒ•ใซๆŒ‘ๆˆฆใ—ใฆใ„ใพใ™ใ€‚ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใฎๆคœๅ‡บใ‚’็ขบไฟใ—ใชใŒใ‚‰ใ€็ฒพๅบฆใ‚’ๆใชใ†ใ“ใจใชใใ€ใ‚ญใƒฃใƒชใƒ–ใƒฌใƒผใ‚ทใƒงใƒณใ•ใ‚ŒใŸใƒใƒฉใƒณใ‚นใ‚’ๆไพ›ใ—ใพใ™ใ€‚ใ“ใฎๆฉŸ่ƒฝใฏใ€่‡ชๅ‹•่ปŠใ€ใƒญใƒœใƒƒใƒˆๅทฅๅญฆใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใƒ“ใƒ‡ใ‚ช่งฃๆžใชใฉใ€่ฟ…้€Ÿใชๅฟœ็ญ”ใ‚’ๅฟ…่ฆใจใ™ใ‚‹ใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซ็‰นใซๆœ‰็”จใงใ™ใ€‚ + +- **ใ•ใพใ–ใพใชไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒ‡ใƒซ๏ผš** ็•ฐใชใ‚‹ใ‚ฟใ‚นใ‚ฏใซใฏ็•ฐใชใ‚‹ใƒ„ใƒผใƒซใ‚ปใƒƒใƒˆใŒๅฟ…่ฆใงใ‚ใ‚‹ใ“ใจใ‚’็†่งฃใ—ใฆใ€YOLOv5uใฏใ•ใพใ–ใพใชไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒ‡ใƒซใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚Inferenceใ€Validationใ€Trainingใซ็„ฆ็‚นใ‚’ๅฝ“ใฆใฆใ„ใฆใ‚‚ใ€ใƒฆใƒผใ‚ถใƒผใซใฏๅพ…ใกๅ—ใ‘ใฆใ„ใ‚‹็‰นๅˆฅใซ่ชฟๆ•ดใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใŒใ‚ใ‚Šใพใ™ใ€‚ใ“ใฎๅคšๆง˜ๆ€งใซใ‚ˆใ‚Šใ€ใƒฏใƒณใ‚ตใ‚คใ‚บใŒใ™ในใฆใฎ่งฃๆฑบ็ญ–ใงใฏใชใใ€ไธ€ๆ„ใฎ่ชฒ้กŒใซ็‰นๅŒ–ใ—ใŸใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +## ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏใจใƒขใƒผใƒ‰ + +ๅ„็จฎใฎไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎYOLOv5uใƒขใƒ‡ใƒซใฏใ€[็‰ฉไฝ“ๆคœๅ‡บ](../tasks/detect.md)ใ‚ฟใ‚นใ‚ฏใงๅ„ชใ‚ŒใŸใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’็™บๆฎใ—ใพใ™ใ€‚[Inference](../modes/predict.md)ใ€[Validation](../modes/val.md)ใ€[Training](../modes/train.md)ใ€ใŠใ‚ˆใณ[Export](../modes/export.md)ใชใฉใฎใ•ใพใ–ใพใชใƒขใƒผใƒ‰ใ‚’ใ‚ตใƒใƒผใƒˆใ—ใฆใ„ใ‚‹ใŸใ‚ใ€้–‹็™บใ‹ใ‚‰ๅฑ•้–‹ใพใงๅน…ๅบƒใ„ใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซ้ฉใ—ใฆใ„ใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซใฎ็จฎ้กž | ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎ้‡ใฟ | ใ‚ฟใ‚นใ‚ฏ | ๆŽจ่ซ– | ๆฑŽๅŒ– | ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ | ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ | +|---------|-----------------------------------------------------------------------------------------------------------------------------|----------------------------|----|----|--------|--------| +| YOLOv5u | `yolov5nu`, `yolov5su`, `yolov5mu`, `yolov5lu`, `yolov5xu`, `yolov5n6u`, `yolov5s6u`, `yolov5m6u`, `yolov5l6u`, `yolov5x6u` | [็‰ฉไฝ“ๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +ใ“ใฎ่กจใงใฏใ€YOLOv5uใƒขใƒ‡ใƒซใฎใƒใƒชใ‚ขใƒณใƒˆใซใคใ„ใฆ่ฉณ็ดฐใชๆฆ‚่ฆใ‚’ๆไพ›ใ—ใ€็‰ฉไฝ“ๆคœๅ‡บใ‚ฟใ‚นใ‚ฏใงใฎ้ฉ็”จๅฏ่ƒฝๆ€งใจใ€[Inference](../modes/predict.md)ใ€[Validation](../modes/val.md)ใ€[Training](../modes/train.md)ใ€[Export](../modes/export.md)ใชใฉใฎใ•ใพใ–ใพใชๆ“ไฝœใƒขใƒผใƒ‰ใฎใ‚ตใƒใƒผใƒˆใ‚’ๅผท่ชฟใ—ใฆใ„ใพใ™ใ€‚ใ“ใฎๅŒ…ๆ‹ฌ็š„ใชใ‚ตใƒใƒผใƒˆใซใ‚ˆใ‚Šใ€ใƒฆใƒผใ‚ถใƒผใฏๅบƒ็ฏ„ใช็‰ฉไฝ“ๆคœๅ‡บใ‚ทใƒŠใƒชใ‚ชใงYOLOv5uใƒขใƒ‡ใƒซใฎๆฉŸ่ƒฝใ‚’ๅๅˆ†ใซๆดป็”จใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +## ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใƒกใƒˆใƒชใ‚ฏใ‚น + +!!! ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚น + + === "ๆคœๅ‡บ" + + [COCO](https://docs.ultralytics.com/datasets/detect/coco/)ใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใŸไฝฟ็”จไพ‹ใซใคใ„ใฆใฏใ€[ๆคœๅ‡บใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ](https://docs.ultralytics.com/tasks/detect/)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใซใฏ80ใฎไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚ฏใƒฉใ‚นใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚ + + | ใƒขใƒ‡ใƒซ | YAML | ใ‚ตใ‚คใ‚บ
(pixels) | mAPval
50-95 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ใƒ‘ใƒฉใƒกใƒผใ‚ฟ
(M) | FLOPS
(B) | + |-------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------|-----------------------|----------------------|----------------------------|-------------------------------------|----------------------|-------------------| + | [yolov5nu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5nu.pt) | [yolov5n.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 34.3 | 73.6 | 1.06 | 2.6 | 7.7 | + | [yolov5su.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5su.pt) | [yolov5s.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 43.0 | 120.7 | 1.27 | 9.1 | 24.0 | + | [yolov5mu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5mu.pt) | [yolov5m.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 49.0 | 233.9 | 1.86 | 25.1 | 64.2 | + | [yolov5lu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5lu.pt) | [yolov5l.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 52.2 | 408.4 | 2.50 | 53.2 | 135.0 | + | [yolov5xu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5xu.pt) | [yolov5x.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 53.2 | 763.2 | 3.81 | 97.2 | 246.4 | + | | | | | | | | | + | [yolov5n6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5n6u.pt) | [yolov5n6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 42.1 | 211.0 | 1.83 | 4.3 | 7.8 | + | [yolov5s6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5s6u.pt) | [yolov5s6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 48.6 | 422.6 | 2.34 | 15.3 | 24.6 | + | [yolov5m6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5m6u.pt) | [yolov5m6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 53.6 | 810.9 | 4.36 | 41.2 | 65.7 | + | [yolov5l6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5l6u.pt) | [yolov5l6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 55.7 | 1470.9 | 5.47 | 86.1 | 137.4 | + | [yolov5x6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5x6u.pt) | [yolov5x6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 56.8 | 2436.5 | 8.98 | 155.4 | 250.7 | + +## ไฝฟ็”จไพ‹ + +ใ“ใฎไพ‹ใงใฏใ€ๅ˜็ด”ใชYOLOv5ใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใจๆŽจ่ซ–ใฎไฝฟ็”จไพ‹ใ‚’ๆไพ›ใ—ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใจไป–ใฎ[ใƒขใƒผใƒ‰](../modes/index.md)ใฎๅฎŒๅ…จใชใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใซใคใ„ใฆใฏใ€[Predict](../modes/predict.md)ใ€[Train](../modes/train.md)ใ€[Val](../modes/val.md)ใ€[Export](../modes/export.md)ใฎใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใƒšใƒผใ‚ธใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + Pythonใงใƒขใƒ‡ใƒซใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚’ไฝœๆˆใ™ใ‚‹ใซใฏใ€PyTorchใฎไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎ`*.pt`ใƒขใƒ‡ใƒซใŠใ‚ˆใณๆง‹ๆˆ`*.yaml`ใƒ•ใ‚กใ‚คใƒซใ‚’`YOLO()`ใ‚ฏใƒฉใ‚นใซๆธกใ™ใ“ใจใŒใงใใพใ™ใ€‚ + + ```python + from ultralytics import YOLO + + # COCOใงไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎYOLOv5nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ™ใ‚‹ + model = YOLO('yolov5n.pt') + + # ใƒขใƒ‡ใƒซๆƒ…ๅ ฑใ‚’่กจ็คบใ™ใ‚‹๏ผˆไปปๆ„๏ผ‰ + model.info() + + # COCO8ใฎไพ‹ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใƒขใƒ‡ใƒซใ‚’100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # YOLOv5nใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใฆ'bus.jpg'็”ปๅƒใงๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ™ใ‚‹ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLIใ‚ณใƒžใƒณใƒ‰ใ‚’ไฝฟ็”จใ—ใฆใƒขใƒ‡ใƒซใ‚’็›ดๆŽฅๅฎŸ่กŒใ™ใ‚‹ใ“ใจใ‚‚ใงใใพใ™ใ€‚ + + ```bash + # COCOใงไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎYOLOv5nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€COCO8ใฎไพ‹ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใง100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ + yolo train model=yolov5n.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCOใงไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎYOLOv5nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€'bus.jpg'็”ปๅƒใงๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ™ใ‚‹ + yolo predict model=yolov5n.pt source=path/to/bus.jpg + ``` + +## ๅผ•็”จใŠใ‚ˆใณ่ฌ่พž + +็ ”็ฉถใงYOLOv5ใพใŸใฏYOLOv5uใ‚’ไฝฟ็”จใ™ใ‚‹ๅ ดๅˆใฏใ€ไปฅไธ‹ใฎใ‚ˆใ†ใซUltralytics YOLOv5ใƒชใƒใ‚ธใƒˆใƒชใ‚’ๅผ•็”จใ—ใฆใใ ใ•ใ„๏ผš + +!!! Quote "" + + === "BibTeX" + ```bibtex + @software{yolov5, + title = {Ultralytics YOLOv5}, + author = {Glenn Jocher}, + year = {2020}, + version = {7.0}, + license = {AGPL-3.0}, + url = {https://github.com/ultralytics/yolov5}, + doi = {10.5281/zenodo.3908559}, + orcid = {0000-0001-5950-6979} + } + ``` + +ใชใŠใ€YOLOv5ใƒขใƒ‡ใƒซใฏ[AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE)ใŠใ‚ˆใณ[Enterprise](https://ultralytics.com/license)ใƒฉใ‚คใ‚ปใƒณใ‚นใฎไธ‹ใงๆไพ›ใ•ใ‚Œใฆใ„ใพใ™ใ€‚ diff --git a/ultralytics/docs/ja/models/yolov5.md:Zone.Identifier b/ultralytics/docs/ja/models/yolov5.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/models/yolov5.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/models/yolov6.md b/ultralytics/docs/ja/models/yolov6.md new file mode 100755 index 0000000..139b0a7 --- /dev/null +++ b/ultralytics/docs/ja/models/yolov6.md @@ -0,0 +1,107 @@ +--- +comments: true +description: ๆœ€ๅ…ˆ็ซฏใฎ้€Ÿๅบฆใจ็ฒพๅบฆใฎใƒใƒฉใƒณใ‚นใ‚’ๅฎŸ็พใ™ใ‚‹ใ€Meituan YOLOv6ใจใ„ใ†ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใƒขใƒ‡ใƒซใ‚’็ดนไป‹ใ—ใพใ™ใ€‚ๆฉŸ่ƒฝใ€ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซใ€Pythonใฎไฝฟ็”จๆ–นๆณ•ใซใคใ„ใฆๆทฑใๆŽ˜ใ‚Šไธ‹ใ’ใพใ™ใ€‚ +keywords: Meituan YOLOv6ใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ€Ultralyticsใ€YOLOv6ใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใ€Bi-directional Concatenationใ€Anchor-Aided Trainingใ€ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณ +--- + +# Meituan YOLOv6 + +## ๆฆ‚่ฆ + +[Meituan](https://about.meituan.com/) YOLOv6ใฏใ€้€Ÿๅบฆใจ็ฒพๅบฆใฎใƒใƒฉใƒณใ‚นใซๅ„ชใ‚ŒใŸๆœ€ๅ…ˆ็ซฏใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใงใ‚ใ‚Šใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซใŠใ„ใฆใฏไบบๆฐ—ใฎใ‚ใ‚‹้ธๆŠž่‚ขใจใชใฃใฆใ„ใพใ™ใ€‚ใ“ใฎใƒขใƒ‡ใƒซใฏใ€Bi-directional Concatenation๏ผˆBiC๏ผ‰ใƒขใ‚ธใƒฅใƒผใƒซใ€ใ‚ขใƒณใ‚ซใƒผๆ”ฏๆดใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ๏ผˆAAT๏ผ‰ๆˆฆ็•ฅใฎๅฎŸ่ฃ…ใ€ใŠใ‚ˆใณCOCOใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใซใŠใ‘ใ‚‹ๆœ€ๅ…ˆ็ซฏใฎ็ฒพๅบฆใ‚’ๅฎŸ็พใ™ใ‚‹ใŸใ‚ใฎๆ”น่‰ฏใ•ใ‚ŒใŸใƒใƒƒใ‚ฏใƒœใƒผใƒณใจใƒใƒƒใ‚ฏใฎ่จญ่จˆใชใฉใ€ใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใจใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆ–นๆณ•ใซใ„ใใคใ‹ใฎๆณจ็›ฎใ™ในใๆŠ€่ก“็š„ๆ”นๅ–„ใ‚’ใ‚‚ใŸใ‚‰ใ—ใพใ—ใŸใ€‚ + +![Meituan YOLOv6](https://user-images.githubusercontent.com/26833433/240750495-4da954ce-8b3b-41c4-8afd-ddb74361d3c2.png) +![ใƒขใƒ‡ใƒซใฎไพ‹](https://user-images.githubusercontent.com/26833433/240750557-3e9ec4f0-0598-49a8-83ea-f33c91eb6d68.png) +**YOLOv6ใฎๆฆ‚่ฆใ€‚** ใƒขใƒ‡ใƒซใฎใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃๅ›ณใฏใ€้‡่ฆใชๆ”นๅ–„็‚นใจใ—ใฆๅ†่จญ่จˆใ•ใ‚ŒใŸใƒใƒƒใƒˆใƒฏใƒผใ‚ฏใ‚ณใƒณใƒใƒผใƒใƒณใƒˆใจใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆˆฆ็•ฅใ‚’็คบใ—ใฆใŠใ‚Šใ€่‘—ใ—ใ„ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นๅ‘ไธŠใซใคใชใŒใฃใฆใ„ใพใ™ใ€‚ (a) YOLOv6ใฎใƒใƒƒใ‚ฏ๏ผˆNใŠใ‚ˆใณSใŒ่กจ็คบใ•ใ‚Œใฆใ„ใพใ™๏ผ‰ใ€‚M/Lใฎๅ ดๅˆใ€RepBlocksใฏCSPStackRepใง็ฝฎใๆ›ใˆใ‚‰ใ‚Œใพใ™ใ€‚ (b) BiCใƒขใ‚ธใƒฅใƒผใƒซใฎๆง‹้€ ใ€‚ (c) SimCSPSPPFใƒ–ใƒญใƒƒใ‚ฏใ€‚ ([ๅ‡บๅ…ธ](https://arxiv.org/pdf/2301.05586.pdf))ใ€‚ + +### ไธปใช็‰นๅพด + +- **Bi-directional Concatenation๏ผˆBiC๏ผ‰ใƒขใ‚ธใƒฅใƒผใƒซ๏ผš** YOLOv6ใฏใ€ๆคœๅ‡บๅ™จใฎใƒใƒƒใ‚ฏใซBiCใƒขใ‚ธใƒฅใƒผใƒซใ‚’ๅฐŽๅ…ฅใ—ใ€ใƒญใƒผใ‚ซใƒชใ‚ผใƒผใ‚ทใƒงใƒณไฟกๅทใ‚’ๅผทๅŒ–ใ—ใฆๆ€ง่ƒฝใ‚’ๅ‘ไธŠใ•ใ›ใ€้€ŸๅบฆใฎไฝŽไธ‹ใ‚’ใปใจใ‚“ใฉ็„ก่ฆ–ใงใใ‚‹ๅ„ชใ‚ŒใŸ็ตๆžœใ‚’ๅฎŸ็พใ—ใพใ™ใ€‚ +- **ใ‚ขใƒณใ‚ซใƒผๆ”ฏๆดใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ๏ผˆAAT๏ผ‰ๆˆฆ็•ฅ๏ผš** ใ“ใฎใƒขใƒ‡ใƒซใงใฏใ€AATใ‚’ๆๆกˆใ—ใฆใ€ใ‚ขใƒณใ‚ซใƒผใƒ™ใƒผใ‚นใจใ‚ขใƒณใ‚ซใƒผใƒ•ใƒชใƒผใฎใƒ‘ใƒฉใƒ€ใ‚คใƒ ใฎไธกๆ–นใฎๅˆฉ็‚นใ‚’ไบซๅ—ใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ“ใ‚Œใซใ‚ˆใ‚Šใ€ๆŽจ่ซ–ๅŠน็އใ‚’ๆใชใ†ใ“ใจใชใๆ€ง่ƒฝใ‚’ๅ‘ไธŠใ•ใ›ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ +- **ๆ”น่‰ฏใ•ใ‚ŒใŸใƒใƒƒใ‚ฏใƒœใƒผใƒณใจใƒใƒƒใ‚ฏใฎ่จญ่จˆ๏ผš** YOLOv6ใ‚’ใƒใƒƒใ‚ฏใƒœใƒผใƒณใจใƒใƒƒใ‚ฏใซๅˆฅใฎใ‚นใƒ†ใƒผใ‚ธใ‚’่ฟฝๅŠ ใ™ใ‚‹ใ“ใจใงใ€ใ“ใฎใƒขใƒ‡ใƒซใฏCOCOใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใฎๆœ€ๅ…ˆ็ซฏใฎๆ€ง่ƒฝใ‚’้ซ˜่งฃๅƒๅบฆๅ…ฅๅŠ›ใงๅฎŸ็พใ—ใฆใ„ใพใ™ใ€‚ +- **่‡ชๅทฑ่’ธ็•™ๆˆฆ็•ฅ๏ผš** YOLOv6ใฎๅฐๅž‹ใƒขใƒ‡ใƒซใฎๆ€ง่ƒฝใ‚’ๅ‘ไธŠใ•ใ›ใ‚‹ใŸใ‚ใซใ€ๆ–ฐใ—ใ„่‡ชๅทฑ่’ธ็•™ๆˆฆ็•ฅใŒๅฎŸ่ฃ…ใ•ใ‚ŒใฆใŠใ‚Šใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐไธญใซ่ฃœๅŠฉๅ›žๅธฐใƒ–ใƒฉใƒณใƒใ‚’ๅผทๅŒ–ใ—ใ€ๆŽจ่ซ–ๆ™‚ใซใฏใใ‚Œใ‚’้™คๅŽปใ—ใฆ้ก•่‘—ใช้€ŸๅบฆไฝŽไธ‹ใ‚’ๅ›ž้ฟใ—ใพใ™ใ€‚ + +## ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใƒกใƒˆใƒชใ‚ฏใ‚น + +YOLOv6ใซใฏใ•ใพใ–ใพใชใ‚นใ‚ฑใƒผใƒซใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซใŒๆไพ›ใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +- YOLOv6-N: NVIDIA Tesla T4 GPUใงใ€COCO val2017ใซใŠใ„ใฆ37.5%ใฎAPใ‚’1187 FPSใง้”ๆˆใ€‚ +- YOLOv6-S: 484 FPSใง45.0%ใฎAPใ€‚ +- YOLOv6-M: 226 FPSใง50.0%ใฎAPใ€‚ +- YOLOv6-L: 116 FPSใง52.8%ใฎAPใ€‚ +- YOLOv6-L6: ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใงใฎๆœ€ๅ…ˆ็ซฏใฎ็ฒพๅบฆใ€‚ + +YOLOv6ใซใฏใ€็•ฐใชใ‚‹็ฒพๅบฆใซๆœ€้ฉๅŒ–ใ•ใ‚ŒใŸใ‚ฏใ‚ฉใƒณใ‚ฟใ‚คใ‚บๆธˆใฟใฎใƒขใƒ‡ใƒซใ‚„ใ€ใƒขใƒใ‚คใƒซใƒ—ใƒฉใƒƒใƒˆใƒ•ใ‚ฉใƒผใƒ ๅ‘ใ‘ใซๆœ€้ฉๅŒ–ใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใ‚‚ๆไพ›ใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +## ไฝฟ็”จไพ‹ + +ใ“ใฎไพ‹ใงใฏใ€YOLOv6ใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใŠใ‚ˆใณๆŽจ่ซ–ใฎ็ฐกๅ˜ใชไฝฟ็”จไพ‹ใ‚’็คบใ—ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใŠใ‚ˆใณไป–ใฎ[ใƒขใƒผใƒ‰](../modes/index.md)ใฎๅฎŒๅ…จใชใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใซใคใ„ใฆใฏใ€[Predict](../modes/predict.md)ใ€[Train](../modes/train.md)ใ€[Val](../modes/val.md)ใ€[Export](../modes/export.md)ใฎใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใƒšใƒผใ‚ธใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + PyTorchใงไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎ`*.pt`ใƒขใƒ‡ใƒซใจ`*.yaml`่จญๅฎšใƒ•ใ‚กใ‚คใƒซใ‚’`YOLO()`ใ‚ฏใƒฉใ‚นใซๆธกใ™ใ“ใจใงใ€Pythonใงใƒขใƒ‡ใƒซใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚’ไฝœๆˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + + ```python + from ultralytics import YOLO + + # YOLOv6nใƒขใƒ‡ใƒซใ‚’ใ‚ผใƒญใ‹ใ‚‰ๆง‹็ฏ‰ใ™ใ‚‹ + model = YOLO('yolov6n.yaml') + + # ใƒขใƒ‡ใƒซใฎๆƒ…ๅ ฑใ‚’่กจ็คบใ™ใ‚‹๏ผˆใ‚ชใƒ—ใ‚ทใƒงใƒณ๏ผ‰ + model.info() + + # COCO8ใฎไพ‹้กŒใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใƒขใƒ‡ใƒซใ‚’100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # YOLOv6nใƒขใƒ‡ใƒซใง'bus.jpg'็”ปๅƒใซๅฏพใ—ใฆๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ™ใ‚‹ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ใƒขใƒ‡ใƒซใ‚’็›ดๆŽฅๅฎŸ่กŒใ™ใ‚‹ใŸใ‚ใฎCLIใ‚ณใƒžใƒณใƒ‰ใ‚‚ๅˆฉ็”จใงใใพใ™ใ€‚ + + ```bash + # ใ‚ผใƒญใ‹ใ‚‰YOLOv6nใƒขใƒ‡ใƒซใ‚’ๆง‹็ฏ‰ใ—ใ€COCO8ใฎไพ‹้กŒใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใง100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ + yolo train model=yolov6n.yaml data=coco8.yaml epochs=100 imgsz=640 + + # ใ‚ผใƒญใ‹ใ‚‰YOLOv6nใƒขใƒ‡ใƒซใ‚’ๆง‹็ฏ‰ใ—ใ€'bus.jpg'็”ปๅƒใซๅฏพใ—ใฆๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ™ใ‚‹ + yolo predict model=yolov6n.yaml source=path/to/bus.jpg + ``` + +## ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏใจใƒขใƒผใƒ‰ + +YOLOv6ใ‚ทใƒชใƒผใ‚บใฏใ€้ซ˜ๆ€ง่ƒฝใฎ[ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ](../tasks/detect.md)ใซๆœ€้ฉๅŒ–ใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใฏใ€ใ•ใพใ–ใพใช่จˆ็ฎ—ใƒ‹ใƒผใ‚บใจ็ฒพๅบฆ่ฆไปถใซๅฏพๅฟœใ—ใฆใŠใ‚Šใ€ๅน…ๅบƒใ„ใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซ้ฉๅฟœใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซใ‚ฟใ‚คใƒ— | ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎ้‡ใฟ | ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏ | ๆŽจ่ซ– | ๆคœ่จผ | ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ | ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ | +|-----------|----------------|--------------------------------|----|----|--------|--------| +| YOLOv6-N | `yolov6-n.pt` | [ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-S | `yolov6-s.pt` | [ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-M | `yolov6-m.pt` | [ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L | `yolov6-l.pt` | [ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L6 | `yolov6-l6.pt` | [ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +ใ“ใฎ่กจใฏใ€YOLOv6ใƒขใƒ‡ใƒซใฎใƒใƒชใ‚ขใƒณใƒˆใซใคใ„ใฆใฎ่ฉณ็ดฐใชๆฆ‚่ฆใ‚’ๆไพ›ใ—ใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใฎใ‚ฟใ‚นใ‚ฏใซใŠใ‘ใ‚‹ๆฉŸ่ƒฝใจใ€[ๆŽจ่ซ–](../modes/predict.md)ใ€[ๆคœ่จผ](../modes/val.md)ใ€[ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ](../modes/train.md)ใ€[ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ](../modes/export.md)ใชใฉใฎใ•ใพใ–ใพใชๆ“ไฝœใƒขใƒผใƒ‰ใจใฎไบ’ๆ›ๆ€งใ‚’ๅผท่ชฟใ—ใฆใ„ใพใ™ใ€‚ใ“ใฎๅŒ…ๆ‹ฌ็š„ใชใ‚ตใƒใƒผใƒˆใซใ‚ˆใ‚Šใ€ใƒฆใƒผใ‚ถใƒผใฏใ•ใพใ–ใพใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚ทใƒŠใƒชใ‚ชใงYOLOv6ใƒขใƒ‡ใƒซใฎๆฉŸ่ƒฝใ‚’ๅๅˆ†ใซๆดป็”จใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +## ๅผ•็”จใจ่ฌ่พž + +ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใฎๅˆ†้‡ŽใซใŠใ‘ใ‚‹้‡่ฆใช่ฒข็Œฎใ‚’ใ—ใŸ่‘—่€…ใซ่ฌๆ„ใ‚’่กจใ—ใพใ™ใ€‚ + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{li2023yolov6, + title={YOLOv6 v3.0: A Full-Scale Reloading}, + author={Chuyi Li and Lulu Li and Yifei Geng and Hongliang Jiang and Meng Cheng and Bo Zhang and Zaidan Ke and Xiaoming Xu and Xiangxiang Chu}, + year={2023}, + eprint={2301.05586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +YOLOv6ใฎใ‚ชใƒชใ‚ธใƒŠใƒซ่ซ–ๆ–‡ใฏ[arXiv](https://arxiv.org/abs/2301.05586)ใงๅ…ฅๆ‰‹ใงใใพใ™ใ€‚่‘—่€…ใฏ่‡ช่บซใฎ็ ”็ฉถใ‚’ๅบƒใๅ…ฑๆœ‰ใ—ใฆใŠใ‚Šใ€ใ‚ณใƒผใƒ‰ใƒ™ใƒผใ‚นใฏ[GitHub](https://github.com/meituan/YOLOv6)ใงใ‚ขใ‚ฏใ‚ปใ‚นใงใใพใ™ใ€‚็งใŸใกใฏๅฝผใ‚‰ใŒใ“ใฎๅˆ†้‡Žใฎ้€ฒๆญฉใซ่ฒข็Œฎใ—ใ€ใใฎ็ ”็ฉถใ‚’ๅบƒใๅ…ฌ้–‹ใ—ใฆใ„ใ‚‹ใ“ใจใซๆ„Ÿ่ฌใ—ใฆใ„ใพใ™ใ€‚ diff --git a/ultralytics/docs/ja/models/yolov6.md:Zone.Identifier b/ultralytics/docs/ja/models/yolov6.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/models/yolov6.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/models/yolov7.md b/ultralytics/docs/ja/models/yolov7.md new file mode 100755 index 0000000..3981150 --- /dev/null +++ b/ultralytics/docs/ja/models/yolov7.md @@ -0,0 +1,65 @@ +--- +comments: true +description: YOLOv7ใฏ้ซ˜้€Ÿๆ€งใจ็ฒพๅบฆใฎไธกๆ–นใงๆ—ข็Ÿฅใฎ็‰ฉไฝ“ๆคœๅ‡บๅ™จใ‚’ๅ‡Œ้ง•ใ™ใ‚‹ๆœ€ๅ…ˆ็ซฏใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ็‰ฉไฝ“ๆคœๅ‡บๅ™จใงใ™ใ€‚ใ“ใฎๆŠ€่ก“ใงใฏใ€ใƒขใƒ‡ใƒซๅ†ใƒ‘ใƒฉใƒกใƒผใ‚ฟๅŒ–ใ€ๅ‹•็š„ใƒฉใƒ™ใƒซๅ‰ฒใ‚Šๅฝ“ใฆใ€ๆ‹กๅผตใ‚นใ‚ฑใƒผใƒชใƒณใ‚ฐใ€่ค‡ๅˆใ‚นใ‚ฑใƒผใƒชใƒณใ‚ฐใชใฉใ€ๅญฆ็ฟ’ๅฏ่ƒฝใชBag-of-Freebiesๆœ€้ฉๅŒ–ใซ็„ฆ็‚นใ‚’ๅฝ“ใฆใฆใ„ใพใ™ใ€‚ +keywords: YOLOv7, ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ็‰ฉไฝ“ๆคœๅ‡บๅ™จ, ๆœ€ๅ…ˆ็ซฏ, Ultralytics, MS COCO ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆ, ใƒขใƒ‡ใƒซๅ†ใƒ‘ใƒฉใƒกใƒผใ‚ฟๅŒ–, ๅ‹•็š„ใƒฉใƒ™ใƒซๅ‰ฒใ‚Šๅฝ“ใฆ, ๆ‹กๅผตใ‚นใ‚ฑใƒผใƒชใƒณใ‚ฐ, ่ค‡ๅˆใ‚นใ‚ฑใƒผใƒชใƒณใ‚ฐ +--- + +# YOLOv7: ๅญฆ็ฟ’ๅฏ่ƒฝใชBag-of-Freebies + +YOLOv7ใฏใ€5 FPSใ‹ใ‚‰160 FPSใฎ็ฏ„ๅ›ฒใงใ€ๆ—ข็Ÿฅใฎใ™ในใฆใฎ็‰ฉไฝ“ๆคœๅ‡บๅ™จใ‚’้€Ÿๅบฆใจ็ฒพๅบฆใฎไธกๆ–นใงๅ‡Œ้ง•ใ™ใ‚‹ๆœ€ๅ…ˆ็ซฏใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ็‰ฉไฝ“ๆคœๅ‡บๅ™จใงใ™ใ€‚GPU V100ไธŠใง30 FPSไปฅไธŠใฎๆ€ง่ƒฝใ‚’ๆŒใคใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ็‰ฉไฝ“ๆคœๅ‡บๅ™จใฎไธญใงใ€ๆœ€้ซ˜ใฎ็ฒพๅบฆ๏ผˆ56.8% AP๏ผ‰ใ‚’ๆŒใฃใฆใ„ใพใ™ใ€‚ใ•ใ‚‰ใซใ€YOLOv7ใฏYOLORใ€YOLOXใ€Scaled-YOLOv4ใ€YOLOv5ใชใฉใฎไป–ใฎ็‰ฉไฝ“ๆคœๅ‡บๅ™จใ‚’้€Ÿๅบฆใจ็ฒพๅบฆใฎ้ขใงไธŠๅ›žใฃใฆใ„ใพใ™ใ€‚ใ“ใฎใƒขใƒ‡ใƒซใฏใ€ไป–ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚„ไบ‹ๅ‰ๅญฆ็ฟ’้‡ใฟใ‚’ไฝฟ็”จใ›ใšใซใ€MS COCOใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚Œใฆใ„ใพใ™ใ€‚YOLOv7ใฎใ‚ฝใƒผใ‚นใ‚ณใƒผใƒ‰ใฏGitHubใงๅ…ฅๆ‰‹ใงใใพใ™ใ€‚ + +![SOTA็‰ฉไฝ“ๆคœๅ‡บๅ™จใจใฎๆฏ”่ผƒ](https://github.com/ultralytics/ultralytics/assets/26833433/5e1e0420-8122-4c79-b8d0-2860aa79af92) +**ๆœ€ๅ…ˆ็ซฏ็‰ฉไฝ“ๆคœๅ‡บๅ™จใจใฎๆฏ”่ผƒใ€‚** ่กจ2ใฎ็ตๆžœใ‹ใ‚‰ใ‚ใ‹ใ‚‹ใ‚ˆใ†ใซใ€ๆๆกˆๆ‰‹ๆณ•ใฏ้€Ÿๅบฆใจ็ฒพๅบฆใฎใƒˆใƒฌใƒผใƒ‰ใ‚ชใƒ•ใซใŠใ„ใฆๆœ€ใ‚‚ๅ„ชใ‚Œใฆใ„ใพใ™ใ€‚ไพ‹ใˆใฐใ€YOLOv7-tiny-SiLUใจYOLOv5-N๏ผˆr6.1๏ผ‰ใ‚’ๆฏ”่ผƒใ™ใ‚‹ใจใ€ๆˆ‘ใ€…ใฎๆ‰‹ๆณ•ใฏ127 fps้€Ÿใใ€APใซใŠใ„ใฆ10.7%็ฒพๅบฆใŒๅ‘ไธŠใ—ใฆใ„ใพใ™ใ€‚ใพใŸใ€YOLOv7ใฏใƒ•ใƒฌใƒผใƒ ใƒฌใƒผใƒˆ161 fpsใง51.4%ใฎAPใ‚’้”ๆˆใ—ใฆใ„ใพใ™ใŒใ€ๅŒใ˜APใ‚’ๆŒใคPPYOLOE-Lใฎใƒ•ใƒฌใƒผใƒ ใƒฌใƒผใƒˆใฏ78 fpsใฎใฟใงใ™ใ€‚ใƒ‘ใƒฉใƒกใƒผใ‚ฟไฝฟ็”จ้‡ใซ้–ขใ—ใฆใฏใ€YOLOv7ใฏPPYOLOE-Lใ‚ˆใ‚Šใ‚‚41%ๅฐ‘ใชใ„ใงใ™ใ€‚ใ•ใ‚‰ใซใ€114 fpsใฎๆŽจ่ซ–้€Ÿๅบฆใ‚’ๆŒใคYOLOv7-Xใ‚’99 fpsใฎๆŽจ่ซ–้€Ÿๅบฆใ‚’ๆŒใคYOLOv5-L๏ผˆr6.1๏ผ‰ใจๆฏ”่ผƒใ™ใ‚‹ใจใ€YOLOv7-XใฏAPใ‚’3.9%ๅ‘ไธŠใ•ใ›ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚YOLOv7-Xใ‚’ใ‚นใ‚ฑใƒผใƒซใฎ่ฟ‘ใ„YOLOv5-X๏ผˆr6.1๏ผ‰ใจๆฏ”่ผƒใ™ใ‚‹ใจใ€YOLOv7-XใฎๆŽจ่ซ–้€Ÿๅบฆใฏ31 fps้€Ÿใ„ใงใ™ใ€‚ใพใŸใ€ใƒ‘ใƒฉใƒกใƒผใ‚ฟ้‡ใจ่จˆ็ฎ—้‡ใฎ่ฆณ็‚นใ‹ใ‚‰ใ€YOLOv7-XใฏYOLOv5-X๏ผˆr6.1๏ผ‰ใซๆฏ”ในใฆใƒ‘ใƒฉใƒกใƒผใ‚ฟใ‚’22%ใ€่จˆ็ฎ—้‡ใ‚’8%ๅ‰Šๆธ›ใ—ใฆใ„ใพใ™ใŒใ€APใฏ2.2%ๅ‘ไธŠใ—ใฆใ„ใพใ™๏ผˆ[ๅ‡บๅ…ธ](https://arxiv.org/pdf/2207.02696.pdf)๏ผ‰ใ€‚ + +## ๆฆ‚่ฆ + +ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ็‰ฉไฝ“ๆคœๅ‡บใฏใ€ใƒžใƒซใƒใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ€่‡ชๅ‹•้‹่ปขใ€ใƒญใƒœใƒ†ใ‚ฃใ‚ฏใ‚นใ€ๅŒป็™‚็”ปๅƒ่งฃๆžใชใฉใ€ๅคšใใฎใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒ“ใ‚ธใƒงใƒณใ‚ทใ‚นใƒ†ใƒ ใฎ้‡่ฆใชใ‚ณใƒณใƒใƒผใƒใƒณใƒˆใงใ™ใ€‚่ฟ‘ๅนดใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ็‰ฉไฝ“ๆคœๅ‡บใฎ้–‹็™บใฏใ€ใ•ใพใ–ใพใชCPUใ€GPUใ€ใƒ‹ใƒฅใƒผใƒฉใƒซใƒ—ใƒญใ‚ปใƒƒใ‚ทใƒณใ‚ฐใƒฆใƒ‹ใƒƒใƒˆ๏ผˆNPU๏ผ‰ใฎๆŽจ่ซ–้€ŸๅบฆใฎๅŠน็އ็š„ใชใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใฎ่จญ่จˆใจๅ‘ไธŠใซ็„ฆ็‚นใ‚’ๅฝ“ใฆใฆใ„ใพใ™ใ€‚YOLOv7ใฏใ€ใ‚จใƒƒใ‚ธใ‹ใ‚‰ใ‚ฏใƒฉใ‚ฆใƒ‰ใพใงใ€ใƒขใƒใ‚คใƒซGPUใจGPUใƒ‡ใƒใ‚คใ‚นใฎไธกๆ–นใ‚’ใ‚ตใƒใƒผใƒˆใ—ใฆใ„ใพใ™ใ€‚ + +ๅพ“ๆฅใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ็‰ฉไฝ“ๆคœๅ‡บๅ™จใŒใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใฎๆœ€้ฉๅŒ–ใซ็„ฆ็‚นใ‚’ๅฝ“ใฆใ‚‹ใฎใซๅฏพใ—ใ€YOLOv7ใงใฏๅญฆ็ฟ’ใƒ—ใƒญใ‚ปใ‚นใฎๆœ€้ฉๅŒ–ใซๆณจๅŠ›ใ—ใฆใ„ใพใ™ใ€‚ใ“ใ‚Œใซใฏใ€ๆŽจ่ซ–ใ‚ณใ‚นใƒˆใ‚’ๅข—ใ‚„ใ•ใšใซ็‰ฉไฝ“ๆคœๅ‡บใฎ็ฒพๅบฆใ‚’ๅ‘ไธŠใ•ใ›ใ‚‹ใŸใ‚ใฎใƒขใ‚ธใƒฅใƒผใƒซใ‚„ๆœ€้ฉๅŒ–ๆ‰‹ๆณ•ใŒๅซใพใ‚Œใพใ™ใ€‚ใ“ใ‚Œใฏใ€ใ€Œๅญฆ็ฟ’ๅฏ่ƒฝใชBag-of-Freebiesใ€ใจใ„ใ†ใ‚ณใƒณใ‚ปใƒ—ใƒˆใงใ™ใ€‚ + +## ไธปใช็‰นๅพด + +YOLOv7ใฏใ€ใ„ใใคใ‹ใฎไธปใช็‰นๅพดใ‚’ๅฐŽๅ…ฅใ—ใฆใ„ใพใ™ใ€‚ + +1. **ใƒขใƒ‡ใƒซๅ†ใƒ‘ใƒฉใƒกใƒผใ‚ฟๅŒ–**: YOLOv7ใฏใ€ใ‚ฐใƒฉใƒ‡ใƒผใ‚ทใƒงใƒณไผๆ’ญ็ตŒ่ทฏใฎๆฆ‚ๅฟตใ‚’ๆŒใคใ€ใ•ใพใ–ใพใชใƒใƒƒใƒˆใƒฏใƒผใ‚ฏใฎใƒฌใ‚คใƒคใƒผใซ้ฉ็”จๅฏ่ƒฝใช่จˆ็”ปใ•ใ‚ŒใŸๅ†ใƒ‘ใƒฉใƒกใƒผใ‚ฟๅŒ–ใƒขใƒ‡ใƒซใ‚’ๆๆกˆใ—ใฆใ„ใพใ™ใ€‚ + +2. **ๅ‹•็š„ใƒฉใƒ™ใƒซๅ‰ฒใ‚Šๅฝ“ใฆ**: ่ค‡ๆ•ฐใฎๅ‡บๅŠ›ๅฑคใ‚’ๆŒใคใƒขใƒ‡ใƒซใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใงใฏใ€็•ฐใชใ‚‹ใƒ–ใƒฉใƒณใƒใฎๅ‡บๅŠ›ใซๅ‹•็š„ใชใ‚ฟใƒผใ‚ฒใƒƒใƒˆใ‚’ๅ‰ฒใ‚Šๅฝ“ใฆใ‚‹ๆ–นๆณ•ใŒๆ–ฐใŸใช่ชฒ้กŒใจใชใ‚Šใพใ™ใ€‚ใ“ใฎๅ•้กŒใ‚’่งฃๆฑบใ™ใ‚‹ใŸใ‚ใซใ€YOLOv7ใฏใ‚ณใƒผใ‚นใƒˆใ‚ฅใƒผใƒ•ใ‚กใ‚คใƒณใƒชใƒผใƒ‰ใ‚ฌใ‚คใƒ‰ไป˜ใใƒฉใƒ™ใƒซๅ‰ฒใ‚Šๅฝ“ใฆใจๅ‘ผใฐใ‚Œใ‚‹ๆ–ฐใ—ใ„ใƒฉใƒ™ใƒซๅ‰ฒใ‚Šๅฝ“ใฆๆ‰‹ๆณ•ใ‚’ๅฐŽๅ…ฅใ—ใฆใ„ใพใ™ใ€‚ + +3. **ๆ‹กๅผตใ‚นใ‚ฑใƒผใƒชใƒณใ‚ฐใจ่ค‡ๅˆใ‚นใ‚ฑใƒผใƒชใƒณใ‚ฐ**: YOLOv7ใฏใ€ใ€Œๆ‹กๅผตใ€ใŠใ‚ˆใณใ€Œ่ค‡ๅˆใ‚นใ‚ฑใƒผใƒชใƒณใ‚ฐใ€ใฎๆ–นๆณ•ใ‚’ๆๆกˆใ—ใ€ๅŠนๆžœ็š„ใซใƒ‘ใƒฉใƒกใƒผใ‚ฟใจ่จˆ็ฎ—ใ‚’ๅˆฉ็”จใงใใ‚‹ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ็‰ฉไฝ“ๆคœๅ‡บๅ™จใซใชใ‚Šใพใ™ใ€‚ + +4. **ๅŠน็އๆ€ง**: YOLOv7ใซใ‚ˆใ‚‹ๆ–นๆณ•ใฏใ€ๆœ€ๅ…ˆ็ซฏใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ็‰ฉไฝ“ๆคœๅ‡บๅ™จใฎใƒ‘ใƒฉใƒกใƒผใ‚ฟ้‡ใ‚’็ด„40%ใ€่จˆ็ฎ—้‡ใ‚’็ด„50%ๅŠน็އ็š„ใซๅ‰Šๆธ›ใ—ใ€ใ‚ˆใ‚Š้ซ˜้€ŸใชๆŽจ่ซ–้€Ÿๅบฆใจ้ซ˜ใ„ๆคœๅ‡บ็ฒพๅบฆใ‚’ๅฎŸ็พใ—ใพใ™ใ€‚ + +## ไฝฟ็”จไพ‹ + +ๅŸท็ญ†ๆ™‚็‚นใงใฏใ€Ultralyticsใฏ็พๅœจใ€YOLOv7ใƒขใƒ‡ใƒซใ‚’ใ‚ตใƒใƒผใƒˆใ—ใฆใ„ใพใ›ใ‚“ใ€‚ใใฎใŸใ‚ใ€YOLOv7ใ‚’ไฝฟ็”จใ—ใŸใ„ๅ ดๅˆใฏใ€YOLOv7ใฎGitHubใƒชใƒใ‚ธใƒˆใƒชใ‚’็›ดๆŽฅๅ‚็…งใ™ใ‚‹ๅฟ…่ฆใŒใ‚ใ‚Šใพใ™ใ€‚ + +ไปฅไธ‹ใฏใ€YOLOv7ใ‚’ไฝฟ็”จใ™ใ‚‹ใŸใ‚ใฎๅ…ธๅž‹็š„ใชๆ‰‹้ †ใฎๆฆ‚่ฆใงใ™ใ€‚ + +1. YOLOv7ใฎGitHubใƒชใƒใ‚ธใƒˆใƒชใซใ‚ขใ‚ฏใ‚ปใ‚นใ—ใพใ™: [https://github.com/WongKinYiu/yolov7](https://github.com/WongKinYiu/yolov7)ใ€‚ + +2. READMEใƒ•ใ‚กใ‚คใƒซใซ่จ˜่ผ‰ใ•ใ‚Œใฆใ„ใ‚‹ๆ‰‹้ †ใซๅพ“ใฃใฆใ‚คใƒณใ‚นใƒˆใƒผใƒซใ—ใพใ™ใ€‚้€šๅธธใ€ใƒชใƒใ‚ธใƒˆใƒชใ‚’ใ‚ฏใƒญใƒผใƒณใ—ใ€ๅฟ…่ฆใชไพๅญ˜้–ขไฟ‚ใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซใ—ใ€ๅฟ…่ฆใช็’ฐๅขƒๅค‰ๆ•ฐใ‚’่จญๅฎšใ™ใ‚‹ๅฟ…่ฆใŒใ‚ใ‚Šใพใ™ใ€‚ + +3. ใ‚คใƒณใ‚นใƒˆใƒผใƒซใŒๅฎŒไบ†ใ—ใŸใ‚‰ใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎๆบ–ๅ‚™ใ€ใƒขใƒ‡ใƒซใƒ‘ใƒฉใƒกใƒผใ‚ฟใฎ่จญๅฎšใ€ใƒขใƒ‡ใƒซใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใฆ็‰ฉไฝ“ๆคœๅ‡บใ‚’ๅฎŸ่กŒใ™ใ‚‹ใชใฉใ€ใƒชใƒใ‚ธใƒˆใƒชใงๆไพ›ใ•ใ‚Œใฆใ„ใ‚‹ไฝฟ็”จๆ–นๆณ•ใซๅพ“ใฃใฆใ€ใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใŠใ‚ˆใณไฝฟ็”จใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +ๅ…ทไฝ“็š„ใชๆ‰‹้ †ใฏใ€ๅ…ทไฝ“็š„ใชใƒฆใƒผใ‚นใ‚ฑใƒผใ‚นใจYOLOv7ใƒชใƒใ‚ธใƒˆใƒชใฎ็พๅœจใฎ็Šถๆ…‹ใซใ‚ˆใฃใฆ็•ฐใชใ‚‹ๅ ดๅˆใŒใ‚ใ‚Šใพใ™ใ€‚ใใฎใŸใ‚ใ€YOLOv7ใฎGitHubใƒชใƒใ‚ธใƒˆใƒชใงๆไพ›ใ•ใ‚Œใฆใ„ใ‚‹ๆ‰‹้ †ใ‚’็›ดๆŽฅๅ‚็…งใ™ใ‚‹ใ“ใจใ‚’ๅผทใใŠๅ‹งใ‚ใ—ใพใ™ใ€‚ + +YOLOv7ใฎใ‚ตใƒใƒผใƒˆใŒๅฎŸ่ฃ…ใ•ใ‚Œใ‚‹ใพใงใ€ใ“ใฎใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใ‚’ๆ›ดๆ–ฐใ—ใฆใ€Ultralyticsใฎไฝฟ็”จไพ‹ใ‚’่ฟฝๅŠ ใ™ใ‚‹ใŸใ‚ใฎๅŠชๅŠ›ใ‚’็ถšใ‘ใพใ™ใ€‚ + +## ๅผ•็”จใจ่ฌ่พž + +ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ็‰ฉไฝ“ๆคœๅ‡บใฎๅˆ†้‡Žใงใฎ้‡่ฆใช่ฒข็Œฎใซๅฏพใ—ใฆใ€YOLOv7ใฎ่‘—่€…ใซๆ„Ÿ่ฌใ„ใŸใ—ใพใ™ใ€‚ + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{wang2022yolov7, + title={{YOLOv7}: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors}, + author={Wang, Chien-Yao and Bochkovskiy, Alexey and Liao, Hong-Yuan Mark}, + journal={arXiv preprint arXiv:2207.02696}, + year={2022} + } + ``` + +YOLOv7ใฎใ‚ชใƒชใ‚ธใƒŠใƒซ่ซ–ๆ–‡ใฏ[arXiv](https://arxiv.org/pdf/2207.02696.pdf)ใง่ฆ‹ใคใ‘ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚่‘—่€…ใฏ่‡ชๅˆ†ใŸใกใฎ็ ”็ฉถใ‚’ๅบƒใๅ…ฌ้–‹ใ—ใฆใŠใ‚Šใ€ใ‚ณใƒผใƒ‰ใƒ™ใƒผใ‚นใฏ[GitHub](https://github.com/WongKinYiu/yolov7)ใงใ‚ขใ‚ฏใ‚ปใ‚นใงใใพใ™ใ€‚ๅฝผใ‚‰ใฎ็ ”็ฉถใŒใ“ใฎๅˆ†้‡Žใ‚’้€ฒใ‚ใ€ไป–ใฎ็ ”็ฉถ่€…ใซใ‚‚ใ‚ขใ‚ฏใ‚ปใ‚นๅฏ่ƒฝใซใ™ใ‚‹ๅŠชๅŠ›ใซๆ„Ÿ่ฌใ—ใพใ™ใ€‚ diff --git a/ultralytics/docs/ja/models/yolov7.md:Zone.Identifier b/ultralytics/docs/ja/models/yolov7.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/models/yolov7.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/models/yolov8.md b/ultralytics/docs/ja/models/yolov8.md new file mode 100755 index 0000000..a404036 --- /dev/null +++ b/ultralytics/docs/ja/models/yolov8.md @@ -0,0 +1,162 @@ +--- +comments: true +description: YOLOv8ใฎ้ญ…ๅŠ›็š„ใชๆฉŸ่ƒฝใ‚’ๆŽข็ดขใ—ใพใ—ใ‚‡ใ†ใ€‚ใ“ใ‚Œใฏๅฝ“็คพใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใฎๆœ€ๆ–ฐใƒใƒผใ‚ธใƒงใƒณใงใ™๏ผ้ซ˜ๅบฆใชใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใ€ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซใ€ใใ—ใฆ็ฒพๅบฆใจ้€Ÿๅบฆใฎๆœ€้ฉใชใƒใƒฉใƒณใ‚นใŒYOLOv8ใ‚’ๅฎŒ็’งใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚ฟใ‚นใ‚ฏใฎ้ธๆŠž่‚ขใจใ—ใพใ™ใ€‚ +keywords: YOLOv8, Ultralytics, ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จ, ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซ, ใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ, ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ, YOLOใ‚ทใƒชใƒผใ‚บ, ้ซ˜ๅบฆใชใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃ, ็ฒพๅบฆ, ้€Ÿๅบฆ +--- + +# YOLOv8 + +## ๆฆ‚่ฆ + +YOLOv8ใฏใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๅ™จใฎYOLOใ‚ทใƒชใƒผใ‚บใฎๆœ€ๆ–ฐ็‰ˆใงใ‚ใ‚Šใ€็ฒพๅบฆใจ้€ŸๅบฆใซใŠใ„ใฆๆœ€ๅ…ˆ็ซฏใฎๆ€ง่ƒฝใ‚’ๆไพ›ใ—ใพใ™ใ€‚ไปฅๅ‰ใฎYOLOใƒใƒผใ‚ธใƒงใƒณใฎ้€ฒๅŒ–ใ‚’ๅŸบใซใ—ใฆใ€YOLOv8ใฏๆ–ฐๆฉŸ่ƒฝใจๆœ€้ฉๅŒ–ใ‚’ๅฐŽๅ…ฅใ—ใ€ๆง˜ใ€…ใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚ฟใ‚นใ‚ฏใซใŠใ„ใฆๅบƒ็ฏ„ใชใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซ้ฉใ—ใŸ้ธๆŠž่‚ขใจใชใ‚Šใพใ™ใ€‚ + +![Ultralytics YOLOv8](https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png) + +## ไธปใช็‰นๅพด + +- **้ซ˜ๅบฆใชใƒใƒƒใ‚ฏใƒœใƒผใƒณใŠใ‚ˆใณใƒใƒƒใ‚ฏใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃ:** YOLOv8ใฏๆœ€ๅ…ˆ็ซฏใฎใƒใƒƒใ‚ฏใƒœใƒผใƒณใจใƒใƒƒใ‚ฏใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใ‚’ๆŽก็”จใ—ใฆใŠใ‚Šใ€็‰นๅพดๆŠฝๅ‡บใจใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใŒๅ‘ไธŠใ—ใฆใ„ใพใ™ใ€‚ +- **ใ‚ขใƒณใ‚ซใƒผใƒ•ใƒชใƒผใชๅˆ†ๅ‰ฒUltralyticsใƒ˜ใƒƒใƒ‰:** YOLOv8ใฏใ‚ขใƒณใ‚ซใƒผใƒ™ใƒผใ‚นใฎๆ–นๆณ•ใจๆฏ”ในใฆใ€ใ‚ขใƒณใ‚ซใƒผใƒ•ใƒชใƒผใชๅˆ†ๅ‰ฒUltralyticsใƒ˜ใƒƒใƒ‰ใ‚’ๆŽก็”จใ—ใ€ใ‚ˆใ‚Š้ซ˜ใ„็ฒพๅบฆใจๅŠน็އ็š„ใชๆคœๅ‡บใƒ—ใƒญใ‚ปใ‚นใซ่ฒข็Œฎใ—ใฆใ„ใพใ™ใ€‚ +- **ๆœ€้ฉใช็ฒพๅบฆใจ้€Ÿๅบฆใฎใƒˆใƒฌใƒผใƒ‰ใ‚ชใƒ•ใฎๆœ€้ฉๅŒ–:** ็ฒพๅบฆใจ้€Ÿๅบฆใฎๆœ€้ฉใชใƒใƒฉใƒณใ‚นใ‚’ไฟใกใชใŒใ‚‰ใ€YOLOv8ใฏๅคšๅฒใซใ‚ใŸใ‚‹ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚ฟใ‚นใ‚ฏใซ้ฉใ—ใฆใ„ใพใ™ใ€‚ +- **ใ•ใพใ–ใพใชไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซ:** YOLOv8ใฏใ•ใพใ–ใพใชใ‚ฟใ‚นใ‚ฏใ‚„ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚น่ฆไปถใซๅฏพๅฟœใ™ใ‚‹ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซใ‚’ๆไพ›ใ—ใฆใŠใ‚Šใ€็‰นๅฎšใฎใƒฆใƒผใ‚นใ‚ฑใƒผใ‚นใซ้ฉใ—ใŸใƒขใƒ‡ใƒซใ‚’็ฐกๅ˜ใซ่ฆ‹ใคใ‘ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +## ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏใจใƒขใƒผใƒ‰ + +YOLOv8ใ‚ทใƒชใƒผใ‚บใฏใ€ใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒ“ใ‚ธใƒงใƒณใฎใ•ใพใ–ใพใชใ‚ฟใ‚นใ‚ฏใซ็‰นๅŒ–ใ—ใŸใ•ใพใ–ใพใชใƒขใƒ‡ใƒซใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใฏใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‹ใ‚‰ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ€ใƒใƒผใ‚บ/ใ‚ญใƒผใƒใ‚คใƒณใƒˆใฎๆคœๅ‡บใ€ๅˆ†้กžใชใฉใฎใ‚ˆใ‚Š่ค‡้›‘ใชใ‚ฟใ‚นใ‚ฏใพใงใ€ใ•ใพใ–ใพใช่ฆไปถใซๅฏพๅฟœใ™ใ‚‹ใ‚ˆใ†ใซ่จญ่จˆใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +YOLOv8ใ‚ทใƒชใƒผใ‚บใฎๅ„ใƒใƒชใ‚ขใƒณใƒˆใฏใ€ใใ‚Œใžใ‚Œใฎใ‚ฟใ‚นใ‚ฏใซๆœ€้ฉๅŒ–ใ•ใ‚ŒใฆใŠใ‚Šใ€้ซ˜ใ„ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใจ็ฒพๅบฆใ‚’ๆไพ›ใ—ใพใ™ใ€‚ใพใŸใ€ใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใฏใ€ๆŽจ่ซ–ใ€ๆคœ่จผใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ€ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใจใ„ใฃใŸใ•ใพใ–ใพใช้‹็”จใƒขใƒผใƒ‰ใจไบ’ๆ›ๆ€งใŒใ‚ใ‚Šใ€ๅฑ•้–‹ใจ้–‹็™บใฎใ•ใพใ–ใพใชๆฎต้šŽใงไฝฟ็”จใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซ | ใƒ•ใ‚กใ‚คใƒซๅ | ใ‚ฟใ‚นใ‚ฏ | ๆŽจ่ซ– | ๆคœ่จผ | ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ | ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ | +|-------------|----------------------------------------------------------------------------------------------------------------|----------------------------------------|----|----|--------|--------| +| YOLOv8 | `yolov8n.pt` `yolov8s.pt` `yolov8m.pt` `yolov8l.pt` `yolov8x.pt` | [ๆคœๅ‡บ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-seg | `yolov8n-seg.pt` `yolov8s-seg.pt` `yolov8m-seg.pt` `yolov8l-seg.pt` `yolov8x-seg.pt` | [ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ](../tasks/segment.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-pose | `yolov8n-pose.pt` `yolov8s-pose.pt` `yolov8m-pose.pt` `yolov8l-pose.pt` `yolov8x-pose.pt` `yolov8x-pose-p6.pt` | [ใƒใƒผใ‚บ/ใ‚ญใƒผใƒใ‚คใƒณใƒˆ](../tasks/pose.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-cls | `yolov8n-cls.pt` `yolov8s-cls.pt` `yolov8m-cls.pt` `yolov8l-cls.pt` `yolov8x-cls.pt` | [ๅˆ†้กž](../tasks/classify.md) | โœ… | โœ… | โœ… | โœ… | + +ใ“ใฎ่กจใฏใ€YOLOv8ใƒขใƒ‡ใƒซใฎใƒใƒชใ‚ขใƒณใƒˆใฎๆฆ‚่ฆใ‚’ๆไพ›ใ—ใ€็‰นๅฎšใฎใ‚ฟใ‚นใ‚ฏใงใฎ้ฉ็”จๆ€งใจๆŽจ่ซ–ใ€ๆคœ่จผใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ€ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใชใฉใฎใ•ใพใ–ใพใช้‹็”จใƒขใƒผใƒ‰ใจใฎไบ’ๆ›ๆ€งใ‚’็คบใ—ใฆใ„ใพใ™ใ€‚ใ“ใ‚Œใซใ‚ˆใ‚Šใ€YOLOv8ใ‚ทใƒชใƒผใ‚บใฎๅคšๆง˜ๆ€งใจๅ …็‰ขๆ€งใŒ็คบใ•ใ‚ŒใฆใŠใ‚Šใ€ใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒ“ใ‚ธใƒงใƒณใฎใ•ใพใ–ใพใชใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซ้ฉใ—ใฆใ„ใพใ™ใ€‚ + +## ๆ€ง่ƒฝใƒกใƒˆใƒชใ‚ฏใ‚น + +!!! ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚น + + === "ๆคœๅ‡บ๏ผˆCOCO๏ผ‰" + + [COCO](https://docs.ultralytics.com/datasets/detect/coco/)ใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใŸไฝฟ็”จไพ‹ใซใคใ„ใฆใฏใ€[ๆคœๅ‡บใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ](https://docs.ultralytics.com/tasks/detect/)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใซใฏ80ใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใ‚ฏใƒฉใ‚นใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚ + + | ใƒขใƒ‡ใƒซ | ใ‚ตใ‚คใ‚บ๏ผˆใƒ”ใ‚ฏใ‚ปใƒซ๏ผ‰ | mAPval
50-95 | ใ‚นใƒ”ใƒผใƒ‰๏ผˆCPU ONNX
๏ผˆใƒŸใƒช็ง’๏ผ‰๏ผ‰ | ใ‚นใƒ”ใƒผใƒ‰๏ผˆA100 TensorRT
๏ผˆใƒŸใƒช็ง’๏ผ‰๏ผ‰ | ใƒ‘ใƒฉใƒกใƒผใ‚ฟๆ•ฐ๏ผˆ็™พไธ‡๏ผ‰ | FLOPs๏ผˆ10ๅ„„๏ผ‰ | + | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + + === "ๆคœๅ‡บ๏ผˆOpen Images V7๏ผ‰" + + [Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/)ใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใŸไฝฟ็”จไพ‹ใซใคใ„ใฆใฏใ€[ๆคœๅ‡บใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ](https://docs.ultralytics.com/tasks/detect/)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใซใฏ600ใฎไบ‹ๅ‰่จ“็ทดใ‚ฏใƒฉใ‚นใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚ + + | ใƒขใƒ‡ใƒซ | ใ‚ตใ‚คใ‚บ๏ผˆใƒ”ใ‚ฏใ‚ปใƒซ๏ผ‰ | mAPval
50-95 | ใ‚นใƒ”ใƒผใƒ‰๏ผˆCPU ONNX
๏ผˆใƒŸใƒช็ง’๏ผ‰๏ผ‰ | ใ‚นใƒ”ใƒผใƒ‰๏ผˆA100 TensorRT
๏ผˆใƒŸใƒช็ง’๏ผ‰๏ผ‰ | ใƒ‘ใƒฉใƒกใƒผใ‚ฟๆ•ฐ๏ผˆ็™พไธ‡๏ผ‰ | FLOPs๏ผˆ10ๅ„„๏ผ‰ | + | ----------------------------------------------------------------------------------------- | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18.4 | 142.4 | 1.21 | 3.5 | 10.5 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27.7 | 183.1 | 1.40 | 11.4 | 29.7 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33.6 | 408.5 | 2.26 | 26.2 | 80.6 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34.9 | 596.9 | 2.43 | 44.1 | 167.4 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36.3 | 860.6 | 3.56 | 68.7 | 260.6 | + + === "ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ๏ผˆCOCO๏ผ‰" + + [COCO](https://docs.ultralytics.com/datasets/segment/coco/)ใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใŸไฝฟ็”จไพ‹ใซใคใ„ใฆใฏใ€[ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ](https://docs.ultralytics.com/tasks/segment/)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใซใฏ80ใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใ‚ฏใƒฉใ‚นใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚ + + | ใƒขใƒ‡ใƒซ | ใ‚ตใ‚คใ‚บ๏ผˆใƒ”ใ‚ฏใ‚ปใƒซ๏ผ‰ | mAPbox
50-95 | mAPmask
50-95 | ใ‚นใƒ”ใƒผใƒ‰๏ผˆCPU ONNX
๏ผˆใƒŸใƒช็ง’๏ผ‰๏ผ‰ | ใ‚นใƒ”ใƒผใƒ‰๏ผˆA100 TensorRT
๏ผˆใƒŸใƒช็ง’๏ผ‰๏ผ‰ | ใƒ‘ใƒฉใƒกใƒผใ‚ฟๆ•ฐ๏ผˆ็™พไธ‡๏ผ‰ | FLOPs๏ผˆ10ๅ„„๏ผ‰ | + | -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | + | [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | + | [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | + | [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | + | [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + + === "ๅˆ†้กž๏ผˆImageNet๏ผ‰" + + [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/)ใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใŸไฝฟ็”จไพ‹ใซใคใ„ใฆใฏใ€[ๅˆ†้กžใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ](https://docs.ultralytics.com/tasks/classify/)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใซใฏ1000ใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใ‚ฏใƒฉใ‚นใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚ + + | ใƒขใƒ‡ใƒซ | ใ‚ตใ‚คใ‚บ๏ผˆใƒ”ใ‚ฏใ‚ปใƒซ๏ผ‰ | top1็ฒพๅบฆ | top5็ฒพๅบฆ | ใ‚นใƒ”ใƒผใƒ‰๏ผˆCPU ONNX
๏ผˆใƒŸใƒช็ง’๏ผ‰๏ผ‰ | ใ‚นใƒ”ใƒผใƒ‰๏ผˆA100 TensorRT
๏ผˆใƒŸใƒช็ง’๏ผ‰๏ผ‰ | ใƒ‘ใƒฉใƒกใƒผใ‚ฟๆ•ฐ๏ผˆ็™พไธ‡๏ผ‰ | FLOPs๏ผˆ640ใƒ”ใ‚ฏใ‚ปใƒซๆ™‚ใฎ10ๅ„„๏ผ‰ | + | -------------------------------------------------------------------------------------------- | --------------------- | --------- | --------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ | + | [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | + | [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | + | [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | + | [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | + | [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + + === "ใƒใƒผใ‚บ๏ผˆCOCO๏ผ‰" + + ['person']ใ‚ฏใƒฉใ‚นใฎ1ใคใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใ‚ฏใƒฉใ‚นใ‚’ๅซใ‚€ใ€[COCO](https://docs.ultralytics.com/datasets/pose/coco/)ใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใŸไฝฟ็”จไพ‹ใซใคใ„ใฆใฏใ€[ใƒใƒผใ‚บๆŽจๅฎšใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ](https://docs.ultralytics.com/tasks/pose/)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + + | ใƒขใƒ‡ใƒซ | ใ‚ตใ‚คใ‚บ๏ผˆใƒ”ใ‚ฏใ‚ปใƒซ๏ผ‰ | mAPpose
50-95 | mAPpose
50 | ใ‚นใƒ”ใƒผใƒ‰๏ผˆCPU ONNX
๏ผˆใƒŸใƒช็ง’๏ผ‰๏ผ‰ | ใ‚นใƒ”ใƒผใƒ‰๏ผˆA100 TensorRT
๏ผˆใƒŸใƒช็ง’๏ผ‰๏ผ‰ | ใƒ‘ใƒฉใƒกใƒผใ‚ฟๆ•ฐ๏ผˆ็™พไธ‡๏ผ‰ | FLOPs๏ผˆ10ๅ„„๏ผ‰ | + | ---------------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | + | [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | + | [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | + | [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | + | [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | + | [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +## ไฝฟ็”จไพ‹ + +ใ“ใฎไพ‹ใงใฏใ€YOLOv8ใฎใ‚ทใƒณใƒ—ใƒซใชใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใจๆŽจ่ซ–ใฎไฝฟ็”จไพ‹ใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใจใใฎไป–ใฎใƒขใƒผใƒ‰ใซ้–ขใ™ใ‚‹ๅฎŒๅ…จใชใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใซใคใ„ใฆใฏใ€[Predict](../modes/predict.md)ใ€[Train](../modes/train.md)ใ€[Val](../modes/val.md)ใ€ใŠใ‚ˆใณ[Export](../modes/export.md)ใฎใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใƒšใƒผใ‚ธใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +ไปฅไธ‹ใฎไพ‹ใฏใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใฎใŸใ‚ใฎYOLOv8 [Detect](../tasks/detect.md)ใƒขใƒ‡ใƒซ็”จใฎใ‚‚ใฎใงใ™ใ€‚ไป–ใฎใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚ฟใ‚นใ‚ฏใซใคใ„ใฆใฏใ€[Segment](../tasks/segment.md)ใ€[Classify](../tasks/classify.md)ใ€ใŠใ‚ˆใณ[Pose](../tasks/pose.md)ใฎใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + Pythonใงใฎใ€`.pt`ๅฝขๅผใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟPyTorchใƒขใƒ‡ใƒซใŠใ‚ˆใณ่จญๅฎšใƒ•ใ‚กใ‚คใƒซ`*.yaml`ใ‚’ไฝฟ็”จใ—ใฆใ€`YOLO()`ใ‚ฏใƒฉใ‚นใ‚’ไป‹ใ—ใฆใƒขใƒ‡ใƒซใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚’ไฝœๆˆใ™ใ‚‹ๆ–นๆณ•ใฎไพ‹ใงใ™๏ผš + + ```python + from ultralytics import YOLO + + # COCOใงไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎYOLOv8nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') + + # ใƒขใƒ‡ใƒซใฎๆƒ…ๅ ฑใ‚’่กจ็คบ๏ผˆใ‚ชใƒ—ใ‚ทใƒงใƒณ๏ผ‰ + model.info() + + # COCO8ใฎไพ‹ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใง100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’ๅฎŸ่กŒ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # 'bus.jpg'็”ปๅƒใซๅฏพใ—ใฆYOLOv8nใƒขใƒ‡ใƒซใงๆŽจ่ซ–ใ‚’ๅฎŸ่กŒ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ็›ดๆŽฅใƒขใƒ‡ใƒซใ‚’ๅฎŸ่กŒใ™ใ‚‹ใŸใ‚ใฎCLIใ‚ณใƒžใƒณใƒ‰ใ‚‚ไฝฟ็”จใงใใพใ™๏ผš + + ```bash + # COCOใงไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎYOLOv8nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€COCO8ใฎไพ‹ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใง100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’ๅฎŸ่กŒ + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCOใงไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎYOLOv8nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€'bus.jpg'็”ปๅƒใซๅฏพใ—ใฆๆŽจ่ซ–ใ‚’ๅฎŸ่กŒ + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## ๅผ•็”จใŠใ‚ˆใณ่ฌ่พž + +ใ“ใฎใƒชใƒใ‚ธใƒˆใƒชใ‹ใ‚‰YOLOv8ใƒขใƒ‡ใƒซใพใŸใฏไป–ใฎใ‚ฝใƒ•ใƒˆใ‚ฆใ‚งใ‚ขใ‚’ไฝฟ็”จใ™ใ‚‹ๅ ดๅˆใฏใ€ๆฌกใฎๅฝขๅผใงๅผ•็”จใ—ใฆใใ ใ•ใ„๏ผš + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @software{yolov8_ultralytics, + author = {Glenn Jocher and Ayush Chaurasia and Jing Qiu}, + title = {Ultralytics YOLOv8}, + version = {8.0.0}, + year = {2023}, + url = {https://github.com/ultralytics/ultralytics}, + orcid = {0000-0001-5950-6979, 0000-0002-7603-6750, 0000-0003-3783-7069}, + license = {AGPL-3.0} + } + ``` + +DOIใฏๆบ–ๅ‚™ไธญใงใ‚ใ‚Šใ€ๅˆฉ็”จๅฏ่ƒฝใซใชใ‚Šๆฌก็ฌฌใ€ๅผ•็”จใซ่ฟฝๅŠ ใ•ใ‚Œใพใ™ใ€‚YOLOv8ใƒขใƒ‡ใƒซใฏ[AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE)ใŠใ‚ˆใณ[Enterprise](https://ultralytics.com/license)ใƒฉใ‚คใ‚ปใƒณใ‚นใงๆไพ›ใ•ใ‚Œใฆใ„ใพใ™ใ€‚ diff --git a/ultralytics/docs/ja/models/yolov8.md:Zone.Identifier b/ultralytics/docs/ja/models/yolov8.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/models/yolov8.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/modes/benchmark.md b/ultralytics/docs/ja/modes/benchmark.md new file mode 100755 index 0000000..91572f3 --- /dev/null +++ b/ultralytics/docs/ja/modes/benchmark.md @@ -0,0 +1,94 @@ +--- +comments: true +description: YOLOv8ใ‚’ๆง˜ใ€…ใชใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผใงใ‚นใƒ”ใƒผใƒ‰ใจ็ฒพๅบฆใ‚’ใƒ—ใƒญใƒ•ใ‚กใ‚คใƒชใƒณใ‚ฐใ™ใ‚‹ๆ–นๆณ•ใ‚’ๅญฆใณใ€mAP50-95ใ€accuracy_top5ใฎใƒกใƒˆใƒชใ‚ฏใ‚นใชใฉใฎๆดžๅฏŸใ‚’ๅพ—ใ‚‹ใ€‚ +keywords: Ultralytics, YOLOv8, ใƒ™ใƒณใƒใƒžใƒผใ‚ฏ, ใ‚นใƒ”ใƒผใƒ‰ใƒ—ใƒญใƒ•ใ‚กใ‚คใƒชใƒณใ‚ฐ, ็ฒพๅบฆใƒ—ใƒญใƒ•ใ‚กใ‚คใƒชใƒณใ‚ฐ, mAP50-95, accuracy_top5, ONNX, OpenVINO, TensorRT, YOLOใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผ +--- + +# Ultralytics YOLO ใงใฎใƒขใƒ‡ใƒซใƒ™ใƒณใƒใƒžใƒผใ‚ฏ + +Ultralytics YOLOใ‚จใ‚ณใ‚ทใ‚นใƒ†ใƒ ใจ็ตฑๅˆ + +## ใฏใ˜ใ‚ใซ + +ใƒขใƒ‡ใƒซใŒใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚Œใ€ๆคœ่จผใ•ใ‚ŒใŸๅพŒใ€ๆฌกใฎ่ซ–็†็š„ใชใ‚นใƒ†ใƒƒใƒ—ใฏใ€ใ•ใพใ–ใพใช็พๅฎŸไธ–็•Œใฎใ‚ทใƒŠใƒชใ‚ชใงใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’่ฉ•ไพกใ™ใ‚‹ใ“ใจใงใ™ใ€‚Ultralytics YOLOv8 ใฎใƒ™ใƒณใƒใƒžใƒผใ‚ฏใƒขใƒผใƒ‰ใฏใ€ใ•ใพใ–ใพใชใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผใงใƒขใƒ‡ใƒซใฎใ‚นใƒ”ใƒผใƒ‰ใจ็ฒพๅบฆใ‚’่ฉ•ไพกใ™ใ‚‹ใŸใ‚ใฎ้ ‘ๅฅใชใƒ•ใƒฌใƒผใƒ ใƒฏใƒผใ‚ฏใ‚’ๆไพ›ใ—ใพใ™ใ€‚ + +## ใƒ™ใƒณใƒใƒžใƒผใ‚ฏใŒ้‡่ฆใช็†็”ฑใฏ๏ผŸ + +- **ๆƒ…ๅ ฑใซๅŸบใฅใ„ใŸๆฑบๅฎš:** ใ‚นใƒ”ใƒผใƒ‰ใจ็ฒพๅบฆใฎใƒˆใƒฌใƒผใƒ‰ใ‚ชใƒ•ใซใคใ„ใฆใฎๆดžๅฏŸใ‚’ๅพ—ใ‚‹ใ€‚ +- **ใƒชใ‚ฝใƒผใ‚นๅ‰ฒใ‚Šๅฝ“ใฆ:** ็•ฐใชใ‚‹ใƒใƒผใƒ‰ใ‚ฆใ‚งใ‚ขใง็•ฐใชใ‚‹ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผใŒใฉใฎใ‚ˆใ†ใซๅ‹•ไฝœใ™ใ‚‹ใ‹ใ‚’็†่งฃใ™ใ‚‹ใ€‚ +- **ๆœ€้ฉๅŒ–:** ็‰นๅฎšใฎใƒฆใƒผใ‚นใ‚ฑใƒผใ‚นใงๆœ€้ซ˜ใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’ๆไพ›ใ™ใ‚‹ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผใ‚’ๅญฆใถใ€‚ +- **ใ‚ณใ‚นใƒˆๅŠน็އ:** ใƒ™ใƒณใƒใƒžใƒผใ‚ฏ็ตๆžœใซๅŸบใฅใ„ใฆใƒใƒผใƒ‰ใ‚ฆใ‚งใ‚ขใƒชใ‚ฝใƒผใ‚นใ‚’ใ‚ˆใ‚ŠๅŠน็އ็š„ใซไฝฟ็”จใ™ใ‚‹ใ€‚ + +### ใƒ™ใƒณใƒใƒžใƒผใ‚ฏใƒขใƒผใƒ‰ใงใฎไธป่ฆใชใƒกใƒˆใƒชใ‚ฏใ‚น + +- **mAP50-95:** ็‰ฉไฝ“ๆคœๅ‡บใ€ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ€ใƒใƒผใ‚บๆŽจๅฎšใซไฝฟ็”จใ€‚ +- **accuracy_top5:** ็”ปๅƒๅˆ†้กžใซไฝฟ็”จใ€‚ +- **ๆŽจ่ซ–ๆ™‚้–“:** ๅ„็”ปๅƒใซ่ฆใ™ใ‚‹ๆ™‚้–“๏ผˆใƒŸใƒช็ง’๏ผ‰ใ€‚ + +### ใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผ + +- **ONNX:** ๆœ€้ฉใชCPUใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใฎใŸใ‚ใซ +- **TensorRT:** ๆœ€ๅคง้™ใฎGPUๅŠน็އใฎใŸใ‚ใซ +- **OpenVINO:** Intelใƒใƒผใƒ‰ใ‚ฆใ‚งใ‚ขใฎๆœ€้ฉๅŒ–ใฎใŸใ‚ใซ +- **CoreML, TensorFlow SavedModel ใชใฉ:** ๅคšๆง˜ใชใƒ‡ใƒ—ใƒญใ‚คใƒกใƒณใƒˆใƒ‹ใƒผใ‚บใซใ€‚ + +!!! Tip "ใƒ’ใƒณใƒˆ" + + * CPUใ‚นใƒ”ใƒผใƒ‰ใ‚ขใƒƒใƒ—ใฎใŸใ‚ใซONNXใพใŸใฏOpenVINOใซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ™ใ‚‹ใ€‚ + * GPUใ‚นใƒ”ใƒผใƒ‰ใ‚ขใƒƒใƒ—ใฎใŸใ‚ใซTensorRTใซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ™ใ‚‹ใ€‚ + +## ไฝฟ็”จไพ‹ + +ONNXใ€TensorRTใชใฉใ€ใ™ในใฆใฎใ‚ตใƒใƒผใƒˆใ•ใ‚Œใ‚‹ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผใงYOLOv8nใƒ™ใƒณใƒใƒžใƒผใ‚ฏใ‚’ๅฎŸ่กŒใ—ใพใ™ใ€‚ๅฎŒๅ…จใชใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅผ•ๆ•ฐใฎใƒชใ‚นใƒˆใซใคใ„ใฆใฏใ€ไปฅไธ‹ใฎArgumentsใ‚ปใ‚ฏใ‚ทใƒงใƒณใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics.utils.benchmarks import benchmark + + # GPUใงใƒ™ใƒณใƒใƒžใƒผใ‚ฏ + benchmark(model='yolov8n.pt', data='coco8.yaml', imgsz=640, half=False, device=0) + ``` + === "CLI" + + ```bash + yolo benchmark model=yolov8n.pt data='coco8.yaml' imgsz=640 half=False device=0 + ``` + +## ๅผ•ๆ•ฐ + +`model`ใ€`data`ใ€`imgsz`ใ€`half`ใ€`device`ใ€`verbose` ใชใฉใฎๅผ•ๆ•ฐใฏใ€็‰นๅฎšใฎใƒ‹ใƒผใ‚บใซๅˆใ‚ใ›ใฆใƒ™ใƒณใƒใƒžใƒผใ‚ฏใ‚’ๅพฎ่ชฟๆ•ดใ—ใ€ใ•ใพใ–ใพใชใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’ๅฎนๆ˜“ใซๆฏ”่ผƒใ™ใ‚‹ใŸใ‚ใซใƒฆใƒผใ‚ถใƒผใซๆŸ”่ปŸๆ€งใ‚’ๆไพ›ใ—ใพใ™ใ€‚ + +| ใ‚ญใƒผ | ๅ€ค | ่ชฌๆ˜Ž | +|-----------|---------|-----------------------------------------------------------| +| `model` | `None` | ใƒขใƒ‡ใƒซใƒ•ใ‚กใ‚คใƒซใธใฎใƒ‘ใ‚นใ€ไพ‹: yolov8n.pt, yolov8n.yaml | +| `data` | `None` | ใƒ™ใƒณใƒใƒžใƒผใ‚ฏใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ๅ‚็…งใ™ใ‚‹YAMLใธใฎใƒ‘ใ‚น๏ผˆ`val`ใƒฉใƒ™ใƒซใฎไธ‹๏ผ‰ | +| `imgsz` | `640` | ็”ปๅƒใ‚ตใ‚คใ‚บใ‚’ใ‚นใ‚ซใƒฉใƒผใพใŸใฏ(h, w)ใƒชใ‚นใƒˆใงใ€ไพ‹: (640, 480) | +| `half` | `False` | FP16้‡ๅญๅŒ– | +| `int8` | `False` | INT8้‡ๅญๅŒ– | +| `device` | `None` | ๅฎŸ่กŒใƒ‡ใƒใ‚คใ‚นใ€ไพ‹: cuda device=0 ใพใŸใฏ device=0,1,2,3 ใพใŸใฏ device=cpu | +| `verbose` | `False` | ใ‚จใƒฉใƒผๆ™‚ใซ็ถš่กŒใ—ใชใ„๏ผˆbool๏ผ‰ใ€ใพใŸใฏvalๅบŠใ—ใใ„ๅ€ค๏ผˆfloat๏ผ‰ | + +## ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผ + +ไปฅไธ‹ใฎๅฏ่ƒฝใชใ™ในใฆใฎใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผใง่‡ชๅ‹•็š„ใซใƒ™ใƒณใƒใƒžใƒผใ‚ฏใ‚’่ฉฆใฟใพใ™ใ€‚ + +| ๅฝขๅผ | `format` ๅผ•ๆ•ฐ | ใƒขใƒ‡ใƒซ | ใƒกใ‚ฟใƒ‡ใƒผใ‚ฟ | ๅผ•ๆ•ฐ | +|--------------------------------------------------------------------|---------------|---------------------------|-------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +[ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ](https://docs.ultralytics.com/modes/export/)ใƒšใƒผใ‚ธใงใ•ใ‚‰ใซ่ฉณใ—ใ„`export`ใฎ่ฉณ็ดฐใ‚’ใ”่ฆงใใ ใ•ใ„ใ€‚ diff --git a/ultralytics/docs/ja/modes/benchmark.md:Zone.Identifier b/ultralytics/docs/ja/modes/benchmark.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/modes/benchmark.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/modes/export.md b/ultralytics/docs/ja/modes/export.md new file mode 100755 index 0000000..fbd30c3 --- /dev/null +++ b/ultralytics/docs/ja/modes/export.md @@ -0,0 +1,76 @@ +--- +comments: true +description: YOLOv8ใƒขใƒ‡ใƒซใ‚’ONNX, TensorRT, CoreMLใชใฉใฎๆง˜ใ€…ใชใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใธใ‚จใ‚ญใ‚นใƒใƒผใƒˆใ™ใ‚‹ๆ‰‹้ †ใซใคใ„ใฆใฎใ‚ฌใ‚คใƒ‰ใงใ™ใ€‚ไปŠใ™ใๅฑ•้–‹ใ‚’ๆŽขๆฑ‚ใ—ใฆใใ ใ•ใ„๏ผ +keywords: YOLO, YOLOv8, Ultralytics, ใƒขใƒ‡ใƒซใ‚จใ‚ญใ‚นใƒใƒผใƒˆ, ONNX, TensorRT, CoreML, TensorFlow SavedModel, OpenVINO, PyTorch, ใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ญใ‚นใƒใƒผใƒˆ +--- + +# Ultralytics YOLO ใงใฎใƒขใƒ‡ใƒซใ‚จใ‚ญใ‚นใƒใƒผใƒˆ + +Ultralytics YOLO ใ‚จใ‚ณใ‚ทใ‚นใƒ†ใƒ ใจ็ตฑๅˆ + +## ใฏใ˜ใ‚ใซ + +ใƒขใƒ‡ใƒซใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ็ต‚ไบ†ๅพŒใฎๆœ€็ต‚็›ฎๆจ™ใฏใ€ๅฎŸไธ–็•Œใฎใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซๅฐŽๅ…ฅใ™ใ‚‹ใ“ใจใงใ™ใ€‚Ultralytics YOLOv8ใฎใ‚จใ‚ญใ‚นใƒใƒผใƒˆใƒขใƒผใƒ‰ใฏใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใƒขใƒ‡ใƒซใ‚’็•ฐใชใ‚‹ใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใซใ‚จใ‚ญใ‚นใƒใƒผใƒˆใ—ใฆใ€ๆง˜ใ€…ใชใƒ—ใƒฉใƒƒใƒˆใƒ•ใ‚ฉใƒผใƒ ใ‚„ใƒ‡ใƒใ‚คใ‚นใงๅฑ•้–‹ๅฏ่ƒฝใซใ™ใ‚‹ใŸใ‚ใฎๅคšๆง˜ใชใ‚ชใƒ—ใ‚ทใƒงใƒณใ‚’ๆไพ›ใ—ใพใ™ใ€‚ใ“ใฎๅŒ…ๆ‹ฌ็š„ใชใ‚ฌใ‚คใƒ‰ใฏใ€ใƒขใƒ‡ใƒซใ‚จใ‚ญใ‚นใƒใƒผใƒˆใฎใƒ‹ใƒฅใ‚ขใƒณใ‚นใ‚’ใ‚ใ‹ใ‚Šใ‚„ใ™ใ่งฃ่ชฌใ—ใ€ๆœ€ๅคงใฎไบ’ๆ›ๆ€งใจใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’้”ๆˆใ™ใ‚‹ๆ–นๆณ•ใ‚’ใ”็ดนไป‹ใ—ใพใ™ใ€‚ + +

+
+ +
+ ่ฆ–่ด: ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ—ใŸUltralytics YOLOv8ใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ญใ‚นใƒใƒผใƒˆใ—ใฆใ€ใ‚ฆใ‚งใƒ–ใ‚ซใƒ ใงใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ™ใ‚‹ๆ–นๆณ•ใ€‚ +

+ +## YOLOv8ใฎใ‚จใ‚ญใ‚นใƒใƒผใƒˆใƒขใƒผใƒ‰ใ‚’้ธใถ็†็”ฑใฏ๏ผŸ + +- **ๆฑŽ็”จๆ€ง:** ONNX, TensorRT, CoreMLใชใฉ่ค‡ๆ•ฐใฎใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใธใ‚จใ‚ญใ‚นใƒใƒผใƒˆใ€‚ +- **ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚น:** TensorRTใงๆœ€ๅคง5ๅ€ใฎGPU้ซ˜้€ŸๅŒ–ใ€ONNXใพใŸใฏOpenVINOใง3ๅ€ใฎCPU้ซ˜้€ŸๅŒ–ใ‚’ๅฎŸ็พใ€‚ +- **ไบ’ๆ›ๆ€ง:** ๆง˜ใ€…ใชใƒใƒผใƒ‰ใ‚ฆใ‚งใ‚ขใŠใ‚ˆใณใ‚ฝใƒ•ใƒˆใ‚ฆใ‚งใ‚ข็’ฐๅขƒใงใƒฆใƒ‹ใƒใƒผใ‚ตใƒซใซใƒขใƒ‡ใƒซใ‚’ๅฑ•้–‹ใ€‚ +- **ไฝฟใ„ใ‚„ใ™ใ•:** ใ‚ทใƒณใƒ—ใƒซใชCLIใŠใ‚ˆใณPython APIใง็ฐกๅ˜ใ‹ใค่ฟ…้€Ÿใชใƒขใƒ‡ใƒซใ‚จใ‚ญใ‚นใƒใƒผใƒˆใ€‚ + +### ใ‚จใ‚ญใ‚นใƒใƒผใƒˆใƒขใƒผใƒ‰ใฎไธป่ฆๆฉŸ่ƒฝ + +ใ„ใใคใ‹ใฎๆณจ็›ฎใ™ในใๆฉŸ่ƒฝใฏไปฅไธ‹ใฎ้€šใ‚Šใงใ™: + +- **ใƒฏใƒณใ‚ฏใƒชใƒƒใ‚ฏใ‚จใ‚ญใ‚นใƒใƒผใƒˆ:** ็•ฐใชใ‚‹ใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใธใฎใ‚ทใƒณใƒ—ใƒซใชใ‚ณใƒžใƒณใƒ‰ใ€‚ +- **ใƒใƒƒใƒใ‚จใ‚ญใ‚นใƒใƒผใƒˆ:** ใƒใƒƒใƒๆŽจ่ซ–ๅฏพๅฟœใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ญใ‚นใƒใƒผใƒˆใ€‚ +- **ๆœ€้ฉๅŒ–ๆŽจ่ซ–:** ใ‚ˆใ‚Š้ซ˜้€ŸใชๆŽจ่ซ–ใฎใŸใ‚ใซๆœ€้ฉๅŒ–ใ•ใ‚ŒใŸใ‚จใ‚ญใ‚นใƒใƒผใƒˆใƒขใƒ‡ใƒซใ€‚ +- **ใƒใƒฅใƒผใƒˆใƒชใ‚ขใƒซๅ‹•็”ป:** ใ‚นใƒ ใƒผใ‚บใชใ‚จใ‚ญใ‚นใƒใƒผใƒˆไฝ“้จ“ใฎใŸใ‚ใฎ่ฉณ็ดฐใชใ‚ฌใ‚คใƒ‰ใจใƒใƒฅใƒผใƒˆใƒชใ‚ขใƒซใ€‚ + +!!! Tip "ใƒ’ใƒณใƒˆ" + + * ONNXใพใŸใฏOpenVINOใธใฎใ‚จใ‚ญใ‚นใƒใƒผใƒˆใงๆœ€ๅคง3ๅ€ใฎCPU้€Ÿๅบฆใ‚ขใƒƒใƒ—ใ€‚ + * TensorRTใธใฎใ‚จใ‚ญใ‚นใƒใƒผใƒˆใงๆœ€ๅคง5ๅ€ใฎGPU้€Ÿๅบฆใ‚ขใƒƒใƒ—ใ€‚ + +## ไฝฟ็”จไพ‹ + +YOLOv8nใƒขใƒ‡ใƒซใ‚’ONNXใ‚„TensorRTใชใฉใฎ็•ฐใชใ‚‹ใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใซใ‚จใ‚ญใ‚นใƒใƒผใƒˆใ—ใพใ™ใ€‚ใ‚จใ‚ญใ‚นใƒใƒผใƒˆๅผ•ๆ•ฐใฎใƒ•ใƒซใƒชใ‚นใƒˆใซใคใ„ใฆใฏใ€ไปฅไธ‹ใฎArgumentsใ‚ปใ‚ฏใ‚ทใƒงใƒณใ‚’ใ”่ฆงใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # Load a model + model = YOLO('yolov8n.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’่ชญใฟ่พผใ‚€ + model = YOLO('path/to/best.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒ‡ใƒซใ‚’่ชญใฟ่พผใ‚€ + + # ใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ญใ‚นใƒใƒผใƒˆ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ญใ‚นใƒใƒผใƒˆ + yolo export model=path/to/best.pt format=onnx # ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ญใ‚นใƒใƒผใƒˆ + ``` + +## ๅผ•ๆ•ฐ + +YOLOใƒขใƒ‡ใƒซใฎใ‚จใ‚ญใ‚นใƒใƒผใƒˆ่จญๅฎš + +[...content truncated for length...] diff --git a/ultralytics/docs/ja/modes/export.md:Zone.Identifier b/ultralytics/docs/ja/modes/export.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/modes/export.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/modes/index.md b/ultralytics/docs/ja/modes/index.md new file mode 100755 index 0000000..2e9abc1 --- /dev/null +++ b/ultralytics/docs/ja/modes/index.md @@ -0,0 +1,53 @@ +--- +comments: true +description: ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‹ใ‚‰ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใพใงใ€UltralyticsใฎYOLOv8ใ‚’ๆดป็”จใ—ใฆๆœ€ๅคง้™ใซๆดป็”จใ—ใพใ™ใ€‚ๆคœ่จผใ€ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ€ใƒ™ใƒณใƒใƒžใƒผใ‚ญใƒณใ‚ฐใชใฉใ€ๅ„ใ‚ตใƒใƒผใƒˆใƒขใƒผใƒ‰ใฎๆดžๅฏŸใจไพ‹ใ‚’ๅพ—ใ‚‹ใ€‚ +keywords: Ultralytics, YOLOv8, ๆฉŸๆขฐๅญฆ็ฟ’, ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ, ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ, ๆคœ่จผ, ไบˆๆธฌ, ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ, ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ, ใƒ™ใƒณใƒใƒžใƒผใ‚ญใƒณใ‚ฐ +--- + +# Ultralytics YOLOv8 ใƒขใƒผใƒ‰ + +Ultralytics YOLOใ‚จใ‚ณใ‚ทใ‚นใƒ†ใƒ ใจ็ตฑๅˆ + +## ใฏใ˜ใ‚ใซ + +Ultralytics YOLOv8ใฏใ€ๅ˜ใชใ‚‹ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใƒขใƒ‡ใƒซใงใฏใ‚ใ‚Šใพใ›ใ‚“; ใ“ใ‚Œใฏใ€ใƒ‡ใƒผใ‚ฟๅŽ้›†ใ€ใƒขใƒ‡ใƒซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‹ใ‚‰ๆคœ่จผใ€ใƒ‡ใƒ—ใƒญใ‚คใƒกใƒณใƒˆใ€ๅฎŸ้š›ใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใซ่‡ณใ‚‹ใพใงใฎๆฉŸๆขฐๅญฆ็ฟ’ใƒขใƒ‡ใƒซใฎใƒฉใ‚คใƒ•ใ‚ตใ‚คใ‚ฏใƒซๅ…จไฝ“ใ‚’ใ‚ซใƒใƒผใ™ใ‚‹ใ‚ˆใ†ใซ่จญ่จˆใ•ใ‚ŒใŸๅคš็”จ้€”ใƒ•ใƒฌใƒผใƒ ใƒฏใƒผใ‚ฏใงใ™ใ€‚ๅ„ใƒขใƒผใƒ‰ใฏ็‰นๅฎšใฎ็›ฎ็š„ใซๅฝน็ซ‹ใกใ€็•ฐใชใ‚‹ใ‚ฟใ‚นใ‚ฏใ‚„ใƒฆใƒผใ‚นใ‚ฑใƒผใ‚นใซๅฟ…่ฆใชๆŸ”่ปŸๆ€งใจๅŠน็އใ‚’ๆไพ›ใ™ใ‚‹ใ‚ˆใ†ใซๅทฅๅคซใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +

+
+ +
+ ่ฆ–่ดใ™ใ‚‹: Ultralytics ใƒขใƒผใƒ‰ใƒใƒฅใƒผใƒˆใƒชใ‚ขใƒซ: ใƒˆใƒฌใƒผใƒ‹ใ€ๆคœ่จผใ€ไบˆๆธฌใ€ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ ๏ผ† ใƒ™ใƒณใƒใƒžใƒผใ‚ญใƒณใ‚ฐใ€‚ +

+ +### ใƒขใƒผใƒ‰ใฎๆฆ‚่ฆ + +Ultralytics YOLOv8ใŒใ‚ตใƒใƒผใƒˆใ™ใ‚‹็•ฐใชใ‚‹**ใƒขใƒผใƒ‰**ใ‚’็†่งฃใ™ใ‚‹ใ“ใจใฏใ€ใƒขใƒ‡ใƒซใ‹ใ‚‰ๆœ€ๅคง้™ใซๆดป็”จใ™ใ‚‹ใŸใ‚ใซใฏๆฌ ใ‹ใ›ใพใ›ใ‚“๏ผš + +- **ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ**ใƒขใƒผใƒ‰๏ผšใ‚ซใ‚นใ‚ฟใƒ ใพใŸใฏไบ‹ๅ‰่ชญใฟ่พผใฟใ•ใ‚ŒใŸใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใฎใƒขใƒ‡ใƒซใฎๅพฎ่ชฟๆ•ดใ‚’่กŒใ„ใพใ™ใ€‚ +- **ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณ**ใƒขใƒผใƒ‰๏ผšใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๅพŒใฎใƒใ‚งใƒƒใ‚ฏใƒใ‚คใƒณใƒˆใจใ—ใฆใ€ใƒขใƒ‡ใƒซใฎๆ€ง่ƒฝใ‚’ๆคœ่จผใ—ใพใ™ใ€‚ +- **ไบˆๆธฌ**ใƒขใƒผใƒ‰๏ผšๅฎŸไธ–็•Œใฎใƒ‡ใƒผใ‚ฟใงใƒขใƒ‡ใƒซใฎไบˆๆธฌ่ƒฝๅŠ›ใ‚’็™บๆฎใ—ใพใ™ใ€‚ +- **ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ**ใƒขใƒผใƒ‰๏ผšๆง˜ใ€…ใชๅฝขๅผใงใƒ‡ใƒ—ใƒญใ‚คใƒกใƒณใƒˆๆบ–ๅ‚™ใŒใงใใŸใƒขใƒ‡ใƒซใ‚’ไฝœๆˆใ—ใพใ™ใ€‚ +- **ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ**ใƒขใƒผใƒ‰๏ผšใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใƒขใƒ‡ใƒซใ‚’ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ่ฟฝ่ทกใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซๆ‹กๅผตใ—ใพใ™ใ€‚ +- **ใƒ™ใƒณใƒใƒžใƒผใ‚ญใƒณใ‚ฐ**ใƒขใƒผใƒ‰๏ผšๆง˜ใ€…ใชใƒ‡ใƒ—ใƒญใ‚คใƒกใƒณใƒˆ็’ฐๅขƒใงใƒขใƒ‡ใƒซใฎ้€Ÿๅบฆใจ็ฒพๅบฆใ‚’ๅˆ†ๆžใ—ใพใ™ใ€‚ + +ใ“ใฎๅŒ…ๆ‹ฌ็š„ใชใ‚ฌใ‚คใƒ‰ใฏใ€YOLOv8ใฎๅ…จๆฝœๅœจ่ƒฝๅŠ›ใ‚’ๅผ•ใๅ‡บใ™ใŸใ‚ใฎๆฆ‚่ฆใจๅฎŸ่ทต็š„ใชๆดžๅฏŸใ‚’ๆไพ›ใ™ใ‚‹ใ“ใจใ‚’็›ฎๆŒ‡ใ—ใฆใ„ใพใ™ใ€‚ + +## [ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ](train.md) + +ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒผใƒ‰ใฏใ€ใ‚ซใ‚นใ‚ฟใƒ ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงYOLOv8ใƒขใƒ‡ใƒซใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’่กŒใ†ใŸใ‚ใซไฝฟ็”จใ•ใ‚Œใพใ™ใ€‚ใ“ใฎใƒขใƒผใƒ‰ใงใฏใ€ๆŒ‡ๅฎšใ•ใ‚ŒใŸใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใจใƒใ‚คใƒ‘ใƒผใƒ‘ใƒฉใƒกใƒผใ‚ฟใ‚’ไฝฟ็”จใ—ใฆใƒขใƒ‡ใƒซใŒใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚Œใพใ™ใ€‚ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒ—ใƒญใ‚ปใ‚นใซใฏใ€ใƒขใƒ‡ใƒซใฎใƒ‘ใƒฉใƒกใƒผใ‚ฟใ‚’ๆœ€้ฉๅŒ–ใ—ใฆใ€ๅ†™็œŸๅ†…ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ฏใƒฉใ‚นใจไฝ็ฝฎใ‚’ๆญฃ็ขบใซไบˆๆธฌใงใใ‚‹ใ‚ˆใ†ใซใ™ใ‚‹ไฝœๆฅญใŒๅซใพใ‚Œใพใ™ใ€‚ + +[ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐไพ‹](train.md){ .md-button } + +## [ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณ](val.md) + +ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใƒขใƒผใƒ‰ใฏใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8ใƒขใƒ‡ใƒซใ‚’ๆคœ่จผใ™ใ‚‹ใŸใ‚ใซไฝฟ็”จใ•ใ‚Œใพใ™ใ€‚ใ“ใฎใƒขใƒผใƒ‰ใงใฏใ€ใƒขใƒ‡ใƒซใŒใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใ‚ปใƒƒใƒˆใง่ฉ•ไพกใ•ใ‚Œใ€ใใฎ็ฒพๅบฆใจไธ€่ˆฌๅŒ–ๆ€ง่ƒฝใ‚’ๆธฌๅฎšใ—ใพใ™ใ€‚ใ“ใฎใƒขใƒผใƒ‰ใฏใ€ใƒขใƒ‡ใƒซใฎใƒใ‚คใƒ‘ใƒผใƒ‘ใƒฉใƒกใƒผใ‚ฟใ‚’่ชฟๆ•ดใ—ใ€ใใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’ๅ‘ไธŠใ•ใ›ใ‚‹ใŸใ‚ใซๅˆฉ็”จใงใใพใ™ใ€‚ + +[ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณไพ‹](val.md){ .md-button } + +## [ไบˆๆธฌ](predict.md) + +ไบˆๆธฌใƒขใƒผใƒ‰ใฏใ€ๆ–ฐใ—ใ„็”ปๅƒใ‚„ใƒ“ใƒ‡ใ‚ชใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8ใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใฆไบˆๆธฌใ‚’่กŒใ†ใŸใ‚ใซไฝฟ็”จใ•ใ‚Œใพใ™ใ€‚ใ“ใฎใƒขใƒผใƒ‰ใงใฏใ€ใƒขใƒ‡ใƒซใŒใƒใ‚งใƒƒใ‚ฏใƒใ‚คใƒณใƒˆใƒ•ใ‚กใ‚คใƒซใ‹ใ‚‰่ชญใฟ่พผใพใ‚Œใ€ใƒฆใƒผใ‚ถใƒผใŒๆŽจ่ซ–ใ‚’่กŒใ†ใŸใ‚ใซ็”ปๅƒใ‚„ใƒ“ใƒ‡ใ‚ชใ‚’ๆไพ›ใงใใพใ™ใ€‚ใƒขใƒ‡ใƒซใฏใ€ๅ…ฅๅŠ›ใ—ใŸ็”ปๅƒใ‚„ใƒ“ใƒ‡ใ‚ชๅ†…ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ฏใƒฉใ‚นใจไฝ็ฝฎใ‚’ไบˆๆธฌใ—ใพใ™ใ€‚ diff --git a/ultralytics/docs/ja/modes/index.md:Zone.Identifier b/ultralytics/docs/ja/modes/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/modes/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/modes/predict.md b/ultralytics/docs/ja/modes/predict.md new file mode 100755 index 0000000..6708187 --- /dev/null +++ b/ultralytics/docs/ja/modes/predict.md @@ -0,0 +1,211 @@ +--- +comments: true +description: YOLOv8ไบˆๆธฌใƒขใƒผใƒ‰ใฎไฝฟ็”จๆ–นๆณ•ใซใคใ„ใฆๅญฆใณใ€็”ปๅƒใ€ๅ‹•็”ปใ€ใƒ‡ใƒผใ‚ฟใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใชใฉใ•ใพใ–ใพใชๆŽจ่ซ–ใ‚ฝใƒผใ‚นใซใคใ„ใฆ่งฃ่ชฌใ—ใพใ™ใ€‚ +keywords: Ultralytics, YOLOv8, ไบˆๆธฌใƒขใƒผใƒ‰, ๆŽจ่ซ–ใ‚ฝใƒผใ‚น, ไบˆๆธฌใ‚ฟใ‚นใ‚ฏ, ใ‚นใƒˆใƒชใƒผใƒŸใƒณใ‚ฐใƒขใƒผใƒ‰, ็”ปๅƒๅ‡ฆ็†, ๅ‹•็”ปๅ‡ฆ็†, ๆฉŸๆขฐๅญฆ็ฟ’, AI +--- + +# Ultralytics YOLOใซใ‚ˆใ‚‹ใƒขใƒ‡ใƒซไบˆๆธฌ + +Ultralytics YOLO ecosystem and integrations + +## ใ‚คใƒณใƒˆใƒญใƒ€ใ‚ฏใ‚ทใƒงใƒณ + +ๆฉŸๆขฐๅญฆ็ฟ’ใ‚„ใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒ“ใ‚ธใƒงใƒณใฎไธ–็•Œใงใฏใ€่ฆ–่ฆšใƒ‡ใƒผใ‚ฟใ‹ใ‚‰ๆ„ๅ‘ณใ‚’ๅผ•ใๅ‡บใ™ใƒ—ใƒญใ‚ปใ‚นใ‚’ใ€ŒๆŽจ่ซ–ใ€ใพใŸใฏใ€Œไบˆๆธฌใ€ใจๅ‘ผใฐใ‚Œใฆใ„ใพใ™ใ€‚UltralyticsใฎYOLOv8ใฏใ€ๅน…ๅบƒใ„ใƒ‡ใƒผใ‚ฟใ‚ฝใƒผใ‚นใซๅฏพใ—ใฆ้ซ˜ๆ€ง่ƒฝใงใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใชๆŽจ่ซ–ใ‚’่กŒใ†ใŸใ‚ใซ็‰นๅŒ–ใ—ใŸใ€ใ€Œไบˆๆธฌใƒขใƒผใƒ‰ใ€ใจๅ‘ผใฐใ‚Œใ‚‹ๅผทๅŠ›ใชๆฉŸ่ƒฝใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚ + +

+
+ +
+ ่ฆ–่ด: Ultralytics YOLOv8ใƒขใƒ‡ใƒซใฎๅ‡บๅŠ›ใ‚’ใ‚ซใ‚นใ‚ฟใƒ ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใซๅ–ใ‚Š่พผใ‚€ๆ–นๆณ•ใ‚’ๅญฆใถใ€‚ +

+ +## ๅฎŸ้š›ใฎๅฟœ็”จไพ‹ + +| ่ฃฝ้€ ๆฅญ | ใ‚นใƒใƒผใƒ„ | ๅฎ‰ๅ…จ | +|:-----------------------------------------------------------------------------------------------------------------------------------:|:-------------------------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------------------:| +| ![Vehicle Spare Parts Detection](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a0f802a8-0776-44cf-8f17-93974a4a28a1) | ![Football Player Detection](https://github.com/RizwanMunawar/ultralytics/assets/62513924/7d320e1f-fc57-4d7f-a691-78ee579c3442) | ![People Fall Detection](https://github.com/RizwanMunawar/ultralytics/assets/62513924/86437c4a-3227-4eee-90ef-9efb697bdb43) | +| ่ปŠไธกใฎใ‚นใƒšใ‚ขใƒ‘ใƒผใƒ„ๆคœๅ‡บ | ใƒ•ใƒƒใƒˆใƒœใƒผใƒซ้ธๆ‰‹ๆคœๅ‡บ | ไบบใฎ่ปขๅ€’ๆคœๅ‡บ | + +## ไบˆๆธฌใซUltralytics YOLOใ‚’ไฝฟใ†็†็”ฑ + +ๆง˜ใ€…ใชๆŽจ่ซ–ใƒ‹ใƒผใ‚บใซYOLOv8ใฎไบˆๆธฌใƒขใƒผใƒ‰ใ‚’ๆคœ่จŽใ™ในใ็†็”ฑใงใ™๏ผš + +- **ๆŸ”่ปŸๆ€ง:** ็”ปๅƒใ€ๅ‹•็”ปใ€ใ•ใ‚‰ใซใฏใƒฉใ‚คใƒ–ใ‚นใƒˆใƒชใƒผใƒ ใซใŠใ„ใฆๆŽจ่ซ–ใ‚’่กŒใ†่ƒฝๅŠ›ใŒใ‚ใ‚Šใพใ™ใ€‚ +- **ใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚น:** ๆญฃ็ขบใ•ใ‚’็Š ็‰ฒใซใ™ใ‚‹ใ“ใจใชใใ€ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใง้ซ˜้€Ÿใชๅ‡ฆ็†ใŒ่กŒใˆใ‚‹ใ‚ˆใ†ใซ่จญ่จˆใ•ใ‚Œใฆใ„ใพใ™ใ€‚ +- **ไฝฟใ„ใ‚„ใ™ใ•:** ่ฟ…้€Ÿใชๅฑ•้–‹ใจใƒ†ใ‚นใƒˆใฎใŸใ‚ใฎ็›ดๆ„Ÿ็š„ใชPythonใŠใ‚ˆใณCLIใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚นใ€‚ +- **้ซ˜ใ„ใ‚ซใ‚นใ‚ฟใƒžใ‚คใ‚บๆ€ง:** ็‰นๅฎšใฎ่ฆไปถใซๅฟœใ˜ใฆใƒขใƒ‡ใƒซใฎๆŽจ่ซ–ๅ‹•ไฝœใ‚’่ชฟๆ•ดใ™ใ‚‹ใŸใ‚ใฎใ•ใพใ–ใพใช่จญๅฎšใจใƒ‘ใƒฉใƒกใƒผใ‚ฟใƒผใ€‚ + +### ไบˆๆธฌใƒขใƒผใƒ‰ใฎไธปใช็‰นๅพด + +YOLOv8ใฎไบˆๆธฌใƒขใƒผใƒ‰ใฏใ€้ ‘ๅฅใงๅคšๆง˜ๆ€งใŒใ‚ใ‚Šใ€ๆฌกใฎ็‰นๅพดใ‚’ๅ‚™ใˆใฆใ„ใพใ™๏ผš + +- **่ค‡ๆ•ฐใฎใƒ‡ใƒผใ‚ฟใ‚ฝใƒผใ‚นใจใฎไบ’ๆ›ๆ€ง:** ใƒ‡ใƒผใ‚ฟใŒๅ€‹ใ€…ใฎ็”ปๅƒใ€็”ปๅƒใฎ้›†ๅˆใ€ๅ‹•็”ปใƒ•ใ‚กใ‚คใƒซใ€ใพใŸใฏใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใฎๅ‹•็”ปใ‚นใƒˆใƒชใƒผใƒ ใฎใ„ใšใ‚Œใฎๅฝขๅผใงใ‚ใฃใฆใ‚‚ใ€ไบˆๆธฌใƒขใƒผใƒ‰ใŒๅฏพๅฟœใ—ใฆใ„ใพใ™ใ€‚ +- **ใ‚นใƒˆใƒชใƒผใƒŸใƒณใ‚ฐใƒขใƒผใƒ‰:** `Results`ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใƒกใƒขใƒชๅŠน็އใฎ่‰ฏใ„ใ‚ธใ‚งใƒใƒฌใƒผใ‚ฟใ‚’็”Ÿๆˆใ™ใ‚‹ใŸใ‚ใซใ‚นใƒˆใƒชใƒผใƒŸใƒณใ‚ฐๆฉŸ่ƒฝใ‚’ไฝฟ็”จใ—ใพใ™ใ€‚`stream=True`ใ‚’ไบˆๆธฌๅ™จใฎๅ‘ผใณๅ‡บใ—ใƒกใ‚ฝใƒƒใƒ‰ใซ่จญๅฎšใ™ใ‚‹ใ“ใจใซใ‚ˆใ‚Šๆœ‰ๅŠนใซใชใ‚Šใพใ™ใ€‚ +- **ใƒใƒƒใƒๅ‡ฆ็†:** ๅ˜ไธ€ใฎใƒใƒƒใƒใง่ค‡ๆ•ฐใฎ็”ปๅƒใ‚„ๅ‹•็”ปใƒ•ใƒฌใƒผใƒ ใ‚’ๅ‡ฆ็†ใ™ใ‚‹่ƒฝๅŠ›ใฏใ€ใ•ใ‚‰ใซๆŽจ่ซ–ๆ™‚้–“ใ‚’็Ÿญ็ธฎใ—ใพใ™ใ€‚ +- **็ตฑๅˆใŒๅฎนๆ˜“:** ๆŸ”่ปŸใชAPIใฎใŠใ‹ใ’ใงใ€ๆ—ขๅญ˜ใฎใƒ‡ใƒผใ‚ฟใƒ‘ใ‚คใƒ—ใƒฉใ‚คใƒณใ‚„ไป–ใฎใ‚ฝใƒ•ใƒˆใ‚ฆใ‚งใ‚ขใ‚ณใƒณใƒใƒผใƒใƒณใƒˆใซ็ฐกๅ˜ใซ็ตฑๅˆใงใใพใ™ใ€‚ + +UltralyticsใฎYOLOใƒขใƒ‡ใƒซใฏใ€`stream=True`ใŒๆŽจ่ซ–ไธญใซใƒขใƒ‡ใƒซใซๆธกใ•ใ‚Œใ‚‹ใจใ€Pythonใฎ`Results`ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใƒชใ‚นใƒˆใพใŸใฏ`Results`ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใƒกใƒขใƒชๅŠน็އใฎ่‰ฏใ„Pythonใ‚ธใ‚งใƒใƒฌใƒผใ‚ฟใฎใ„ใšใ‚Œใ‹ใ‚’่ฟ”ใ—ใพใ™๏ผš + +!!! Example "ไบˆๆธฌ" + + === "`stream=False`ใงใƒชใ‚นใƒˆใ‚’่ฟ”ใ™" + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') # ไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซ + + # ็”ปๅƒใฎใƒชใ‚นใƒˆใซๅฏพใ—ใฆใƒใƒƒใƒๆŽจ่ซ–ใ‚’ๅฎŸ่กŒ + results = model(['im1.jpg', 'im2.jpg']) # Resultsใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใƒชใ‚นใƒˆใ‚’่ฟ”ใ™ + + # ็ตๆžœใƒชใ‚นใƒˆใ‚’ๅ‡ฆ็† + for result in results: + boxes = result.boxes # ใƒใ‚ฆใƒณใƒ‡ใ‚ฃใƒณใ‚ฐใƒœใƒƒใ‚ฏใ‚นๅ‡บๅŠ›็”จใฎBoxesใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆ + masks = result.masks # ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒžใ‚นใ‚ฏๅ‡บๅŠ›็”จใฎMasksใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆ + keypoints = result.keypoints # ๅงฟๅ‹ขๅ‡บๅŠ›็”จใฎKeypointsใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆ + probs = result.probs # ๅˆ†้กžๅ‡บๅŠ›็”จใฎProbsใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆ + ``` + + === "`stream=True`ใงใ‚ธใ‚งใƒใƒฌใƒผใ‚ฟใ‚’่ฟ”ใ™" + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') # ไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซ + + # ็”ปๅƒใฎใƒชใ‚นใƒˆใซๅฏพใ—ใฆใƒใƒƒใƒๆŽจ่ซ–ใ‚’ๅฎŸ่กŒ + results = model(['im1.jpg', 'im2.jpg'], stream=True) # Resultsใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ธใ‚งใƒใƒฌใƒผใ‚ฟใ‚’่ฟ”ใ™ + + # ็ตๆžœใ‚ธใ‚งใƒใƒฌใƒผใ‚ฟใ‚’ๅ‡ฆ็† + for result in results: + boxes = result.boxes # ใƒใ‚ฆใƒณใƒ‡ใ‚ฃใƒณใ‚ฐใƒœใƒƒใ‚ฏใ‚นๅ‡บๅŠ›็”จใฎBoxesใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆ + masks = result.masks # ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒžใ‚นใ‚ฏๅ‡บๅŠ›็”จใฎMasksใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆ + keypoints = result.keypoints # ๅงฟๅ‹ขๅ‡บๅŠ›็”จใฎKeypointsใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆ + probs = result.probs # ๅˆ†้กžๅ‡บๅŠ›็”จใฎProbsใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆ + ``` + +## ๆŽจ่ซ–ใ‚ฝใƒผใ‚น + +YOLOv8ใฏใ€ไปฅไธ‹ใฎ่กจใซ็คบใ•ใ‚Œใ‚‹ใ‚ˆใ†ใซใ€็•ฐใชใ‚‹ใ‚ฟใ‚คใƒ—ใฎๅ…ฅๅŠ›ใ‚ฝใƒผใ‚นใ‚’ๆŽจ่ซ–ใซๅ‡ฆ็†ใงใใพใ™ใ€‚ใ‚ฝใƒผใ‚นใซใฏ้™ๆญข็”ปๅƒใ€ๅ‹•็”ปใ‚นใƒˆใƒชใƒผใƒ ใ€ใŠใ‚ˆใณใ•ใพใ–ใพใชใƒ‡ใƒผใ‚ฟใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใŒๅซใพใ‚Œใพใ™ใ€‚่กจใซใฏใ€ๅ„ใ‚ฝใƒผใ‚นใŒใ‚นใƒˆใƒชใƒผใƒŸใƒณใ‚ฐใƒขใƒผใƒ‰ใงไฝฟ็”จใงใใ‚‹ใ‹ใฉใ†ใ‹ใ‚‚็คบใ•ใ‚ŒใฆใŠใ‚Šใ€ๅผ•ๆ•ฐ`stream=True`ใงโœ…ใŒ่กจ็คบใ•ใ‚Œใฆใ„ใพใ™ใ€‚ใ‚นใƒˆใƒชใƒผใƒŸใƒณใ‚ฐใƒขใƒผใƒ‰ใฏใ€ๅ‹•็”ปใ‚„ใƒฉใ‚คใƒ–ใ‚นใƒˆใƒชใƒผใƒ ใ‚’ๅ‡ฆ็†ใ™ใ‚‹ๅ ดๅˆใซๆœ‰ๅˆฉใงใ‚ใ‚Šใ€ใ™ในใฆใฎใƒ•ใƒฌใƒผใƒ ใ‚’ใƒกใƒขใƒชใซใƒญใƒผใƒ‰ใ™ใ‚‹ไปฃใ‚ใ‚Šใซ็ตๆžœใฎใ‚ธใ‚งใƒใƒฌใƒผใ‚ฟใ‚’ไฝœๆˆใ—ใพใ™ใ€‚ + +!!! Tip "ใƒ’ใƒณใƒˆ" + + ้•ทใ„ๅ‹•็”ปใ‚„ๅคงใใชใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ๅŠน็އ็š„ใซใƒกใƒขใƒช็ฎก็†ใ™ใ‚‹ใŸใ‚ใซ`stream=True`ใ‚’ไฝฟ็”จใ—ใพใ™ใ€‚`stream=False`ใงใฏใ€ใ™ในใฆใฎใƒ•ใƒฌใƒผใƒ ใพใŸใฏใƒ‡ใƒผใ‚ฟใƒใ‚คใƒณใƒˆใฎ็ตๆžœใŒใƒกใƒขใƒชใซๆ ผ็ดใ•ใ‚Œใพใ™ใŒใ€ๅคงใใชๅ…ฅๅŠ›ใง่ฟ…้€ŸใซใƒกใƒขใƒชใŒ็ฉใฟไธŠใŒใ‚Šใ€ใƒกใƒขใƒชไธ่ถณใฎใ‚จใƒฉใƒผใ‚’ๅผ•ใ่ตทใ“ใ™ๅฏ่ƒฝๆ€งใŒใ‚ใ‚Šใพใ™ใ€‚ๅฏพ็…ง็š„ใซใ€`stream=True`ใฏใ‚ธใ‚งใƒใƒฌใƒผใ‚ฟใ‚’ๅˆฉ็”จใ—ใ€็พๅœจใฎใƒ•ใƒฌใƒผใƒ ใพใŸใฏใƒ‡ใƒผใ‚ฟใƒใ‚คใƒณใƒˆใฎ็ตๆžœใฎใฟใ‚’ใƒกใƒขใƒชใซไฟๆŒใ—ใ€ใƒกใƒขใƒชๆถˆ่ฒปใ‚’ๅคงๅน…ใซๅ‰Šๆธ›ใ—ใ€ใƒกใƒขใƒชไธ่ถณใฎๅ•้กŒใ‚’้˜ฒใŽใพใ™ใ€‚ + +| ใ‚ฝใƒผใ‚น | ๅผ•ๆ•ฐ | ใ‚ฟใ‚คใƒ— | ๅ‚™่€ƒ | +|------------|--------------------------------------------|------------------|------------------------------------------------------------------| +| ็”ปๅƒ | `'image.jpg'` | `str` ใพใŸใฏ `Path` | ๅ˜ไธ€ใฎ็”ปๅƒใƒ•ใ‚กใ‚คใƒซใ€‚ | +| URL | `'https://ultralytics.com/images/bus.jpg'` | `str` | ็”ปๅƒใธใฎURLใ€‚ | +| ใ‚นใ‚ฏใƒชใƒผใƒณใ‚ทใƒงใƒƒใƒˆ | `'screen'` | `str` | ใ‚นใ‚ฏใƒชใƒผใƒณใ‚ทใƒงใƒƒใƒˆใ‚’ใ‚ญใƒฃใƒ—ใƒใƒฃใ€‚ | +| PIL | `Image.open('im.jpg')` | `PIL.Image` | HWCใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใงRGBใƒใƒฃใƒณใƒใƒซใ€‚ | +| OpenCV | `cv2.imread('im.jpg')` | `np.ndarray` | HWCใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใงBGRใƒใƒฃใƒณใƒใƒซ `uint8 (0-255)`ใ€‚ | +| numpy | `np.zeros((640,1280,3))` | `np.ndarray` | HWCใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใงBGRใƒใƒฃใƒณใƒใƒซ `uint8 (0-255)`ใ€‚ | +| torch | `torch.zeros(16,3,320,640)` | `torch.Tensor` | BCHWใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใงRGBใƒใƒฃใƒณใƒใƒซ `float32 (0.0-1.0)`ใ€‚ | +| CSV | `'sources.csv'` | `str` ใพใŸใฏ `Path` | ็”ปๅƒใ€ๅ‹•็”ปใ€ใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒชใธใฎใƒ‘ใ‚นใ‚’ๅซใ‚€CSVใƒ•ใ‚กใ‚คใƒซใ€‚ | +| ๅ‹•็”ป โœ… | `'video.mp4'` | `str` ใพใŸใฏ `Path` | MP4ใ€AVIใชใฉใฎๅฝขๅผใฎๅ‹•็”ปใƒ•ใ‚กใ‚คใƒซใ€‚ | +| ใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒช โœ… | `'path/'` | `str` ใพใŸใฏ `Path` | ็”ปๅƒใพใŸใฏๅ‹•็”ปใ‚’ๅซใ‚€ใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒชใธใฎใƒ‘ใ‚นใ€‚ | +| ใ‚ฐใƒญใƒ– โœ… | `'path/*.jpg'` | `str` | ่ค‡ๆ•ฐใฎใƒ•ใ‚กใ‚คใƒซใซไธ€่‡ดใ™ใ‚‹ใ‚ฐใƒญใƒ–ใƒ‘ใ‚ฟใƒผใƒณใ€‚ใƒฏใ‚คใƒซใƒ‰ใ‚ซใƒผใƒ‰ใจใ—ใฆ`*`ๆ–‡ๅญ—ใ‚’ไฝฟ็”จใ—ใพใ™ใ€‚ | +| YouTube โœ… | `'https://youtu.be/LNwODJXcvt4'` | `str` | YouTubeๅ‹•็”ปใฎURLใ€‚ | +| ใ‚นใƒˆใƒชใƒผใƒ  โœ… | `'rtsp://example.com/media.mp4'` | `str` | RTSPใ€RTMPใ€TCPใ€IPใ‚ขใƒ‰ใƒฌใ‚นใชใฉใฎใ‚นใƒˆใƒชใƒผใƒŸใƒณใ‚ฐใƒ—ใƒญใƒˆใ‚ณใƒซใฎใŸใ‚ใฎURLใ€‚ | +| ใƒžใƒซใƒใ‚นใƒˆใƒชใƒผใƒ  โœ… | `'list.streams'` | `str` ใพใŸใฏ `Path` | ใ‚นใƒˆใƒชใƒผใƒ URLใ‚’่กŒใ”ใจใซ1ใคๅซใ‚€`*.streams`ใƒ†ใ‚ญใ‚นใƒˆใƒ•ใ‚กใ‚คใƒซใ€‚ใคใพใ‚Šใ€8ใคใฎใ‚นใƒˆใƒชใƒผใƒ ใ‚’ใƒใƒƒใƒใ‚ตใ‚คใ‚บ8ใงๅฎŸ่กŒใ—ใพใ™ใ€‚ | + +ไปฅไธ‹ใฏใ€ใใ‚Œใžใ‚Œใฎใ‚ฝใƒผใ‚นใ‚ฟใ‚คใƒ—ใ‚’ไฝฟ็”จใ™ใ‚‹ใŸใ‚ใฎใ‚ณใƒผใƒ‰ไพ‹ใงใ™๏ผš + +!!! Example "ไบˆๆธฌใ‚ฝใƒผใ‚น" + + === "็”ปๅƒ" + ็”ปๅƒใƒ•ใ‚กใ‚คใƒซใซๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ—ใพใ™ใ€‚ + ```python + from ultralytics import YOLO + + # ไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') + + # ็”ปๅƒใƒ•ใ‚กใ‚คใƒซใธใฎใƒ‘ใ‚นใ‚’ๅฎš็พฉ + source = 'path/to/image.jpg' + + # ใ‚ฝใƒผใ‚นใซๆŽจ่ซ–ใ‚’ๅฎŸ่กŒ + results = model(source) # Resultsใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใƒชใ‚นใƒˆ + ``` + + === "ใ‚นใ‚ฏใƒชใƒผใƒณใ‚ทใƒงใƒƒใƒˆ" + ็พๅœจใฎ็”ป้ขๅ†…ๅฎนใฎใ‚นใ‚ฏใƒชใƒผใƒณใ‚ทใƒงใƒƒใƒˆใซๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ—ใพใ™ใ€‚ + ```python + from ultralytics import YOLO + + # ไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') + + # ็พๅœจใฎใ‚นใ‚ฏใƒชใƒผใƒณใ‚ทใƒงใƒƒใƒˆใ‚’ใ‚ฝใƒผใ‚นใจใ—ใฆๅฎš็พฉ + source = 'screen' + + # ใ‚ฝใƒผใ‚นใซๆŽจ่ซ–ใ‚’ๅฎŸ่กŒ + results = model(source) # Resultsใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใƒชใ‚นใƒˆ + ``` + + === "URL" + ใƒชใƒขใƒผใƒˆใฎURL็ตŒ็”ฑใงใƒ›ใ‚นใƒˆใ•ใ‚Œใฆใ„ใ‚‹็”ปๅƒใ‚„ๅ‹•็”ปใซๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ—ใพใ™ใ€‚ + ```python + from ultralytics import YOLO + + # ไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') + + # ใƒชใƒขใƒผใƒˆ็”ปๅƒใ‚„ๅ‹•็”ปใฎURLใ‚’ๅฎš็พฉ + source = 'https://ultralytics.com/images/bus.jpg' + + # ใ‚ฝใƒผใ‚นใซๆŽจ่ซ–ใ‚’ๅฎŸ่กŒ + results = model(source) # Resultsใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใƒชใ‚นใƒˆ + ``` + + === "PIL" + Python Imaging Library (PIL)ใ‚’ไฝฟ็”จใ—ใฆ้–‹ใ„ใŸ็”ปๅƒใซๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ—ใพใ™ใ€‚ + ```python + from PIL import Image + from ultralytics import YOLO + + # ไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') + + # PILใ‚’ไฝฟ็”จใ—ใฆ็”ปๅƒใ‚’้–‹ใ + source = Image.open('path/to/image.jpg') + + # ใ‚ฝใƒผใ‚นใซๆŽจ่ซ–ใ‚’ๅฎŸ่กŒ + results = model(source) # Resultsใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใƒชใ‚นใƒˆ + ``` + + === "OpenCV" + OpenCVใ‚’ไฝฟ็”จใ—ใฆ่ชญใฟ่พผใ‚“ใ ็”ปๅƒใซๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ—ใพใ™ใ€‚ + ```python + import cv2 + from ultralytics import YOLO + + # ไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') + + # OpenCVใ‚’ไฝฟ็”จใ—ใฆ็”ปๅƒใ‚’่ชญใฟ่พผใ‚€ + source = cv2.imread('path/to/image.jpg') + + # ใ‚ฝใƒผใ‚นใซๆŽจ่ซ–ใ‚’ๅฎŸ่กŒ + results = model(source) # Resultsใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใƒชใ‚นใƒˆ + ``` + + === "numpy" + numpy้…ๅˆ—ใจใ—ใฆ่กจใ•ใ‚Œใ‚‹็”ปๅƒใซๆŽจ่ซ–ใ‚’ๅฎŸ่กŒใ—ใพใ™ใ€‚ + ```python + import numpy as np + from ultralytics import YOLO + + # ไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') + + # HWCๅฝข็Šถ๏ผˆ640, 640, 3๏ผ‰ใ€็ฏ„ๅ›ฒ[0, 255]ใ€ๅž‹`uint8`ใฎใƒฉใƒณใƒ€ใƒ ใชnumpy้…ๅˆ—ใ‚’ไฝœๆˆ + source = np.random.randint(low=0, high=255, size=(640,640,3), dtype='uint8') + + # ใ‚ฝใƒผใ‚นใซๆŽจ่ซ–ใ‚’ๅฎŸ่กŒ + results = model(source) # Resultsใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใƒชใ‚นใƒˆ + ``` diff --git a/ultralytics/docs/ja/modes/predict.md:Zone.Identifier b/ultralytics/docs/ja/modes/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/modes/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/modes/track.md b/ultralytics/docs/ja/modes/track.md new file mode 100755 index 0000000..56453f9 --- /dev/null +++ b/ultralytics/docs/ja/modes/track.md @@ -0,0 +1,200 @@ +--- +comments: true +description: Ultralytics YOLOใ‚’ไฝฟ็”จใ—ใŸใƒ“ใƒ‡ใ‚ชใ‚นใƒˆใƒชใƒผใƒ ใงใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใฎไฝฟ็”จๆ–นๆณ•ใ‚’ๅญฆใณใพใ™ใ€‚็•ฐใชใ‚‹ใƒˆใƒฉใƒƒใ‚ซใƒผใฎไฝฟ็”จใ‚ฌใ‚คใƒ‰ใจใƒˆใƒฉใƒƒใ‚ซใƒผๆง‹ๆˆใฎใ‚ซใ‚นใ‚ฟใƒžใ‚คใ‚บใซใคใ„ใฆใ€‚ +keywords: Ultralytics, YOLO, ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ, ใƒ“ใƒ‡ใ‚ชใ‚นใƒˆใƒชใƒผใƒ , BoT-SORT, ByteTrack, Pythonใ‚ฌใ‚คใƒ‰, CLIใ‚ฌใ‚คใƒ‰ +--- + +# Ultralytics YOLOใซใ‚ˆใ‚‹่ค‡ๆ•ฐใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ + +่ค‡ๆ•ฐใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐไพ‹ + +ใƒ“ใƒ‡ใ‚ชๅˆ†ๆžใฎ้ ˜ๅŸŸใงใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใฏใ€ใƒ•ใƒฌใƒผใƒ ๅ†…ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎไฝ็ฝฎใจใ‚ฏใƒฉใ‚นใ‚’็‰นๅฎšใ™ใ‚‹ใ ใ‘ใงใชใใ€ใƒ“ใƒ‡ใ‚ชใŒ้€ฒ่กŒใ™ใ‚‹ใซใคใ‚Œใฆใใ‚Œใžใ‚Œใฎๆคœๅ‡บใ•ใ‚ŒใŸใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใซใƒฆใƒ‹ใƒผใ‚ฏใชIDใ‚’็ถญๆŒใ™ใ‚‹้‡่ฆใชใ‚ฟใ‚นใ‚ฏใงใ™ใ€‚ใใฎๅฟœ็”จ็ฏ„ๅ›ฒใฏ็„ก้™ใงใ€็›ฃ่ฆ–ใ‚„ใ‚ปใ‚ญใƒฅใƒชใƒ†ใ‚ฃใ‹ใ‚‰ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใ‚นใƒใƒผใƒ„ๅˆ†ๆžใพใงๅŠใณใพใ™ใ€‚ + +## ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใซUltralytics YOLOใ‚’้ธใถ็†็”ฑใฏ๏ผŸ + +Ultralyticsใฎใƒˆใƒฉใƒƒใ‚ซใƒผใ‹ใ‚‰ใฎๅ‡บๅŠ›ใฏๆจ™ๆบ–ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใจไธ€่‡ดใ—ใฆใŠใ‚Šใ€ใ•ใ‚‰ใซใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆIDใฎไป˜ๅŠ ไพกๅ€คใŒใ‚ใ‚Šใพใ™ใ€‚ใ“ใ‚Œใซใ‚ˆใ‚Šใ€ใƒ“ใƒ‡ใ‚ชใ‚นใƒˆใƒชใƒผใƒ ๅ†…ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’่ฟฝ่ทกใ—ใ€ๅพŒ็ถšใฎๅˆ†ๆžใ‚’่กŒใ†ใ“ใจใŒๅฎนๆ˜“ใซใชใ‚Šใพใ™ใ€‚Ultralytics YOLOใ‚’ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใฎใƒ‹ใƒผใ‚บใซๅˆฉ็”จใ‚’ๆคœ่จŽใ™ใ‚‹็†็”ฑใฏไปฅไธ‹ใฎ้€šใ‚Šใงใ™๏ผš + +- **ๅŠน็އๆ€ง:** ็ฒพๅบฆใ‚’ๆใชใ†ใ“ใจใชใใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใงใƒ“ใƒ‡ใ‚ชใ‚นใƒˆใƒชใƒผใƒ ใ‚’ๅ‡ฆ็†ใ—ใพใ™ใ€‚ +- **ๆŸ”่ปŸๆ€ง:** ่ค‡ๆ•ฐใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚ขใƒซใ‚ดใƒชใ‚บใƒ ใจๆง‹ๆˆใ‚’ใ‚ตใƒใƒผใƒˆใ—ใฆใ„ใพใ™ใ€‚ +- **ไฝฟใ„ใ‚„ใ™ใ•:** ็ฐกๅ˜ใชPython APIใจCLIใ‚ชใƒ—ใ‚ทใƒงใƒณใง่ฟ…้€Ÿใช็ตฑๅˆใจๅฑ•้–‹ใŒๅฏ่ƒฝใงใ™ใ€‚ +- **ใ‚ซใ‚นใ‚ฟใƒžใ‚คใ‚บๆ€ง:** ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎYOLOใƒขใƒ‡ใƒซใจใฎๅฎนๆ˜“ใชไฝฟ็”จใซใ‚ˆใ‚Šใ€ใƒ‰ใƒกใ‚คใƒณ็‰นๆœ‰ใฎใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใธใฎ็ตฑๅˆใŒๅฏ่ƒฝใงใ™ใ€‚ + +

+
+ +
+ ่ฆ–่ด๏ผš Ultralytics YOLOv8ใซใ‚ˆใ‚‹ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใจใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ€‚ +

+ +## ๅฎŸไธ–็•Œใงใฎๅฟœ็”จไพ‹ + +| ไบค้€š | ๅฐๅฃฒ | ๆฐด็”ฃๆฅญ | +|:---------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------------:| +| ![่ปŠไธกใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ](https://github.com/RizwanMunawar/ultralytics/assets/62513924/ee6e6038-383b-4f21-ac29-b2a1c7d386ab) | ![ไบบใ€…ใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ](https://github.com/RizwanMunawar/ultralytics/assets/62513924/93bb4ee2-77a0-4e4e-8eb6-eb8f527f0527) | ![้ญšใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a5146d0f-bfa8-4e0a-b7df-3c1446cd8142) | +| ่ปŠไธกใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ | ไบบใ€…ใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ | ้ญšใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ | + +## ไธ€็›ฎใงใ‚ใ‹ใ‚‹ๆฉŸ่ƒฝ + +Ultralytics YOLOใฏใ€ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บๆฉŸ่ƒฝใ‚’ๆ‹กๅผตใ—ใฆใ€ๅ …็‰ขใงๅคšๆฉŸ่ƒฝใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚’ๆไพ›ใ—ใพใ™๏ผš + +- **ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ๏ผš** ้ซ˜ใƒ•ใƒฌใƒผใƒ ใƒฌใƒผใƒˆใฎใƒ“ใƒ‡ใ‚ชใงใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ใ‚ทใƒผใƒ ใƒฌใ‚นใซ่ฟฝ่ทกใ—ใพใ™ใ€‚ +- **่ค‡ๆ•ฐใƒˆใƒฉใƒƒใ‚ซใƒผใ‚ตใƒใƒผใƒˆ๏ผš** ็ขบ็ซ‹ใ•ใ‚ŒใŸใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚ขใƒซใ‚ดใƒชใ‚บใƒ ใ‹ใ‚‰้ธๆŠžใงใใพใ™ใ€‚ +- **ใ‚ซใ‚นใ‚ฟใƒžใ‚คใ‚บๅฏ่ƒฝใชใƒˆใƒฉใƒƒใ‚ซใƒผๆง‹ๆˆ๏ผš** ๆง˜ใ€…ใชใƒ‘ใƒฉใƒกใƒผใ‚ฟใƒผใ‚’่ชฟๆ•ดใ™ใ‚‹ใ“ใจใง็‰นๅฎšใฎ่ฆไปถใซๅˆใ‚ใ›ใฆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚ขใƒซใ‚ดใƒชใ‚บใƒ ใ‚’่ชฟๆ•ดใ—ใพใ™ใ€‚ + +## ๅˆฉ็”จๅฏ่ƒฝใชใƒˆใƒฉใƒƒใ‚ซใƒผ + +Ultralytics YOLOใฏใ€ๆฌกใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚ขใƒซใ‚ดใƒชใ‚บใƒ ใ‚’ใ‚ตใƒใƒผใƒˆใ—ใฆใ„ใพใ™ใ€‚ใใ‚Œใ‚‰ใฏใ€้–ข้€ฃใ™ใ‚‹YAMLๆง‹ๆˆใƒ•ใ‚กใ‚คใƒซ๏ผˆใŸใจใˆใฐ`tracker=tracker_type.yaml`๏ผ‰ใ‚’ๆธกใ™ใ“ใจใงๆœ‰ๅŠนใซใ™ใ‚‹ใ“ใจใŒใงใใพใ™๏ผš + +* [BoT-SORT](https://github.com/NirAharon/BoT-SORT) - ใ“ใฎใƒˆใƒฉใƒƒใ‚ซใƒผใ‚’ๆœ‰ๅŠนใซใ™ใ‚‹ใซใฏ`botsort.yaml`ใ‚’ไฝฟ็”จใ—ใพใ™ใ€‚ +* [ByteTrack](https://github.com/ifzhang/ByteTrack) - ใ“ใฎใƒˆใƒฉใƒƒใ‚ซใƒผใ‚’ๆœ‰ๅŠนใซใ™ใ‚‹ใซใฏ`bytetrack.yaml`ใ‚’ไฝฟ็”จใ—ใพใ™ใ€‚ + +ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฎใƒˆใƒฉใƒƒใ‚ซใƒผใฏBoT-SORTใงใ™ใ€‚ + +## ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ + +ใƒ“ใƒ‡ใ‚ชใ‚นใƒˆใƒชใƒผใƒ ใงใƒˆใƒฉใƒƒใ‚ซใƒผใ‚’ๅฎŸ่กŒใ™ใ‚‹ใซใฏใ€YOLOv8nใ€YOLOv8n-segใ€YOLOv8n-poseใชใฉใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎDetectใ€Segmentใ€ใพใŸใฏPoseใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅ…ฌๅผใพใŸใฏใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') # ๅ…ฌๅผใฎDetectใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n-seg.pt') # ๅ…ฌๅผใฎSegmentใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n-pose.pt') # ๅ…ฌๅผใฎPoseใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('path/to/best.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใงใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚’ๅฎŸ่กŒ + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True) # ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใƒˆใƒฉใƒƒใ‚ซใƒผใงใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True, tracker="bytetrack.yaml") # ByteTrackใƒˆใƒฉใƒƒใ‚ซใƒผใงใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ + ``` + + === "CLI" + + ```bash + # ใ‚ณใƒžใƒณใƒ‰ใƒฉใ‚คใƒณใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚นใ‚’ไฝฟ็”จใ—ใฆใ€ๆง˜ใ€…ใชใƒขใƒ‡ใƒซใงใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚’ๅฎŸ่กŒ + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" # ๅ…ฌๅผใฎDetectใƒขใƒ‡ใƒซ + yolo track model=yolov8n-seg.pt source="https://youtu.be/LNwODJXcvt4" # ๅ…ฌๅผใฎSegmentใƒขใƒ‡ใƒซ + yolo track model=yolov8n-pose.pt source="https://youtu.be/LNwODJXcvt4" # ๅ…ฌๅผใฎPoseใƒขใƒ‡ใƒซ + yolo track model=path/to/best.pt source="https://youtu.be/LNwODJXcvt4" # ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใƒขใƒ‡ใƒซ + + # ByteTrackใƒˆใƒฉใƒƒใ‚ซใƒผใ‚’ไฝฟ็”จใ—ใฆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ + yolo track model=path/to/best.pt tracker="bytetrack.yaml" + ``` + +ไธŠ่จ˜ใฎไฝฟ็”จๆณ•ใซ็คบใ•ใ‚Œใฆใ„ใ‚‹ใ‚ˆใ†ใซใ€ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใฏใƒ“ใƒ‡ใ‚ชใ‚„ใ‚นใƒˆใƒชใƒผใƒŸใƒณใ‚ฐใ‚ฝใƒผใ‚นใงๅฎŸ่กŒใ•ใ‚Œใ‚‹ใ™ในใฆใฎDetectใ€Segmentใ€ใŠใ‚ˆใณPoseใƒขใƒ‡ใƒซใงๅˆฉ็”จๅฏ่ƒฝใงใ™ใ€‚ + +## ๆง‹ๆˆ + +### ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐๅผ•ๆ•ฐ + +ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐๆง‹ๆˆใฏใ€`conf`ใ€`iou`ใ€ใŠใ‚ˆใณ`show`ใชใฉใฎPredictใƒขใƒผใƒ‰ใจๅŒใ˜ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใ‚’ๅ…ฑๆœ‰ใ—ใพใ™ใ€‚ใ•ใ‚‰ใชใ‚‹ๆง‹ๆˆใซใคใ„ใฆใฏใ€[Predict](https://docs.ultralytics.com/modes/predict/)ใƒขใƒ‡ใƒซใƒšใƒผใ‚ธใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใƒ‘ใƒฉใƒกใƒผใ‚ฟใƒผใ‚’ๆง‹ๆˆใ—ใ€ใƒˆใƒฉใƒƒใ‚ซใƒผใ‚’ๅฎŸ่กŒ + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", conf=0.3, iou=0.5, show=True) + ``` + + === "CLI" + + ```bash + # ใ‚ณใƒžใƒณใƒ‰ใƒฉใ‚คใƒณใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚นใ‚’ไฝฟ็”จใ—ใฆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใƒ‘ใƒฉใƒกใƒผใ‚ฟใ‚’ๆง‹ๆˆใ—ใ€ใƒˆใƒฉใƒƒใ‚ซใƒผใ‚’ๅฎŸ่กŒ + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" conf=0.3, iou=0.5 show + ``` + +### ใƒˆใƒฉใƒƒใ‚ซใƒผใฎ้ธๆŠž + +Ultralyticsใฏใ€ๅค‰ๆ›ดใ•ใ‚ŒใŸใƒˆใƒฉใƒƒใ‚ซใƒผๆง‹ๆˆใƒ•ใ‚กใ‚คใƒซใฎไฝฟ็”จใ‚‚ๅฏ่ƒฝใซใ—ใพใ™ใ€‚ใ“ใ‚Œใ‚’่กŒใ†ใซใฏใ€[ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers)ใ‹ใ‚‰ใƒˆใƒฉใƒƒใ‚ซใƒผๆง‹ๆˆใƒ•ใ‚กใ‚คใƒซ๏ผˆใŸใจใˆใฐ`custom_tracker.yaml`๏ผ‰ใฎใ‚ณใƒ”ใƒผใ‚’ไฝœๆˆใ—ใ€ๅฟ…่ฆใซๅฟœใ˜ใฆไปปๆ„ใฎๆง‹ๆˆ๏ผˆ`tracker_type`ใ‚’้™คใ๏ผ‰ใ‚’ๅค‰ๆ›ดใ—ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€ใ‚ซใ‚นใ‚ฟใƒ ๆง‹ๆˆใƒ•ใ‚กใ‚คใƒซใงใƒˆใƒฉใƒƒใ‚ซใƒผใ‚’ๅฎŸ่กŒ + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", tracker='custom_tracker.yaml') + ``` + + === "CLI" + + ```bash + # ใ‚ณใƒžใƒณใƒ‰ใƒฉใ‚คใƒณใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚นใ‚’ไฝฟ็”จใ—ใฆใ€ใ‚ซใ‚นใ‚ฟใƒ ๆง‹ๆˆใƒ•ใ‚กใ‚คใƒซใงใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใ€ใƒˆใƒฉใƒƒใ‚ซใƒผใ‚’ๅฎŸ่กŒ + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" tracker='custom_tracker.yaml' + ``` + +ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐๅผ•ๆ•ฐใฎๅŒ…ๆ‹ฌ็š„ใชใƒชใ‚นใƒˆใซใคใ„ใฆใฏใ€[ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers)ใƒšใƒผใ‚ธใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +## Pythonใฎไพ‹ + +### ใƒˆใƒฉใƒƒใ‚ฏใƒซใƒผใƒ—ใฎๆฐธ็ถšๅŒ– + +ๆฌกใฏใ€OpenCV (`cv2`)ใจYOLOv8ใ‚’ไฝฟ็”จใ—ใฆใƒ“ใƒ‡ใ‚ชใƒ•ใƒฌใƒผใƒ ใงใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚’ๅฎŸ่กŒใ™ใ‚‹Pythonใ‚นใ‚ฏใƒชใƒ—ใƒˆใงใ™ใ€‚ใ“ใฎใ‚นใ‚ฏใƒชใƒ—ใƒˆใงใฏใ€ๅฟ…่ฆใชใƒ‘ใƒƒใ‚ฑใƒผใ‚ธ๏ผˆ`opencv-python`ใŠใ‚ˆใณ`ultralytics`๏ผ‰ใŒๆ—ขใซใ‚คใƒณใ‚นใƒˆใƒผใƒซใ•ใ‚Œใฆใ„ใ‚‹ใ“ใจใŒๅ‰ๆใงใ™ใ€‚`persist=True`ๅผ•ๆ•ฐใฏใ€ใƒˆใƒฉใƒƒใ‚ซใƒผใซ็พๅœจใฎ็”ปๅƒใพใŸใฏใƒ•ใƒฌใƒผใƒ ใŒใ‚ทใƒผใ‚ฑใƒณใ‚นใฎๆฌกใฎใ‚‚ใฎใงใ‚ใ‚Šใ€็พๅœจใฎ็”ปๅƒใซๅ‰ใฎ็”ปๅƒใ‹ใ‚‰ใฎใƒˆใƒฉใƒƒใ‚ฏใ‚’ๆœŸๅพ…ใ™ใ‚‹ใ“ใจใ‚’ไผใˆใพใ™ใ€‚ + +!!! Example "ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚’ไผดใ†ใ‚นใƒˆใƒชใƒผใƒŸใƒณใ‚ฐforใƒซใƒผใƒ—" + + ```python + import cv2 + from ultralytics import YOLO + + # YOLOv8ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') + + # ใƒ“ใƒ‡ใ‚ชใƒ•ใ‚กใ‚คใƒซใ‚’้–‹ใ + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # ใƒ“ใƒ‡ใ‚ชใƒ•ใƒฌใƒผใƒ ใ‚’ใƒซใƒผใƒ—ใ™ใ‚‹ + while cap.isOpened(): + # ใƒ“ใƒ‡ใ‚ชใ‹ใ‚‰ใƒ•ใƒฌใƒผใƒ ใ‚’่ชญใฟ่พผใ‚€ + success, frame = cap.read() + + if success: + # ใƒ•ใƒฌใƒผใƒ ใงYOLOv8ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚’ๅฎŸ่กŒใ—ใ€ใƒ•ใƒฌใƒผใƒ ้–“ใงใƒˆใƒฉใƒƒใ‚ฏใ‚’ๆฐธ็ถšๅŒ– + results = model.track(frame, persist=True) + + # ใƒ•ใƒฌใƒผใƒ ใซ็ตๆžœใ‚’ๅฏ่ฆ–ๅŒ– + annotated_frame = results[0].plot() + + # ๆณจ้‡ˆไป˜ใใฎใƒ•ใƒฌใƒผใƒ ใ‚’่กจ็คบ + cv2.imshow("YOLOv8ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ", annotated_frame) + + # 'q'ใŒๆŠผใ•ใ‚ŒใŸใ‚‰ใƒซใƒผใƒ—ใ‹ใ‚‰ๆŠœใ‘ใ‚‹ + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # ใƒ“ใƒ‡ใ‚ชใฎ็ต‚ใ‚ใ‚Šใซๅˆฐ้”ใ—ใŸใ‚‰ใƒซใƒผใƒ—ใ‹ใ‚‰ๆŠœใ‘ใ‚‹ + break + + # ใƒ“ใƒ‡ใ‚ชใ‚ญใƒฃใƒ—ใƒใƒฃใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’่งฃๆ”พใ—ใ€่กจ็คบใ‚ฆใ‚ฃใƒณใƒ‰ใ‚ฆใ‚’้–‰ใ˜ใ‚‹ + cap.release() + cv2.destroyAllWindows() + ``` + +ใ“ใ“ใงใฎๅค‰ๆ›ดใฏใ€ๅ˜็ด”ใชๆคœๅ‡บใงใฏใชใใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚’ๆœ‰ๅŠนใซใ™ใ‚‹`model(frame)`ใ‹ใ‚‰`model.track(frame)`ใธใฎๅค‰ๆ›ดใงใ™ใ€‚ใ“ใฎๅค‰ๆ›ดใ•ใ‚ŒใŸใ‚นใ‚ฏใƒชใƒ—ใƒˆใฏใ€ใƒ“ใƒ‡ใ‚ชใฎๅ„ใƒ•ใƒฌใƒผใƒ ใงใƒˆใƒฉใƒƒใ‚ซใƒผใ‚’ๅฎŸ่กŒใ—ใ€็ตๆžœใ‚’่ฆ–่ฆšๅŒ–ใ—ใ€ใ‚ฆใ‚ฃใƒณใƒ‰ใ‚ฆใซ่กจ็คบใ—ใพใ™ใ€‚ใƒซใƒผใƒ—ใฏ'q'ใ‚’ๆŠผใ™ใ“ใจใง็ต‚ไบ†ใงใใพใ™ใ€‚ + +## ๆ–ฐใ—ใ„ใƒˆใƒฉใƒƒใ‚ซใƒผใฎ่ฒข็Œฎ + +ใ‚ใชใŸใฏใƒžใƒซใƒใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใซ็ฒพ้€šใ—ใฆใŠใ‚Šใ€Ultralytics YOLOใงใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚ขใƒซใ‚ดใƒชใ‚บใƒ ใ‚’ใ†ใพใๅฎŸ่ฃ…ใพใŸใฏ้ฉๅฟœใ•ใ›ใŸใ“ใจใŒใ‚ใ‚Šใพใ™ใ‹๏ผŸ[ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers)ใ‚ปใ‚ฏใ‚ทใƒงใƒณใธใฎ่ฒข็Œฎใ‚’็งใŸใกใฏๆญ“่ฟŽใ—ใพใ™๏ผใ‚ใชใŸใฎๅฎŸไธ–็•Œใงใฎๅฟœ็”จไพ‹ใจใ‚ฝใƒชใƒฅใƒผใ‚ทใƒงใƒณใฏใ€ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚ฟใ‚นใ‚ฏใซๅ–ใ‚Š็ต„ใ‚€ใƒฆใƒผใ‚ถใƒผใซใจใฃใฆ้žๅธธใซๆœ‰็›Šใ‹ใ‚‚ใ—ใ‚Œใพใ›ใ‚“ใ€‚ + +ใ“ใฎใ‚ปใ‚ฏใ‚ทใƒงใƒณใธใฎ่ฒข็Œฎใซใ‚ˆใ‚Šใ€Ultralytics YOLOใƒ•ใƒฌใƒผใƒ ใƒฏใƒผใ‚ฏๅ†…ใงๅˆฉ็”จๅฏ่ƒฝใชใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ‚ฝใƒชใƒฅใƒผใ‚ทใƒงใƒณใฎ็ฏ„ๅ›ฒใŒๅบƒใŒใ‚Šใ€ใ‚ณใƒŸใƒฅใƒ‹ใƒ†ใ‚ฃใซใจใฃใฆใฎๆฉŸ่ƒฝๆ€งใจใƒฆใƒผใƒ†ใ‚ฃใƒชใƒ†ใ‚ฃใƒผใซๆ–ฐใŸใชๅฑคใŒๅŠ ใ‚ใ‚Šใพใ™ใ€‚ + +ใ”่‡ช่บซใฎ่ฒข็Œฎใ‚’้–‹ๅง‹ใ™ใ‚‹ใซใฏใ€ใƒ—ใƒซใƒชใ‚ฏใ‚จใ‚นใƒˆ๏ผˆPR๏ผ‰ใฎ้€ไฟกใซ้–ขใ™ใ‚‹็ทๅˆ็š„ใชๆŒ‡็คบใซใคใ„ใฆๆˆ‘ใ€…ใฎ[่ฒข็Œฎใ‚ฌใ‚คใƒ‰](https://docs.ultralytics.com/help/contributing)ใ‚’ใ”ๅ‚็…งใใ ใ•ใ„ ๐Ÿ› ๏ธใ€‚ใ‚ใชใŸใŒไฝ•ใ‚’ใ‚‚ใŸใ‚‰ใ™ใ‹็งใŸใกใฏๆœŸๅพ…ใ—ใฆใ„ใพใ™๏ผ + +ไธ€็ท’ใซใ€Ultralytics YOLOใ‚จใ‚ณใ‚ทใ‚นใƒ†ใƒ ใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐๆฉŸ่ƒฝใ‚’้ซ˜ใ‚ใพใ—ใ‚‡ใ† ๐Ÿ™๏ผ diff --git a/ultralytics/docs/ja/modes/track.md:Zone.Identifier b/ultralytics/docs/ja/modes/track.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/modes/track.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/modes/train.md b/ultralytics/docs/ja/modes/train.md new file mode 100755 index 0000000..c8d3642 --- /dev/null +++ b/ultralytics/docs/ja/modes/train.md @@ -0,0 +1,206 @@ +--- +comments: true +description: YOLOv8ใƒขใƒ‡ใƒซใ‚’Ultralytics YOLOใ‚’ไฝฟ็”จใ—ใฆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ๆ‰‹้ †ใซใคใ„ใฆใฎใ‚ฌใ‚คใƒ‰ใงใ€ใ‚ทใƒณใ‚ฐใƒซGPUใจใƒžใƒซใƒGPUใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใฎไพ‹ใ‚’ๅซใ‚€ +keywords: Ultralytics, YOLOv8, YOLO, ็‰ฉไฝ“ๆคœๅ‡บ, ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒผใƒ‰, ใ‚ซใ‚นใ‚ฟใƒ ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆ, GPUใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ, ใƒžใƒซใƒGPU, ใƒใ‚คใƒ‘ใƒผใƒ‘ใƒฉใƒกใƒผใ‚ฟ, CLIไพ‹, Pythonไพ‹ +--- + +# Ultralytics YOLOใ‚’ไฝฟใฃใŸใƒขใƒ‡ใƒซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + +Ultralytics YOLOใ‚จใ‚ณใ‚ทใ‚นใƒ†ใƒ ใจ็ตฑๅˆ + +## ใฏใ˜ใ‚ใซ + +ใƒ‡ใ‚ฃใƒผใƒ—ใƒฉใƒผใƒ‹ใƒณใ‚ฐใƒขใƒ‡ใƒซใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใฏใ€ใƒ‡ใƒผใ‚ฟใ‚’ไธŽใˆใฆใƒ‘ใƒฉใƒกใƒผใ‚ฟใƒผใ‚’่ชฟๆ•ดใ—ใ€ๆญฃ็ขบใชไบˆๆธฌใ‚’่กŒใˆใ‚‹ใ‚ˆใ†ใซใ™ใ‚‹ใƒ—ใƒญใ‚ปใ‚นใ‚’ๅซใฟใพใ™ใ€‚UltralyticsใฎYOLOv8ใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒผใƒ‰ใฏใ€็พไปฃใฎใƒใƒผใƒ‰ใ‚ฆใ‚งใ‚ขใฎ่ƒฝๅŠ›ใ‚’ใƒ•ใƒซใซๆดป็”จใ—ใฆ็‰ฉไฝ“ๆคœๅ‡บใƒขใƒ‡ใƒซใ‚’ๅŠนๆžœ็š„ใ‹ใคๅŠน็އ็š„ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ใŸใ‚ใซ่จญ่จˆใ•ใ‚Œใฆใ„ใพใ™ใ€‚ใ“ใฎใ‚ฌใ‚คใƒ‰ใฏใ€YOLOv8 ใฎ่ฑŠๅฏŒใชๆฉŸ่ƒฝใ‚ปใƒƒใƒˆใ‚’ไฝฟ็”จใ—ใฆ่‡ช่บซใฎใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ใŸใ‚ใซๅฟ…่ฆใชใ™ในใฆใฎ่ฉณ็ดฐใ‚’ใ‚ซใƒใƒผใ™ใ‚‹ใ“ใจใ‚’็›ฎ็š„ใจใ—ใฆใ„ใพใ™ใ€‚ + +

+
+ +
+ ่ฆ–่ด: Google Colab ใงใ‚ซใ‚นใ‚ฟใƒ ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใซYOLOv8ใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ๆ–นๆณ•ใ€‚ +

+ +## ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใซUltralyticsใฎYOLOใ‚’้ธใถ็†็”ฑ + +YOLOv8ใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒผใƒ‰ใ‚’้ธๆŠžใ™ใ‚‹ใ„ใใคใ‹ใฎ้ญ…ๅŠ›็š„ใช็†็”ฑใ‚’ไปฅไธ‹ใซ็คบใ—ใพใ™๏ผš + +- **ๅŠน็އๆ€ง๏ผš** ใ‚ทใƒณใ‚ฐใƒซGPUใ‚ปใƒƒใƒˆใ‚ขใƒƒใƒ—ใงใ‚ใ‚ใ†ใจ่ค‡ๆ•ฐใฎGPUใซใ‚นใ‚ฑใƒผใƒซใ™ใ‚‹ๅ ดๅˆใงใ‚ใ‚ใ†ใจใ€ใ‚ใชใŸใฎใƒใƒผใƒ‰ใ‚ฆใ‚งใ‚ขใ‚’ๆœ€ๅคง้™ใซๆดป็”จใ—ใพใ™ใ€‚ +- **ๆฑŽ็”จๆ€ง๏ผš** COCOใ€VOCใ€ImageNetใฎใ‚ˆใ†ใชๆ—ขๅญ˜ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใซๅŠ ใˆใ€ใ‚ซใ‚นใ‚ฟใƒ ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใŒๅฏ่ƒฝใงใ™ใ€‚ +- **ใƒฆใƒผใ‚ถใƒผใƒ•ใƒฌใƒณใƒ‰ใƒชใƒผ๏ผš** ็›ดๆ„Ÿ็š„ใงใ‚ใ‚ŠใชใŒใ‚‰ๅผทๅŠ›ใชCLIใจPythonใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚นใ‚’ๅ‚™ใˆใ€็ฐกๅ˜ใชใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐไฝ“้จ“ใ‚’ๆไพ›ใ—ใพใ™ใ€‚ +- **ใƒใ‚คใƒ‘ใƒผใƒ‘ใƒฉใƒกใƒผใ‚ฟใฎๆŸ”่ปŸๆ€ง๏ผš** ใƒขใƒ‡ใƒซใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’ๅพฎ่ชฟๆ•ดใ™ใ‚‹ใŸใ‚ใฎๅน…ๅบƒใ„ใ‚ซใ‚นใ‚ฟใƒžใ‚คใ‚บๅฏ่ƒฝใชใƒใ‚คใƒ‘ใƒผใƒ‘ใƒฉใƒกใƒผใ‚ฟใ€‚ + +### ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒผใƒ‰ใฎไธปใช็‰นๅพด + +ไปฅไธ‹ใซใ€YOLOv8ใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒผใƒ‰ใฎใ„ใใคใ‹ใฎๆณจ็›ฎใ™ในใ็‰นๅพดใ‚’ๆŒ™ใ’ใพใ™๏ผš + +- **่‡ชๅ‹•ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰๏ผš** COCOใ€VOCใ€ImageNetใฎใ‚ˆใ†ใชๆจ™ๆบ–ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฏๆœ€ๅˆใฎไฝฟ็”จๆ™‚ใซ่‡ชๅ‹•็š„ใซใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰ใ•ใ‚Œใพใ™ใ€‚ +- **ใƒžใƒซใƒGPUใ‚ตใƒใƒผใƒˆ๏ผš** ่ค‡ๆ•ฐใฎGPUใซใ‚ใŸใฃใฆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’ใ‚นใ‚ฑใƒผใƒซใ—ใ€ใƒ—ใƒญใ‚ปใ‚นใ‚’่ฟ…้€Ÿใซ่กŒใ„ใพใ™ใ€‚ +- **ใƒใ‚คใƒ‘ใƒผใƒ‘ใƒฉใƒกใƒผใ‚ฟใฎ่จญๅฎš๏ผš** YAML่จญๅฎšใƒ•ใ‚กใ‚คใƒซใ‚„CLIๅผ•ๆ•ฐใ‚’้€šใ˜ใฆใƒใ‚คใƒ‘ใƒผใƒ‘ใƒฉใƒกใƒผใ‚ฟใ‚’ๅค‰ๆ›ดใ™ใ‚‹ใ‚ชใƒ—ใ‚ทใƒงใƒณใ€‚ +- **ๅฏ่ฆ–ๅŒ–ใจใƒขใƒ‹ใ‚ฟใƒชใƒณใ‚ฐ๏ผš** ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆŒ‡ๆจ™ใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ่ฟฝ่ทกใจๅญฆ็ฟ’ใƒ—ใƒญใ‚ปใ‚นใฎๅฏ่ฆ–ๅŒ–ใซใ‚ˆใ‚Šใ€ใ‚ˆใ‚Š่‰ฏใ„ๆดžๅฏŸใ‚’ๅพ—ใพใ™ใ€‚ + +!!! Tip "ใƒ’ใƒณใƒˆ" + + * YOLOv8ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ€ไพ‹ใˆใฐCOCOใ€VOCใ€ImageNetใชใฉใฏใ€ๆœ€ๅˆใฎไฝฟ็”จๆ™‚ใซ่‡ชๅ‹•็š„ใซใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰ใ•ใ‚Œใพใ™ใ€‚ไพ‹๏ผš`yolo train data=coco.yaml` + +## ไฝฟ็”จไพ‹ + +COCO128ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงYOLOv8nใ‚’100ใ‚จใƒใƒƒใ‚ฏใ€็”ปๅƒใ‚ตใ‚คใ‚บ640ใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ใ€‚ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒ‡ใƒใ‚คใ‚นใฏใ€`device`ๅผ•ๆ•ฐใ‚’ไฝฟใฃใฆๆŒ‡ๅฎšใงใใพใ™ใ€‚ๅผ•ๆ•ฐใŒๆธกใ•ใ‚Œใชใ„ๅ ดๅˆใ€ๅˆฉ็”จๅฏ่ƒฝใงใ‚ใ‚ŒใฐGPU `device=0`ใŒใ€ใใ†ใงใชใ‘ใ‚Œใฐ`device=cpu`ใŒๅˆฉ็”จใ•ใ‚Œใพใ™ใ€‚ๅ…จใฆใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๅผ•ๆ•ฐใฎใƒชใ‚นใƒˆใฏไปฅไธ‹ใฎๅผ•ๆ•ฐใ‚ปใ‚ฏใ‚ทใƒงใƒณใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ใ‚ทใƒณใ‚ฐใƒซGPUใจCPUใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐไพ‹" + + ใƒ‡ใƒใ‚คใ‚นใฏ่‡ชๅ‹•็š„ใซๆฑบๅฎšใ•ใ‚Œใพใ™ใ€‚GPUใŒๅˆฉ็”จๅฏ่ƒฝใงใ‚ใ‚Œใฐใใ‚ŒใŒไฝฟ็”จใ•ใ‚Œใ€ใใ†ใงใชใ‘ใ‚ŒใฐCPUใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใŒ้–‹ๅง‹ใ•ใ‚Œใพใ™ใ€‚ + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.yaml') # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ไฝœๆˆ + model = YOLO('yolov8n.pt') # ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใซใฏใŠใ™ใ™ใ‚ใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # YAMLใ‹ใ‚‰ใƒขใƒ‡ใƒซใ‚’ไฝœๆˆใ—ใ€้‡ใฟใ‚’่ปข้€ + + # ใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + + === "CLI" + + ```bash + # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ไฝœๆˆใ—ใ€ๆœ€ๅˆใ‹ใ‚‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟ*.ptใƒขใƒ‡ใƒซใ‹ใ‚‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ไฝœๆˆใ—ใ€ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใฎ้‡ใฟใ‚’่ปข้€ใ—ใฆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### ใƒžใƒซใƒGPUใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + +ใƒžใƒซใƒGPUใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใฏใ€ๅˆฉ็”จๅฏ่ƒฝใชใƒใƒผใƒ‰ใ‚ฆใ‚งใ‚ขใƒชใ‚ฝใƒผใ‚นใ‚’ใ‚ˆใ‚ŠๅŠน็އ็š„ใซๆดป็”จใ™ใ‚‹ใŸใ‚ใซใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใฎ่ฒ ่ทใ‚’่ค‡ๆ•ฐใฎGPUใซๅˆ†ๆ•ฃใ•ใ›ใ‚‹ใ“ใจใ‚’ๅฏ่ƒฝใซใ—ใพใ™ใ€‚ใ“ใฎๆฉŸ่ƒฝใฏPython APIใจใ‚ณใƒžใƒณใƒ‰ใƒฉใ‚คใƒณใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใ‚ฃใ‚นใฎไธกๆ–นใ‚’้€šใ˜ใฆๅˆฉ็”จใงใใพใ™ใ€‚ใƒžใƒซใƒGPUใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’ๆœ‰ๅŠนใซใ™ใ‚‹ใซใฏใ€ไฝฟ็”จใ—ใŸใ„GPUใƒ‡ใƒใ‚คใ‚นIDใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + +!!! Example "ใƒžใƒซใƒGPUใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐไพ‹" + + 2ใคใฎGPUใ‚’ไฝฟใฃใฆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ใซใฏใ€CUDAใƒ‡ใƒใ‚คใ‚น0ใจ1ใ‚’ไฝฟใ„ไปฅไธ‹ใฎใ‚ณใƒžใƒณใƒ‰ใ‚’ไฝฟ็”จใ—ใพใ™ใ€‚ๅฟ…่ฆใซๅฟœใ˜ใฆ่ฟฝๅŠ ใฎGPUใซๆ‹กๅผตใ—ใพใ™ใ€‚ + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') # ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใซใฏใŠใ™ใ™ใ‚ใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # 2ใคใฎGPUใงใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device=[0, 1]) + ``` + + === "CLI" + + ```bash + # ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟ*.ptใƒขใƒ‡ใƒซใ‹ใ‚‰GPU 0ใจ1ใ‚’ไฝฟใฃใฆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=0,1 + ``` + +### Apple M1 ใŠใ‚ˆใณ M2 MPSใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + +AppleใฎM1ใŠใ‚ˆใณM2ใƒใƒƒใƒ—ใซๅฏพใ™ใ‚‹ใ‚ตใƒใƒผใƒˆใŒUltralyticsใฎYOLOใƒขใƒ‡ใƒซใซ็ตฑๅˆใ•ใ‚ŒใŸใ“ใจใงใ€AppleใฎๅผทๅŠ›ใชMetal Performance Shaders๏ผˆMPS๏ผ‰ใƒ•ใƒฌใƒผใƒ ใƒฏใƒผใ‚ฏใ‚’ไฝฟ็”จใ—ใฆใƒ‡ใƒใ‚คใ‚นใงใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใซใชใ‚Šใพใ—ใŸใ€‚ MPSใฏใ€Appleใฎใ‚ซใ‚นใ‚ฟใƒ ใ‚ทใƒชใ‚ณใƒณไธŠใงใฎ่จˆ็ฎ—ใ‚„็”ปๅƒๅ‡ฆ็†ใ‚ฟใ‚นใ‚ฏใฎ้ซ˜ๆ€ง่ƒฝใชๅฎŸ่กŒๆ–นๆณ•ใ‚’ๆไพ›ใ—ใพใ™ใ€‚ + +AppleใฎM1ใŠใ‚ˆใณM2ใƒใƒƒใƒ—ใงใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’ๆœ‰ๅŠนใซใ™ใ‚‹ใซใฏใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒ—ใƒญใ‚ปใ‚นใ‚’้–‹ๅง‹ใ™ใ‚‹้š›ใซ`mps`ใ‚’ใƒ‡ใƒใ‚คใ‚นใจใ—ใฆๆŒ‡ๅฎšใ™ใ‚‹ๅฟ…่ฆใŒใ‚ใ‚Šใพใ™ใ€‚ไปฅไธ‹ใฏPythonใŠใ‚ˆใณใ‚ณใƒžใƒณใƒ‰ใƒฉใ‚คใƒณใงใ“ใ‚Œใ‚’่กŒใ†ไพ‹ใงใ™๏ผš + +!!! Example "MPSใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') # ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใซใฏใŠใ™ใ™ใ‚ใฎไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # MPSใ‚’ไฝฟใฃใฆใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device='mps') + ``` + + === "CLI" + + ```bash + # MPSใ‚’ไฝฟใฃใฆใ€ไบ‹ๅ‰ๅญฆ็ฟ’ๆธˆใฟ*.ptใƒขใƒ‡ใƒซใ‹ใ‚‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=mps + ``` + +M1/M2ใƒใƒƒใƒ—ใฎ่จˆ็ฎ—่ƒฝๅŠ›ใ‚’ๅˆฉ็”จใ—ใชใŒใ‚‰ใ€ใ“ใ‚Œใซใ‚ˆใ‚Šใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚ฟใ‚นใ‚ฏใฎใ‚ˆใ‚ŠๅŠน็އ็š„ใชๅ‡ฆ็†ใŒๅฏ่ƒฝใซใชใ‚Šใพใ™ใ€‚ใ‚ˆใ‚Š่ฉณ็ดฐใชใ‚ฌใ‚คใƒ€ใƒณใ‚นใ‚„้ซ˜ๅบฆใช่จญๅฎšใ‚ชใƒ—ใ‚ทใƒงใƒณใซใคใ„ใฆใฏใ€[PyTorch MPSใฎใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ](https://pytorch.org/docs/stable/notes/mps.html)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +## ใƒญใ‚ฎใƒณใ‚ฐ + +YOLOv8ใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹้š›ใ€ใƒขใƒ‡ใƒซใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’ๆ™‚้–“ใจใจใ‚‚ใซ่ฟฝ่ทกใ™ใ‚‹ใ“ใจใŒไพกๅ€คใ‚ใ‚‹ใ“ใจใงใ‚ใ‚‹ใจ่€ƒใˆใ‚‰ใ‚Œใพใ™ใ€‚ใ“ใ‚ŒใŒใƒญใ‚ฎใƒณใ‚ฐใฎๅฝนๅ‰ฒใซใชใ‚Šใพใ™ใ€‚UltralyticsใฎYOLOใฏใ€Cometใ€ClearMLใ€TensorBoardใฎ3็จฎ้กžใฎใƒญใ‚ฌใƒผใ‚’ใ‚ตใƒใƒผใƒˆใ—ใฆใ„ใพใ™ใ€‚ + +ใƒญใ‚ฌใƒผใ‚’ไฝฟ็”จใ™ใ‚‹ใซใฏใ€ไธŠ่จ˜ใฎใ‚ณใƒผใƒ‰ใ‚นใƒ‹ใƒšใƒƒใƒˆใ‹ใ‚‰ใƒ‰ใƒญใƒƒใƒ—ใƒ€ใ‚ฆใƒณใƒกใƒ‹ใƒฅใƒผใ‚’้ธๆŠžใ—ใ€ๅฎŸ่กŒใ—ใพใ™ใ€‚้ธๆŠžใ—ใŸใƒญใ‚ฌใƒผใŒใ‚คใƒณใ‚นใƒˆใƒผใƒซใ•ใ‚Œใ€ๅˆๆœŸๅŒ–ใ•ใ‚Œใพใ™ใ€‚ + +### Comet + +[Comet](https://www.comet.ml/site/)ใฏใ€ใƒ‡ใƒผใ‚ฟใ‚ตใ‚คใ‚จใƒณใƒ†ใ‚ฃใ‚นใƒˆใ‚„้–‹็™บ่€…ใŒๅฎŸ้จ“ใ‚„ใƒขใƒ‡ใƒซใ‚’่ฟฝ่ทกใ€ๆฏ”่ผƒใ€่ชฌๆ˜Žใ€ๆœ€้ฉๅŒ–ใ™ใ‚‹ใŸใ‚ใฎใƒ—ใƒฉใƒƒใƒˆใƒ•ใ‚ฉใƒผใƒ ใงใ™ใ€‚ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ใƒกใƒˆใƒชใ‚ฏใ‚นใ‚„ใ‚ณใƒผใƒ‰ๅทฎๅˆ†ใ€ใƒใ‚คใƒ‘ใƒผใƒ‘ใƒฉใƒกใƒผใ‚ฟใฎ่ฟฝ่ทกใชใฉใฎๆฉŸ่ƒฝใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚ + +Cometใ‚’ไฝฟ็”จใ™ใ‚‹ใซใฏ๏ผš + +!!! Example "ไพ‹" + + === "Python" + ```python + # pip install comet_ml + import comet_ml + + comet_ml.init() + ``` + +Cometใ‚ขใ‚ซใ‚ฆใƒณใƒˆใซใ‚ตใ‚คใƒณใ‚คใƒณใ—ใ€APIใ‚ญใƒผใ‚’ๅ–ๅพ—ใ—ใฆใใ ใ•ใ„ใ€‚ใ“ใฎใ‚ญใƒผใ‚’็’ฐๅขƒๅค‰ๆ•ฐใพใŸใฏใ‚นใ‚ฏใƒชใƒ—ใƒˆใซ่ฟฝๅŠ ใ—ใฆใ€ๅฎŸ้จ“ใ‚’ใƒญใ‚ฐใซ่จ˜้Œฒใ™ใ‚‹ๅฟ…่ฆใŒใ‚ใ‚Šใพใ™ใ€‚ + +### ClearML + +[ClearML](https://www.clear.ml/)ใฏใ€ๅฎŸ้จ“ใฎ่ฟฝ่ทกใ‚’่‡ชๅ‹•ๅŒ–ใ—ใ€่ณ‡ๆบใฎๅŠน็އ็š„ใชๅ…ฑๆœ‰ใ‚’ๆ”ฏๆดใ™ใ‚‹ใ‚ชใƒผใƒ—ใƒณใ‚ฝใƒผใ‚นใƒ—ใƒฉใƒƒใƒˆใƒ•ใ‚ฉใƒผใƒ ใงใ™ใ€‚ใƒใƒผใƒ ใŒMLไฝœๆฅญใ‚’ใ‚ˆใ‚ŠๅŠน็އ็š„ใซ็ฎก็†ใ€ๅฎŸ่กŒใ€ๅ†็พใ™ใ‚‹ใฎใซๅฝน็ซ‹ใกใพใ™ใ€‚ + +ClearMLใ‚’ไฝฟ็”จใ™ใ‚‹ใซใฏ๏ผš + +!!! Example "ไพ‹" + + === "Python" + ```python + # pip install clearml + import clearml + + clearml.browser_login() + ``` + +ใ“ใฎใ‚นใ‚ฏใƒชใƒ—ใƒˆใ‚’ๅฎŸ่กŒใ—ใŸๅพŒใ€ใƒ–ใƒฉใ‚ฆใ‚ถใงClearMLใ‚ขใ‚ซใ‚ฆใƒณใƒˆใซใ‚ตใ‚คใƒณใ‚คใƒณใ—ใ€ใ‚ปใƒƒใ‚ทใƒงใƒณใ‚’่ช่จผใ™ใ‚‹ๅฟ…่ฆใŒใ‚ใ‚Šใพใ™ใ€‚ + +### TensorBoard + +[TensorBoard](https://www.tensorflow.org/tensorboard)ใฏใ€TensorFlowใฎ่ฆ–่ฆšๅŒ–ใƒ„ใƒผใƒซใ‚ญใƒƒใƒˆใงใ™ใ€‚TensorFlowใ‚ฐใƒฉใƒ•ใ‚’ๅฏ่ฆ–ๅŒ–ใ—ใ€ใ‚ฐใƒฉใƒ•ใฎๅฎŸ่กŒใซ้–ขใ™ใ‚‹ๅฎš้‡็š„ใƒกใƒˆใƒชใƒƒใ‚ฏใ‚’ใƒ—ใƒญใƒƒใƒˆใ—ใ€ใใ‚Œใ‚’้€š้Žใ™ใ‚‹็”ปๅƒใชใฉใฎ่ฟฝๅŠ ใƒ‡ใƒผใ‚ฟใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +[Google Colab](https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb)ใงTensorBoardใ‚’ไฝฟ็”จใ™ใ‚‹ใซใฏ๏ผš + +!!! Example "ไพ‹" + + === "CLI" + ```bash + load_ext tensorboard + tensorboard --logdir ultralytics/runs # 'runs'ใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒชใจ็ฝฎใๆ›ใˆใฆใใ ใ•ใ„ + ``` + +TensorBoardใ‚’ใƒญใƒผใ‚ซใƒซใงไฝฟ็”จใ™ใ‚‹ๅ ดๅˆใฏใ€http://localhost:6006/ ใง็ตๆžœใ‚’็ขบ่ชใงใใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "CLI" + ```bash + tensorboard --logdir ultralytics/runs # 'runs'ใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒชใจ็ฝฎใๆ›ใˆใฆใใ ใ•ใ„ + ``` + +ใ“ใ‚ŒใงTensorBoardใŒใƒญใƒผใƒ‰ใ•ใ‚Œใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒญใ‚ฐใŒไฟๅญ˜ใ•ใ‚Œใฆใ„ใ‚‹ใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒชใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + +ใƒญใ‚ฐใ‚’่จญๅฎšใ—ใŸๅพŒใ€ใƒขใƒ‡ใƒซใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้€ฒใ‚ใฆใใ ใ•ใ„ใ€‚ใ™ในใฆใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒกใƒˆใƒชใ‚ฏใ‚นใŒ้ธๆŠžใ—ใŸใƒ—ใƒฉใƒƒใƒˆใƒ•ใ‚ฉใƒผใƒ ใซ่‡ชๅ‹•็š„ใซ่จ˜้Œฒใ•ใ‚Œใ€ใ“ใ‚Œใ‚‰ใฎใƒญใ‚ฐใ‚’ใ‚ขใ‚ฏใ‚ปใ‚นใ—ใฆใ€ๆ™‚้–“ใจใจใ‚‚ใซใƒขใƒ‡ใƒซใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’็›ฃ่ฆ–ใ—ใŸใ‚Šใ€ใ•ใพใ–ใพใชใƒขใƒ‡ใƒซใ‚’ๆฏ”่ผƒใ—ใŸใ‚Šใ€ๆ”นๅ–„ใฎไฝ™ๅœฐใ‚’็‰นๅฎšใ—ใŸใ‚Šใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ diff --git a/ultralytics/docs/ja/modes/train.md:Zone.Identifier b/ultralytics/docs/ja/modes/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/modes/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/modes/val.md b/ultralytics/docs/ja/modes/val.md new file mode 100755 index 0000000..354ca37 --- /dev/null +++ b/ultralytics/docs/ja/modes/val.md @@ -0,0 +1,86 @@ +--- +comments: true +description: YOLOv8ใƒขใƒ‡ใƒซใฎใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใ‚ฌใ‚คใƒ‰ใ€‚ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณ่จญๅฎšใจใƒกใƒˆใƒชใ‚ฏใ‚นใ‚’ไฝฟ็”จใ—ใฆYOLOใƒขใƒ‡ใƒซใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’่ฉ•ไพกใ™ใ‚‹ๆ–นๆณ•ใ‚’PythonใจCLIใฎไพ‹ใงๅญฆใณใพใ—ใ‚‡ใ†ใ€‚ +keywords: Ultralytics, YOLO ใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ, YOLOv8, ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณ, ใƒขใƒ‡ใƒซ่ฉ•ไพก, ใƒใ‚คใƒ‘ใƒผใƒ‘ใƒฉใƒกใƒผใ‚ฟ, ๆญฃ็ขบๆ€ง, ใƒกใƒˆใƒชใ‚ฏใ‚น, Python, CLI +--- + +# Ultralytics YOLOใซใ‚ˆใ‚‹ใƒขใƒ‡ใƒซใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณ + +Ultralytics YOLOใฎใ‚จใ‚ณใ‚ทใ‚นใƒ†ใƒ ใจ็ตฑๅˆๆฉŸ่ƒฝ + +## ใฏใ˜ใ‚ใซ + +ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใฏใ€่จ“็ทดใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใฎๅ“่ณชใ‚’่ฉ•ไพกใ™ใ‚‹ใŸใ‚ใซๆฉŸๆขฐๅญฆ็ฟ’ใƒ‘ใ‚คใƒ—ใƒฉใ‚คใƒณใง้‡่ฆใชใ‚นใƒ†ใƒƒใƒ—ใงใ™ใ€‚Ultralytics YOLOv8ใฎValใƒขใƒผใƒ‰ใฏใ€ใƒขใƒ‡ใƒซใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’่ฉ•ไพกใ™ใ‚‹ใŸใ‚ใฎๅ …็‰ขใชใƒ„ใƒผใƒซใจใƒกใƒˆใƒชใ‚ฏใ‚นใ‚’ๆไพ›ใ—ใพใ™ใ€‚ใ“ใฎใ‚ฌใ‚คใƒ‰ใฏใ€Valใƒขใƒผใƒ‰ใ‚’ๅŠนๆžœ็š„ใซไฝฟ็”จใ—ใฆใ€ใƒขใƒ‡ใƒซใŒๆญฃ็ขบใงไฟก้ ผใงใใ‚‹ใ“ใจใ‚’็ขบ่ชใ™ใ‚‹ใŸใ‚ใฎๅฎŒๅ…จใชใƒชใ‚ฝใƒผใ‚นใจใชใฃใฆใ„ใพใ™ใ€‚ + +## Ultralytics YOLOใงใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใ‚’่กŒใ†ๅˆฉ็‚น + +YOLOv8ใฎValใƒขใƒผใƒ‰ใ‚’ไฝฟ็”จใ™ใ‚‹ใ“ใจใฎๅˆฉ็‚นใฏไปฅไธ‹ใฎ้€šใ‚Šใงใ™๏ผš + +- **Precision๏ผˆ็ฒพๅบฆ๏ผ‰๏ผš** mAP50ใ€mAP75ใ€mAP50-95ใจใ„ใฃใŸๆญฃ็ขบใชใƒกใƒˆใƒชใ‚ฏใ‚นใ‚’ๅ–ๅพ—ใ—ใ€ใƒขใƒ‡ใƒซใ‚’็ทๅˆ็š„ใซ่ฉ•ไพกใ—ใพใ™ใ€‚ +- **Convenience๏ผˆไพฟๅˆฉใ•๏ผ‰๏ผš** ่จ“็ทด่จญๅฎšใ‚’่จ˜ๆ†ถใ™ใ‚‹็ต„ใฟ่พผใฟใฎๆฉŸ่ƒฝใ‚’ๅˆฉ็”จใ—ใฆใ€ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใƒ—ใƒญใ‚ปใ‚นใ‚’็ฐก็ด ๅŒ–ใ—ใพใ™ใ€‚ +- **Flexibility๏ผˆๆŸ”่ปŸๆ€ง๏ผ‰๏ผš** ๅŒใ˜ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚„็•ฐใชใ‚‹ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ€็”ปๅƒใ‚ตใ‚คใ‚บใงใƒขใƒ‡ใƒซใ‚’ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใงใใพใ™ใ€‚ +- **Hyperparameter Tuning๏ผˆใƒใ‚คใƒ‘ใƒผใƒ‘ใƒฉใƒกใƒผใ‚ฟใƒใƒฅใƒผใƒ‹ใƒณใ‚ฐ๏ผ‰๏ผš** ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใƒกใƒˆใƒชใ‚ฏใ‚นใ‚’ๅˆฉ็”จใ—ใฆใƒขใƒ‡ใƒซใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’ๅ‘ไธŠใ•ใ›ใ‚‹ใŸใ‚ใฎใƒใƒฅใƒผใƒ‹ใƒณใ‚ฐใ‚’่กŒใ„ใพใ™ใ€‚ + +### Valใƒขใƒผใƒ‰ใฎไธป่ฆๆฉŸ่ƒฝ + +YOLOv8ใฎValใƒขใƒผใƒ‰ใซใ‚ˆใ‚Šๆไพ›ใ•ใ‚Œใ‚‹ๆณจ็›ฎใ™ในใๆฉŸ่ƒฝใฏไปฅไธ‹ใฎ้€šใ‚Šใงใ™๏ผš + +- **Automated Settings๏ผˆ่‡ชๅ‹•่จญๅฎš๏ผ‰๏ผš** ่จ“็ทดๆ™‚ใฎ่จญๅฎšใ‚’ใƒขใƒ‡ใƒซใŒ่จ˜ๆ†ถใ—ใฆใ„ใ‚‹ใŸใ‚ใ€ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใŒ็›ดๆ„Ÿ็š„ใซ่กŒใˆใพใ™ใ€‚ +- **Multi-Metric Support๏ผˆ่ค‡ๆ•ฐใƒกใƒˆใƒชใƒƒใ‚ฏใฎใ‚ตใƒใƒผใƒˆ๏ผ‰๏ผš** ็ฒพๅบฆใƒกใƒˆใƒชใƒƒใ‚ฏใฎ็ฏ„ๅ›ฒใซๅŸบใฅใ„ใฆใƒขใƒ‡ใƒซใ‚’่ฉ•ไพกใ—ใพใ™ใ€‚ +- **CLI and Python API๏ผš** ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใซใ‚ณใƒžใƒณใƒ‰ใƒฉใ‚คใƒณใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใ‚คใ‚นใ‚‚ใ—ใใฏPython APIใฎใฉใกใ‚‰ใ‹ใ‚’้ธๆŠžใงใใพใ™ใ€‚ +- **Data Compatibility๏ผˆใƒ‡ใƒผใ‚ฟไบ’ๆ›ๆ€ง๏ผ‰๏ผš** ่จ“็ทดๆฎต้šŽใงไฝฟใ‚ใ‚ŒใŸใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฏใ‚‚ใกใ‚ใ‚“ใ€ใ‚ซใ‚นใ‚ฟใƒ ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใจใ‚‚ใ‚ทใƒผใƒ ใƒฌใ‚นใซๅ‹•ไฝœใ—ใพใ™ใ€‚ + +!!! Tip "Tip" + + * YOLOv8ใƒขใƒ‡ใƒซใฏ่จ“็ทด่จญๅฎšใ‚’่‡ชๅ‹•็š„ใซ่จ˜ๆ†ถใ—ใฆใ„ใ‚‹ใฎใงใ€`yolo val model=yolov8n.pt`ใ‚„`model('yolov8n.pt').val()`ใ ใ‘ใงใ€ๅ…ƒใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใจๅŒใ˜็”ปๅƒใ‚ตใ‚คใ‚บใง็ฐกๅ˜ใซใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณๅฏ่ƒฝใงใ™ใ€‚ + +## ไฝฟ็”จไพ‹ + +COCO128ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆไธŠใง่จ“็ทดๆธˆใฟใฎYOLOv8nใƒขใƒ‡ใƒซใฎ็ฒพๅบฆใ‚’ๆคœ่จผใ—ใพใ™ใ€‚`model`ใฏใใฎ่จ“็ทดๆ™‚ใฎ`data`ๅŠใณๅผ•ๆ•ฐใ‚’ใƒขใƒ‡ใƒซๅฑžๆ€งใจใ—ใฆไฟๆŒใ—ใฆใ„ใ‚‹ใŸใ‚ใ€ๅผ•ๆ•ฐใ‚’ๆธกใ™ๅฟ…่ฆใฏใ‚ใ‚Šใพใ›ใ‚“ใ€‚ๅ…จใฆใฎใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅผ•ๆ•ฐใฎใƒชใ‚นใƒˆใซใคใ„ใฆใฏใ€ไปฅไธ‹ใฎArgumentsใ‚ปใ‚ฏใ‚ทใƒงใƒณใ‚’ใ”่ฆงใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ™ใ‚‹ + model = YOLO('yolov8n.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('path/to/best.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใ‚’ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใ™ใ‚‹ + metrics = model.val() # ๅผ•ๆ•ฐใฏๅฟ…่ฆใชใ—ใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใจ่จญๅฎšใฏ่จ˜ๆ†ถๆŒใก + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ๅ„ใ‚ซใƒ†ใ‚ดใƒชใฎmap50-95ใŒๅซใพใ‚ŒใŸใƒชใ‚นใƒˆ + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณ + yolo detect val model=path/to/best.pt # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณ + ``` + +## ๅผ•ๆ•ฐ + +YOLOใƒขใƒ‡ใƒซใซๅฏพใ™ใ‚‹ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณ่จญๅฎšใฏใ€ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆไธŠใงใฎใƒขใƒ‡ใƒซใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’่ฉ•ไพกใ™ใ‚‹ใŸใ‚ใซไฝฟ็”จใ•ใ‚Œใ‚‹ใ•ใพใ–ใพใชใƒใ‚คใƒ‘ใƒผใƒ‘ใƒฉใƒกใƒผใ‚ฟใจ่จญๅฎšใ‚’ๆŒ‡ใ—ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎ่จญๅฎšใฏใ€ใƒขใƒ‡ใƒซใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ€ใ‚นใƒ”ใƒผใƒ‰ใ€ใใ—ใฆ็ฒพๅบฆใซๅฝฑ้Ÿฟใ‚’ไธŽใˆใ‚‹ๅฏ่ƒฝๆ€งใŒใ‚ใ‚Šใพใ™ใ€‚ไธ€่ˆฌ็š„ใชYOLOใฎใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณ่จญๅฎšใซใฏใ€ใƒใƒƒใƒใ‚ตใ‚คใ‚บใ‚„่จ“็ทดไธญใฎใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณ้ ปๅบฆใ€ใƒขใƒ‡ใƒซใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’่ฉ•ไพกใ™ใ‚‹ใŸใ‚ใซไฝฟ็”จใ•ใ‚Œใ‚‹ใƒกใƒˆใƒชใƒƒใ‚ฏใŒๅซใพใ‚Œใพใ™ใ€‚ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใƒ—ใƒญใ‚ปใ‚นใซๅฝฑ้Ÿฟใ‚’ไธŽใˆใ‚‹ใ‹ใ‚‚ใ—ใ‚Œใชใ„ไป–ใฎ่ฆ็ด ใซใฏใ€ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎใ‚ตใ‚คใ‚บใจๆง‹ๆˆใ€ใŠใ‚ˆใณใƒขใƒ‡ใƒซใŒไฝฟ็”จใ•ใ‚Œใฆใ„ใ‚‹ๅ…ทไฝ“็š„ใชใ‚ฟใ‚นใ‚ฏใชใฉใŒใ‚ใ‚Šใพใ™ใ€‚ใƒขใƒ‡ใƒซใŒใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆไธŠใงใ†ใพใๅ‹•ไฝœใ—ใฆใ„ใ‚‹ใ“ใจใ‚’็ขบ่ชใ—ใ€้Žๅญฆ็ฟ’ใ‚’ๆคœๅ‡บใ—ใฆ้˜ฒใใŸใ‚ใซใ€ใ“ใ‚Œใ‚‰ใฎ่จญๅฎšใ‚’ๆ…Ž้‡ใซใƒใƒฅใƒผใƒ‹ใƒณใ‚ฐใ—ใฆๅฎŸ้จ“ใ™ใ‚‹ใ“ใจใŒ้‡่ฆใงใ™ใ€‚ + +| ใ‚ญใƒผ | ๅ€ค | ่ชฌๆ˜Ž | +|---------------|---------|--------------------------------------------------| +| `data` | `None` | ใƒ‡ใƒผใ‚ฟใƒ•ใ‚กใ‚คใƒซใธใฎใƒ‘ใ‚นใ€ไพ‹: coco128.yaml | +| `imgsz` | `640` | ๅ…ฅๅŠ›็”ปๅƒใฎใ‚ตใ‚คใ‚บใ‚’ๆ•ดๆ•ฐใง | +| `batch` | `16` | ใƒใƒƒใƒใ”ใจใฎ็”ปๅƒๆ•ฐ๏ผˆAutoBatchใฎๅ ดๅˆใฏ-1๏ผ‰ | +| `save_json` | `False` | JSONใƒ•ใ‚กใ‚คใƒซใซ็ตๆžœใ‚’ไฟๅญ˜ | +| `save_hybrid` | `False` | ใƒใ‚คใƒ–ใƒชใƒƒใƒ‰ใƒใƒผใ‚ธใƒงใƒณใฎใƒฉใƒ™ใƒซใ‚’ไฟๅญ˜๏ผˆใƒฉใƒ™ใƒซ๏ผ‹่ฟฝๅŠ ใฎไบˆๆธฌ๏ผ‰ | +| `conf` | `0.001` | ๆคœๅ‡บใฎใŸใ‚ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆไฟก้ ผๅบฆ้–พๅ€ค | +| `iou` | `0.6` | NMS็”จใฎไบคๅทฎใ‚ชใƒผใƒใƒผใƒฆใƒ‹ใ‚ชใƒณ๏ผˆIoU๏ผ‰้–พๅ€ค | +| `max_det` | `300` | ็”ปๅƒใ‚ใŸใ‚Šใฎๆœ€ๅคงๆคœๅ‡บๆ•ฐ | +| `half` | `True` | ๅŠ็ฒพๅบฆ๏ผˆFP16๏ผ‰ใ‚’ไฝฟ็”จใ™ใ‚‹ | +| `device` | `None` | ๅฎŸ่กŒใƒ‡ใƒใ‚คใ‚นใ€ไพ‹: cuda device=0/1/2/3ใ‚„device=cpu | +| `dnn` | `False` | ONNXๆŽจ่ซ–็”จใฎOpenCV DNNใ‚’ไฝฟ็”จ | +| `plots` | `False` | ่จ“็ทดไธญใซใƒ—ใƒญใƒƒใƒˆใ‚’่กจ็คบ | +| `rect` | `False` | ๅ„ใƒใƒƒใƒใŒๆœ€ๅฐ้™ใฎใƒ‘ใƒ‡ใ‚ฃใƒณใ‚ฐใงๆ•ด็†ใ•ใ‚ŒใŸ็Ÿฉๅฝขใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณ | +| `split` | `val` | ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใซไฝฟ็”จใ™ใ‚‹ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎใ‚นใƒ—ใƒชใƒƒใƒˆใ€ไพ‹: 'val'ใ€'test'ใ€'train' | +| | | | diff --git a/ultralytics/docs/ja/modes/val.md:Zone.Identifier b/ultralytics/docs/ja/modes/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/modes/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/quickstart.md b/ultralytics/docs/ja/quickstart.md new file mode 100755 index 0000000..35c737a --- /dev/null +++ b/ultralytics/docs/ja/quickstart.md @@ -0,0 +1,198 @@ +--- +comments: true +description: Ultralyticsใฎpipใ€condaใ€gitใ€Dockerใ‚’ไฝฟ็”จใ—ใŸๆง˜ใ€…ใชใ‚คใƒณใ‚นใƒˆใƒผใƒซๆ–นๆณ•ใ‚’ๆŽข็ดขใ—ใ€ใ‚ณใƒžใƒณใƒ‰ใƒฉใ‚คใƒณใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚นใพใŸใฏPythonใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆๅ†…ใงใฎUltralyticsใฎไฝฟ็”จๆ–นๆณ•ใ‚’ๅญฆใณใพใ™ใ€‚ +keywords: Ultralyticsใ‚คใƒณใ‚นใƒˆใƒผใƒซ, pipใ‚คใƒณใ‚นใƒˆใƒผใƒซUltralytics, Dockerใ‚คใƒณใ‚นใƒˆใƒผใƒซUltralytics, Ultralyticsใ‚ณใƒžใƒณใƒ‰ใƒฉใ‚คใƒณใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚น, Ultralytics Pythonใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚น +--- + +## Ultralyticsใฎใ‚คใƒณใ‚นใƒˆใƒผใƒซ + +Ultralyticsใฏpipใ€condaใ€Dockerใ‚’ๅซใ‚€ใ•ใพใ–ใพใชใ‚คใƒณใ‚นใƒˆใƒผใƒซๆ–นๆณ•ใ‚’ๆไพ›ใ—ใฆใ„ใพใ™ใ€‚ๆœ€ๆ–ฐใฎๅฎ‰ๅฎš็‰ˆใƒชใƒชใƒผใ‚นใงใ‚ใ‚‹`ultralytics` pipใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’้€šใ˜ใฆYOLOv8ใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซใ™ใ‚‹ใ‹ใ€ๆœ€ๆ–ฐใƒใƒผใ‚ธใƒงใƒณใ‚’ๅ–ๅพ—ใ™ใ‚‹ใŸใ‚ใซ[Ultralytics GitHubใƒชใƒใ‚ธใƒˆใƒช](https://github.com/ultralytics/ultralytics)ใ‚’ใ‚ฏใƒญใƒผใƒณใ—ใพใ™ใ€‚Dockerใฏใ€ใƒญใƒผใ‚ซใƒซใ‚คใƒณใ‚นใƒˆใƒผใƒซใ‚’ๅ›ž้ฟใ—ใ€ๅญค็ซ‹ใ—ใŸใ‚ณใƒณใƒ†ใƒŠๅ†…ใงใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ๅฎŸ่กŒใ™ใ‚‹ใŸใ‚ใซไฝฟ็”จใงใใพใ™ใ€‚ + +!!! Example "ใ‚คใƒณใ‚นใƒˆใƒผใƒซ" + + === "Pipใงใฎใ‚คใƒณใ‚นใƒˆใƒผใƒซ๏ผˆๆŽจๅฅจ๏ผ‰" + pipใ‚’ไฝฟ็”จใ—ใฆ`ultralytics`ใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซใ™ใ‚‹ใ‹ใ€`pip install -U ultralytics`ใ‚’ๅฎŸ่กŒใ—ใฆๆ—ขๅญ˜ใฎใ‚คใƒณใ‚นใƒˆใƒผใƒซใ‚’ใ‚ขใƒƒใƒ—ใƒ‡ใƒผใƒˆใ—ใพใ™ใ€‚`ultralytics`ใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใฎ่ฉณ็ดฐใซใคใ„ใฆใฏใ€Python Package Index๏ผˆPyPI๏ผ‰ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„: [https://pypi.org/project/ultralytics/](https://pypi.org/project/ultralytics/)ใ€‚ + + [![PyPI version](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + + ```bash + # PyPIใ‹ใ‚‰ultralyticsใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซ + pip install ultralytics + ``` + + GitHubใฎ[ใƒชใƒใ‚ธใƒˆใƒช](https://github.com/ultralytics/ultralytics)ใ‹ใ‚‰็›ดๆŽฅ`ultralytics`ใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซใ™ใ‚‹ใ“ใจใ‚‚ใงใใพใ™ใ€‚ใ“ใ‚Œใฏใ€ๆœ€ๆ–ฐใฎ้–‹็™บ็‰ˆใŒๅฟ…่ฆใชๅ ดๅˆใซไพฟๅˆฉใ‹ใ‚‚ใ—ใ‚Œใพใ›ใ‚“ใ€‚ใ‚ทใ‚นใƒ†ใƒ ใซGitใ‚ณใƒžใƒณใƒ‰ใƒฉใ‚คใƒณใƒ„ใƒผใƒซใŒใ‚คใƒณใ‚นใƒˆใƒผใƒซใ•ใ‚Œใฆใ„ใ‚‹ๅฟ…่ฆใŒใ‚ใ‚Šใพใ™ใ€‚`@main`ใ‚ณใƒžใƒณใƒ‰ใฏ`main`ใƒ–ใƒฉใƒณใƒใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซใ—ใ€ๅˆฅใฎใƒ–ใƒฉใƒณใƒใ€ไพ‹ใˆใฐ`@my-branch`ใซๅค‰ๆ›ดใ—ใŸใ‚Šใ€`main`ใƒ–ใƒฉใƒณใƒใซใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใ™ใ‚‹ใŸใ‚ใซๅฎŒๅ…จใซๅ‰Š้™คใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + + ```bash + # GitHubใ‹ใ‚‰ultralyticsใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซ + pip install git+https://github.com/ultralytics/ultralytics.git@main + ``` + + + === "Condaใงใฎใ‚คใƒณใ‚นใƒˆใƒผใƒซ" + Condaใฏpipใฎไปฃใ‚ใ‚Šใฎใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใƒžใƒใƒผใ‚ธใƒฃใƒผใงใ€ใ‚คใƒณใ‚นใƒˆใƒผใƒซใซใ‚‚ไฝฟ็”จใงใใพใ™ใ€‚ใ‚ˆใ‚Š่ฉณ็ดฐใฏAnacondaใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ [https://anaconda.org/conda-forge/ultralytics](https://anaconda.org/conda-forge/ultralytics)ใ€‚Condaใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ๆ›ดๆ–ฐใ™ใ‚‹ใŸใ‚ใฎUltralyticsใƒ•ใ‚ฃใƒผใƒ‰ใ‚นใƒˆใƒƒใ‚ฏใƒชใƒใ‚ธใƒˆใƒชใฏใ“ใกใ‚‰ใงใ™ [https://github.com/conda-forge/ultralytics-feedstock/](https://github.com/conda-forge/ultralytics-feedstock/)ใ€‚ + + + [![Conda Recipe](https://img.shields.io/badge/recipe-ultralytics-green.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Version](https://img.shields.io/conda/vn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) + + ```bash + # Condaใ‚’ไฝฟ็”จใ—ใฆultralyticsใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซ + conda install -c conda-forge ultralytics + ``` + + !!! Note "ใƒŽใƒผใƒˆ" + + CUDA็’ฐๅขƒใงใ‚คใƒณใ‚นใƒˆใƒผใƒซใ™ใ‚‹ๅ ดๅˆใ€ใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใƒžใƒใƒผใ‚ธใƒฃใƒผใŒ็ซถๅˆใ‚’่งฃๆฑบใงใใ‚‹ใ‚ˆใ†ใซใ™ใ‚‹ใŸใ‚ใ€`ultralytics`ใ€`pytorch`ใ€`pytorch-cuda`ใ‚’ๅŒใ˜ใ‚ณใƒžใƒณใƒ‰ใงไธ€็ท’ใซใ‚คใƒณใ‚นใƒˆใƒผใƒซใ™ใ‚‹ใฎใŒใƒ™ใ‚นใƒˆใƒ—ใƒฉใ‚ฏใƒ†ใ‚ฃใ‚นใงใ™ใ€‚ใพใŸใฏใ€CPUๅฐ‚็”จใฎ`pytorch`ใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใซๅฟ…่ฆใชๅ ดๅˆใฏไธŠๆ›ธใใ™ใ‚‹ใ‚ˆใ†ใซ`pytorch-cuda`ใ‚’ๆœ€ๅพŒใซใ‚คใƒณใ‚นใƒˆใƒผใƒซใ—ใพใ™ใ€‚ + ```bash + # Condaใ‚’ไฝฟ็”จใ—ใฆไธ€็ท’ใซใ™ในใฆใฎใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซ + conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics + ``` + + ### Conda Dockerใ‚คใƒกใƒผใ‚ธ + + UltralyticsใฎConda Dockerใ‚คใƒกใƒผใ‚ธใ‚‚[DockerHub](https://hub.docker.com/r/ultralytics/ultralytics)ใ‹ใ‚‰ๅˆฉ็”จๅฏ่ƒฝใงใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎใ‚คใƒกใƒผใ‚ธใฏ[Miniconda3](https://docs.conda.io/projects/miniconda/en/latest/)ใซๅŸบใฅใ„ใฆใŠใ‚Šใ€Conda็’ฐๅขƒใง`ultralytics`ใ‚’ไฝฟ็”จใ™ใ‚‹็ฐกๅ˜ใชๆ–นๆณ•ใงใ™ใ€‚ + + ```bash + # ใ‚คใƒกใƒผใ‚ธๅใ‚’ๅค‰ๆ•ฐใจใ—ใฆ่จญๅฎš + t=ultralytics/ultralytics:latest-conda + + # Docker Hubใ‹ใ‚‰ๆœ€ๆ–ฐใฎultralyticsใ‚คใƒกใƒผใ‚ธใ‚’ใƒ—ใƒซ + sudo docker pull $t + + # ใ™ในใฆใฎGPUใ‚’ๆŒใคใ‚ณใƒณใƒ†ใƒŠใงultralyticsใ‚คใƒกใƒผใ‚ธใ‚’ๅฎŸ่กŒ + sudo docker run -it --ipc=host --gpus all $t # ใ™ในใฆใฎGPU + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # GPUใ‚’ๆŒ‡ๅฎš + ``` + + === "Gitใ‚ฏใƒญใƒผใƒณ" + ้–‹็™บใธใฎ่ฒข็Œฎใซ่ˆˆๅ‘ณใŒใ‚ใ‚‹ๅ ดๅˆใ‚„ใ€ๆœ€ๆ–ฐใฎใ‚ฝใƒผใ‚นใ‚ณใƒผใƒ‰ใงๅฎŸ้จ“ใ—ใŸใ„ๅ ดๅˆใฏใ€`ultralytics`ใƒชใƒใ‚ธใƒˆใƒชใ‚’ใ‚ฏใƒญใƒผใƒณใ—ใฆใใ ใ•ใ„ใ€‚ใ‚ฏใƒญใƒผใƒณใ—ใŸๅพŒใ€ใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒชใซ็งปๅ‹•ใ—ใ€pipใ‚’ไฝฟใฃใฆ็ทจ้›†ๅฏ่ƒฝใƒขใƒผใƒ‰`-e`ใงใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซใ—ใพใ™ใ€‚ + ```bash + # ultralyticsใƒชใƒใ‚ธใƒˆใƒชใ‚’ใ‚ฏใƒญใƒผใƒณ + git clone https://github.com/ultralytics/ultralytics + + # ใ‚ฏใƒญใƒผใƒณใ—ใŸใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒชใซ็งปๅ‹• + cd ultralytics + + # ้–‹็™บ็”จใซ็ทจ้›†ๅฏ่ƒฝใƒขใƒผใƒ‰ใงใƒ‘ใƒƒใ‚ฑใƒผใ‚ธใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซ + pip install -e . + ``` + +ๅฟ…่ฆใชไพๅญ˜้–ขไฟ‚ใฎใƒชใ‚นใƒˆใซใคใ„ใฆใฏใ€`ultralytics`ใฎ[requirements.txt](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt)ใƒ•ใ‚กใ‚คใƒซใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ไธŠ่จ˜ใฎๅ…จใฆใฎไพ‹ใงใฏใ€ๅฟ…่ฆใชไพๅญ˜้–ขไฟ‚ใ‚’ๅ…จใฆใ‚คใƒณใ‚นใƒˆใƒผใƒซใ—ใพใ™ใ€‚ + +

+
+ +
+ Watch: Ultralytics YOLO Quick Start Guide +

+ +!!! Tip "ใƒ’ใƒณใƒˆ" + + PyTorchใฎ่ฆไปถใฏใ‚ชใƒšใƒฌใƒผใƒ†ใ‚ฃใƒณใ‚ฐใ‚ทใ‚นใƒ†ใƒ ใจCUDAใฎ่ฆไปถใซใ‚ˆใฃใฆ็•ฐใชใ‚‹ใŸใ‚ใ€[https://pytorch.org/get-started/locally](https://pytorch.org/get-started/locally)ใซๅพ“ใฃใฆๆœ€ๅˆใซPyTorchใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซใ™ใ‚‹ใ“ใจใ‚’ใŠๅ‹งใ‚ใ—ใพใ™ใ€‚ + + + PyTorch Installation Instructions + + +## CLIใงUltralyticsใ‚’ไฝฟ็”จ + +Ultralyticsใ‚ณใƒžใƒณใƒ‰ใƒฉใ‚คใƒณใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚น๏ผˆCLI๏ผ‰ใ‚’ไฝฟ็”จใ™ใ‚‹ใจใ€Python็’ฐๅขƒใŒใชใใฆใ‚‚ๅ˜ไธ€ใฎ่กŒใฎใ‚ณใƒžใƒณใƒ‰ใ‚’็ฐกๅ˜ใซๅฎŸ่กŒใงใใพใ™ใ€‚CLIใฏใ‚ซใ‚นใ‚ฟใƒžใ‚คใ‚บใ‚‚Pythonใ‚ณใƒผใƒ‰ใ‚‚ๅฟ…่ฆใ‚ใ‚Šใพใ›ใ‚“ใ€‚ๅ˜็ด”ใซใ™ในใฆใฎใ‚ฟใ‚นใ‚ฏใ‚’`yolo`ใ‚ณใƒžใƒณใƒ‰ใงใ‚ฟใƒผใƒŸใƒŠใƒซใ‹ใ‚‰ๅฎŸ่กŒใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ‚ณใƒžใƒณใƒ‰ใƒฉใ‚คใƒณใ‹ใ‚‰YOLOv8ใ‚’ไฝฟ็”จใ™ใ‚‹ๆ–นๆณ•ใซใคใ„ใฆ่ฉณใ—ใใฏใ€[CLIใ‚ฌใ‚คใƒ‰](/../usage/cli.md)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "ๆง‹ๆ–‡" + + Ultralyticsใฎ`yolo`ใ‚ณใƒžใƒณใƒ‰ใฏไปฅไธ‹ใฎๆง‹ๆ–‡ใ‚’ไฝฟ็”จใ—ใพใ™๏ผš + ```bash + yolo TASK MODE ARGS + + ใ“ใ“ใง TASK๏ผˆใ‚ชใƒ—ใ‚ทใƒงใƒณ๏ผ‰ใฏ[detect, segment, classify]ใฎใ†ใกใฎ1ใค + MODE๏ผˆๅฟ…้ ˆ๏ผ‰ใฏ[train, val, predict, export, track]ใฎใ†ใกใฎ1ใค + ARGS๏ผˆใ‚ชใƒ—ใ‚ทใƒงใƒณ๏ผ‰ใฏใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใ‚’ไธŠๆ›ธใใ™ใ‚‹ไปปๆ„ใฎๆ•ฐใฎใ‚ซใ‚นใ‚ฟใƒ 'arg=value'ใƒšใ‚ขใงใ™ใ€‚ + ``` + full [Configuration Guide](/../usage/cfg.md)ใพใŸใฏ`yolo cfg`ใงๅ…จใฆใฎARGSใ‚’็ขบ่ชใ—ใฆใใ ใ•ใ„ + + === "ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ" + + 10ใ‚จใƒใƒƒใ‚ฏใซใ‚ใŸใฃใฆๅˆๆœŸๅญฆ็ฟ’็އ0.01ใงๆคœๅ‡บใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + ```bash + yolo train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "ไบˆๆธฌ" + + ็”ปๅƒใ‚ตใ‚คใ‚บ320ใงไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใฆYouTubeใƒ“ใƒ‡ใ‚ชใ‚’ไบˆๆธฌ๏ผš + ```bash + yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "ๆคœ่จผ" + + ใƒใƒƒใƒใ‚ตใ‚คใ‚บ1ใŠใ‚ˆใณ็”ปๅƒใ‚ตใ‚คใ‚บ640ใงไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸๆคœๅ‡บใƒขใƒ‡ใƒซใ‚’ๆคœ่จผใ™ใ‚‹๏ผš + ```bash + yolo val model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + + === "ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ" + + ็”ปๅƒใ‚ตใ‚คใ‚บ224 x 128ใงYOLOv8nๅˆ†้กžใƒขใƒ‡ใƒซใ‚’ONNXๅฝขๅผใซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ๏ผˆTASKใฏไธ่ฆ๏ผ‰ + ```bash + yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128 + ``` + + === "็‰นๆฎŠ" + + ใƒใƒผใ‚ธใƒงใƒณใ‚’็ขบ่ชใ—ใŸใ‚Šใ€่จญๅฎšใ‚’่กจ็คบใ—ใŸใ‚Šใ€ใƒใ‚งใƒƒใ‚ฏใ‚’่กŒใฃใŸใ‚Šใ™ใ‚‹ใŸใ‚ใฎ็‰นๅˆฅใชใ‚ณใƒžใƒณใƒ‰ใ‚’ๅฎŸ่กŒใ—ใพใ™๏ผš + ```bash + yolo help + yolo checks + yolo version + yolo settings + yolo copy-cfg + yolo cfg + ``` + +!!! Warning "่ญฆๅ‘Š" + + ๅผ•ๆ•ฐใฏ`arg=val`ใƒšใ‚ขใจใ—ใฆๆธกใ•ใ‚Œใ€`=`่จ˜ๅทใงๅˆ†ๅ‰ฒใ•ใ‚Œใ€ใƒšใ‚ข้–“ใซใ‚นใƒšใƒผใ‚น` `ใŒๅฟ…่ฆใงใ™ใ€‚ๅผ•ๆ•ฐใฎใƒ—ใƒฌใƒ•ใ‚ฃใƒƒใ‚ฏใ‚นใซ`--`ใ‚„ๅผ•ๆ•ฐ้–“ใซใ‚ซใƒณใƒž`,`ใ‚’ไฝฟ็”จใ—ใชใ„ใงใใ ใ•ใ„ใ€‚ + + - `yolo predict model=yolov8n.pt imgsz=640 conf=0.25`   โœ… + - `yolo predict model yolov8n.pt imgsz 640 conf 0.25`   โŒ + - `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25`   โŒ + +[CLIใ‚ฌใ‚คใƒ‰](/../usage/cli.md){ .md-button } + +## PythonใงUltralyticsใ‚’ไฝฟ็”จ + +YOLOv8ใฎPythonใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚นใ‚’ไฝฟ็”จใ™ใ‚‹ใจใ€Pythonใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใซใ‚ทใƒผใƒ ใƒฌใ‚นใซ็ตฑๅˆใ—ใ€ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ€ๅฎŸ่กŒใ€ๅ‡บๅŠ›ใ‚’ๅ‡ฆ็†ใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚็ฐกๅ˜ใ•ใจไฝฟใ„ใ‚„ใ™ใ•ใ‚’ๅฟต้ ญใซ่จญ่จˆใ•ใ‚ŒใŸPythonใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚นใซใ‚ˆใ‚Šใ€ใƒฆใƒผใ‚ถใƒผใฏ็ด ๆ—ฉใใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใซ็‰ฉไฝ“ๆคœๅ‡บใ€ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ€ๅˆ†้กžใ‚’ๅฎŸ่ฃ…ใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ“ใฎใ‚ˆใ†ใซใ€YOLOv8ใฎPythonใ‚คใƒณใ‚ฟใƒผใƒ•ใ‚งใƒผใ‚นใฏใ€ใ“ใ‚Œใ‚‰ใฎๆฉŸ่ƒฝใ‚’Pythonใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใซๅ–ใ‚Šๅ…ฅใ‚ŒใŸใ„ใจ่€ƒใˆใฆใ„ใ‚‹ๆ–นใซใจใฃใฆ่ฒด้‡ใชใƒ„ใƒผใƒซใงใ™ใ€‚ + +ใŸใจใˆใฐใ€ใƒฆใƒผใ‚ถใƒผใฏใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ—ใฆใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ—ใ€ๆคœ่จผใ‚ปใƒƒใƒˆใงใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’่ฉ•ไพกใ—ใ€ONNXๅฝขๅผใซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ™ใ‚‹ใพใงใฎไธ€้€ฃใฎๅ‡ฆ็†ใ‚’ๆ•ฐ่กŒใฎใ‚ณใƒผใƒ‰ใง่กŒใ†ใ“ใจใŒใงใใพใ™ใ€‚YOLOv8ใ‚’Pythonใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใงไฝฟ็”จใ™ใ‚‹ๆ–นๆณ•ใซใคใ„ใฆ่ฉณใ—ใใฏใ€[Pythonใ‚ฌใ‚คใƒ‰](/../usage/python.md)ใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + ```python + from ultralytics import YOLO + + # ใ‚นใ‚ฏใƒฉใƒƒใƒใ‹ใ‚‰ๆ–ฐใ—ใ„YOLOใƒขใƒ‡ใƒซใ‚’ไฝœๆˆ + model = YOLO('yolov8n.yaml') + + # ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒ‰ใ•ใ‚ŒใŸYOLOใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰๏ผˆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใซๆŽจๅฅจ๏ผ‰ + model = YOLO('yolov8n.pt') + + # 'coco128.yaml'ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ไฝฟ็”จใ—ใฆ3ใ‚จใƒใƒƒใ‚ฏใงใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + results = model.train(data='coco128.yaml', epochs=3) + + # ใƒขใƒ‡ใƒซใฎใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚นใ‚’ๆคœ่จผใ‚ปใƒƒใƒˆใง่ฉ•ไพก + results = model.val() + + # ใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใฆ็”ปๅƒใง็‰ฉไฝ“ๆคœๅ‡บใ‚’ๅฎŸ่กŒ + results = model('https://ultralytics.com/images/bus.jpg') + + # ใƒขใƒ‡ใƒซใ‚’ONNXๅฝขๅผใซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + success = model.export(format='onnx') + ``` + +[Pythonใ‚ฌใ‚คใƒ‰](/../usage/python.md){.md-button .md-button--primary} diff --git a/ultralytics/docs/ja/quickstart.md:Zone.Identifier b/ultralytics/docs/ja/quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/tasks/classify.md b/ultralytics/docs/ja/tasks/classify.md new file mode 100755 index 0000000..def346f --- /dev/null +++ b/ultralytics/docs/ja/tasks/classify.md @@ -0,0 +1,172 @@ +--- +comments: true +description: YOLOv8 ๅˆ†้กžใƒขใƒ‡ใƒซใซใคใ„ใฆใฎ็”ปๅƒๅˆ†้กžใ€‚ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใฎใƒชใ‚นใƒˆใจใƒขใƒ‡ใƒซใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ€ๆคœ่จผใ€ไบˆๆธฌใ€ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๆ–นๆณ•ใฎ่ฉณ็ดฐๆƒ…ๅ ฑใ‚’ๅญฆใณใพใ™ใ€‚ +keywords: Ultralytics, YOLOv8, ็”ปๅƒๅˆ†้กž, ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใƒขใƒ‡ใƒซ, YOLOv8n-cls, ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ, ๆคœ่จผ, ไบˆๆธฌ, ใƒขใƒ‡ใƒซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ +--- + +# ็”ปๅƒๅˆ†้กž + +็”ปๅƒๅˆ†้กžใฎไพ‹ + +็”ปๅƒๅˆ†้กžใฏ3ใคใฎใ‚ฟใ‚นใ‚ฏใฎไธญใงๆœ€ใ‚‚ๅ˜็ด”ใงใ€1ๆžšใฎ็”ปๅƒใ‚’ใ‚ใ‚‰ใ‹ใ˜ใ‚ๅฎš็พฉใ•ใ‚ŒใŸใ‚ฏใƒฉใ‚นใฎใ‚ปใƒƒใƒˆใซๅˆ†้กžใ—ใพใ™ใ€‚ + +็”ปๅƒๅˆ†้กžๅ™จใฎๅ‡บๅŠ›ใฏๅ˜ไธ€ใฎใ‚ฏใƒฉใ‚นใƒฉใƒ™ใƒซใจไฟก้ ผๅบฆใ‚นใ‚ณใ‚ขใงใ™ใ€‚็”ปๅƒใŒใฉใฎใ‚ฏใƒฉใ‚นใซๅฑžใ—ใฆใ„ใ‚‹ใ‹ใฎใฟใ‚’็Ÿฅใ‚‹ๅฟ…่ฆใŒใ‚ใ‚Šใ€ใ‚ฏใƒฉใ‚นใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใŒใฉใ“ใซใ‚ใ‚‹ใ‹ใ€ใใฎๆญฃ็ขบใชๅฝข็Šถใฏๅฟ…่ฆใจใ—ใชใ„ๅ ดๅˆใซ็”ปๅƒๅˆ†้กžใŒๅฝน็ซ‹ใกใพใ™ใ€‚ + +!!! Tip "ใƒ’ใƒณใƒˆ" + + YOLOv8 ๅˆ†้กžใƒขใƒ‡ใƒซใฏ `-cls` ๆŽฅๅฐพ่พžใ‚’ไฝฟ็”จใ—ใพใ™ใ€‚ไพ‹: `yolov8n-cls.pt` ใ“ใ‚Œใ‚‰ใฏ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) ใงไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +## [ใƒขใƒ‡ใƒซ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +ใ“ใ“ใซไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸ YOLOv8 ๅˆ†้กžใƒขใƒ‡ใƒซใŒ่กจ็คบใ•ใ‚Œใฆใ„ใพใ™ใ€‚ๆคœๅ‡บใ€ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ€ใƒใƒผใ‚บใƒขใƒ‡ใƒซใฏ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚Œใฆใ„ใพใ™ใŒใ€ๅˆ†้กžใƒขใƒ‡ใƒซใฏ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) ใงไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +[ใƒขใƒ‡ใƒซ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) ใฏๅˆๅ›žไฝฟ็”จๆ™‚ใซ Ultralytics ใฎๆœ€ๆ–ฐ [ใƒชใƒชใƒผใ‚น](https://github.com/ultralytics/assets/releases) ใ‹ใ‚‰่‡ชๅ‹•็š„ใซใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰ใ•ใ‚Œใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซ | ใ‚ตใ‚คใ‚บ
(ใƒ”ใ‚ฏใ‚ปใƒซ) | ๆญฃ็ขบๆ€ง
ใƒˆใƒƒใƒ—1 | ๆญฃ็ขบๆ€ง
ใƒˆใƒƒใƒ—5 | ใ‚นใƒ”ใƒผใƒ‰
CPU ONNX
(ms) | ใ‚นใƒ”ใƒผใƒ‰
A100 TensorRT
(ms) | ใƒ‘ใƒฉใƒกใƒผใ‚ฟ
(M) | FLOPs
(B) at 640 | +|----------------------------------------------------------------------------------------------|--------------------|------------------|------------------|-------------------------------|------------------------------------|-------------------|--------------------------| +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- **ๆญฃ็ขบๆ€ง** ใฎๅ€คใฏ [ImageNet](https://www.image-net.org/) ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎๆคœ่จผใ‚ปใƒƒใƒˆใงใฎใƒขใƒ‡ใƒซใฎๆญฃ็ขบๆ€งใงใ™ใ€‚ +
ๅ†็พใ™ใ‚‹ใซใฏ `yolo val classify data=path/to/ImageNet device=0` +- **ใ‚นใƒ”ใƒผใƒ‰** ใฏ [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚’ไฝฟ็”จใ—ใฆ ImageNet ๆคœ่จผ็”ปๅƒใ‚’ๅนณๅ‡ๅŒ–ใ—ใŸใ‚‚ใฎใงใ™ใ€‚ +
ๅ†็พใ™ใ‚‹ใซใฏ `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu` + +## ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + +็”ปๅƒใ‚ตใ‚คใ‚บ64ใง100ใ‚จใƒใƒƒใ‚ฏใซใ‚ใŸใฃใฆMNIST160ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใซYOLOv8n-clsใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ—ใพใ™ใ€‚ๅˆฉ็”จๅฏ่ƒฝใชๅผ•ๆ•ฐใฎๅฎŒๅ…จใชใƒชใ‚นใƒˆใซใคใ„ใฆใฏใ€[่จญๅฎš](/../usage/cfg.md) ใƒšใƒผใ‚ธใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ™ใ‚‹ + model = YOLO('yolov8n-cls.yaml') # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ใƒ“ใƒซใƒ‰ + model = YOLO('yolov8n-cls.pt') # ไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰๏ผˆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใซๆŽจๅฅจ๏ผ‰ + model = YOLO('yolov8n-cls.yaml').load('yolov8n-cls.pt') # YAMLใ‹ใ‚‰ใƒ“ใƒซใƒ‰ใ—ใฆใ‚ฆใ‚งใ‚คใƒˆใ‚’่ปข้€ + + # ใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ + results = model.train(data='mnist160', epochs=100, imgsz=64) + ``` + + === "CLI" + + ```bash + # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ใƒ“ใƒซใƒ‰ใ—ใ€ใ‚ผใƒญใ‹ใ‚‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo classify train data=mnist160 model=yolov8n-cls.yaml epochs=100 imgsz=64 + + # ไบ‹ๅ‰ใซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸ *.pt ใƒขใƒ‡ใƒซใ‹ใ‚‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo classify train data=mnist160 model=yolov8n-cls.pt epochs=100 imgsz=64 + + # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ใƒ“ใƒซใƒ‰ใ—ใ€ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใ‚ฆใ‚งใ‚คใƒˆใ‚’่ปข้€ใ—ใฆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo classify train data=mnist160 model=yolov8n-cls.yaml pretrained=yolov8n-cls.pt epochs=100 imgsz=64 + ``` + +### ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆ + +YOLOๅˆ†้กžใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใฎ่ฉณ็ดฐใฏ [ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚ฌใ‚คใƒ‰](../../../datasets/classify/index.md) ใซใ‚ใ‚Šใพใ™ใ€‚ + +## ๆคœ่จผ + +MNIST160ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎYOLOv8n-clsใƒขใƒ‡ใƒซใฎๆญฃ็ขบๆ€งใ‚’ๆคœ่จผใ—ใพใ™ใ€‚ๅผ•ๆ•ฐใฏๅฟ…่ฆใ‚ใ‚Šใพใ›ใ‚“ใ€‚`model` ใฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆ™‚ใฎ `data` ใŠใ‚ˆใณๅผ•ๆ•ฐใ‚’ใƒขใƒ‡ใƒซๅฑžๆ€งใจใ—ใฆไฟๆŒใ—ใฆใ„ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ™ใ‚‹ + model = YOLO('yolov8n-cls.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('path/to/best.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใ‚’ๆคœ่จผใ™ใ‚‹ + metrics = model.val() # ๅผ•ๆ•ฐไธ่ฆใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใจ่จญๅฎšใฏ่จ˜ๆ†ถใ•ใ‚Œใฆใ„ใ‚‹ + metrics.top1 # ใƒˆใƒƒใƒ—1ใฎๆญฃ็ขบๆ€ง + metrics.top5 # ใƒˆใƒƒใƒ—5ใฎๆญฃ็ขบๆ€ง + ``` + === "CLI" + + ```bash + yolo classify val model=yolov8n-cls.pt # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ๆคœ่จผ + yolo classify val model=path/to/best.pt # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ๆคœ่จผ + ``` + +## ไบˆๆธฌ + +ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎYOLOv8n-clsใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใฆใ€็”ปๅƒใซๅฏพใ™ใ‚‹ไบˆๆธฌใ‚’ๅฎŸ่กŒใ—ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ™ใ‚‹ + model = YOLO('yolov8n-cls.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('path/to/best.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใงไบˆๆธฌใ™ใ‚‹ + results = model('https://ultralytics.com/images/bus.jpg') # ็”ปๅƒใงไบˆๆธฌ + ``` + === "CLI" + + ```bash + yolo classify predict model=yolov8n-cls.pt source='https://ultralytics.com/images/bus.jpg' # ๅ…ฌๅผใƒขใƒ‡ใƒซใงไบˆๆธฌ + yolo classify predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใงไบˆๆธฌ + ``` + +`predict` ใƒขใƒผใƒ‰ใฎๅฎŒๅ…จใช่ฉณ็ดฐใฏ [ไบˆๆธฌ](https://docs.ultralytics.com/modes/predict/) ใƒšใƒผใ‚ธใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +## ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + +YOLOv8n-clsใƒขใƒ‡ใƒซใ‚’ONNXใ€CoreMLใชใฉใฎ็•ฐใชใ‚‹ๅฝขๅผใซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ—ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ™ใ‚‹ + model = YOLO('yolov8n-cls.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('path/to/best.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ™ใ‚‹ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-cls.pt format=onnx # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + yolo export model=path/to/best.pt format=onnx # ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + ``` + +ๅˆฉ็”จๅฏ่ƒฝใช YOLOv8-cls ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผใฏไปฅไธ‹ใฎ่กจใซใ‚ใ‚Šใพใ™ใ€‚ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใง็›ดๆŽฅไบˆๆธฌใพใŸใฏๆคœ่จผใŒๅฏ่ƒฝใงใ™ใ€ไพ‹: `yolo predict model=yolov8n-cls.onnx`ใ€‚ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฎŒไบ†ๅพŒใ€ใƒขใƒ‡ใƒซใฎไฝฟ็”จไพ‹ใŒ่กจ็คบใ•ใ‚Œใพใ™ใ€‚ + +| ๅฝขๅผ | `format` ๅผ•ๆ•ฐ | ใƒขใƒ‡ใƒซ | ใƒกใ‚ฟใƒ‡ใƒผใ‚ฟ | ๅผ•ๆ•ฐ | +|--------------------------------------------------------------------|---------------|-------------------------------|-------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-cls.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-cls.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-cls.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-cls_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-cls.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-cls.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-cls_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-cls.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-cls.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-cls_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-cls_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-cls_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-cls_ncnn_model/` | โœ… | `imgsz`, `half` | + +`export` ใฎ่ฉณ็ดฐใฏ [ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ](https://docs.ultralytics.com/modes/export/) ใƒšใƒผใ‚ธใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ diff --git a/ultralytics/docs/ja/tasks/classify.md:Zone.Identifier b/ultralytics/docs/ja/tasks/classify.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/tasks/classify.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/tasks/detect.md b/ultralytics/docs/ja/tasks/detect.md new file mode 100755 index 0000000..3bc24c1 --- /dev/null +++ b/ultralytics/docs/ja/tasks/detect.md @@ -0,0 +1,184 @@ +--- +comments: true +description: Ultralyticsใฎๅ…ฌๅผใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ YOLOv8ใ€‚ใƒขใƒ‡ใƒซใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ€ๆคœ่จผใ€ไบˆๆธฌใ€ใใ—ใฆๆง˜ใ€…ใชใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใงใฎใƒขใƒ‡ใƒซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๆ–นๆณ•ใ‚’ๅญฆใถใ€‚่ฉณ็ดฐใชใƒ‘ใƒ•ใ‚ฉใƒผใƒžใƒณใ‚น็ตฑ่จˆใ‚‚ๅซใ‚€ใ€‚ +keywords: YOLOv8, Ultralytics, ็‰ฉไฝ“ๆคœๅ‡บ, ไบ‹ๅ‰่จ“็ทดๆธˆใฟใƒขใƒ‡ใƒซ, ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ, ๆคœ่จผ, ไบˆๆธฌ, ใƒขใƒ‡ใƒซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ, COCO, ImageNet, PyTorch, ONNX, CoreML +--- + +# ็‰ฉไฝ“ๆคœๅ‡บ + +็‰ฉไฝ“ๆคœๅ‡บใฎไพ‹ + +็‰ฉไฝ“ๆคœๅ‡บใจใฏใ€็”ปๅƒใ‚„ใƒ“ใƒ‡ใ‚ชใ‚นใƒˆใƒชใƒผใƒ ๅ†…ใฎ็‰ฉไฝ“ใฎไฝ็ฝฎใจใ‚ฏใƒฉใ‚นใ‚’็‰นๅฎšใ™ใ‚‹ใ‚ฟใ‚นใ‚ฏใงใ™ใ€‚ + +็‰ฉไฝ“ๆคœๅ‡บๅ™จใฎๅ‡บๅŠ›ใฏใ€็”ปๅƒๅ†…ใฎ็‰ฉไฝ“ใ‚’ๅ›ฒใ‚€ไธ€้€ฃใฎใƒใ‚ฆใƒณใƒ‡ใ‚ฃใƒณใ‚ฐใƒœใƒƒใ‚ฏใ‚นใงใ‚ใ‚Šใ€ๅ„ใƒœใƒƒใ‚ฏใ‚นใซใฏใ‚ฏใƒฉใ‚นใƒฉใƒ™ใƒซใจไฟก้ ผๅบฆใ‚นใ‚ณใ‚ขใŒไป˜ใ‘ใ‚‰ใ‚Œใพใ™ใ€‚ใ‚ทใƒผใƒณๅ†…ใฎ้–ขๅฟƒๅฏพ่ฑกใ‚’่ญ˜ๅˆฅใ™ใ‚‹ๅฟ…่ฆใŒใ‚ใ‚‹ใŒใ€ใใฎ็‰ฉไฝ“ใฎๆญฃ็ขบใชไฝ็ฝฎใ‚„ๅฝข็Šถใพใงใฏๅฟ…่ฆใชใ„ๅ ดๅˆใซใ€็‰ฉไฝ“ๆคœๅ‡บใŒ้ฉใ—ใฆใ„ใพใ™ใ€‚ + +

+
+ +
+ ่ฆ–่ดใ™ใ‚‹: Ultralyticsใฎไบ‹ๅ‰่จ“็ทดๆธˆใฟYOLOv8ใƒขใƒ‡ใƒซใซใ‚ˆใ‚‹็‰ฉไฝ“ๆคœๅ‡บใ€‚ +

+ +!!! Tip "ใƒ’ใƒณใƒˆ" + + YOLOv8 Detectใƒขใƒ‡ใƒซใฏใ€ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฎYOLOv8ใƒขใƒ‡ใƒซใ€ใคใพใ‚Š`yolov8n.pt`ใงใ‚ใ‚Šใ€[COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)ใงไบ‹ๅ‰่จ“็ทดใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +## [ใƒขใƒ‡ใƒซ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +ไบ‹ๅ‰่จ“็ทดใ•ใ‚ŒใŸYOLOv8 Detectใƒขใƒ‡ใƒซใŒใ“ใกใ‚‰ใซ็คบใ•ใ‚Œใพใ™ใ€‚Detect, Segment, Poseใƒขใƒ‡ใƒซใฏ[COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใ€Classifyใƒขใƒ‡ใƒซใฏ[ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml)ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงไบ‹ๅ‰่จ“็ทดใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +[ใƒขใƒ‡ใƒซ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models)ใฏใ€ๆœ€ๅˆใฎไฝฟ็”จๆ™‚ใซUltralyticsใฎๆœ€ๆ–ฐใฎ[ใƒชใƒชใƒผใ‚น](https://github.com/ultralytics/assets/releases)ใ‹ใ‚‰่‡ชๅ‹•็š„ใซใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰ใ•ใ‚Œใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซ | ใ‚ตใ‚คใ‚บ
(ใƒ”ใ‚ฏใ‚ปใƒซ) | mAPval
50-95 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ใƒ‘ใƒฉใƒกใƒผใ‚ฟๆ•ฐ
(M) | FLOPs
(B) | +|--------------------------------------------------------------------------------------|--------------------|----------------------|-----------------------------|----------------------------------|--------------------|-------------------| +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- **mAPval** ใฎๅ€คใฏ[COCO val2017](http://cocodataset.org)ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใซใŠใ„ใฆใ€ๅ˜ไธ€ใƒขใƒ‡ใƒซๅ˜ไธ€ใ‚นใ‚ฑใƒผใƒซใงใฎใ‚‚ใฎใงใ™ใ€‚ +
ๅ†็พๆ–นๆณ•: `yolo val detect data=coco.yaml device=0` +- **้€Ÿๅบฆ** ใฏ[Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/)ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚’ไฝฟ็”จใ—ใฆCOCO val็”ปๅƒใซๅฏพใ—ใฆๅนณๅ‡ๅŒ–ใ•ใ‚ŒใŸใ‚‚ใฎใงใ™ใ€‚ +
ๅ†็พๆ–นๆณ•: `yolo val detect data=coco128.yaml batch=1 device=0|cpu` + +## ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + +YOLOv8nใ‚’็”ปๅƒใ‚ตใ‚คใ‚บ640ใงCOCO128ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใซๅฏพใ—ใฆ100ใ‚จใƒใƒƒใ‚ฏใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ—ใพใ™ใ€‚ไฝฟ็”จๅฏ่ƒฝใชๅผ•ๆ•ฐใฎๅฎŒๅ…จใชใƒชใ‚นใƒˆใซใคใ„ใฆใฏใ€[่จญๅฎš](/../usage/cfg.md)ใƒšใƒผใ‚ธใ‚’ใ”่ฆงใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ™ใ‚‹ + model = YOLO('yolov8n.yaml') # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ๆง‹็ฏ‰ + model = YOLO('yolov8n.pt') # ไบ‹ๅ‰่จ“็ทดๆธˆใฟใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰๏ผˆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใซๆŽจๅฅจ๏ผ‰ + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # YAMLใ‹ใ‚‰ๆง‹็ฏ‰ใ—ใ€้‡ใฟใ‚’่ปข้€ + + # ใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ™ใ‚‹ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ไฝœๆˆใ—ใ€ใ‚ผใƒญใ‹ใ‚‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # ไบ‹ๅ‰่จ“็ทดๆธˆใฟใฎ*.ptใƒขใƒ‡ใƒซใ‹ใ‚‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ไฝœๆˆใ—ใ€ไบ‹ๅ‰่จ“็ทดๆธˆใฟใฎ้‡ใฟใ‚’่ปข้€ใ—ใฆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎๅฝขๅผ + +YOLOๆคœๅ‡บใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎๅฝขๅผใฎ่ฉณ็ดฐใฏใ€[ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚ฌใ‚คใƒ‰](../../../datasets/detect/index.md)ใซ่จ˜่ผ‰ใ•ใ‚Œใฆใ„ใพใ™ใ€‚ไป–ใฎๅฝขๅผ๏ผˆCOCO็ญ‰๏ผ‰ใ‹ใ‚‰YOLOๅฝขๅผใซๆ—ขๅญ˜ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ๅค‰ๆ›ใ™ใ‚‹ใซใฏใ€Ultralyticsใฎ[JSON2YOLO](https://github.com/ultralytics/JSON2YOLO)ใƒ„ใƒผใƒซใ‚’ใ”ๅˆฉ็”จใใ ใ•ใ„ใ€‚ + +## ๆคœ่จผ + +ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซใฎ็ฒพๅบฆใ‚’COCO128ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงๆคœ่จผใ—ใพใ™ใ€‚ๅผ•ๆ•ฐใฏไธ่ฆใงใ€ใƒขใƒ‡ใƒซใฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใฎ`data`ใจๅผ•ๆ•ฐใ‚’ใƒขใƒ‡ใƒซๅฑžๆ€งใจใ—ใฆไฟๆŒใ—ใฆใ„ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ™ใ‚‹ + model = YOLO('yolov8n.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('ใƒ‘ใ‚น/ใƒ™ใ‚นใƒˆ.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใ‚’ๆคœ่จผใ™ใ‚‹ + metrics = model.val() # ๅผ•ๆ•ฐไธ่ฆใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใจ่จญๅฎšใฏ่จ˜ๆ†ถใ•ใ‚Œใฆใ„ใ‚‹ + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ๅ„ใ‚ซใƒ†ใ‚ดใƒชใฎmap50-95ใ‚’ๅซใ‚€ใƒชใ‚นใƒˆ + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ๆคœ่จผ + yolo detect val model=ใƒ‘ใ‚น/ใƒ™ใ‚นใƒˆ.pt # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ๆคœ่จผ + ``` + +## ไบˆๆธฌ + +ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8nใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใฆ็”ปๅƒใฎไบˆๆธฌใ‚’ๅฎŸ่กŒใ—ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ใ™ใ‚‹ + model = YOLO('yolov8n.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('ใƒ‘ใ‚น/ใƒ™ใ‚นใƒˆ.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใงไบˆๆธฌ + results = model('https://ultralytics.com/images/bus.jpg') # ็”ปๅƒใฎไบˆๆธฌๅฎŸ่กŒ + ``` + === "CLI" + + ```bash + yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' # ๅ…ฌๅผใƒขใƒ‡ใƒซใงไบˆๆธฌ + yolo detect predict model=ใƒ‘ใ‚น/ใƒ™ใ‚นใƒˆ.pt source='https://ultralytics.com/images/bus.jpg' # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใงไบˆๆธฌ + ``` + +`predict`ใƒขใƒผใƒ‰ใฎ่ฉณ็ดฐใฏใ€[Predict](https://docs.ultralytics.com/modes/predict/)ใƒšใƒผใ‚ธใงๅ…จใฆ่ฆ‹ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +## ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + +YOLOv8nใƒขใƒ‡ใƒซใ‚’ONNXใ€CoreMLใชใฉใฎ็•ฐใชใ‚‹ใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ—ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('ใƒ‘ใ‚น/ใƒ™ใ‚นใƒˆ.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + yolo export model=ใƒ‘ใ‚น/ใƒ™ใ‚นใƒˆ.pt format=onnx # ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + ``` + +YOLOv8ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฏ่ƒฝใชใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใฎใƒ†ใƒผใƒ–ใƒซใฏไปฅไธ‹ใงใ™ใ€‚ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฎŒไบ†ๅพŒใซใ€ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใง็›ดๆŽฅไบˆๆธฌใพใŸใฏๆคœ่จผใŒๅฏ่ƒฝใงใ™ใ€‚ใคใพใ‚Šใ€`yolo predict model=yolov8n.onnx` ใงใ™ใ€‚ไฝฟ็”จไพ‹ใฏใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฎŒไบ†ๅพŒใซใƒขใƒ‡ใƒซใซ่กจ็คบใ•ใ‚Œใพใ™ใ€‚ + +| ใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆ | `format`ๅผ•ๆ•ฐ | ใƒขใƒ‡ใƒซ | ใƒกใ‚ฟใƒ‡ใƒผใ‚ฟ | ๅผ•ๆ•ฐ | +|--------------------------------------------------------------------|---------------|---------------------------|-------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +`export`ใฎ่ฉณ็ดฐใฏใ€[Export](https://docs.ultralytics.com/modes/export/)ใƒšใƒผใ‚ธใงๅ…จใฆ่ฆ‹ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ diff --git a/ultralytics/docs/ja/tasks/detect.md:Zone.Identifier b/ultralytics/docs/ja/tasks/detect.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/tasks/detect.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/tasks/index.md b/ultralytics/docs/ja/tasks/index.md new file mode 100755 index 0000000..bc5d3b7 --- /dev/null +++ b/ultralytics/docs/ja/tasks/index.md @@ -0,0 +1,55 @@ +--- +comments: true +description: YOLOv8ใŒๅฎŸ่กŒใงใใ‚‹ๅŸบๆœฌ็š„ใชใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒผใƒ“ใ‚ธใƒงใƒณใ‚ฟใ‚นใ‚ฏใซใคใ„ใฆๅญฆใณใ€ๆคœๅ‡บใ€ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ€ๅˆ†้กžใ€ใƒใƒผใ‚บ่ช่ญ˜ใŒAIใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใงใฉใฎใ‚ˆใ†ใซไฝฟ็”จใ•ใ‚Œใ‚‹ใ‹ใ‚’็†่งฃใ—ใพใ™ใ€‚ +keywords: Ultralytics, YOLOv8, ๆคœๅ‡บ, ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ, ๅˆ†้กž, ใƒใƒผใ‚บๆŽจๅฎš, AIใƒ•ใƒฌใƒผใƒ ใƒฏใƒผใ‚ฏ, ใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒผใƒ“ใ‚ธใƒงใƒณใ‚ฟใ‚นใ‚ฏ +--- + +# Ultralytics YOLOv8ใ‚ฟใ‚นใ‚ฏ + +
+Ultralytics YOLOใŒใ‚ตใƒใƒผใƒˆใ™ใ‚‹ใ‚ฟใ‚นใ‚ฏ + +YOLOv8ใฏใ€่ค‡ๆ•ฐใฎใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒผใƒ“ใ‚ธใƒงใƒณ**ใ‚ฟใ‚นใ‚ฏ**ใ‚’ใ‚ตใƒใƒผใƒˆใ™ใ‚‹AIใƒ•ใƒฌใƒผใƒ ใƒฏใƒผใ‚ฏใงใ™ใ€‚ใ“ใฎใƒ•ใƒฌใƒผใƒ ใƒฏใƒผใ‚ฏใฏใ€[ๆคœๅ‡บ](detect.md)ใ€[ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ](segment.md)ใ€[ๅˆ†้กž](classify.md)ใ€ๅŠใณ[ใƒใƒผใ‚บ](pose.md)ๆŽจๅฎšใ‚’ๅฎŸ่กŒใ™ใ‚‹ใŸใ‚ใซไฝฟ็”จใงใใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎใ‚ฟใ‚นใ‚ฏใฏใใ‚Œใžใ‚Œ็•ฐใชใ‚‹็›ฎ็š„ใจ็”จ้€”ใ‚’ๆŒใฃใฆใ„ใพใ™ใ€‚ + +!!! Note "ใƒŽใƒผใƒˆ" + + ๐Ÿšง ๅฝ“็คพใฎๅคš่จ€่ชžใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆใฏ็พๅœจๅปบ่จญไธญใงใ‚ใ‚Šใ€ๆ”นๅ–„ใฎใŸใ‚ใซไธ€็”Ÿๆ‡ธๅ‘ฝไฝœๆฅญใ‚’่กŒใฃใฆใ„ใพใ™ใ€‚ใ”็†่งฃใ„ใŸใ ใใ‚ใ‚ŠใŒใจใ†ใ”ใ–ใ„ใพใ™๏ผ๐Ÿ™ + +

+
+ +
+ ่ฆ–่ดใ™ใ‚‹: Ultralytics YOLOใ‚ฟใ‚นใ‚ฏใฎๆŽข็ดข๏ผšใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ€ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ€ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐใ€ใƒใƒผใ‚บๆŽจๅฎšใ€‚ +

+ +## [ๆคœๅ‡บ](detect.md) + +ๆคœๅ‡บใฏYOLOv8ใŒใ‚ตใƒใƒผใƒˆใ™ใ‚‹ๅŸบๆœฌ็š„ใชใ‚ฟใ‚นใ‚ฏใงใ™ใ€‚ใใ‚Œใฏ็”ปๅƒใ‚„ใƒ“ใƒ‡ใ‚ชใƒ•ใƒฌใƒผใƒ ๅ†…ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ๆคœๅ‡บใ—ใ€ๅ‘จๅ›ฒใซๅขƒ็•Œใƒœใƒƒใ‚ฏใ‚นใ‚’ๆใใ“ใจใ‚’ๅซใฟใพใ™ใ€‚ๆคœๅ‡บใ•ใ‚ŒใŸใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฏใใฎ็‰นๅพดใซๅŸบใฅใ„ใฆ็•ฐใชใ‚‹ใ‚ซใƒ†ใ‚ดใƒชใƒผใซๅˆ†้กžใ•ใ‚Œใพใ™ใ€‚YOLOv8ใฏไธ€ๆžšใฎ็”ปๅƒใ‚„ใƒ“ใƒ‡ใ‚ชใƒ•ใƒฌใƒผใƒ ใซ่ค‡ๆ•ฐใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’้ซ˜ใ„็ฒพๅบฆใจ้€Ÿๅบฆใงๆคœๅ‡บใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +[ๆคœๅ‡บไพ‹](detect.md){ .md-button } + +## [ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ](segment.md) + +ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใฏใ€็”ปๅƒใฎๅ†…ๅฎนใซๅŸบใฅใ„ใฆ็”ปๅƒใ‚’็•ฐใชใ‚‹้ ˜ๅŸŸใซๅˆ†ๅ‰ฒใ™ใ‚‹ใ‚ฟใ‚นใ‚ฏใงใ™ใ€‚ๅ„้ ˜ๅŸŸใฏใใฎๅ†…ๅฎนใซๅŸบใฅใ„ใฆใƒฉใƒ™ใƒซใŒๅ‰ฒใ‚Šๅฝ“ใฆใ‚‰ใ‚Œใพใ™ใ€‚ใ“ใฎใ‚ฟใ‚นใ‚ฏใฏใ€็”ปๅƒๅˆ†ๅ‰ฒใ‚„ๅŒป็™‚็”ปๅƒๅ‡ฆ็†ใชใฉใฎใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซใŠใ„ใฆๆœ‰็”จใงใ™ใ€‚YOLOv8ใฏU-Netใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใฎใƒใƒชใ‚จใƒผใ‚ทใƒงใƒณใ‚’ไฝฟ็”จใ—ใฆใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ‚’ๅฎŸ่กŒใ—ใพใ™ใ€‚ + +[ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณไพ‹](segment.md){ .md-button } + +## [ๅˆ†้กž](classify.md) + +ๅˆ†้กžใฏใ€็”ปๅƒใ‚’็•ฐใชใ‚‹ใ‚ซใƒ†ใ‚ดใƒชใƒผใซๅˆ†้กžใ™ใ‚‹ใ‚ฟใ‚นใ‚ฏใงใ™ใ€‚YOLOv8ใฏ็”ปๅƒใฎๅ†…ๅฎนใซๅŸบใฅใ„ใฆ็”ปๅƒใ‚’ๅˆ†้กžใ™ใ‚‹ใŸใ‚ใซไฝฟ็”จใงใใพใ™ใ€‚ใใ‚ŒใฏEfficientNetใ‚ขใƒผใ‚ญใƒ†ใ‚ฏใƒใƒฃใฎใƒใƒชใ‚จใƒผใ‚ทใƒงใƒณใ‚’ไฝฟ็”จใ—ใฆๅˆ†้กžใ‚’ๅฎŸ่กŒใ—ใพใ™ใ€‚ + +[ๅˆ†้กžไพ‹](classify.md){ .md-button } + +## [ใƒใƒผใ‚บ](pose.md) + +ใƒใƒผใ‚บ/ใ‚ญใƒผใƒใ‚คใƒณใƒˆๆคœๅ‡บใฏใ€็”ปๅƒใ‚„ใƒ“ใƒ‡ใ‚ชใƒ•ใƒฌใƒผใƒ ๅ†…ใฎ็‰นๅฎšใฎ็‚นใ‚’ๆคœๅ‡บใ™ใ‚‹ใ‚ฟใ‚นใ‚ฏใงใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎ็‚นใฏใ‚ญใƒผใƒใ‚คใƒณใƒˆใจๅ‘ผใฐใ‚Œใ€ๅ‹•ใใ‚„ใƒใƒผใ‚บๆŽจๅฎšใ‚’่ฟฝ่ทกใ™ใ‚‹ใŸใ‚ใซไฝฟ็”จใ•ใ‚Œใพใ™ใ€‚YOLOv8ใฏ้ซ˜ใ„็ฒพๅบฆใจ้€Ÿๅบฆใง็”ปๅƒใ‚„ใƒ“ใƒ‡ใ‚ชใƒ•ใƒฌใƒผใƒ ๅ†…ใฎใ‚ญใƒผใƒใ‚คใƒณใƒˆใ‚’ๆคœๅ‡บใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + +[ใƒใƒผใ‚บไพ‹](pose.md){ .md-button } + +## ็ต่ซ– + +YOLOv8ใฏใ€ๆคœๅ‡บใ€ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ€ๅˆ†้กžใ€ใ‚ญใƒผใƒใ‚คใƒณใƒˆๆคœๅ‡บใ‚’ๅซใ‚€่ค‡ๆ•ฐใฎใ‚ฟใ‚นใ‚ฏใ‚’ใ‚ตใƒใƒผใƒˆใ—ใฆใ„ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎใ‚ฟใ‚นใ‚ฏใฏใใ‚Œใžใ‚Œ็•ฐใชใ‚‹็›ฎ็š„ใจ็”จ้€”ใ‚’ๆŒใฃใฆใ„ใพใ™ใ€‚ใ“ใ‚Œใ‚‰ใฎใ‚ฟใ‚นใ‚ฏใฎ้•ใ„ใ‚’็†่งฃใ™ใ‚‹ใ“ใจใซใ‚ˆใ‚Šใ€ใ‚ณใƒณใƒ”ใƒฅใƒผใ‚ฟใƒผใƒ“ใ‚ธใƒงใƒณใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใซ้ฉๅˆ‡ใชใ‚ฟใ‚นใ‚ฏใ‚’้ธๆŠžใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ diff --git a/ultralytics/docs/ja/tasks/index.md:Zone.Identifier b/ultralytics/docs/ja/tasks/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/tasks/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/tasks/pose.md b/ultralytics/docs/ja/tasks/pose.md new file mode 100755 index 0000000..d4acc94 --- /dev/null +++ b/ultralytics/docs/ja/tasks/pose.md @@ -0,0 +1,185 @@ +--- +comments: true +description: Ultralytics YOLOv8ใ‚’ไฝฟ็”จใ—ใฆใƒใƒผใ‚บๆŽจๅฎšใ‚ฟใ‚นใ‚ฏใ‚’่กŒใ†ๆ–นๆณ•ใ‚’ๅญฆใณใพใ™ใ€‚ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎใƒขใƒ‡ใƒซใ‚’่ฆ‹ใคใ‘ใ€ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ€ๆคœ่จผใ€ไบˆๆธฌใ€็‹ฌ่‡ชใฎใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ‚’่กŒใ„ใพใ™ใ€‚ +keywords: Ultralytics, YOLO, YOLOv8, ใƒใƒผใ‚บๆŽจๅฎš, ใ‚ญใƒผใƒใ‚คใƒณใƒˆๆคœๅ‡บ, ็‰ฉไฝ“ๆคœๅ‡บ, ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใƒขใƒ‡ใƒซ, ๆฉŸๆขฐๅญฆ็ฟ’, ไบบๅทฅ็Ÿฅ่ƒฝ +--- + +# ใƒใƒผใ‚บๆŽจๅฎš + +ใƒใƒผใ‚บๆŽจๅฎšไพ‹ + +ใƒใƒผใ‚บๆŽจๅฎšใฏใ€้€šๅธธใ‚ญใƒผใƒใ‚คใƒณใƒˆใจใ—ใฆๅ‚็…งใ•ใ‚Œใ‚‹็”ปๅƒๅ†…ใฎ็‰นๅฎšใฎ็‚นใฎไฝ็ฝฎใ‚’่ญ˜ๅˆฅใ™ใ‚‹ใ‚ฟใ‚นใ‚ฏใงใ™ใ€‚ใ‚ญใƒผใƒใ‚คใƒณใƒˆใฏใ€้–ข็ฏ€ใ€ใƒฉใƒณใƒ‰ใƒžใƒผใ‚ฏใ€ใพใŸใฏใใฎไป–ใฎ็‰นๅพด็š„ใช็‰นๅพดใชใฉใ€ๅฏพ่ฑก็‰ฉใฎใ•ใพใ–ใพใช้ƒจๅˆ†ใ‚’่กจใ™ใ“ใจใŒใงใใพใ™ใ€‚ใ‚ญใƒผใƒใ‚คใƒณใƒˆใฎไฝ็ฝฎใฏใ€้€šๅธธ2Dใฎ `[x, y]` ใพใŸใฏ3D `[x, y, visible]` ๅบงๆจ™ใฎใ‚ปใƒƒใƒˆใจใ—ใฆ่กจใ•ใ‚Œใพใ™ใ€‚ + +ใƒใƒผใ‚บๆŽจๅฎšใƒขใƒ‡ใƒซใฎๅ‡บๅŠ›ใฏใ€็”ปๅƒๅ†…ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆไธŠใฎใ‚ญใƒผใƒใ‚คใƒณใƒˆใ‚’่กจใ™ไธ€้€ฃใฎ็‚นใงใ‚ใ‚Šใ€้€šๅธธใฏๅ„็‚นใฎไฟก้ ผใ‚นใ‚ณใ‚ขใ‚’ไผดใ„ใพใ™ใ€‚ใƒใƒผใ‚บๆŽจๅฎšใฏใ€ใ‚ทใƒผใƒณๅ†…ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎ็‰นๅฎšใฎ้ƒจๅˆ†ใจใ€ใใ‚Œใ‚‰ใŒไบ’ใ„ใซๅฏพใ—ใฆไฝ็ฝฎใ™ใ‚‹ๅ ดๆ‰€ใ‚’็‰นๅฎšใ™ใ‚‹ๅฟ…่ฆใŒใ‚ใ‚‹ๅ ดๅˆใซ้ฉใ—ใฆใ„ใพใ™ใ€‚ + +

+
+ +
+ ่ฆ–่ด: Ultralytics YOLOv8ใซใ‚ˆใ‚‹ใƒใƒผใ‚บๆŽจๅฎšใ€‚ +

+ +!!! Tip "ใƒ’ใƒณใƒˆ" + + YOLOv8 _pose_ ใƒขใƒ‡ใƒซใฏ `-pose` ใ‚ตใƒ•ใ‚ฃใƒƒใ‚ฏใ‚นใ‚’ไฝฟ็”จใ—ใพใ™ใ€‚ไพ‹๏ผš`yolov8n-pose.pt`ใ€‚ใ“ใ‚Œใ‚‰ใฎใƒขใƒ‡ใƒซใฏ [COCOใ‚ญใƒผใƒใ‚คใƒณใƒˆ](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco-pose.yaml) ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚Œใ€ๅคšๆง˜ใชใƒใƒผใ‚บๆŽจๅฎšใ‚ฟใ‚นใ‚ฏใซ้ฉใ—ใฆใ„ใพใ™ใ€‚ + +## [ใƒขใƒ‡ใƒซ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +YOLOv8ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใƒใƒผใ‚บใƒขใƒ‡ใƒซใฏใ“ใกใ‚‰ใงใ™ใ€‚Detect, Segment, Poseใƒขใƒ‡ใƒซใฏ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใ€Classifyใƒขใƒ‡ใƒซใฏ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +[ใƒขใƒ‡ใƒซ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models)ใฏๆœ€ๆ–ฐใฎUltralytics [ใƒชใƒชใƒผใ‚น](https://github.com/ultralytics/assets/releases)ใ‹ใ‚‰ๆœ€ๅˆใฎไฝฟ็”จๆ™‚ใซ่‡ชๅ‹•็š„ใซใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰ใ•ใ‚Œใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซ | ใ‚ตใ‚คใ‚บ
(ใƒ”ใ‚ฏใ‚ปใƒซ) | mAPใƒใƒผใ‚บ
50-95 | mAPใƒใƒผใ‚บ
50 | ้€Ÿๅบฆ
CPU ONNX
(ใƒŸใƒช็ง’) | ้€Ÿๅบฆ
A100 TensorRT
(ใƒŸใƒช็ง’) | ใƒ‘ใƒฉใƒกใƒผใ‚ฟ
(M) | FLOPs
(B) | +|------------------------------------------------------------------------------------------------------|--------------------|----------------------|-------------------|------------------------------|-----------------------------------|-------------------|-------------------| +| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | +| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | +| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | +| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | +| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | +| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +- **mAPval** ใฎๅ€คใฏใ€[COCO Keypoints val2017](http://cocodataset.org)ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใฎๅ˜ไธ€ใƒขใƒ‡ใƒซๅ˜ไธ€ใ‚นใ‚ฑใƒผใƒซใซๅฏพใ™ใ‚‹ใ‚‚ใฎใงใ™ใ€‚ +
ๅ†็พๆ–นๆณ• `yolo val pose data=coco-pose.yaml device=0` +- **้€Ÿๅบฆ** ใฏ [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/)ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚’ไฝฟ็”จใ—ใŸCOCO val็”ปๅƒใฎๅนณๅ‡ใงใ™ใ€‚ +
ๅ†็พๆ–นๆณ• `yolo val pose data=coco8-pose.yaml batch=1 device=0|cpu` + +## ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + +COCO128-poseใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงYOLOv8-poseใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ—ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n-pose.yaml') # ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’YAMLใ‹ใ‚‰ใƒ“ใƒซใƒ‰ + model = YOLO('yolov8n-pose.pt') # ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰๏ผˆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ็”จใซๆŽจๅฅจ๏ผ‰ + model = YOLO('yolov8n-pose.yaml').load('yolov8n-pose.pt') # YAMLใ‹ใ‚‰ใƒ“ใƒซใƒ‰ใ—ใฆ้‡ใฟใ‚’่ปข้€ + + # ใƒขใƒ‡ใƒซใฎใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + results = model.train(data='coco8-pose.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ใƒ“ใƒซใƒ‰ใ—ใ€ๆœ€ๅˆใ‹ใ‚‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml epochs=100 imgsz=640 + + # ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎ*.ptใƒขใƒ‡ใƒซใ‹ใ‚‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.pt epochs=100 imgsz=640 + + # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ใƒ“ใƒซใƒ‰ใ€ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎ้‡ใฟใ‚’่ปข้€ใ—ใฆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml pretrained=yolov8n-pose.pt epochs=100 imgsz=640 + ``` + +### ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆ + +YOLOใƒใƒผใ‚บใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใฎ่ฉณ็ดฐใฏใ€[ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚ฌใ‚คใƒ‰](../../../datasets/pose/index.md)ใซ่จ˜่ผ‰ใ•ใ‚Œใฆใ„ใพใ™ใ€‚ๆ—ขๅญ˜ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ไป–ใฎใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆ๏ผˆCOCOใชใฉ๏ผ‰ใ‹ใ‚‰YOLOใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใซๅค‰ๆ›ใ™ใ‚‹ใซใฏใ€Ultralyticsใฎ[JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) ใƒ„ใƒผใƒซใ‚’ใ”ไฝฟ็”จใใ ใ•ใ„ใ€‚ + +## Val + +COCO128-poseใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8n-poseใƒขใƒ‡ใƒซใฎ็ฒพๅบฆใ‚’ๆคœ่จผใ—ใพใ™ใ€‚ๅผ•ๆ•ฐใฏๅฟ…่ฆใชใใ€`model`ใซใฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ`data`ใจๅผ•ๆ•ฐใŒใƒขใƒ‡ใƒซๅฑžๆ€งใจใ—ใฆไฟๆŒใ•ใ‚Œใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n-pose.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('path/to/best.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใ‚’ๆคœ่จผ + metrics = model.val() # ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚„่จญๅฎšใฏ่จ˜้Œฒใ•ใ‚Œใฆใ„ใ‚‹ใŸใ‚ๅผ•ๆ•ฐใฏไธ่ฆ + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ๅ„ใ‚ซใƒ†ใ‚ดใƒชใฎmap50-95ใŒๅซใพใ‚Œใ‚‹ใƒชใ‚นใƒˆ + ``` + === "CLI" + + ```bash + yolo pose val model=yolov8n-pose.pt # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ๆคœ่จผ + yolo pose val model=path/to/best.pt # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ๆคœ่จผ + ``` + +## Predict + +ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎYOLOv8n-poseใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใฆ็”ปๅƒใ‚’ไบˆๆธฌใ—ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n-pose.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('path/to/best.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใงไบˆๆธฌ + results = model('https://ultralytics.com/images/bus.jpg') # ็”ปๅƒใซไบˆๆธฌใ‚’ๅฎŸ่กŒ + ``` + === "CLI" + + ```bash + yolo pose predict model=yolov8n-pose.pt source='https://ultralytics.com/images/bus.jpg' # ๅ…ฌๅผใƒขใƒ‡ใƒซใงไบˆๆธฌ + yolo pose predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใงไบˆๆธฌ + ``` + +`predict`ใƒขใƒผใƒ‰ใฎ่ฉณ็ดฐใ‚’[Predict](https://docs.ultralytics.com/modes/predict/)ใƒšใƒผใ‚ธใงใ”่ฆงใ„ใŸใ ใ‘ใพใ™ใ€‚ + +## Export + +YOLOv8n Poseใƒขใƒ‡ใƒซใ‚’ONNXใ€CoreMLใชใฉใฎ็•ฐใชใ‚‹ใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ—ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n-pose.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('path/to/best.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-pose.pt format=onnx # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + yolo export model=path/to/best.pt format=onnx # ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + ``` + +ๅˆฉ็”จๅฏ่ƒฝใชYOLOv8-poseใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใฏไปฅไธ‹ใฎ่กจใซ็คบใ•ใ‚ŒใฆใŠใ‚Šใ€ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฎŒไบ†ๅพŒใซใŠไฝฟใ„ใฎใƒขใƒ‡ใƒซใซ้–ขใ™ใ‚‹ไฝฟ็”จไพ‹ใŒ็คบใ•ใ‚Œใพใ™ใ€‚ + +| ใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆ | `format`ๅผ•ๆ•ฐ | ใƒขใƒ‡ใƒซ | ใƒกใ‚ฟใƒ‡ใƒผใ‚ฟ | ๅผ•ๆ•ฐ | +|--------------------------------------------------------------------|---------------|--------------------------------|-------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-pose.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-pose.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-pose.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-pose_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-pose.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-pose.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-pose_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-pose.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-pose.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-pose_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-pose_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-pose_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-pose_ncnn_model/` | โœ… | `imgsz`, `half` | + +`export`ใฎ่ฉณ็ดฐใฏ[Export](https://docs.ultralytics.com/modes/export/)ใƒšใƒผใ‚ธใงใ”่ฆงใ„ใŸใ ใ‘ใพใ™ใ€‚ diff --git a/ultralytics/docs/ja/tasks/pose.md:Zone.Identifier b/ultralytics/docs/ja/tasks/pose.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/tasks/pose.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ja/tasks/segment.md b/ultralytics/docs/ja/tasks/segment.md new file mode 100755 index 0000000..bc6eae3 --- /dev/null +++ b/ultralytics/docs/ja/tasks/segment.md @@ -0,0 +1,186 @@ +--- +comments: true +description: Ultralytics YOLOใ‚’ไฝฟ็”จใ—ใฆใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒขใƒ‡ใƒซใ‚’ไฝฟใ„ใ“ใชใ™ๆ–นๆณ•ใ‚’ๅญฆใณใพใ—ใ‚‡ใ†ใ€‚ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ€ใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณใ€็”ปๅƒไบˆๆธฌใ€ใƒขใƒ‡ใƒซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใซ้–ขใ™ใ‚‹ๆŒ‡็คบใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚ +keywords: yolov8, ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ, Ultralytics, COCOใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆ, ็”ปๅƒใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ, ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บ, ใƒขใƒ‡ใƒซใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ, ใƒขใƒ‡ใƒซใƒใƒชใƒ‡ใƒผใ‚ทใƒงใƒณ, ็”ปๅƒไบˆๆธฌ, ใƒขใƒ‡ใƒซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ +--- + +# ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ + +ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใฎไพ‹ + +ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใฏใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆๆคœๅ‡บใ‚’ไธ€ๆญฉ้€ฒใ‚ใฆใŠใ‚Šใ€็”ปๅƒๅ†…ใฎๅ€‹ใ€…ใฎใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’่ญ˜ๅˆฅใ—ใ€ใใ‚Œใ‚‰ใ‚’็”ปๅƒใฎๆฎ‹ใ‚Šใฎ้ƒจๅˆ†ใ‹ใ‚‰ใ‚ปใ‚ฐใƒกใƒณใƒˆๅŒ–ใ—ใพใ™ใ€‚ + +ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒขใƒ‡ใƒซใฎๅ‡บๅŠ›ใฏใ€็”ปๅƒๅ†…ใฎๅ„ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ๆฆ‚่ชฌใ™ใ‚‹ใƒžใ‚นใ‚ฏใพใŸใฏ่ผช้ƒญใฎใ‚ปใƒƒใƒˆใงใ‚ใ‚Šใ€ๅ„ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใซใฏใ‚ฏใƒฉใ‚นใƒฉใƒ™ใƒซใจไฟก้ ผใ‚นใ‚ณใ‚ขใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใฎไฝ็ฝฎใ ใ‘ใงใชใใ€ใใฎๆญฃ็ขบใชๅฝข็Šถใ‚’็Ÿฅใ‚‹ๅฟ…่ฆใŒใ‚ใ‚‹ๅ ดๅˆใซใ€ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใŒๅฝน็ซ‹ใกใพใ™ใ€‚ + +

+
+ +
+ ่ฆ–่ด: Pythonใงไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎUltralytics YOLOv8ใƒขใƒ‡ใƒซใงใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใ‚’ๅฎŸ่กŒใ™ใ‚‹ใ€‚ +

+ +!!! Tip "ใƒ’ใƒณใƒˆ" + + YOLOv8ใ‚ปใ‚ฐใƒกใƒณใƒˆใƒขใƒ‡ใƒซใฏ`-seg`ใ‚ตใƒ•ใ‚ฃใƒƒใ‚ฏใ‚นใ‚’ไฝฟ็”จใ—ใ€ใคใพใ‚Š`yolov8n-seg.pt`ใชใฉใฏ[COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)ใงไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +## [ใƒขใƒ‡ใƒซ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +ใ“ใ“ใงใฏใ€ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸYOLOv8ใ‚ปใ‚ฐใƒกใƒณใƒˆใƒขใƒ‡ใƒซใŒ็คบใ•ใ‚Œใฆใ„ใพใ™ใ€‚Detectใ€Segmentใ€Poseใƒขใƒ‡ใƒซใฏ[COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚Œใฆใ„ใ‚‹ไธ€ๆ–นใ€Classifyใƒขใƒ‡ใƒซใฏ[ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml)ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚Œใฆใ„ใพใ™ใ€‚ + +[ใƒขใƒ‡ใƒซ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models)ใฏๅˆๅ›žไฝฟ็”จๆ™‚ใซๆœ€ๆ–ฐใฎUltralytics[ใƒชใƒชใƒผใ‚น](https://github.com/ultralytics/assets/releases)ใ‹ใ‚‰่‡ชๅ‹•็š„ใซใƒ€ใ‚ฆใƒณใƒญใƒผใƒ‰ใ•ใ‚Œใพใ™ใ€‚ + +| ใƒขใƒ‡ใƒซ | ใ‚ตใ‚คใ‚บ
(ใƒ”ใ‚ฏใ‚ปใƒซ) | mAPbox
50-95 | mAPmask
50-95 | ใ‚นใƒ”ใƒผใƒ‰
CPU ONNX
(ms) | ใ‚นใƒ”ใƒผใƒ‰
A100 TensorRT
(ms) | ใƒ‘ใƒฉใƒกใƒผใ‚ฟ
(M) | FLOPs
(B) | +|----------------------------------------------------------------------------------------------|--------------------|----------------------|-----------------------|-------------------------------|------------------------------------|-------------------|-------------------| +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- **mAPval**ใฎๅ€คใฏ[COCO val2017](http://cocodataset.org)ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใฎๅ˜ไธ€ใƒขใƒ‡ใƒซๅ˜ไธ€ใ‚นใ‚ฑใƒผใƒซใฎๅ€คใงใ™ใ€‚ +
ๅ†็พใ™ใ‚‹ใซใฏ `yolo val segment data=coco.yaml device=0` +- **ใ‚นใƒ”ใƒผใƒ‰**ใฏ[Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/)ใ‚คใƒณใ‚นใ‚ฟใƒณใ‚นใ‚’ไฝฟ็”จใ—ใฆCOCO val็”ปๅƒใงๅนณๅ‡ๅŒ–ใ•ใ‚Œใพใ™ใ€‚ +
ๅ†็พใ™ใ‚‹ใซใฏ `yolo val segment data=coco128-seg.yaml batch=1 device=0|cpu` + +## ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + +COCO128-segใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงใ€็”ปๅƒใ‚ตใ‚คใ‚บ640ใงYOLOv8n-segใ‚’100ใ‚จใƒใƒƒใ‚ฏใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ—ใพใ™ใ€‚ๅˆฉ็”จๅฏ่ƒฝใชๅ…จใฆใฎๅผ•ๆ•ฐใซใคใ„ใฆใฏใ€[ใ‚ณใƒณใƒ•ใ‚ฃใ‚ฎใƒฅใƒฌใƒผใ‚ทใƒงใƒณ](/../usage/cfg.md)ใƒšใƒผใ‚ธใ‚’ๅ‚็…งใ—ใฆใใ ใ•ใ„ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n-seg.yaml') # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ใƒ“ใƒซใƒ‰ + model = YOLO('yolov8n-seg.pt') # ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰(ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใซๆŽจๅฅจ) + model = YOLO('yolov8n-seg.yaml').load('yolov8n.pt') # YAMLใ‹ใ‚‰ใƒ“ใƒซใƒ‰ใ—ใ‚ฆใ‚งใ‚คใƒˆใ‚’็งป่กŒ + + # ใƒขใƒ‡ใƒซใ‚’ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ + results = model.train(data='coco128-seg.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ใƒ“ใƒซใƒ‰ใ—ใ‚ผใƒญใ‹ใ‚‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml epochs=100 imgsz=640 + + # ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใฎ*.ptใƒขใƒ‡ใƒซใ‹ใ‚‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.pt epochs=100 imgsz=640 + + # YAMLใ‹ใ‚‰ๆ–ฐใ—ใ„ใƒขใƒ‡ใƒซใ‚’ใƒ“ใƒซใƒ‰ใ—ใ€ไบ‹ๅ‰ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐๆธˆใฟใ‚ฆใ‚งใ‚คใƒˆใ‚’็งป่กŒใ—ใฆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ‚’้–‹ๅง‹ + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml pretrained=yolov8n-seg.pt epochs=100 imgsz=640 + ``` + +### ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆ + +YOLOใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใฎ่ฉณ็ดฐใฏใ€[ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚ฌใ‚คใƒ‰](../../../datasets/segment/index.md)ใง่ฆ‹ใคใ‘ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๆ—ขๅญ˜ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ไป–ใฎใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆ(ไพ‹ใˆใฐCOCOใชใฉ)ใ‹ใ‚‰YOLOใƒ•ใ‚ฉใƒผใƒžใƒƒใƒˆใซๅค‰ๆ›ใ™ใ‚‹ใซใฏใ€Ultralyticsใฎ[JSON2YOLO](https://github.com/ultralytics/JSON2YOLO)ใƒ„ใƒผใƒซใ‚’ไฝฟ็”จใ—ใฆใใ ใ•ใ„ใ€‚ + +## ่ฉ•ไพก + +่จ“็ทดใ•ใ‚ŒใŸYOLOv8n-segใƒขใƒ‡ใƒซใฎ็ฒพๅบฆใ‚’COCO128-segใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใงๆคœ่จผใ—ใพใ™ใ€‚ๅผ•ๆ•ฐใฏๅฟ…่ฆใ‚ใ‚Šใพใ›ใ‚“ใ€ใชใœใชใ‚‰`model`ใฏใƒขใƒ‡ใƒซๅฑžๆ€งใจใ—ใฆใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ`data`ใจๅผ•ๆ•ฐใ‚’ไฟๆŒใ—ใฆใ„ใ‚‹ใ‹ใ‚‰ใงใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n-seg.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('path/to/best.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใ‚’่ฉ•ไพก + metrics = model.val() # ๅผ•ๆ•ฐใฏๅฟ…่ฆใชใ—ใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใจ่จญๅฎšใฏ่จ˜ๆ†ถใ—ใฆใ„ใ‚‹ + metrics.box.map # map50-95(B) + metrics.box.map50 # map50(B) + metrics.box.map75 # map75(B) + metrics.box.maps # ๅ„ใ‚ซใƒ†ใ‚ดใƒชใฎmap50-95(B)ใฎใƒชใ‚นใƒˆ + metrics.seg.map # map50-95(M) + metrics.seg.map50 # map50(M) + metrics.seg.map75 # map75(M) + metrics.seg.maps # ๅ„ใ‚ซใƒ†ใ‚ดใƒชใฎmap50-95(M)ใฎใƒชใ‚นใƒˆ + ``` + === "CLI" + + ```bash + yolo segment val model=yolov8n-seg.pt # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’่ฉ•ไพก + yolo segment val model=path/to/best.pt # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’่ฉ•ไพก + ``` + +## ไบˆๆธฌ + +่จ“็ทดใ•ใ‚ŒใŸYOLOv8n-segใƒขใƒ‡ใƒซใ‚’ไฝฟ็”จใ—ใฆ็”ปๅƒใฎไบˆๆธฌใ‚’ๅฎŸ่กŒใ—ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n-seg.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('path/to/best.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใงไบˆๆธฌ + results = model('https://ultralytics.com/images/bus.jpg') # ็”ปๅƒใงไบˆๆธฌ + ``` + === "CLI" + + ```bash + yolo segment predict model=yolov8n-seg.pt source='https://ultralytics.com/images/bus.jpg' # ๅ…ฌๅผใƒขใƒ‡ใƒซใงไบˆๆธฌ + yolo segment predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ใ‚ซใ‚นใ‚ฟใƒ ใƒขใƒ‡ใƒซใงไบˆๆธฌ + ``` + +`predict`ใƒขใƒผใƒ‰ใฎๅฎŒๅ…จใช่ฉณ็ดฐใฏใ€[ไบˆๆธฌ](https://docs.ultralytics.com/modes/predict/)ใƒšใƒผใ‚ธใซใฆ็ขบ่ชใงใใพใ™ใ€‚ + +## ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + +YOLOv8n-segใƒขใƒ‡ใƒซใ‚’ONNXใ€CoreMLใชใฉใฎๅˆฅใฎๅฝขๅผใซใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ—ใพใ™ใ€‚ + +!!! Example "ไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('yolov8n-seg.pt') # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + model = YOLO('path/to/best.pt') # ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใ‚’ใƒญใƒผใƒ‰ + + # ใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-seg.pt format=onnx # ๅ…ฌๅผใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + yolo export model=path/to/best.pt format=onnx # ใ‚ซใ‚นใ‚ฟใƒ ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใ‚’ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ + ``` + +ใ”ๅˆฉ็”จๅฏ่ƒฝใชYOLOv8-segใ‚จใ‚ฏใ‚นใƒใƒผใƒˆๅฝขๅผใฏไปฅไธ‹ใฎ่กจใซ็คบใ•ใ‚Œใฆใ„ใพใ™ใ€‚ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใ•ใ‚ŒใŸใƒขใƒ‡ใƒซใซ็›ดๆŽฅไบˆๆธฌใพใŸใฏ่ฉ•ไพกใŒๅฏ่ƒฝใงใ™ใ€ใคใพใ‚Š `yolo predict model=yolov8n-seg.onnx`ใ€‚ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆใŒๅฎŒไบ†ใ—ใŸๅพŒใซใ€ใƒขใƒ‡ใƒซใฎไฝฟ็”จไพ‹ใŒ่กจ็คบใ•ใ‚Œใพใ™ใ€‚ + +| ๅฝขๅผ | `format`ๅผ•ๆ•ฐ | ใƒขใƒ‡ใƒซ | ใƒกใ‚ฟใƒ‡ใƒผใ‚ฟ | ๅผ•ๆ•ฐ | +|--------------------------------------------------------------------|---------------|-------------------------------|-------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-seg.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-seg.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-seg.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-seg_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-seg.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-seg.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-seg_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-seg.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-seg.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-seg_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-seg_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-seg_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-seg_ncnn_model/` | โœ… | `imgsz`, `half` | diff --git a/ultralytics/docs/ja/tasks/segment.md:Zone.Identifier b/ultralytics/docs/ja/tasks/segment.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ja/tasks/segment.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/index.md b/ultralytics/docs/ko/index.md new file mode 100755 index 0000000..67d82a7 --- /dev/null +++ b/ultralytics/docs/ko/index.md @@ -0,0 +1,83 @@ +--- +comments: true +description: Ultralytics YOLOv8์„ ์™„๋ฒฝํ•˜๊ฒŒ ํƒ๊ตฌํ•˜๋Š” ๊ฐ€์ด๋“œ๋กœ, ๊ณ ์† ๋ฐ ์ •ํ™•์„ฑ์ด ํŠน์ง•์ธ ๊ฐ์ฒด ํƒ์ง€ ๋ฐ ์ด๋ฏธ์ง€ ๋ถ„ํ•  ๋ชจ๋ธ์ž…๋‹ˆ๋‹ค. ์„ค์น˜, ์˜ˆ์ธก, ํ›ˆ๋ จ ํŠœํ† ๋ฆฌ์–ผ ๋“ฑ์ด ํฌํ•จ๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค. +keywords: Ultralytics, YOLOv8, ๊ฐ์ฒด ํƒ์ง€, ์ด๋ฏธ์ง€ ๋ถ„ํ• , ๊ธฐ๊ณ„ ํ•™์Šต, ๋”ฅ๋Ÿฌ๋‹, ์ปดํ“จํ„ฐ ๋น„์ „, YOLOv8 ์„ค์น˜, YOLOv8 ์˜ˆ์ธก, YOLOv8 ํ›ˆ๋ จ, YOLO ์—ญ์‚ฌ, YOLO ๋ผ์ด์„ผ์Šค +--- + +
+

+ + Ultralytics YOLO ๋ฐฐ๋„ˆ +

+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ Ultralytics CI + Ultralytics ์ฝ”๋“œ ์ปค๋ฒ„๋ฆฌ์ง€ + YOLOv8 ์ธ์šฉ + Docker ๋‹น๊ธฐ๊ธฐ + Discord +
+ Run on Gradient + Open In Colab + Open In Kaggle +
+ +Ultralytics์˜ ์ตœ์‹  ๋ฒ„์ „์ธ [YOLOv8](https://github.com/ultralytics/ultralytics)์„ ์†Œ๊ฐœํ•ฉ๋‹ˆ๋‹ค. ์ด ๋ชจ๋ธ์€ ๋”ฅ๋Ÿฌ๋‹๊ณผ ์ปดํ“จํ„ฐ ๋น„์ „์˜ ์ตœ์‹  ๋ฐœ์ „์„ ๋ฐ”ํƒ•์œผ๋กœ ๊ตฌ์ถ•๋˜์—ˆ์œผ๋ฉฐ, ์†๋„์™€ ์ •ํ™•์„ฑ ๋ฉด์—์„œ ๋›ฐ์–ด๋‚œ ์„ฑ๋Šฅ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ๊ฐ„๊ฒฐํ•œ ์„ค๊ณ„๋กœ ์ธํ•ด ๋‹ค์–‘ํ•œ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜์— ์ ํ•ฉํ•˜๋ฉฐ, ์—ฃ์ง€ ๋””๋ฐ”์ด์Šค์—์„œ๋ถ€ํ„ฐ ํด๋ผ์šฐ๋“œ API์— ์ด๋ฅด๊ธฐ๊นŒ์ง€ ๋‹ค์–‘ํ•œ ํ•˜๋“œ์›จ์–ด ํ”Œ๋žซํผ์— ์‰ฝ๊ฒŒ ์ ์‘ ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค. + +YOLOv8 ๋ฌธ์„œ๋ฅผ ํƒ๊ตฌํ•˜์—ฌ, ๊ทธ ๊ธฐ๋Šฅ๊ณผ ๋Šฅ๋ ฅ์„ ์ดํ•ดํ•˜๊ณ  ํ™œ์šฉํ•  ์ˆ˜ ์žˆ๋„๋ก ๋•๋Š” ์ข…ํ•ฉ์ ์ธ ์ž์›์ž…๋‹ˆ๋‹ค. ๊ธฐ๊ณ„ ํ•™์Šต ๋ถ„์•ผ์—์„œ ๊ฒฝํ—˜์ด ๋งŽ๊ฑด, ์ƒˆ๋กญ๊ฒŒ ์‹œ์ž‘ํ•˜๋Š” ์ด๋“ค์ด๊ฑด, ์ด ํ—ˆ๋ธŒ๋Š” YOLOv8์˜ ์ž ์žฌ๋ ฅ์„ ๊ทน๋Œ€ํ™”ํ•˜๊ธฐ ์œ„ํ•ด ์„ค๊ณ„๋˜์—ˆ์Šต๋‹ˆ๋‹ค. + +!!! Note "๋…ธํŠธ" + + ๐Ÿšง ๋‹ค๊ตญ์–ด ๋ฌธ์„œ๋Š” ํ˜„์žฌ ์ œ์ž‘ ์ค‘์ด๋ฉฐ, ์ด๋ฅผ ๊ฐœ์„ ํ•˜๊ธฐ ์œ„ํ•ด ๋…ธ๋ ฅํ•˜๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. ์ธ๋‚ดํ•ด ์ฃผ์…”์„œ ๊ฐ์‚ฌํ•ฉ๋‹ˆ๋‹ค! ๐Ÿ™ + +## ์‹œ์ž‘ํ•˜๊ธฐ + +- **์„ค์น˜** `ultralytics`๋ฅผ pip์œผ๋กœ ์„ค์น˜ํ•˜๊ณ  ๋ช‡ ๋ถ„ ๋งŒ์— ์‹œ์ž‘ํ•˜์„ธ์š”   [:material-clock-fast: ์‹œ์ž‘ํ•˜๊ธฐ](quickstart.md){ .md-button } +- **์˜ˆ์ธก** YOLOv8๋กœ ์ƒˆ๋กœ์šด ์ด๋ฏธ์ง€์™€ ๋น„๋””์˜ค๋ฅผ ๊ฐ์ง€ํ•˜์„ธ์š”   [:octicons-image-16: ์ด๋ฏธ์ง€์—์„œ ์˜ˆ์ธกํ•˜๊ธฐ](modes/predict.md){ .md-button } +- **ํ›ˆ๋ จ** ์ƒˆ๋กœ์šด YOLOv8 ๋ชจ๋ธ์„ ์‚ฌ์šฉ์ž์˜ ๋งž์ถค ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ ํ›ˆ๋ จํ•˜์„ธ์š”   [:fontawesome-solid-brain: ๋ชจ๋ธ ํ›ˆ๋ จํ•˜๊ธฐ](modes/train.md){ .md-button } +- **ํƒํ—˜** ์„ธ๋ถ„ํ™”, ๋ถ„๋ฅ˜, ์ž์„ธ ์ธ์‹, ์ถ”์ ๊ณผ ๊ฐ™์€ YOLOv8 ์ž‘์—…   [:material-magnify-expand: ์ž‘์—… ํƒํ—˜ํ•˜๊ธฐ](tasks/index.md){ .md-button } + +

+
+ +
+ ์‹œ์ฒญํ•˜๊ธฐ: ์‚ฌ์šฉ์ž์˜ ๋งž์ถค ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ YOLOv8 ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•˜๋Š” ๋ฐฉ๋ฒ•์„ Google Colab์—์„œ ์•Œ์•„๋ณด์„ธ์š”. +

+ +## YOLO: ๊ฐ„๋‹จํ•œ ์—ญ์‚ฌ + +[YOLO](https://arxiv.org/abs/1506.02640) (You Only Look Once, ๋‹จ ํ•œ ๋ฒˆ์˜ ๊ฒ€์‚ฌ)๋Š” ์›Œ์‹ฑํ„ด ๋Œ€ํ•™๊ต์˜ Joseph Redmon๊ณผ Ali Farhadi๊ฐ€ ๊ฐœ๋ฐœํ•œ ์ธ๊ธฐ ์žˆ๋Š” ๊ฐ์ฒด ํƒ์ง€ ๋ฐ ์ด๋ฏธ์ง€ ๋ถ„ํ•  ๋ชจ๋ธ์ž…๋‹ˆ๋‹ค. 2015๋…„์— ์ถœ์‹œ๋œ YOLO๋Š” ๊ทธ ๋น ๋ฅธ ์†๋„์™€ ์ •ํ™•์„ฑ์œผ๋กœ ์ธํ•ด ๋น ๋ฅด๊ฒŒ ์ธ๊ธฐ๋ฅผ ์–ป์—ˆ์Šต๋‹ˆ๋‹ค. + +- [YOLOv2](https://arxiv.org/abs/1612.08242)๋Š” 2016๋…„์— ๊ณต๊ฐœ๋˜์—ˆ์œผ๋ฉฐ ๋ฐฐ์น˜ ์ •๊ทœํ™”, ์•ต์ปค ๋ฐ•์Šค, ์ฐจ์› ํด๋Ÿฌ์Šคํ„ฐ๋ฅผ ํ†ตํ•ฉํ•˜์—ฌ ์›๋ณธ ๋ชจ๋ธ์„ ๊ฐœ์„ ํ–ˆ์Šต๋‹ˆ๋‹ค. +- [YOLOv3](https://pjreddie.com/media/files/papers/YOLOv3.pdf)๋Š” 2018๋…„์— ์ถœ์‹œ๋˜์–ด ๋” ํšจ์œจ์ ์ธ ๋ฐฑ๋ณธ ๋„คํŠธ์›Œํฌ, ๋ณต์ˆ˜ ์•ต์ปค ๋ฐ ๊ณต๊ฐ„ ํ”ผ๋ผ๋ฏธ๋“œ ํ’€๋ง์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ์„ ๋”์šฑ ํ–ฅ์ƒ์‹œ์ผฐ์Šต๋‹ˆ๋‹ค. +- [YOLOv4](https://arxiv.org/abs/2004.10934)๋Š” 2020๋…„์— ๋‚˜์™€์„œ ๋ชจ์ž์ดํฌ ๋ฐ์ดํ„ฐ ์ฆ๊ฐ€, ์ƒˆ๋กœ์šด ์•ต์ปค-ํ”„๋ฆฌ ํƒ์ง€ ํ—ค๋“œ, ์ƒˆ๋กœ์šด ์†์‹ค ํ•จ์ˆ˜์™€ ๊ฐ™์€ ํ˜์‹ ์„ ๋„์ž…ํ–ˆ์Šต๋‹ˆ๋‹ค. +- [YOLOv5](https://github.com/ultralytics/yolov5)๋Š” ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ์„ ๋”์šฑ ํ–ฅ์ƒ์‹œํ‚ค๊ณ  ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ ์ตœ์ ํ™”, ํ†ตํ•ฉ ์‹คํ—˜ ์ถ”์  ๋ฐ ์ธ๊ธฐ ์žˆ๋Š” ์ˆ˜์ถœ ํ˜•์‹์œผ๋กœ์˜ ์ž๋™ ์ˆ˜์ถœ๊ณผ ๊ฐ™์€ ์ƒˆ๋กœ์šด ๊ธฐ๋Šฅ์„ ์ถ”๊ฐ€ํ–ˆ์Šต๋‹ˆ๋‹ค. +- [YOLOv6](https://github.com/meituan/YOLOv6)๋Š” 2022๋…„์— [Meituan](https://about.meituan.com/)์— ์˜ํ•ด ์˜คํ”ˆ ์†Œ์Šคํ™”๋˜์—ˆ์œผ๋ฉฐ, ์ด ํšŒ์‚ฌ์˜ ์ž์œจ ๋ฐฐ๋‹ฌ ๋กœ๋ด‡์—์„œ ์‚ฌ์šฉ๋˜๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. +- [YOLOv7](https://github.com/WongKinYiu/yolov7)๋Š” COCO ํ‚คํฌ์ธํŠธ ๋ฐ์ดํ„ฐ์…‹์—์„œ์˜ ์ž์„ธ ์ถ”์ •๊ณผ ๊ฐ™์€ ์ถ”๊ฐ€ ์ž‘์—…์„ ์ถ”๊ฐ€ํ–ˆ์Šต๋‹ˆ๋‹ค. +- [YOLOv8](https://github.com/ultralytics/ultralytics)์€ Ultralytics์—์„œ ์ถœ์‹œํ•œ YOLO์˜ ์ตœ์‹  ๋ฒ„์ „์ž…๋‹ˆ๋‹ค. ์ฒจ๋‹จ ์ƒํƒœ ๊ธฐ์ˆ  ๋ชจ๋ธ๋กœ์„œ, YOLOv8์€ ์ด์ „ ๋ฒ„์ „๋“ค์˜ ์„ฑ๊ณต์„ ๊ธฐ๋ฐ˜์œผ๋กœ ์ƒˆ๋กœ์šด ๊ธฐ๋Šฅ๊ณผ ๊ฐœ์„  ์‚ฌํ•ญ์„ ๋„์ž…ํ•˜์—ฌ ์„ฑ๋Šฅ, ์œ ์—ฐ์„ฑ ๋ฐ ํšจ์œจ์„ฑ์„ ํ–ฅ์ƒ์‹œ์ผฐ์Šต๋‹ˆ๋‹ค. YOLOv8์€ [ํƒ์ง€](tasks/detect.md), [๋ถ„ํ• ](tasks/segment.md), [์ž์„ธ ์ถ”์ •](tasks/pose.md), [์ถ”์ ](modes/track.md), [๋ถ„๋ฅ˜](tasks/classify.md)๋ฅผ ํฌํ•จํ•˜์—ฌ ๋‹ค์–‘ํ•œ ๋น„์ „ AI ์ž‘์—…์„ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ๋‹ค์žฌ๋‹ค๋Šฅํ•จ์€ ์‚ฌ์šฉ์ž๋“ค์ด ๋‹ค์–‘ํ•œ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜๊ณผ ๋„๋ฉ”์ธ ์ „๋ฐ˜์— ๊ฑธ์ณ YOLOv8์˜ ๋Šฅ๋ ฅ์„ ํ™œ์šฉํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•ฉ๋‹ˆ๋‹ค. + +## YOLO ๋ผ์ด์„ผ์Šค: Ultralytics YOLO๋Š” ์–ด๋–ป๊ฒŒ ๋ผ์ด์„ผ์Šค๊ฐ€ ๋ถ€์—ฌ๋˜๋‚˜์š”? + +Ultralytics๋Š” ๋‹ค์–‘ํ•œ ์‚ฌ์šฉ ์‚ฌ๋ก€์— ๋งž์ถฐ ๋‘ ๊ฐ€์ง€ ๋ผ์ด์„ ์Šค ์˜ต์…˜์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค: + +- **AGPL-3.0 ๋ผ์ด์„ ์Šค**: ์ด [OSI ์Šน์ธ](https://opensource.org/licenses/) ์˜คํ”ˆ ์†Œ์Šค ๋ผ์ด์„ ์Šค๋Š” ํ•™์ƒ ๋ฐ ์• ํ˜ธ๊ฐ€์—๊ฒŒ ์ด์ƒ์ ์ž…๋‹ˆ๋‹ค. ์˜คํ”ˆ ํ˜‘๋ ฅ๊ณผ ์ง€์‹ ๊ณต์œ ๋ฅผ ์ด‰์ง„ํ•ฉ๋‹ˆ๋‹ค. ์ž์„ธํ•œ ๋‚ด์šฉ์€ [๋ผ์ด์„ ์Šค](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) ํŒŒ์ผ์„ ์ฐธ์กฐํ•˜์„ธ์š”. +- **๊ธฐ์—… ๋ผ์ด์„ ์Šค**: ์ƒ์—…์  ์‚ฌ์šฉ์„ ์œ„ํ•ด ์„ค๊ณ„๋œ ์ด ๋ผ์ด์„ ์Šค๋Š” Ultralytics ์†Œํ”„ํŠธ์›จ์–ด ๋ฐ AI ๋ชจ๋ธ์„ ์ƒ์—…์  ์ œํ’ˆ ๋ฐ ์„œ๋น„์Šค์— ์›ํ™œํ•˜๊ฒŒ ํ†ตํ•ฉํ•  ์ˆ˜ ์žˆ๊ฒŒ ํ•˜์—ฌ AGPL-3.0์˜ ์˜คํ”ˆ ์†Œ์Šค ์š”๊ฑด์„ ์šฐํšŒํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ƒ์—…์  ์ œ๊ณต๋ฌผ์— ์†”๋ฃจ์…˜์„ ๋‚ด์žฅํ•˜๋Š” ์‹œ๋‚˜๋ฆฌ์˜ค์— ๊ด€์—ฌํ•˜๋Š” ๊ฒฝ์šฐ [Ultralytics ๋ผ์ด์„ ์‹ฑ](https://ultralytics.com/license)์„ ํ†ตํ•ด ๋ฌธ์˜ํ•˜์‹œ๊ธฐ ๋ฐ”๋ž๋‹ˆ๋‹ค. + +์šฐ๋ฆฌ์˜ ๋ผ์ด์„ ์Šค ์ „๋žต์€ ์˜คํ”ˆ ์†Œ์Šค ํ”„๋กœ์ ํŠธ์— ๋Œ€ํ•œ ๊ฐœ์„  ์‚ฌํ•ญ์ด ์ปค๋ฎค๋‹ˆํ‹ฐ์— ๋˜๋Œ์•„๊ฐ€๋„๋ก ๋ณด์žฅํ•˜๋ ค๋Š” ๊ฒƒ์ž…๋‹ˆ๋‹ค. ์šฐ๋ฆฌ๋Š” ์˜คํ”ˆ ์†Œ์Šค์˜ ์›์น™์„ ๊ฐ€์Šด ๊นŠ์ด ์ƒˆ๊ธฐ๊ณ  ์žˆ์œผ๋ฉฐ, ์šฐ๋ฆฌ์˜ ๊ธฐ์—ฌ๊ฐ€ ๋ชจ๋‘์—๊ฒŒ ์œ ์šฉํ•œ ๋ฐฉ์‹์œผ๋กœ ํ™œ์šฉ๋˜๊ณ  ํ™•์žฅ๋  ์ˆ˜ ์žˆ๋„๋ก ๋ณด์žฅํ•˜๋Š” ๊ฒƒ์ด ์šฐ๋ฆฌ์˜ ์‚ฌ๋ช…์ž…๋‹ˆ๋‹ค.โค๏ธ diff --git a/ultralytics/docs/ko/index.md:Zone.Identifier b/ultralytics/docs/ko/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/models/fast-sam.md b/ultralytics/docs/ko/models/fast-sam.md new file mode 100755 index 0000000..4480421 --- /dev/null +++ b/ultralytics/docs/ko/models/fast-sam.md @@ -0,0 +1,193 @@ +--- +comments: true +description: FastSAM์€ ์ด๋ฏธ์ง€์—์„œ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๋ถ„ํ• ์„ ์œ„ํ•œ CNN ๊ธฐ๋ฐ˜ ์†”๋ฃจ์…˜์œผ๋กœ, ํ–ฅ์ƒ๋œ ์‚ฌ์šฉ์ž ์ƒํ˜ธ์ž‘์šฉ, ๊ณ„์‚ฐ ํšจ์œจ์„ฑ, ๋‹ค์–‘ํ•œ ๋น„์ „ ์ž‘์—…์— ๋Œ€์‘ํ•  ์ˆ˜ ์žˆ๋Š” ํŠน์ง•์„ ๊ฐ–๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. +keywords: FastSAM, ๋จธ์‹ ๋Ÿฌ๋‹, CNN ๊ธฐ๋ฐ˜ ์†”๋ฃจ์…˜, ๊ฐ์ฒด ๋ถ„ํ• , ์‹ค์‹œ๊ฐ„ ์†”๋ฃจ์…˜, Ultralytics, ๋น„์ „ ์ž‘์—…, ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ, ์‚ฐ์—… ์‘์šฉ, ์‚ฌ์šฉ์ž ์ƒํ˜ธ์ž‘์šฉ +--- + +# Fast Segment Anything Model (FastSAM) + +Fast Segment Anything Model (FastSAM)์€ Segment Anything ์ž‘์—…์„ ์œ„ํ•œ ์ƒˆ๋กœ์šด ์‹ค์‹œ๊ฐ„ CNN ๊ธฐ๋ฐ˜ ์†”๋ฃจ์…˜์ž…๋‹ˆ๋‹ค. ์ด ์ž‘์—…์€ ๋‹ค์–‘ํ•œ ์‚ฌ์šฉ์ž ์ƒํ˜ธ์ž‘์šฉ ํ”„๋กฌํ”„ํŠธ์— ๋”ฐ๋ผ ์ด๋ฏธ์ง€ ๋‚ด์˜ ๋ชจ๋“  ๊ฐ์ฒด๋ฅผ ๋ถ„ํ• ํ•˜๋Š” ๊ฒƒ์„ ๋ชฉํ‘œ๋กœ ํ•ฉ๋‹ˆ๋‹ค. FastSAM์€ ๊ณ„์‚ฐ ์š”๊ตฌ ์‚ฌํ•ญ์„ ํฌ๊ฒŒ ์ค„์ด๋ฉด์„œ ๊ฒฝ์Ÿ๋ ฅ ์žˆ๋Š” ์„ฑ๋Šฅ์„ ์œ ์ง€ํ•˜๊ธฐ ๋•Œ๋ฌธ์— ๋‹ค์–‘ํ•œ ๋น„์ „ ์ž‘์—…์— ์‹ค์šฉ์ ์ธ ์„ ํƒ์ง€๊ฐ€ ๋  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +![Fast Segment Anything Model (FastSAM) ์•„ํ‚คํ…์ฒ˜ ๊ฐœ์š”](https://user-images.githubusercontent.com/26833433/248551984-d98f0f6d-7535-45d0-b380-2e1440b52ad7.jpg) + +## ๊ฐœ์š” + +FastSAM์€ ๊ณ„์‚ฐ ๋ฆฌ์†Œ์Šค ์š”๊ตฌ ์‚ฌํ•ญ์ด ํฐ Transformer ๋ชจ๋ธ์ธ Segment Anything Model (SAM)์˜ ํ•œ๊ณ„๋ฅผ ํ•ด๊ฒฐํ•˜๊ธฐ ์œ„ํ•ด ์„ค๊ณ„๋˜์—ˆ์Šต๋‹ˆ๋‹ค. FastSAM์€ Segment Anything ์ž‘์—…์„ ๋‘ ๋‹จ๊ณ„๋กœ ๋ถ„๋ฆฌํ•œ ๋ฐฉ์‹์„ ์ฑ„ํƒํ•ฉ๋‹ˆ๋‹ค: ๋ชจ๋“  ์ธ์Šคํ„ด์Šค ๋ถ„ํ• ๊ณผ ํ”„๋กฌํ”„ํŠธ๋กœ ์ธํ•œ ์˜์—ญ ์„ ํƒ. ์ฒซ ๋ฒˆ์งธ ๋‹จ๊ณ„์—์„œ๋Š” [YOLOv8-seg](../tasks/segment.md)๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€์˜ ๋ชจ๋“  ์ธ์Šคํ„ด์Šค์˜ ๋ถ„ํ•  ๋งˆ์Šคํฌ๋ฅผ ์ƒ์„ฑํ•ฉ๋‹ˆ๋‹ค. ๋‘ ๋ฒˆ์งธ ๋‹จ๊ณ„์—์„œ๋Š” ํ”„๋กฌํ”„ํŠธ์— ํ•ด๋‹นํ•˜๋Š” ๊ด€์‹ฌ ์˜์—ญ์„ ์ถœ๋ ฅํ•ฉ๋‹ˆ๋‹ค. + +## ์ฃผ์š” ํŠน์ง• + +1. **์‹ค์‹œ๊ฐ„ ์†”๋ฃจ์…˜**: CNN์˜ ๊ณ„์‚ฐ ํšจ์œจ์„ฑ์„ ํ™œ์šฉํ•˜์—ฌ FastSAM์€ Segment Anything ์ž‘์—…์— ๋Œ€ํ•œ ์‹ค์‹œ๊ฐ„ ์†”๋ฃจ์…˜์„ ์ œ๊ณตํ•˜๋ฉฐ, ๋น ๋ฅธ ๊ฒฐ๊ณผ๊ฐ€ ํ•„์š”ํ•œ ์‚ฐ์—… ์‘์šฉ์— ๊ฐ€์น˜๊ฐ€ ์žˆ์Šต๋‹ˆ๋‹ค. + +2. **ํšจ์œจ์„ฑ๊ณผ ์„ฑ๋Šฅ**: FastSAM์€ ์„ฑ๋Šฅ ํ’ˆ์งˆ์„ ํฌ์ƒํ•˜์ง€ ์•Š๊ณ  ๊ณ„์‚ฐ๊ณผ ๋ฆฌ์†Œ์Šค ์š”๊ตฌ ์‚ฌํ•ญ์„ ํฌ๊ฒŒ ์ค„์ž…๋‹ˆ๋‹ค. SAM๊ณผ ๋น„๊ตํ•ด ์œ ์‚ฌํ•œ ์„ฑ๋Šฅ์„ ๋‹ฌ์„ฑํ•˜๋ฉด์„œ ๊ณ„์‚ฐ ๋ฆฌ์†Œ์Šค๋ฅผ ํฌ๊ฒŒ ์ค„์—ฌ ์‹ค์‹œ๊ฐ„ ์‘์šฉ์ด ๊ฐ€๋Šฅํ•ด์ง‘๋‹ˆ๋‹ค. + +3. **ํ”„๋กฌํ”„ํŠธ ์•ˆ๋‚ด ๋ถ„ํ• **: FastSAM์€ ๋‹ค์–‘ํ•œ ์‚ฌ์šฉ์ž ์ƒํ˜ธ์ž‘์šฉ ํ”„๋กฌํ”„ํŠธ์— ๋”ฐ๋ผ ์ด๋ฏธ์ง€ ๋‚ด์˜ ๋ชจ๋“  ๊ฐ์ฒด๋ฅผ ๋ถ„ํ• ํ•  ์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ ๋‹ค์–‘ํ•œ ์‹œ๋‚˜๋ฆฌ์˜ค์—์„œ ์œ ์—ฐ์„ฑ๊ณผ ์ ์‘์„ฑ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +4. **YOLOv8-seg ๊ธฐ๋ฐ˜**: FastSAM์€ [YOLOv8-seg](../tasks/segment.md)๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ํ•œ ๊ฒƒ์œผ๋กœ, ์ธ์Šคํ„ด์Šค ๋ถ„ํ•  ๋ธŒ๋žœ์น˜๊ฐ€ ์žฅ์ฐฉ๋œ ๊ฐ์ฒด ๊ฐ์ง€๊ธฐ์ž…๋‹ˆ๋‹ค. ์ด๋ฅผ ํ†ตํ•ด ์ด๋ฏธ์ง€์˜ ๋ชจ๋“  ์ธ์Šคํ„ด์Šค์˜ ๋ถ„ํ•  ๋งˆ์Šคํฌ๋ฅผ ํšจ๊ณผ์ ์œผ๋กœ ์ƒ์„ฑํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +5. **๋ฒค์น˜๋งˆํฌ์—์„œ ๊ฒฝ์Ÿ ๊ฒฐ๊ณผ**: MS COCO์—์„œ์˜ ๊ฐ์ฒด ์ œ์•ˆ ์ž‘์—…์—์„œ FastSAM์€ [SAM](sam.md)์— ๋น„ํ•ด ๋‹จ์ผ NVIDIA RTX 3090์—์„œ ํ›จ์”ฌ ๋” ๋น ๋ฅธ ์†๋„๋กœ ๋†’์€ ์ ์ˆ˜๋ฅผ ๋‹ฌ์„ฑํ•˜์—ฌ ํšจ์œจ์„ฑ๊ณผ ๋Šฅ๋ ฅ์„ ์ž…์ฆํ–ˆ์Šต๋‹ˆ๋‹ค. + +6. **์‹ค์šฉ์ ์ธ ์‘์šฉ**: FastSAM์€ ํ˜„์žฌ ๋ฐฉ๋ฒ•๋ณด๋‹ค ์ˆ˜์‹ญ ๋ฐฐ ๋˜๋Š” ์ˆ˜๋ฐฑ ๋ฐฐ ๋” ๋น ๋ฅธ ์†๋„๋กœ ์—ฌ๋Ÿฌ ๋น„์ „ ์ž‘์—…์˜ ์‹ ์†ํ•œ ์†”๋ฃจ์…˜์„ ์ œ๊ณตํ•˜์—ฌ ์‹ค์งˆ์ ์ธ ์ ์šฉ ๊ฐ€๋Šฅ์„ฑ์„ ์ œ์‹œํ•ฉ๋‹ˆ๋‹ค. + +7. **๋ชจ๋ธ ์••์ถ• ๊ฐ€๋Šฅ์„ฑ**: FastSAM์€ ๊ตฌ์กฐ์— ์ธ๊ณต ์‚ฌ์ „์„ ๋„์ž…ํ•˜์—ฌ ๊ณ„์‚ฐ ๋น„์šฉ์„ ํฌ๊ฒŒ ์ค„์ผ ์ˆ˜ ์žˆ๋Š” ๊ฒฝ๋กœ๋ฅผ ๋ณด์—ฌ์ฃผ์–ด ์ผ๋ฐ˜ ๋น„์ „ ์ž‘์—…์— ๋Œ€ํ•œ ๋Œ€ํ˜• ๋ชจ๋ธ ์•„ํ‚คํ…์ฒ˜์— ๋Œ€ํ•œ ์ƒˆ๋กœ์šด ๊ฐ€๋Šฅ์„ฑ์„ ์—ด์–ด์ค๋‹ˆ๋‹ค. + +## ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ๋ชจ๋ธ, ์ง€์›๋˜๋Š” ์ž‘์—… ๋ฐ ์šด์˜ ๋ชจ๋“œ + +์ด ํ‘œ๋Š” ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ๋ชจ๋ธ๊ณผ ํ•ด๋‹นํ•˜๋Š” ์‚ฌ์ „ ํ›ˆ๋ จ ๊ฐ€์ค‘์น˜, ์ง€์›ํ•˜๋Š” ์ž‘์—… ๋ฐ [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md), [Export](../modes/export.md)์™€ ๊ฐ™์€ ๋‹ค๋ฅธ ์šด์˜ ๋ชจ๋“œ์— ๋Œ€ํ•œ ํ˜ธํ™˜์„ฑ์„ ๋‚˜ํƒ€๋‚ด๋ฉฐ, ์ง€์›๋˜๋Š” ๋ชจ๋“œ๋Š” โœ… ์ด๋ชจ์ง€๋กœ, ์ง€์›๋˜์ง€ ์•Š๋Š” ๋ชจ๋“œ๋Š” โŒ ์ด๋ชจ์ง€๋กœ ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ ์œ ํ˜• | ์‚ฌ์ „ ํ›ˆ๋ จ ๊ฐ€์ค‘์น˜ | ์ง€์›๋˜๋Š” ์ž‘์—… | Inference | Validation | Training | Export | +|-----------|----------------|--------------------------------|-----------|------------|----------|--------| +| FastSAM-s | `FastSAM-s.pt` | [์ธ์Šคํ„ด์Šค ๋ถ„ํ• ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| FastSAM-x | `FastSAM-x.pt` | [์ธ์Šคํ„ด์Šค ๋ถ„ํ• ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## ์‚ฌ์šฉ ์˜ˆ์‹œ + +FastSAM ๋ชจ๋ธ์„ Python ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜์— ์‰ฝ๊ฒŒ ํ†ตํ•ฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. Ultralytics๋Š” ๊ฐœ๋ฐœ์„ ๊ฐ„์†Œํ™”ํ•˜๊ธฐ ์œ„ํ•ด ์‚ฌ์šฉ์ž ์นœํ™”์ ์ธ Python API ๋ฐ CLI ๋ช…๋ น์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +### ์˜ˆ์ธก ์‚ฌ์šฉ๋ฒ• + +์ด๋ฏธ์ง€์—์„œ ๊ฐ์ฒด ๊ฒ€์ถœ์„ ์ˆ˜ํ–‰ํ•˜๋ ค๋ฉด ๋‹ค์Œ๊ณผ ๊ฐ™์ด `predict` ๋ฉ”์„œ๋“œ๋ฅผ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค: + +!!! Example "์˜ˆ์ œ" + + === "Python" + ```python + from ultralytics import FastSAM + from ultralytics.models.fastsam import FastSAMPrompt + + # ์ถ”๋ก  ์†Œ์Šค ์ •์˜ + source = 'path/to/bus.jpg' + + # FastSAM ๋ชจ๋ธ ์ƒ์„ฑ + model = FastSAM('FastSAM-s.pt') # ๋˜๋Š” FastSAM-x.pt + + # ์ด๋ฏธ์ง€์— ๋Œ€ํ•œ ์ถ”๋ก  ์‹คํ–‰ + everything_results = model(source, device='cpu', retina_masks=True, imgsz=1024, conf=0.4, iou=0.9) + + # Prompt Process ๊ฐ์ฒด ์ค€๋น„ + prompt_process = FastSAMPrompt(source, everything_results, device='cpu') + + # ๋ชจ๋“  ํ”„๋กฌํ”„ํŠธ + ann = prompt_process.everything_prompt() + + # ๋ฐ”์šด๋”ฉ ๋ฐ•์Šค์˜ ๊ธฐ๋ณธ ๋ชจ์–‘์€ [0,0,0,0]์—์„œ [x1,y1,x2,y2]๋กœ ๋ณ€๊ฒฝ + ann = prompt_process.box_prompt(bbox=[200, 200, 300, 300]) + + # ํ…์ŠคํŠธ ํ”„๋กฌํ”„ํŠธ + ann = prompt_process.text_prompt(text='a photo of a dog') + + # ํฌ์ธํŠธ ํ”„๋กฌํ”„ํŠธ + # ๊ธฐ๋ณธ ํฌ์ธํŠธ [[0,0]] [[x1,y1],[x2,y2]] + # ๊ธฐ๋ณธ ํฌ์ธํŠธ ๋ ˆ์ด๋ธ” [0] [1,0] 0:๋ฐฐ๊ฒฝ, 1:์ „๊ฒฝ + ann = prompt_process.point_prompt(points=[[200, 200]], pointlabel=[1]) + prompt_process.plot(annotations=ann, output='./') + ``` + + === "CLI" + ```bash + # FastSAM ๋ชจ๋ธ ๋กœ๋“œ ๋ฐ ๋ชจ๋“  ๊ฒƒ์„ ์„ธ๋ถ„ํ™”ํ•˜์—ฌ ์ถ”์ถœ + yolo segment predict model=FastSAM-s.pt source=path/to/bus.jpg imgsz=640 + ``` + +์ด ์ฝ”๋“œ ์กฐ๊ฐ์€ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์„ ๋กœ๋“œํ•˜๊ณ  ์ด๋ฏธ์ง€์— ๋Œ€ํ•œ ์˜ˆ์ธก์„ ์‹คํ–‰ํ•˜๋Š” ๊ฐ„ํŽธํ•จ์„ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. + +### ๊ฒ€์ฆ ์‚ฌ์šฉ๋ฒ• + +๋ฐ์ดํ„ฐ์…‹์—์„œ ๋ชจ๋ธ์„ ๊ฒ€์ฆํ•˜๋Š” ๋ฐฉ๋ฒ•์€ ๋‹ค์Œ๊ณผ ๊ฐ™์Šต๋‹ˆ๋‹ค: + +!!! Example "์˜ˆ์ œ" + + === "Python" + ```python + from ultralytics import FastSAM + + # FastSAM ๋ชจ๋ธ ์ƒ์„ฑ + model = FastSAM('FastSAM-s.pt') # ๋˜๋Š” FastSAM-x.pt + + # ๋ชจ๋ธ ๊ฒ€์ฆ + results = model.val(data='coco8-seg.yaml') + ``` + + === "CLI" + ```bash + # FastSAM ๋ชจ๋ธ ๋กœ๋“œ ๋ฐ ์ด๋ฏธ์ง€ ํฌ๊ธฐ 640์—์„œ COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์— ๋Œ€ํ•ด ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ + yolo segment val model=FastSAM-s.pt data=coco8.yaml imgsz=640 + ``` + +FastSAM์€ ๋‹จ์ผ ํด๋ž˜์Šค ๊ฐ์ฒด์˜ ๊ฐ์ง€์™€ ๋ถ„ํ• ๋งŒ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. ์ด๋Š” ๋ชจ๋“  ๊ฐ์ฒด๋ฅผ ๋™์ผํ•œ ํด๋ž˜์Šค๋กœ ์ธ์‹ํ•˜๊ณ  ๋ถ„ํ• ํ•œ๋‹ค๋Š” ์˜๋ฏธ์ž…๋‹ˆ๋‹ค. ๋”ฐ๋ผ์„œ ๋ฐ์ดํ„ฐ์…‹์„ ์ค€๋น„ํ•  ๋•Œ ๋ชจ๋“  ๊ฐ์ฒด ์นดํ…Œ๊ณ ๋ฆฌ ID๋ฅผ 0์œผ๋กœ ๋ณ€ํ™˜ํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. + +## FastSAM ๊ณต์‹ ์‚ฌ์šฉ๋ฒ• + +FastSAM์€ [https://github.com/CASIA-IVA-Lab/FastSAM](https://github.com/CASIA-IVA-Lab/FastSAM) ์ €์žฅ์†Œ์—์„œ ์ง์ ‘ ์‚ฌ์šฉํ•  ์ˆ˜๋„ ์žˆ์Šต๋‹ˆ๋‹ค. FastSAM์„ ์‚ฌ์šฉํ•˜๊ธฐ ์œ„ํ•ด ์ˆ˜ํ–‰ํ•  ์ผ๋ฐ˜์ ์ธ ๋‹จ๊ณ„๋ฅผ ๊ฐ„๋‹จํžˆ ์†Œ๊ฐœํ•ฉ๋‹ˆ๋‹ค: + +### ์„ค์น˜ + +1. FastSAM ์ €์žฅ์†Œ๋ฅผ ๋ณต์ œํ•ฉ๋‹ˆ๋‹ค: + ```shell + git clone https://github.com/CASIA-IVA-Lab/FastSAM.git + ``` + +2. Python 3.9๋กœ Conda ํ™˜๊ฒฝ์„ ์ƒ์„ฑํ•˜๊ณ  ํ™œ์„ฑํ™”ํ•ฉ๋‹ˆ๋‹ค: + ```shell + conda create -n FastSAM python=3.9 + conda activate FastSAM + ``` + +3. ๋ณต์ œํ•œ ์ €์žฅ์†Œ๋กœ ์ด๋™ํ•˜์—ฌ ํ•„์š”ํ•œ ํŒจํ‚ค์ง€๋ฅผ ์„ค์น˜ํ•ฉ๋‹ˆ๋‹ค: + ```shell + cd FastSAM + pip install -r requirements.txt + ``` + +4. CLIP ๋ชจ๋ธ์„ ์„ค์น˜ํ•ฉ๋‹ˆ๋‹ค: + ```shell + pip install git+https://github.com/openai/CLIP.git + ``` + +### ์˜ˆ์‹œ ์‚ฌ์šฉ๋ฒ• + +1. [๋ชจ๋ธ ์ฒดํฌํฌ์ธํŠธ](https://drive.google.com/file/d/1m1sjY4ihXBU1fZXdQ-Xdj-mDltW-2Rqv/view?usp=sharing)๋ฅผ ๋‹ค์šด๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค. + +2. FastSAM์„ ์ถ”๋ก ํ•˜๊ธฐ ์œ„ํ•ด ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. ์˜ˆ์‹œ ๋ช…๋ น์–ด: + + - ์ด๋ฏธ์ง€์—์„œ ๋ชจ๋“  ๊ฒƒ์„ ์„ธ๋ถ„ํ™”: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg + ``` + + - ํ…์ŠคํŠธ ํ”„๋กฌํ”„ํŠธ๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ํŠน์ • ๊ฐ์ฒด๋ฅผ ์„ธ๋ถ„ํ™”: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --text_prompt "the yellow dog" + ``` + + - ๋ฐ”์šด๋”ฉ ๋ฐ•์Šค ๋‚ด์˜ ๊ฐ์ฒด๋ฅผ ์„ธ๋ถ„ํ™” (xywh ํ˜•์‹์œผ๋กœ ์ƒ์ž ์ขŒํ‘œ ์ œ๊ณต): + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --box_prompt "[570,200,230,400]" + ``` + + - ํŠน์ • ์ง€์  ๊ทผ์ฒ˜์˜ ๊ฐ์ฒด๋ฅผ ์„ธ๋ถ„ํ™”: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --point_prompt "[[520,360],[620,300]]" --point_label "[1,0]" + ``` + +๋˜ํ•œ, FastSAM์„ [Colab ๋ฐ๋ชจ](https://colab.research.google.com/drive/1oX14f6IneGGw612WgVlAiy91UHwFAvr9?usp=sharing) ๋˜๋Š” [HuggingFace ์›น ๋ฐ๋ชจ](https://huggingface.co/spaces/An-619/FastSAM)์—์„œ ์‹œ๊ฐ์ ์ธ ๊ฒฝํ—˜์œผ๋กœ ์‹œ๋„ํ•ด ๋ณผ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +## ์ธ์šฉ ๋ฐ ๊ฐ์‚ฌ์˜ ๋ง์”€ + +FastSAM์˜ ์‹ค์‹œ๊ฐ„ ์ธ์Šคํ„ด์Šค ๋ถ„ํ•  ๋ถ„์•ผ์— ๋Œ€ํ•œ ํ˜์‹ ์ ์ธ ๊ธฐ์—ฌ๋ฅผ ์œ„ํ•ด FastSAM ์ €์ž๋“ค์—๊ฒŒ ๊ฐ์‚ฌ์˜ ๋ง์”€์„ ์ „ํ•ฉ๋‹ˆ๋‹ค: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{zhao2023fast, + title={Fast Segment Anything}, + author={Xu Zhao and Wenchao Ding and Yongqi An and Yinglong Du and Tao Yu and Min Li and Ming Tang and Jinqiao Wang}, + year={2023}, + eprint={2306.12156}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +FastSAM ์›๋ณธ ๋…ผ๋ฌธ์€ [arXiv](https://arxiv.org/abs/2306.12156)์—์„œ ์ฐพ์„ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ €์ž๋“ค์€ ์ž์‹ ๋“ค์˜ ์ž‘์—…์„ ๊ณต๊ฐœ์ ์œผ๋กœ ์ œ๊ณตํ•˜์˜€์œผ๋ฉฐ, ์ฝ”๋“œ๋ฒ ์ด์Šค๋Š” [GitHub](https://github.com/CASIA-IVA-Lab/FastSAM)์—์„œ ์ด์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ €์ž๋“ค์˜ ๋…ธ๋ ฅ์— ๊ฐ์‚ฌ๋“œ๋ฆฌ๋ฉฐ ์ €์ž‘๋ฌผ์„ ๋” ํญ๋„“์€ ์ปค๋ฎค๋‹ˆํ‹ฐ์— ์•Œ๋ฆฌ๊ธฐ ์œ„ํ•œ ๊ธฐ์—ฌ๋ฅผ ๊ธฐ๋Œ€ํ•ฉ๋‹ˆ๋‹ค. diff --git a/ultralytics/docs/ko/models/fast-sam.md:Zone.Identifier b/ultralytics/docs/ko/models/fast-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/models/fast-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/models/index.md b/ultralytics/docs/ko/models/index.md new file mode 100755 index 0000000..78f43ac --- /dev/null +++ b/ultralytics/docs/ko/models/index.md @@ -0,0 +1,98 @@ +--- +comments: true +description: Ultralytics๊ฐ€ ์ง€์›ํ•˜๋Š” ๋‹ค์–‘ํ•œ YOLO ๊ณ„์—ด ๋ชจ๋ธ, SAM, MobileSAM, FastSAM, YOLO-NAS, RT-DETR์— ๋Œ€ํ•ด ์•Œ์•„๋ณด๊ณ  CLI์™€ Python ์‚ฌ์šฉ ์˜ˆ์ œ๋ฅผ ํ†ตํ•ด ์‹œ์ž‘ํ•˜์„ธ์š”. +keywords: Ultralytics, ๋ฌธ์„œํ™”, YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS, RT-DETR, ๋ชจ๋ธ, ์•„ํ‚คํ…์ฒ˜, Python, CLI +--- + +# Ultralytics๊ฐ€ ์ง€์›ํ•˜๋Š” ๋ชจ๋ธ๋“ค + +Ultralytics ๋ชจ๋ธ ๋ฌธ์„œ์— ์˜ค์‹  ๊ฒƒ์„ ํ™˜์˜ํ•ฉ๋‹ˆ๋‹ค! ์šฐ๋ฆฌ๋Š” [๊ฐ์ฒด ๊ฐ์ง€](../tasks/detect.md), [์ธ์Šคํ„ด์Šค ๋ถ„ํ• ](../tasks/segment.md), [์ด๋ฏธ์ง€ ๋ถ„๋ฅ˜](../tasks/classify.md), [์ž์„ธ ์ถ”์ •](../tasks/pose.md), [๋‹ค์ค‘ ๊ฐ์ฒด ์ถ”์ ](../modes/track.md)๊ณผ ๊ฐ™์€ ํŠน์ • ์ž‘์—…์— ๋งž์ถฐ์ง„ ๋‹ค์–‘ํ•œ ๋ฒ”์œ„์˜ ๋ชจ๋ธ์„ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. Ultralytics์— ๋ชจ๋ธ ์•„ํ‚คํ…์ฒ˜๋ฅผ ๊ธฐ์—ฌํ•˜๊ณ  ์‹ถ๋‹ค๋ฉด, [๊ธฐ์—ฌ ๊ฐ€์ด๋“œ](../../help/contributing.md)๋ฅผ ํ™•์ธํ•ด ๋ณด์„ธ์š”. + +!!! Note "์ฃผ์˜์‚ฌํ•ญ" + + ๐Ÿšง ํ˜„์žฌ ๋‹ค์–‘ํ•œ ์–ธ์–ด๋กœ ๋œ ๋ฌธ์„œ ์ž‘์—…์ด ์ง„ํ–‰ ์ค‘์ด๋ฉฐ, ์ด๋ฅผ ๊ฐœ์„ ํ•˜๊ธฐ ์œ„ํ•ด ์—ด์‹ฌํžˆ ๋…ธ๋ ฅํ•˜๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. ์ธ๋‚ดํ•ด ์ฃผ์…”์„œ ๊ฐ์‚ฌํ•ฉ๋‹ˆ๋‹ค! ๐Ÿ™ + +## ์ฃผ์š” ๋ชจ๋ธ๋“ค + +๋‹ค์Œ์€ ์ง€์›๋˜๋Š” ํ•ต์‹ฌ ๋ชจ๋ธ ๋ชฉ๋ก์ž…๋‹ˆ๋‹ค: + +1. **[YOLOv3](yolov3.md)**: Joseph Redmon์— ์˜ํ•ด ์ตœ์ดˆ๋กœ ๋งŒ๋“ค์–ด์ง„ YOLO ๋ชจ๋ธ ํŒจ๋ฐ€๋ฆฌ์˜ ์„ธ ๋ฒˆ์งธ ๋ฒ„์ „์œผ๋กœ, ํšจ์œจ์ ์ธ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฐ์ง€ ๋Šฅ๋ ฅ์œผ๋กœ ์•Œ๋ ค์ ธ ์žˆ์Šต๋‹ˆ๋‹ค. +2. **[YOLOv4](yolov4.md)**: 2020๋…„ Alexey Bochkovskiy๊ฐ€ ๋ฐœํ‘œํ•œ YOLOv3์˜ ๋‹คํฌ๋„ท ๊ธฐ๋ฐ˜ ์—…๋ฐ์ดํŠธ ๋ฒ„์ „์ž…๋‹ˆ๋‹ค. +3. **[YOLOv5](yolov5.md)**: Ultralytics์— ์˜ํ•ด ํ–ฅ์ƒ๋œ YOLO ์•„ํ‚คํ…์ฒ˜๋กœ, ์ด์ „ ๋ฒ„์ „๋“ค์— ๋น„ํ•ด ๋” ๋‚˜์€ ์„ฑ๋Šฅ๊ณผ ์†๋„ ํŠธ๋ ˆ์ด๋“œ์˜คํ”„๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. +4. **[YOLOv6](yolov6.md)**: [๋ฏธํˆฌ์•ˆ](https://about.meituan.com/)์—์„œ 2022๋…„์— ๋ฐœํ‘œํ•˜์—ฌ, ํšŒ์‚ฌ์˜ ์ž์œจ ์ฃผํ–‰ ๋ฐฐ๋‹ฌ ๋กœ๋ด‡์—์„œ ๋งŽ์ด ์‚ฌ์šฉ๋˜๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. +5. **[YOLOv7](yolov7.md)**: YOLOv4์˜ ์ €์ž๋“ค์— ์˜ํ•ด 2022๋…„์— ์—…๋ฐ์ดํŠธ๋œ YOLO ๋ชจ๋ธ๋“ค์ž…๋‹ˆ๋‹ค. +6. **[YOLOv8](yolov8.md) ์ƒˆ๋กœ์šด ๐Ÿš€**: YOLO ํŒจ๋ฐ€๋ฆฌ์˜ ์ตœ์‹  ๋ฒ„์ „์œผ๋กœ, ์ธ์Šคํ„ด์Šค ๋ถ„ํ• , ์ž์„ธ/ํ‚คํฌ์ธํŠธ ์ถ”์ •, ๋ถ„๋ฅ˜ ๋“ฑ ํ–ฅ์ƒ๋œ ๊ธฐ๋Šฅ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. +7. **[Segment Anything Model (SAM)](sam.md)**: ๋ฉ”ํƒ€์˜ Segment Anything Model (SAM)์ž…๋‹ˆ๋‹ค. +8. **[Mobile Segment Anything Model (MobileSAM)](mobile-sam.md)**: ๊ฒฝํฌ๋Œ€ํ•™๊ต์—์„œ ๋ชจ๋ฐ”์ผ ์–ดํ”Œ๋ฆฌ์ผ€์ด์…˜์„ ์œ„ํ•ด ๊ฐœ๋ฐœํ•œ MobileSAM์ž…๋‹ˆ๋‹ค. +9. **[Fast Segment Anything Model (FastSAM)](fast-sam.md)**: ์ค‘๊ตญ ๊ณผํ•™์› ์ž๋™ํ™” ์—ฐ๊ตฌ์†Œ์˜ ์ด๋ฏธ์ง€ ๋ฐ ๋น„๋””์˜ค ๋ถ„์„ ๊ทธ๋ฃน์— ์˜ํ•ด ๊ฐœ๋ฐœ๋œ FastSAM์ž…๋‹ˆ๋‹ค. +10. **[YOLO-NAS](yolo-nas.md)**: YOLO Neural Architecture Search (NAS) ๋ชจ๋ธ๋“ค์ž…๋‹ˆ๋‹ค. +11. **[Realtime Detection Transformers (RT-DETR)](rtdetr.md)**: ๋ฐ”์ด๋‘์˜ PaddlePaddle Realtime Detection Transformer (RT-DETR) ๋ชจ๋ธ๋“ค์ž…๋‹ˆ๋‹ค. + +

+
+ +
+ ์‹œ์ฒญํ•˜๊ธฐ: ๋ช‡ ์ค„์˜ ์ฝ”๋“œ๋กœ Ultralytics YOLO ๋ชจ๋ธ์„ ์‹คํ–‰ํ•˜์„ธ์š”. +

+ +## ์‹œ์ž‘ํ•˜๊ธฐ: ์‚ฌ์šฉ ์˜ˆ์ œ + +์ด ์˜ˆ์ œ๋Š” YOLO ํ•™์Šต๊ณผ ์ถ”๋ก ์— ๋Œ€ํ•œ ๊ฐ„๋‹จํ•œ ์˜ˆ์ œ๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ด์— ๋Œ€ํ•œ ์ „์ฒด ๋ฌธ์„œ๋Š” [์˜ˆ์ธก](../modes/predict.md), [ํ•™์Šต](../modes/train.md), [๊ฒ€์ฆ](../modes/val.md), [๋‚ด๋ณด๋‚ด๊ธฐ](../modes/export.md) ๋ฌธ์„œ ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +์•„๋ž˜ ์˜ˆ์ œ๋Š” ๊ฐ์ฒด ๊ฐ์ง€๋ฅผ ์œ„ํ•œ YOLOv8 [๊ฐ์ง€](../tasks/detect.md) ๋ชจ๋ธ์— ๋Œ€ํ•œ ๊ฒƒ์ž…๋‹ˆ๋‹ค. ์ถ”๊ฐ€์ ์œผ๋กœ ์ง€์›๋˜๋Š” ์ž‘์—…๋“ค์€ [๋ถ„ํ• ](../tasks/segment.md), [๋ถ„๋ฅ˜](../tasks/classify.md), [์ž์„ธ](../tasks/pose.md) ๋ฌธ์„œ๋ฅผ ์ฐธ์กฐํ•˜์„ธ์š”. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + PyTorch๋กœ ์‚ฌ์ „ ํ•™์Šต๋œ `*.pt` ๋ชจ๋ธ๋“ค๊ณผ ๊ตฌ์„ฑ `*.yaml` ํŒŒ์ผ๋“ค์€ `YOLO()`, `SAM()`, `NAS()`, `RTDETR()` ํด๋ž˜์Šค์— ์ „๋‹ฌํ•˜์—ฌ ํŒŒ์ด์ฌ์—์„œ ๋ชจ๋ธ ์ธ์Šคํ„ด์Šค๋ฅผ ์ƒ์„ฑํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค: + + ```python + from ultralytics import YOLO + + # COCO๋กœ ์‚ฌ์ „ ํ•™์Šต๋œ YOLOv8n ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n.pt') + + # ๋ชจ๋ธ ์ •๋ณด ํ‘œ์‹œ (์„ ํƒ์‚ฌํ•ญ) + model.info() + + # COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์— ๋Œ€ํ•ด 100 ์—ํฌํฌ ๋™์•ˆ ๋ชจ๋ธ ํ•™์Šต + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # 'bus.jpg' ์ด๋ฏธ์ง€์— ๋Œ€ํ•œ YOLOv8n ๋ชจ๋ธ ์ถ”๋ก  ์‹คํ–‰ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ๋ชจ๋ธ์„ ์ง์ ‘ ์‹คํ–‰ํ•˜๊ธฐ ์œ„ํ•œ CLI ๋ช…๋ น์–ด๊ฐ€ ์ œ๊ณต๋ฉ๋‹ˆ๋‹ค: + + ```bash + # COCO๋กœ ์‚ฌ์ „ ํ•™์Šต๋œ YOLOv8n ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์™€ COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์—์„œ 100 ์—ํฌํฌ ๋™์•ˆ ํ•™์Šต + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCO๋กœ ์‚ฌ์ „ ํ•™์Šต๋œ YOLOv8n ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์™€ 'bus.jpg' ์ด๋ฏธ์ง€์— ๋Œ€ํ•œ ์ถ”๋ก  ์‹คํ–‰ + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## ์ƒˆ๋กœ์šด ๋ชจ๋ธ ๊ธฐ์—ฌํ•˜๊ธฐ + +Ultralytics์— ์—ฌ๋Ÿฌ๋ถ„์˜ ๋ชจ๋ธ์„ ๊ธฐ์—ฌํ•˜๊ณ  ์‹ถ์œผ์‹ ๊ฐ€์š”? ํ›Œ๋ฅญํ•ฉ๋‹ˆ๋‹ค! ์šฐ๋ฆฌ๋Š” ํ•ญ์ƒ ๋ชจ๋ธ ํฌํŠธํด๋ฆฌ์˜ค๋ฅผ ํ™•์žฅํ•˜๋Š” ๊ฒƒ์— ์—ด๋ ค ์žˆ์Šต๋‹ˆ๋‹ค. + +1. **์ €์žฅ์†Œ ํฌํฌํ•˜๊ธฐ**: [Ultralytics GitHub ์ €์žฅ์†Œ](https://github.com/ultralytics/ultralytics)๋ฅผ ํฌํฌํ•˜์—ฌ ์‹œ์ž‘ํ•ฉ๋‹ˆ๋‹ค. + +2. **ํฌํฌ ๋ณต์ œํ•˜๊ธฐ**: ํฌํฌํ•œ ์ €์žฅ์†Œ๋ฅผ ๋กœ์ปฌ ๊ธฐ๊ณ„์— ๋ณต์ œํ•˜๊ณ  ์ƒˆ๋กœ์šด ๋ธŒ๋žœ์น˜๋ฅผ ์ƒ์„ฑํ•˜์—ฌ ์ž‘์—…ํ•ฉ๋‹ˆ๋‹ค. + +3. **๋ชจ๋ธ ๊ตฌํ˜„ํ•˜๊ธฐ**: ์šฐ๋ฆฌ์˜ [๊ธฐ์—ฌ ๊ฐ€์ด๋“œ](../../help/contributing.md)์— ์ œ๊ณต๋œ ์ฝ”๋”ฉ ํ‘œ์ค€ ๋ฐ ๊ฐ€์ด๋“œ๋ผ์ธ์„ ๋”ฐ๋ผ ๋ชจ๋ธ์„ ์ถ”๊ฐ€ํ•ฉ๋‹ˆ๋‹ค. + +4. **์ฒ ์ €ํžˆ ํ…Œ์ŠคํŠธํ•˜๊ธฐ**: ๋…๋ฆฝ์ ์œผ๋กœ๋ฟ๋งŒ ์•„๋‹ˆ๋ผ ํŒŒ์ดํ”„๋ผ์ธ์˜ ์ผ๋ถ€๋กœ๋„ ๋ชจ๋ธ์„ ์ฒ ์ €ํžˆ ํ…Œ์ŠคํŠธํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. + +5. **ํ’€ ๋ฆฌํ€˜์ŠคํŠธ ์ƒ์„ฑํ•˜๊ธฐ**: ๋ชจ๋ธ์— ๋งŒ์กฑํ•˜๊ฒŒ ๋˜๋ฉด, ๋ฆฌ๋ทฐ๋ฅผ ์œ„ํ•ด ๋ฉ”์ธ ์ €์žฅ์†Œ์— ํ’€ ๋ฆฌํ€˜์ŠคํŠธ๋ฅผ ์ƒ์„ฑํ•ฉ๋‹ˆ๋‹ค. + +6. **์ฝ”๋“œ ๋ฆฌ๋ทฐ & ๋ณ‘ํ•ฉ**: ๋ฆฌ๋ทฐ ํ›„, ์—ฌ๋Ÿฌ๋ถ„์˜ ๋ชจ๋ธ์ด ์šฐ๋ฆฌ ๊ธฐ์ค€์— ๋ถ€ํ•ฉํ•œ๋‹ค๋ฉด ๋ฉ”์ธ ์ €์žฅ์†Œ์— ๋ณ‘ํ•ฉ๋ฉ๋‹ˆ๋‹ค. + +์ž์„ธํ•œ ๋‹จ๊ณ„๋Š” [๊ธฐ์—ฌ ๊ฐ€์ด๋“œ](../../help/contributing.md)๋ฅผ ์ฐธ์กฐํ•ด์ฃผ์‹ญ์‹œ์˜ค. diff --git a/ultralytics/docs/ko/models/index.md:Zone.Identifier b/ultralytics/docs/ko/models/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/models/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/models/mobile-sam.md b/ultralytics/docs/ko/models/mobile-sam.md new file mode 100755 index 0000000..4e9169b --- /dev/null +++ b/ultralytics/docs/ko/models/mobile-sam.md @@ -0,0 +1,116 @@ +--- +comments: true +description: Ultralytics ํ”„๋ ˆ์ž„์›Œํฌ์—์„œ MobileSAM์„ ๋‹ค์šด๋กœ๋“œํ•˜๊ณ  ํ…Œ์ŠคํŠธํ•˜๋Š” ๋ฐฉ๋ฒ•, MobileSAM์˜ ๊ตฌํ˜„ ๋ฐฉ์‹, ์›๋ณธ SAM๊ณผ์˜ ๋น„๊ต, ๋ชจ๋ฐ”์ผ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ํ–ฅ์ƒ ๋“ฑ์— ๋Œ€ํ•ด ์ž์„ธํžˆ ์•Œ์•„๋ณด์„ธ์š”. ์˜ค๋Š˜๋ถ€ํ„ฐ ๋ชจ๋ฐ”์ผ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜์„ ๊ฐœ์„ ํ•˜์„ธ์š”. +keywords: MobileSAM, Ultralytics, SAM, ๋ชจ๋ฐ”์ผ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜, Arxiv, GPU, API, ์ด๋ฏธ์ง€ ์ธ์ฝ”๋”, ๋งˆ์Šคํฌ ๋””์ฝ”๋”, ๋ชจ๋ธ ๋‹ค์šด๋กœ๋“œ, ํ…Œ์ŠคํŠธ ๋ฐฉ๋ฒ• +--- + +![MobileSAM ๋กœ๊ณ ](https://github.com/ChaoningZhang/MobileSAM/blob/master/assets/logo2.png?raw=true) + +# Mobile Segment Anything (MobileSAM) + +MobileSAM ๋…ผ๋ฌธ์€ ์ด์ œ [arXiv](https://arxiv.org/pdf/2306.14289.pdf)์—์„œ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +MobileSAM์„ CPU์—์„œ ์‹คํ–‰ํ•˜๋Š” ๋ฐ๋ชจ๋Š” ์ด [๋ฐ๋ชจ ๋งํฌ](https://huggingface.co/spaces/dhkim2810/MobileSAM)์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. Mac i5 CPU์—์„œ์˜ ์„ฑ๋Šฅ์€ ์•ฝ 3์ดˆ์ž…๋‹ˆ๋‹ค. Hugging Face ๋ฐ๋ชจ์—์„œ๋Š” ์ธํ„ฐํŽ˜์ด์Šค์™€ ๋‚ฎ์€ ์„ฑ๋Šฅ์˜ CPU๊ฐ€ ๋А๋ฆฐ ์‘๋‹ต์œผ๋กœ ์ด์–ด์ง€์ง€๋งŒ, ์—ฌ์ „ํžˆ ํšจ๊ณผ์ ์œผ๋กœ ์ž‘๋™ํ•ฉ๋‹ˆ๋‹ค. + +MobileSAM์€ [Grounding-SAM](https://github.com/IDEA-Research/Grounded-Segment-Anything), [AnyLabeling](https://github.com/vietanhdev/anylabeling) ๋ฐ [Segment Anything in 3D](https://github.com/Jumpat/SegmentAnythingin3D)๋ฅผ ๋น„๋กฏํ•œ ์—ฌ๋Ÿฌ ํ”„๋กœ์ ํŠธ์—์„œ ๊ตฌํ˜„๋˜์—ˆ์Šต๋‹ˆ๋‹ค. + +MobileSAM์€ 1%์˜ ์›๋ณธ ์ด๋ฏธ์ง€๋กœ ๊ตฌ์„ฑ๋œ 100k ๋ฐ์ดํ„ฐ์…‹์—์„œ ํ•˜๋ฃจ ์ด๋‚ด์— ๋‹จ์ผ GPU๋กœ ํ•™์Šต๋ฉ๋‹ˆ๋‹ค. ์ด ํ•™์Šต์„ ์œ„ํ•œ ์ฝ”๋“œ๋Š” ๋‚˜์ค‘์— ๊ณต๊ฐœ๋  ์˜ˆ์ •์ž…๋‹ˆ๋‹ค. + +## ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ๋ชจ๋ธ, ์ง€์›๋˜๋Š” ์ž‘์—… ๋ฐ ์ž‘๋™ ๋ชจ๋“œ + +์ด ํ‘œ์—์„œ๋Š” ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ๋ชจ๋ธ๊ณผ ํ•ด๋‹น ๋ชจ๋ธ์— ๋Œ€ํ•œ ์‚ฌ์ „ ํ›ˆ๋ จ ๊ฐ€์ค‘์น˜, ์ง€์›๋˜๋Š” ์ž‘์—…, [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md) ๋ฐ [Export](../modes/export.md)์™€ ๊ฐ™์€ ๋‹ค์–‘ํ•œ ์ž‘๋™ ๋ชจ๋“œ์˜ ํ˜ธํ™˜์„ฑ์„ ๋‚˜ํƒ€๋ƒ…๋‹ˆ๋‹ค. ์ง€์›๋˜๋Š” ๋ชจ๋“œ๋Š” โœ… ์ด๋ชจ์ง€๋กœ ํ‘œ์‹œ๋˜๊ณ , ์ง€์›๋˜์ง€ ์•Š๋Š” ๋ชจ๋“œ๋Š” โŒ ์ด๋ชจ์ง€๋กœ ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ ์œ ํ˜• | ์‚ฌ์ „ ํ›ˆ๋ จ ๊ฐ€์ค‘์น˜ | ์ง€์›๋˜๋Š” ์ž‘์—… | Inference | Validation | Training | Export | +|-----------|-----------------|------------------------------------|-----------|------------|----------|--------| +| MobileSAM | `mobile_sam.pt` | [์ธ์Šคํ„ด์Šค ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## SAM์—์„œ MobileSAM์œผ๋กœ์˜ ์ ์‘ + +MobileSAM์€ ์›๋ณธ SAM๊ณผ ๋™์ผํ•œ ํŒŒ์ดํ”„๋ผ์ธ์„ ์œ ์ง€ํ•˜๋ฏ€๋กœ, ์›๋ณธ์˜ ์ „์ฒ˜๋ฆฌ, ํ›„์ฒ˜๋ฆฌ ๋ฐ ๋ชจ๋“  ๋‹ค๋ฅธ ์ธํ„ฐํŽ˜์ด์Šค๋ฅผ ํ†ตํ•ฉํ–ˆ์Šต๋‹ˆ๋‹ค. ๋”ฐ๋ผ์„œ ํ˜„์žฌ ์›๋ณธ SAM์„ ์‚ฌ์šฉ ์ค‘์ธ ๊ฒฝ์šฐ, MobileSAM์œผ๋กœ ์ „ํ™˜ํ•˜๋Š” ๋ฐ ์ตœ์†Œํ•œ์˜ ๋…ธ๋ ฅ์ด ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค. + +MobileSAM์€ ์›๋ณธ SAM๊ณผ ๋น„๊ต ๊ฐ€๋Šฅํ•œ ์„ฑ๋Šฅ์„ ๋ฐœํœ˜ํ•˜๋ฉฐ, ์ด๋ฏธ์ง€ ์ธ์ฝ”๋”๋งŒ ๋ณ€๊ฒฝ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ๊ตฌ์ฒด์ ์œผ๋กœ, ์›๋ณธ์˜ ๋ฌด๊ฑฐ์šด ViT-H ์ธ์ฝ”๋” (632M)๋ฅผ ๋” ์ž‘์€ Tiny-ViT (5M)๋กœ ๋Œ€์ฒดํ–ˆ์Šต๋‹ˆ๋‹ค. ๋‹จ์ผ GPU์—์„œ MobileSAM์€ ์ด๋ฏธ์ง€ ๋‹น ์•ฝ 12ms์˜ ์ž‘์—… ์‹œ๊ฐ„์ด ์†Œ์š”๋ฉ๋‹ˆ๋‹ค. ์ด๋ฏธ์ง€ ์ธ์ฝ”๋”์—๋Š” 8ms๊ฐ€ ์†Œ์š”๋˜๊ณ , ๋งˆ์Šคํฌ ๋””์ฝ”๋”์—๋Š” 4ms๊ฐ€ ์†Œ์š”๋ฉ๋‹ˆ๋‹ค. + +๋‹ค์Œ ํ‘œ๋Š” ViT ๊ธฐ๋ฐ˜ ์ด๋ฏธ์ง€ ์ธ์ฝ”๋”๋ฅผ ๋น„๊ตํ•ฉ๋‹ˆ๋‹ค: + +| ์ด๋ฏธ์ง€ ์ธ์ฝ”๋” | ์›๋ณธ SAM | MobileSAM | +|---------|--------|-----------| +| ๋งค๊ฐœ๋ณ€์ˆ˜ | 611M | 5M | +| ์†๋„ | 452ms | 8ms | + +์›๋ณธ SAM๊ณผ MobileSAM์€ ๋™์ผํ•œ ํ”„๋กฌํ”„ํŠธ ๊ฐ€์ด๋“œ ๋งˆ์Šคํฌ ๋””์ฝ”๋”๋ฅผ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค: + +| ๋งˆ์Šคํฌ ๋””์ฝ”๋” | ์›๋ณธ SAM | MobileSAM | +|---------|--------|-----------| +| ๋งค๊ฐœ๋ณ€์ˆ˜ | 3.876M | 3.876M | +| ์†๋„ | 4ms | 4ms | + +์ „์ฒด ํŒŒ์ดํ”„๋ผ์ธ์˜ ๋น„๊ต๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์Šต๋‹ˆ๋‹ค: + +| ์ „์ฒด ํŒŒ์ดํ”„๋ผ์ธ (์ธ์ฝ”๋”+๋””์ฝ”๋”) | ์›๋ณธ SAM | MobileSAM | +|--------------------|--------|-----------| +| ๋งค๊ฐœ๋ณ€์ˆ˜ | 615M | 9.66M | +| ์†๋„ | 456ms | 12ms | + +MobileSAM๊ณผ ์›๋ณธ SAM์˜ ์„ฑ๋Šฅ์€ ํฌ์ธํŠธ ๋ฐ ๋ฐ•์Šค๋ฅผ ์‚ฌ์šฉํ•œ ํ”„๋กฌํ”„ํŠธ๋ฅผ ํ†ตํ•ด ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +![ํฌ์ธํŠธ ํ”„๋กฌํ”„ํŠธ๊ฐ€ ์žˆ๋Š” ์ด๋ฏธ์ง€](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +![๋ฐ•์Šค ํ”„๋กฌํ”„ํŠธ๊ฐ€ ์žˆ๋Š” ์ด๋ฏธ์ง€](https://github.com/ChaoningZhang/MobileSAM/blob/master/assets/logo2.png?raw=true) + +MobileSAM์€ ์šฐ์ˆ˜ํ•œ ์„ฑ๋Šฅ์„ ์ž๋ž‘ํ•˜๋ฉฐ, ํ˜„์žฌ์˜ FastSAM๋ณด๋‹ค ์•ฝ 5๋ฐฐ ์ž‘๊ณ  7๋ฐฐ ๋น ๋ฆ…๋‹ˆ๋‹ค. ์ž์„ธํ•œ ๋‚ด์šฉ์€ [MobileSAM ํ”„๋กœ์ ํŠธ ํŽ˜์ด์ง€](https://github.com/ChaoningZhang/MobileSAM)์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +## Ultralytics์—์„œ MobileSAM ํ…Œ์ŠคํŠธ + +์›๋ณธ SAM๊ณผ ๋งˆ์ฐฌ๊ฐ€์ง€๋กœ, ํฌ์ธํŠธ ๋ฐ ๋ฐ•์Šค ํ”„๋กฌํ”„ํŠธ ๋ชจ๋“œ๋ฅผ ํฌํ•จํ•œ Ultralytics์—์„œ ๊ฐ„๋‹จํ•œ ํ…Œ์ŠคํŠธ ๋ฐฉ๋ฒ•์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +### ๋ชจ๋ธ ๋‹ค์šด๋กœ๋“œ + +๋ชจ๋ธ์„ [์—ฌ๊ธฐ](https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt)์—์„œ ๋‹ค์šด๋กœ๋“œํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +### ํฌ์ธํŠธ ํ”„๋กฌํ”„ํŠธ + +!!! Example "์˜ˆ์ œ" + + === "Python" + ```python + from ultralytics import SAM + + # ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = SAM('mobile_sam.pt') + + # ํฌ์ธํŠธ ํ”„๋กฌํ”„ํŠธ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์„ธ๊ทธ๋จผํŠธ ์˜ˆ์ธก + model.predict('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +### ๋ฐ•์Šค ํ”„๋กฌํ”„ํŠธ + +!!! Example "์˜ˆ์ œ" + + === "Python" + ```python + from ultralytics import SAM + + # ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = SAM('mobile_sam.pt') + + # ๋ฐ•์Šค ํ”„๋กฌํ”„ํŠธ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์„ธ๊ทธ๋จผํŠธ ์˜ˆ์ธก + model.predict('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + ``` + +`MobileSAM`๊ณผ `SAM`์€ ๋™์ผํ•œ API๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๊ตฌํ˜„๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ๋” ๋งŽ์€ ์‚ฌ์šฉ๋ฒ•์— ๋Œ€ํ•ด์„œ๋Š” [SAM ํŽ˜์ด์ง€](sam.md)๋ฅผ ์ฐธ์กฐํ•˜์„ธ์š”. + +## ์ธ์šฉ ๋ฐ ๊ฐ์‚ฌ์˜ ๊ธ€ + +MobileSAM์ด ์—ฐ๊ตฌ ๋˜๋Š” ๊ฐœ๋ฐœ์— ์œ ์šฉํ•˜๊ฒŒ ์‚ฌ์šฉ๋œ ๊ฒฝ์šฐ, ๋‹ค์Œ์˜ ๋…ผ๋ฌธ์„ ์ธ์šฉํ•ด ์ฃผ์‹œ๊ธฐ ๋ฐ”๋ž๋‹ˆ๋‹ค: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{mobile_sam, + title={Faster Segment Anything: Towards Lightweight SAM for Mobile Applications}, + author={Zhang, Chaoning and Han, Dongshen and Qiao, Yu and Kim, Jung Uk and Bae, Sung Ho and Lee, Seungkyu and Hong, Choong Seon}, + journal={arXiv preprint arXiv:2306.14289}, + year={2023} + } diff --git a/ultralytics/docs/ko/models/mobile-sam.md:Zone.Identifier b/ultralytics/docs/ko/models/mobile-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/models/mobile-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/models/rtdetr.md b/ultralytics/docs/ko/models/rtdetr.md new file mode 100755 index 0000000..904fc73 --- /dev/null +++ b/ultralytics/docs/ko/models/rtdetr.md @@ -0,0 +1,93 @@ +--- +comments: true +description: ๋น„๋‘˜๊ธฐ(Baidu)๊ฐ€ ๊ฐœ๋ฐœํ•œ RT-DETR์€ ๋น„์ „ ํŠธ๋žœ์Šคํฌ๋จธ(Vision Transformers)๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ํ•œ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ๋กœ, ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์‹œ๊ฐ„์ง€์—ฐ์ด ์—†๋Š” ๊ณ ์„ฑ๋Šฅ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. +keywords: RT-DETR, ๋น„๋‘˜๊ธฐ, ๋น„์ „ ํŠธ๋žœ์Šคํฌ๋จธ, ๊ฐ์ฒด ๊ฒ€์ถœ, ์‹ค์‹œ๊ฐ„ ์„ฑ๋Šฅ, CUDA, TensorRT, IoU-aware query selection, Ultralytics, ํŒŒ์ด์ฌ API, PaddlePaddle +--- + +# ๋น„๋‘˜๊ธฐ์˜ RT-DETR: ๋น„์ „ ํŠธ๋žœ์Šคํฌ๋จธ ๊ธฐ๋ฐ˜ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ + +## ๊ฐœ์š” + +๋น„๋‘˜๊ธฐ(Baidu)๊ฐ€ ๊ฐœ๋ฐœํ•œ Real-Time Detection Transformer(RT-DETR)์€ ๊ณ ์ •๋ฐ€๋„๋ฅผ ์œ ์ง€ํ•˜๋ฉด์„œ ์‹ค์‹œ๊ฐ„ ์„ฑ๋Šฅ์„ ์ œ๊ณตํ•˜๋Š” ์ฒจ๋‹จ ์—”๋“œ ํˆฌ ์—”๋“œ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ์ž…๋‹ˆ๋‹ค. ๋น„์ „ ํŠธ๋žœ์Šคํฌ๋จธ(Vision Transformers, ViT)์˜ ์„ฑ๋Šฅ์„ ํ™œ์šฉํ•˜์—ฌ, ๋‹ค์ค‘ ์Šค์ผ€์ผ ํŠน์ง•์„ ํšจ์œจ์ ์œผ๋กœ ์ฒ˜๋ฆฌํ•  ์ˆ˜ ์žˆ๋„๋ก ์ธํŠธ๋ผ ์Šค์ผ€์ผ ์ƒํ˜ธ ์ž‘์šฉ๊ณผ ํฌ๋กœ์Šค ์Šค์ผ€์ผ ํ“จ์ „์„ ๋ถ„๋ฆฌํ•ฉ๋‹ˆ๋‹ค. RT-DETR์€ ๋‹ค์–‘ํ•œ ๋””์ฝ”๋” ๋ ˆ์ด์–ด๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ถ”๋ก  ์†๋„๋ฅผ ์œ ์—ฐํ•˜๊ฒŒ ์กฐ์ •ํ•  ์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ ์žฌํ›ˆ๋ จ ์—†์ด ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ์— ์ ์šฉํ•˜๊ธฐ์— ๋งค์šฐ ์ ํ•ฉํ•ฉ๋‹ˆ๋‹ค. ์ด ๋ชจ๋ธ์€ CUDA์™€ TensorRT์™€ ๊ฐ™์€ ๊ฐ€์†ํ™”๋œ ๋ฐฑ์—”๋“œ์—์„œ ๋งŽ์€ ๋‹ค๋ฅธ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ๋ณด๋‹ค ๋›ฐ์–ด๋‚œ ์„ฑ๋Šฅ์„ ๋ฐœํœ˜ํ•ฉ๋‹ˆ๋‹ค. + +![๋ชจ๋ธ ์˜ˆ์‹œ ์ด๋ฏธ์ง€](https://user-images.githubusercontent.com/26833433/238963168-90e8483f-90aa-4eb6-a5e1-0d408b23dd33.png) +**๋น„๋‘˜๊ธฐ์˜ RT-DETR ๊ฐœ์š”** ๋น„๋‘˜๊ธฐ์˜ RT-DETR ๋ชจ๋ธ ๊ตฌ์กฐ ๋‹ค์ด์–ด๊ทธ๋žจ์€ ๋ฐฑ๋ณธ ๋„คํŠธ์›Œํฌ์˜ ๋งˆ์ง€๋ง‰ ์„ธ ๋‹จ๊ณ„ {S3, S4, S5}๋ฅผ ์ธ์ฝ”๋”์˜ ์ž…๋ ฅ์œผ๋กœ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. ํšจ์œจ์ ์ธ ํ•˜์ด๋ธŒ๋ฆฌ๋“œ ์ธ์ฝ”๋”๋Š” ์ธํŠธ๋ผ์Šค์ผ€์ผ ํŠน์ง• ์ƒํ˜ธ ์ž‘์šฉ(AIFI, intrascale feature interaction)๊ณผ ํฌ๋กœ์Šค ์Šค์ผ€์ผ ํŠน์ง• ํ“จ์ „ ๋ชจ๋“ˆ(CCFM, cross-scale feature-fusion module)์„ ํ†ตํ•ด ๋‹ค์ค‘ ์Šค์ผ€์ผ ํŠน์ง•์„ ์ด๋ฏธ์ง€ ํŠน์ง•์˜ ์‹œํ€€์Šค๋กœ ๋ณ€ํ™˜ํ•ฉ๋‹ˆ๋‹ค. IoU-aware query selection์€ ๋””์ฝ”๋”์— ๋Œ€ํ•œ ์ดˆ๊ธฐ ๊ฐ์ฒด ์ฟผ๋ฆฌ๋กœ ์ž‘๋™ํ•˜๊ธฐ ์œ„ํ•ด ์ผ์ •ํ•œ ์ˆ˜์˜ ์ด๋ฏธ์ง€ ํŠน์ง•์„ ์„ ํƒํ•˜๋Š” ๋ฐ ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. ๋งˆ์ง€๋ง‰์œผ๋กœ, ๋ณด์กฐ ์˜ˆ์ธก ํ—ค๋“œ์™€ ํ•จ๊ป˜ ๋””์ฝ”๋”๋Š” ๊ฐ์ฒด ์ฟผ๋ฆฌ๋ฅผ ๋ฐ˜๋ณตํ•˜์—ฌ ๋ฐ•์Šค์™€ ์‹ ๋ขฐ๋„ ์ ์ˆ˜๋ฅผ ์ตœ์ ํ™”ํ•ฉ๋‹ˆ๋‹ค. ([์›๋ฌธ](https://arxiv.org/pdf/2304.08069.pdf) ์ฐธ์กฐ). + +### ์ฃผ์š” ๊ธฐ๋Šฅ + +- **ํšจ์œจ์ ์ธ ํ•˜์ด๋ธŒ๋ฆฌ๋“œ ์ธ์ฝ”๋”:** ๋น„๋‘˜๊ธฐ์˜ RT-DETR์€ ๋‹ค์ค‘ ์Šค์ผ€์ผ ํŠน์ง•์„ ์ธํŠธ๋ผ ์Šค์ผ€์ผ ์ƒํ˜ธ ์ž‘์šฉ๊ณผ ํฌ๋กœ์Šค ์Šค์ผ€์ผ ํ“จ์ „์„ ๋ถ„๋ฆฌํ•˜์—ฌ ์ฒ˜๋ฆฌํ•˜๋Š” ํšจ์œจ์ ์ธ ํ•˜์ด๋ธŒ๋ฆฌ๋“œ ์ธ์ฝ”๋”๋ฅผ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. ์ด ๋…ํŠนํ•œ ๋น„์ „ ํŠธ๋žœ์Šคํฌ๋จธ ๊ธฐ๋ฐ˜ ๋””์ž์ธ์€ ๊ณ„์‚ฐ ๋น„์šฉ์„ ์ค„์ด๊ณ  ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ์ด ๊ฐ€๋Šฅํ•˜๋„๋ก ํ•ฉ๋‹ˆ๋‹ค. +- **IoU-aware ์ฟผ๋ฆฌ ์„ ํƒ:** ๋น„๋‘˜๊ธฐ์˜ RT-DETR์€ IoU-aware ์ฟผ๋ฆฌ ์„ ํƒ์„ ์‚ฌ์šฉํ•˜์—ฌ ๊ฐœ์ฒด ์ฟผ๋ฆฌ ์ดˆ๊ธฐํ™”๋ฅผ ๊ฐœ์„ ํ•ฉ๋‹ˆ๋‹ค. ์ด๋ฅผ ํ†ตํ•ด ๋ชจ๋ธ์€ ์žฅ๋ฉด์—์„œ ๊ฐ€์žฅ ๊ด€๋ จ์„ฑ ์žˆ๋Š” ๊ฐœ์ฒด์— ์ง‘์ค‘ํ•˜๋ฉฐ ๊ฒ€์ถœ ์ •ํ™•๋„๋ฅผ ํ–ฅ์ƒ์‹œํ‚ต๋‹ˆ๋‹ค. +- **์œตํ†ต์„ฑ ์žˆ๋Š” ์ถ”๋ก  ์†๋„ ์กฐ์ •:** ๋น„๋‘˜๊ธฐ์˜ RT-DETR์€ ํ›ˆ๋ จ ์—†์ด ๋‹ค๋ฅธ ๋””์ฝ”๋” ๋ ˆ์ด์–ด๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ถ”๋ก  ์†๋„๋ฅผ ์œ ์—ฐํ•˜๊ฒŒ ์กฐ์ •ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ์ ์‘์„ฑ์€ ๋‹ค์–‘ํ•œ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ ์‹œ๋‚˜๋ฆฌ์˜ค์—์„œ ์‹ค์šฉ์ ์ธ ์‘์šฉ์„ ์šฉ์ดํ•˜๊ฒŒ ํ•ฉ๋‹ˆ๋‹ค. + +## ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ + +Ultralytics์˜ ํŒŒ์ด์ฌ API๋Š” ๋‹ค์–‘ํ•œ ์Šค์ผ€์ผ์˜ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ PaddlePaddle RT-DETR ๋ชจ๋ธ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค: + +- RT-DETR-L: COCO val2017์—์„œ 53.0% AP, T4 GPU์—์„œ 114 FPS +- RT-DETR-X: COCO val2017์—์„œ 54.8% AP, T4 GPU์—์„œ 74 FPS + +## ์‚ฌ์šฉ ์˜ˆ์‹œ + +์ด ์˜ˆ์‹œ๋Š” ๊ฐ„๋‹จํ•œ RT-DETRR ํ›ˆ๋ จ ๋ฐ ์ถ”๋ก  ์˜ˆ์‹œ๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md), [Export](../modes/export.md) ๋“ฑ์˜ ์ž์„ธํ•œ ๋ฌธ์„œ๋Š” [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md), [Export](../modes/export.md) ๋ฌธ์„œ ํŽ˜์ด์ง€๋ฅผ ์ฐธ์กฐํ•˜์‹ญ์‹œ์˜ค. + +!!! ์˜ˆ์‹œ + + === "ํŒŒ์ด์ฌ" + + ```python + from ultralytics import RTDETR + + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ RT-DETR-l ๋ชจ๋ธ ๋กœ๋“œ + model = RTDETR('rtdetr-l.pt') + + # ๋ชจ๋ธ ์ •๋ณด ํ‘œ์‹œ (์„ ํƒ ์‚ฌํ•ญ) + model.info() + + # COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์— ๋Œ€ํ•ด 100 epoch ๋™์•ˆ ๋ชจ๋ธ ํ›ˆ๋ จ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # 'bus.jpg' ์ด๋ฏธ์ง€์—์„œ RT-DETR-l ๋ชจ๋ธ๋กœ ์ถ”๋ก  ์‹คํ–‰ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ```bash + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ RT-DETR-l ๋ชจ๋ธ ๋กœ๋“œํ•˜๊ณ  COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์— ๋Œ€ํ•ด 100 epoch ๋™์•ˆ ํ›ˆ๋ จ + yolo train model=rtdetr-l.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ RT-DETR-l ๋ชจ๋ธ ๋กœ๋“œํ•˜๊ณ  'bus.jpg' ์ด๋ฏธ์ง€์—์„œ ์ถ”๋ก  ์‹คํ–‰ + yolo predict model=rtdetr-l.pt source=path/to/bus.jpg + ``` + +## ์ง€์›๋˜๋Š” ์ž‘์—… ๋ฐ ๋ชจ๋“œ + +์ด ํ…Œ์ด๋ธ”์€ ๊ฐ ๋ชจ๋ธ์˜ ์œ ํ˜•, ํŠน์ • ์‚ฌ์ „ ํ›ˆ๋ จ ๊ฐ€์ค‘์น˜, ๊ฐ ๋ชจ๋ธ์ด ์ง€์›ํ•˜๋Š” ์ž‘์—… ๋ฐ [๋ชจ๋“œ](../modes/train.md), [Val](../modes/val.md), [Predict](../modes/predict.md), [Export](../modes/export.md)์™€ ๊ฐ™์€ ๋‹ค์–‘ํ•œ ๋ชจ๋“œ๋ฅผ ๋‚˜ํƒ€๋‚ด๋Š” โœ… ์ด๋ชจ์ง€๋กœ ํ‘œ์‹œ๋œ ๋ชจ๋“œ๋ฅผ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ ์œ ํ˜• | ์‚ฌ์ „ ํ›ˆ๋ จ ๊ฐ€์ค‘์น˜ | ์ง€์›๋˜๋Š” ์ž‘์—… | ์ถ”๋ก  | ๊ฒ€์ฆ | ํ›ˆ๋ จ | ์ถœ๋ ฅ | +|---------------------|---------------|-----------------------------|----|----|----|----| +| RT-DETR Large | `rtdetr-l.pt` | [๊ฐ์ฒด ๊ฒ€์ถœ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| RT-DETR Extra-Large | `rtdetr-x.pt` | [๊ฐ์ฒด ๊ฒ€์ถœ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +## ์ธ์šฉ ๋ฐ ๊ฐ์‚ฌ์˜ ๋ง + +๋งŒ์•ฝ ์—ฐ๊ตฌ๋‚˜ ๊ฐœ๋ฐœ ์ž‘์—…์—์„œ ๋น„๋‘˜๊ธฐ(Baidu)์˜ RT-DETR์„ ์‚ฌ์šฉํ•œ๋‹ค๋ฉด, [์›๋ž˜ ๋…ผ๋ฌธ์„](https://arxiv.org/abs/2304.08069) ์ธ์šฉํ•ด์ฃผ์‹œ๊ธฐ ๋ฐ”๋ž๋‹ˆ๋‹ค: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{lv2023detrs, + title={DETRs Beat YOLOs on Real-time Object Detection}, + author={Wenyu Lv and Shangliang Xu and Yian Zhao and Guanzhong Wang and Jinman Wei and Cheng Cui and Yuning Du and Qingqing Dang and Yi Liu}, + year={2023}, + eprint={2304.08069}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +์ปดํ“จํ„ฐ ๋น„์ „ ์ปค๋ฎค๋‹ˆํ‹ฐ์—๊ฒŒ ๊ท€์ค‘ํ•œ ์ž๋ฃŒ์ธ ๋น„์ „ ํŠธ๋žœ์Šคํฌ๋จธ ๊ธฐ๋ฐ˜ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ์ธ ๋น„๋‘˜๊ธฐ(Baidu)์˜ RT-DETR์„ ๋งŒ๋“ค๊ณ  ์œ ์ง€ํ•˜๊ธฐ ์œ„ํ•ด ๋น„๋‘˜๊ธฐ์™€ [PaddlePaddle](https://github.com/PaddlePaddle/PaddleDetection) ํŒ€์—๊ฒŒ ๊ฐ์‚ฌ์˜ ์ธ์‚ฌ๋ฅผ ์ „ํ•ฉ๋‹ˆ๋‹ค. + +*Keywords: RT-DETR, Transformer, ViT, ๋น„์ „ ํŠธ๋žœ์Šคํฌ๋จธ, ๋น„๋‘˜๊ธฐ RT-DETR, PaddlePaddle, Paddle Paddle RT-DETR, ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ, ๋น„์ „ ํŠธ๋žœ์Šคํฌ๋จธ ๊ธฐ๋ฐ˜ ๊ฐ์ฒด ๊ฒ€์ถœ, ์‚ฌ์ „ ํ›ˆ๋ จ๋œ PaddlePaddle RT-DETR ๋ชจ๋ธ, ๋น„๋‘˜๊ธฐ RT-DETR ์‚ฌ์šฉ๋ฒ•, Ultralytics ํŒŒ์ด์ฌ API* diff --git a/ultralytics/docs/ko/models/rtdetr.md:Zone.Identifier b/ultralytics/docs/ko/models/rtdetr.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/models/rtdetr.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/models/sam.md b/ultralytics/docs/ko/models/sam.md new file mode 100755 index 0000000..55bfe93 --- /dev/null +++ b/ultralytics/docs/ko/models/sam.md @@ -0,0 +1,226 @@ +--- +comments: true +description: ์–ผํŠธ๋ผ๋ฆฌ์–ผ๋ฆฌํ‹ฑ์Šค(Ultralytics)์˜ ์ตœ์ฒจ๋‹จ ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™” ๋ชจ๋ธ์ธ Segment Anything Model(SAM)์— ๋Œ€ํ•ด ์•Œ์•„๋ณด์„ธ์š”. ํ•ด๋‹น ๋ชจ๋ธ์€ ์‹ค์‹œ๊ฐ„ ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”๋ฅผ ๊ฐ€๋Šฅํ•˜๊ฒŒ ํ•˜๋ฉฐ, ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ด์šฉํ•œ ์„ธ๋ถ„ํ™”, ์ œ๋กœ์ƒท ์„ฑ๋Šฅ ๋ฐ ์‚ฌ์šฉ๋ฒ•์— ๋Œ€ํ•ด ์•Œ์•„๋ด…๋‹ˆ๋‹ค. +keywords: ์–ผํŠธ๋ผ๋ฆฌ์–ผ๋ฆฌํ‹ฑ์Šค, ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”, Segment Anything Model, SAM, SA-1B ๋ฐ์ดํ„ฐ์…‹, ์‹ค์‹œ๊ฐ„ ์„ฑ๋Šฅ, ์ œ๋กœ์ƒท ์ „์ด, ๊ฐ์ฒด ๊ฐ์ง€, ์ด๋ฏธ์ง€ ๋ถ„์„, ๋จธ์‹  ๋Ÿฌ๋‹ +--- + +# Segment Anything Model (SAM) + +Segment Anything Model(SAM) ์„ ์–ด์„œ ์˜ค์„ธ์š”. ์ด ํ˜์‹ ์ ์ธ ๋ชจ๋ธ์€ ํ”„๋กฌํ”„ํŠธ ๊ธฐ๋ฐ˜์˜ ์‹ค์‹œ๊ฐ„ ์„ธ๋ถ„ํ™”๋ฅผ ํ†ตํ•ด ์„ธ๋ถ„ํ™” ๋ถ„์•ผ์—์„œ ์ƒˆ๋กœ์šด ๊ธฐ์ค€์„ ์„ธ์› ์Šต๋‹ˆ๋‹ค. + +## SAM ์†Œ๊ฐœ: Segment Anything Model์˜ ์†Œ๊ฐœ + +Segment Anything Model(SAM)์€ ํ”„๋กฌํ”„ํŠธ ๊ธฐ๋ฐ˜์˜ ์„ธ๋ถ„ํ™”๋ฅผ ๊ฐ€๋Šฅํ•˜๊ฒŒ ํ•˜๋Š” ๋›ฐ์–ด๋‚œ ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™” ๋ชจ๋ธ์ž…๋‹ˆ๋‹ค. SAM์€ ์ด๋ฏธ์ง€ ์„ธ๋ถ„์„ ์ž‘์—…์—์„œ ๋…์ฐฝ์„ฑ์„ ๋ณด์—ฌ์ฃผ๋Š” Segment Anything ์ด๋‹ˆ์…”ํ‹ฐ๋ธŒ์˜ ํ•ต์‹ฌ์„ ํ˜•์„ฑํ•˜๊ณ  ์žˆ์œผ๋ฉฐ, ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”๋ฅผ ์œ„ํ•œ ์ƒˆ๋กœ์šด ๋ชจ๋ธ, ์ž‘์—… ๋ฐ ๋ฐ์ดํ„ฐ์…‹์„ ์†Œ๊ฐœํ•˜๋Š” ํ˜์‹ ์ ์ธ ํ”„๋กœ์ ํŠธ์ž…๋‹ˆ๋‹ค. + +SAM์˜ ๊ณ ๊ธ‰์„ค๊ณ„๋Š” ๋ชจ๋ธ์ด ๊ธฐ์กด ์ง€์‹ ์—†์ด๋„ ์ƒˆ๋กœ์šด ์ด๋ฏธ์ง€ ๋ถ„ํฌ ๋ฐ ์ž‘์—…์— ๋Œ€์‘ํ•  ์ˆ˜ ์žˆ๋Š” ๊ธฐ๋Šฅ์ธ ์ œ๋กœ์ƒท ์ „์ด๋ฅผ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. 1,100๋งŒ ๊ฐœ์˜ ์ •๊ตํ•˜๊ฒŒ ์„ ๋ณ„๋œ ์ด๋ฏธ์ง€์— ๋ถ„ํฌ๋œ 10์–ต ๊ฐœ ์ด์ƒ์˜ ๋งˆ์Šคํฌ๋ฅผ ํฌํ•จํ•œ SA-1B ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ ํ•™์Šต๋œ SAM์€ ๋งŽ์€ ๊ฒฝ์šฐ์— ์ „์ ์œผ๋กœ ๊ฐ๋…๋œ ํ•™์Šต ๊ฒฐ๊ณผ๋ฅผ ๋Šฅ๊ฐ€ํ•˜๋Š” ์ธ์ƒ์ ์ธ ์ œ๋กœ์ƒท ์„ฑ๋Šฅ์„ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. + +![๋ฐ์ดํ„ฐ์…‹ ์ƒ˜ํ”Œ ์ด๋ฏธ์ง€](https://user-images.githubusercontent.com/26833433/238056229-0e8ffbeb-f81a-477e-a490-aff3d82fd8ce.jpg) +์ƒˆ๋กญ๊ฒŒ ๋„์ž…๋œ SA-1B ๋ฐ์ดํ„ฐ์…‹์—์„œ ์˜ค๋ฒ„๋ ˆ์ด๋œ ๋งˆ์Šคํฌ๋ฅผ ํฌํ•จํ•œ ์˜ˆ์‹œ ์ด๋ฏธ์ง€์ž…๋‹ˆ๋‹ค. SA-1B๋Š” ๋‹ค์–‘ํ•œ ๊ณ ํ•ด์ƒ๋„์˜ ์ด๋ฏธ์ง€๋ฅผ ๋ผ์ด์„ ์Šค ๋ณดํ˜ธํ•˜๋ฉฐ ์‚ฌ์ƒํ™œ์„ ๋ณดํ˜ธํ•˜๊ณ  ์žˆ์œผ๋ฉฐ, 1,100๋งŒ ๊ฐœ์˜ ๊ณ ํ’ˆ์งˆ ์„ธ๋ถ„ํ™” ๋งˆ์Šคํฌ๋ฅผ ๊ฐ€์ง€๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ๋งˆ์Šคํฌ๋Š” SAM์— ์˜ํ•ด ์ž๋™์œผ๋กœ ์ฃผ์„์ด ๋‹ฌ๋ ธ์œผ๋ฉฐ, ์ธ๊ฐ„ ํ‰๊ฐ€ ๋ฐ ๋‹ค์–‘ํ•œ ์‹คํ—˜์„ ํ†ตํ•ด ๋†’์€ ํ’ˆ์งˆ๊ณผ ๋‹ค์–‘์„ฑ์„ ๊ฐ–์ถ”์—ˆ์Œ์ด ๊ฒ€์ฆ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ์‹œ๊ฐํ™”๋ฅผ ์œ„ํ•ด ์ด๋ฏธ์ง€๋Š” ์ด๋ฏธ์ง€ ๋‹น ํ‰๊ท  100๊ฐœ์˜ ๋งˆ์Šคํฌ๋กœ ๊ทธ๋ฃนํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค. + +## Segment Anything Model (SAM)์˜ ์ฃผ์š” ๊ธฐ๋Šฅ + +- **ํ”„๋กฌํ”„ํŠธ ๊ธฐ๋ฐ˜ ์„ธ๋ถ„ํ™” ์ž‘์—…:** SAM์€ ํ”„๋กฌํ”„ํŠธ ๊ธฐ๋ฐ˜์˜ ์„ธ๋ถ„ํ™” ์ž‘์—…์„ ์œ„ํ•ด ์„ค๊ณ„๋˜์–ด, ๊ณต๊ฐ„ ๋˜๋Š” ํ…์ŠคํŠธ ๋‹จ์„œ๋ฅผ ์ด์šฉํ•˜์—ฌ ๊ฐœ์ฒด๋ฅผ ์‹๋ณ„ํ•ฉ๋‹ˆ๋‹ค. +- **๊ณ ๊ธ‰์„ค๊ณ„:** Segment Anything Model์€ ๊ฐ•๋ ฅํ•œ ์ด๋ฏธ์ง€ ์ธ์ฝ”๋”, ํ”„๋กฌํ”„ํŠธ ์ธ์ฝ”๋” ๋ฐ ๊ฐ€๋ฒผ์šด ๋งˆ์Šคํฌ ๋””์ฝ”๋”๋ฅผ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. ์ด ๋…ํŠนํ•œ ์•„ํ‚คํ…์ฒ˜๋Š” ์œ ์—ฐํ•œ ํ”„๋กฌํ”„ํŒ…, ์‹ค์‹œ๊ฐ„ ๋งˆ์Šคํฌ ๊ณ„์‚ฐ ๋ฐ ์„ธ๋ถ„ํ™” ์ž‘์—…์—์„œ์˜ ๋ชจํ˜ธ์„ฑ ์ธ์‹์„ ๊ฐ€๋Šฅ์ผ€ ํ•ฉ๋‹ˆ๋‹ค. +- **SA-1B ๋ฐ์ดํ„ฐ์…‹:** Segment Anything ํ”„๋กœ์ ํŠธ์—์„œ ์†Œ๊ฐœ๋œ SA-1B ๋ฐ์ดํ„ฐ์…‹์€ 1,100๋งŒ ๊ฐœ์˜ ์ด๋ฏธ์ง€์— 10์–ต ๊ฐœ ์ด์ƒ์˜ ์„ธ๋ถ„ํ™” ๋งˆ์Šคํฌ๋ฅผ ๊ฐ€์ง€๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. ์ด๋Š” ์ง€๊ธˆ๊นŒ์ง€ ๊ฐ€์žฅ ํฐ ์„ธ๋ถ„ํ™” ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ, SAM์—๊ฒŒ ๋‹ค์–‘ํ•˜๊ณ  ๋Œ€๊ทœ๋ชจ์˜ ํ•™์Šต ๋ฐ์ดํ„ฐ๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. +- **์ œ๋กœ์ƒท ์„ฑ๋Šฅ:** SAM์€ ๋‹ค์–‘ํ•œ ์„ธ๋ถ„ํ™” ์ž‘์—…์—์„œ ๋›ฐ์–ด๋‚œ ์ œ๋กœ์ƒท ์„ฑ๋Šฅ์„ ๋ณด์—ฌ์ฃผ๋ฏ€๋กœ, ํ”„๋กฌํ”„ํŠธ ์—”์ง€๋‹ˆ์–ด๋ง์˜ ํ•„์š”์„ฑ์„ ์ตœ์†Œํ™”ํ•˜๊ณ  ๋‹ค์–‘ํ•œ ์‘์šฉ ํ”„๋กœ๊ทธ๋žจ์— ์ฆ‰์‹œ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋Š” ๋„๊ตฌ์ž…๋‹ˆ๋‹ค. + +Segment Anything Model ๋ฐ SA-1B ๋ฐ์ดํ„ฐ์…‹์— ๋Œ€ํ•œ ์ž์„ธํ•œ ๋‚ด์šฉ์€ [Segment Anything ์›น์‚ฌ์ดํŠธ](https://segment-anything.com)์™€ ์—ฐ๊ตฌ ๋…ผ๋ฌธ [Segment Anything](https://arxiv.org/abs/2304.02643)์„ ์ฐธ์กฐํ•ด ์ฃผ์„ธ์š”. + +## ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ๋ชจ๋ธ, ์ง€์›ํ•˜๋Š” ์ž‘์—… ๋ฐ ์šด์˜ ๋ชจ๋“œ + +์•„๋ž˜ ํ‘œ๋Š” ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ๋ชจ๋ธ๊ณผ ํ•ด๋‹น ๋ชจ๋ธ์˜ ์‚ฌ์ „ ํ›ˆ๋ จ ๊ฐ€์ค‘์น˜, ์ง€์›ํ•˜๋Š” ์ž‘์—… ๋ฐ [์ถ”๋ก ](../modes/predict.md), [๊ฒ€์ฆ](../modes/val.md), [ํ›ˆ๋ จ](../modes/train.md) ๋ฐ [๋‚ด๋ณด๋‚ด๊ธฐ](../modes/export.md)์™€ ๊ฐ™์€ ๋‹ค๋ฅธ ์šด์˜ ๋ชจ๋“œ์™€์˜ ํ˜ธํ™˜์„ฑ์„ ๋‚˜ํƒ€๋ƒ…๋‹ˆ๋‹ค. ์ง€์›๋˜๋Š” ๋ชจ๋“œ๋Š” โœ… ์ด๋ชจ์ง€๋กœ, ์ง€์›๋˜์ง€ ์•Š๋Š” ๋ชจ๋“œ๋Š” โŒ ์ด๋ชจ์ง€๋กœ ํ‘œ์‹œ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ ์œ ํ˜• | ์‚ฌ์ „ ํ›ˆ๋ จ ๊ฐ€์ค‘์น˜ | ์ง€์› ์ž‘์—… | ์ถ”๋ก  | ๊ฒ€์ฆ | ํ›ˆ๋ จ | ๋‚ด๋ณด๋‚ด๊ธฐ | +|-----------|------------|---------------------------------|----|----|----|------| +| SAM base | `sam_b.pt` | [์ธ์Šคํ„ด์Šค ์„ธ๋ถ„ํ™”](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| SAM large | `sam_l.pt` | [์ธ์Šคํ„ด์Šค ์„ธ๋ถ„ํ™”](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## SAM ์‚ฌ์šฉ ๋ฐฉ๋ฒ•: ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”์—์„œ์˜ ๋‹ค์žฌ๋‹ค๋Šฅํ•จ๊ณผ ๊ฐ•๋ ฅํ•จ + +Segment Anything Model์€ ํ›ˆ๋ จ ๋ฐ์ดํ„ฐ๋ฅผ ์ดˆ์›”ํ•˜๋Š” ๋‹ค์–‘ํ•œ ํ•˜์œ„ ์ž‘์—…์— ๋Œ€ํ•ด์„œ๋„ ์‚ฌ์šฉ๋  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด์—๋Š” ๊ฐ€์žฅ์ž๋ฆฌ ๊ฒ€์ถœ, ๊ฐ์ฒด ์ œ์•ˆ ์ƒ์„ฑ, ์ธ์Šคํ„ด์Šค ์„ธ๋ถ„์žฅ ๋ฐ ์ดˆ๊ธฐ ํ…์ŠคํŠธ-๋งˆ์Šคํฌ ์˜ˆ์ธก ๋“ฑ์ด ํฌํ•จ๋ฉ๋‹ˆ๋‹ค. SAM์€ ํ”„๋กฌํ”„ํŒ… ์—”์ง€๋‹ˆ์–ด๋ง์„ ํ†ตํ•ด ์ƒˆ๋กœ์šด ์ž‘์—… ๋ฐ ๋ฐ์ดํ„ฐ ๋ถ„ํฌ์— ๋น ๋ฅด๊ฒŒ ์ ์‘ํ•  ์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ, ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”์— ๋Œ€ํ•œ ๋‹ค์žฌ๋‹ค๋Šฅํ•˜๊ณ  ๊ฐ•๋ ฅํ•œ ๋„๊ตฌ๋กœ ์‚ฌ์šฉ๋  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +### SAM ์˜ˆ์ธก ์˜ˆ์ œ + +!!! Example "ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ด์šฉํ•œ ์„ธ๋ถ„ํ™”" + + ์ฃผ์–ด์ง„ ํ”„๋กฌํ”„ํŠธ๋กœ ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”๋ฅผ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค. + + === "ํŒŒ์ด์ฌ" + + ```python + from ultralytics import SAM + + # ๋ชจ๋ธ ๋กœ๋“œ + model = SAM('sam_b.pt') + + # ๋ชจ๋ธ ์ •๋ณด ํ‘œ์‹œ (์„ ํƒ ์‚ฌํ•ญ) + model.info() + + # bboxes ํ”„๋กฌํ”„ํŠธ๋กœ ์ถ”๋ก  ์‹คํ–‰ + model('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + + # points ํ”„๋กฌํ”„ํŠธ๋กœ ์ถ”๋ก  ์‹คํ–‰ + model('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +!!! Example "์ „์ฒด ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”" + + ์ „์ฒด ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”๋ฅผ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค. + + === "ํŒŒ์ด์ฌ" + + ```python + from ultralytics import SAM + + # ๋ชจ๋ธ ๋กœ๋“œ + model = SAM('sam_b.pt') + + # ๋ชจ๋ธ ์ •๋ณด ํ‘œ์‹œ (์„ ํƒ ์‚ฌํ•ญ) + model.info() + + # ์ถ”๋ก  ์‹คํ–‰ + model('path/to/image.jpg') + ``` + + === "CLI" + + ```bash + # SAM ๋ชจ๋ธ๋กœ ์ถ”๋ก  ์‹คํ–‰ + yolo predict model=sam_b.pt source=path/to/image.jpg + ``` + +- ์—ฌ๊ธฐ์„œ ์ „์ฒด ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”๋Š” ํ”„๋กฌํ”„ํŠธ(bboxes/points/masks)๋ฅผ ์ „๋‹ฌํ•˜์ง€ ์•Š์œผ๋ฉด ์‹คํ–‰๋ฉ๋‹ˆ๋‹ค. + +!!! Example "SAMPredictor ์˜ˆ์ œ" + + ์ด๋ฏธ์ง€๋ฅผ ์„ค์ •ํ•˜๊ณ  ์ด๋ฏธ์ง€ ์ธ์ฝ”๋”๋ฅผ ์—ฌ๋Ÿฌ๋ฒˆ ์‹คํ–‰ํ•˜์ง€ ์•Š๊ณ  ์—ฌ๋Ÿฌ๋ฒˆ ํ”„๋กฌํ”„ํŠธ ์ถ”๋ก ์„ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + + === "ํ”„๋กฌํ”„ํŠธ ์ถ”๋ก " + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # SAMPredictor ์ƒ์„ฑ + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # ์ด๋ฏธ์ง€ ์„ค์ • + predictor.set_image("ultralytics/assets/zidane.jpg") # ์ด๋ฏธ์ง€ ํŒŒ์ผ๋กœ ์„ค์ • + predictor.set_image(cv2.imread("ultralytics/assets/zidane.jpg")) # np.ndarray๋กœ ์„ค์ • + results = predictor(bboxes=[439, 437, 524, 709]) + results = predictor(points=[900, 370], labels=[1]) + + # ์ด๋ฏธ์ง€ ๋ฆฌ์…‹ + predictor.reset_image() + ``` + + ์ถ”๊ฐ€ ์ธ์ˆ˜๋กœ ์ „์ฒด ์ด๋ฏธ์ง€๋ฅผ ์„ธ๋ถ„ํ™”ํ•ฉ๋‹ˆ๋‹ค. + + === "์ „์ฒด ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # SAMPredictor ์ƒ์„ฑ + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # ์ถ”๊ฐ€ ์ธ์ˆ˜๋กœ ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™” + results = predictor(source="ultralytics/assets/zidane.jpg", crop_n_layers=1, points_stride=64) + ``` + +- `์ „์ฒด ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”`์— ๋Œ€ํ•œ ์ž์„ธํ•œ ์ถ”๊ฐ€ ์ธ์ˆ˜๋Š” [`Predictor/generate` ์ฐธ์กฐ](../../../reference/models/sam/predict.md)๋ฅผ ์ฐธ์กฐํ•˜์„ธ์š”. + +## YOLOv8๊ณผ์˜ SAM ๋น„๊ต + +์—ฌ๊ธฐ์„œ๋Š” Meta์˜ ๊ฐ€์žฅ ์ž‘์€ SAM ๋ชจ๋ธ์ธ SAM-b๋ฅผ ์–ผํŠธ๋ผ๋ฆฌ์–ผ๋ฆฌํ‹ฑ์Šค์˜ ๊ฐ€์žฅ ์ž‘์€ ์„ธ๋ถ„ํ™” ๋ชจ๋ธ, [YOLOv8n-seg](../tasks/segment.md),๊ณผ ๋น„๊ตํ•ฉ๋‹ˆ๋‹ค: + +| ๋ชจ๋ธ | ํฌ๊ธฐ | ํŒŒ๋ผ๋ฏธํ„ฐ | ์†๋„ (CPU) | +|------------------------------------------------|-----------------------|----------------------|------------------------| +| Meta's SAM-b | 358 MB | 94.7 M | 51096 ms/im | +| [MobileSAM](mobile-sam.md) | 40.7 MB | 10.1 M | 46122 ms/im | +| [FastSAM-s](fast-sam.md) with YOLOv8 backbone | 23.7 MB | 11.8 M | 115 ms/im | +| Ultralytics [YOLOv8n-seg](../tasks/segment.md) | **6.7 MB** (53.4๋ฐฐ ์ž‘์Œ) | **3.4 M** (27.9๋ฐฐ ์ ์Œ) | **59 ms/im** (866๋ฐฐ ๋น ๋ฆ„) | + +์ด ๋น„๊ต๋Š” ๋ชจ๋ธ ํฌ๊ธฐ ๋ฐ ์†๋„์— ๋Œ€ํ•œ ์ƒ๋‹นํ•œ ์ฐจ์ด๋ฅผ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. SAM์€ ์ž๋™์œผ๋กœ ์„ธ๋ถ„ํ™”ํ•˜๋Š” ๋…ํŠนํ•œ ๊ธฐ๋Šฅ์„ ์ œ๊ณตํ•˜์ง€๋งŒ, ์ž‘์€ ํฌ๊ธฐ์™€ ๋†’์€ ์ฒ˜๋ฆฌ ์†๋„๋กœ ์ธํ•ด YOLOv8 ์„ธ๋ถ„ํ™” ๋ชจ๋ธ๊ณผ ์ง์ ‘ ๊ฒฝ์Ÿํ•˜์ง€๋Š” ์•Š์Šต๋‹ˆ๋‹ค. + +์ด ํ…Œ์ŠคํŠธ๋Š” 2023๋…„ ์• ํ”Œ M2 ๋งฅ๋ถ(16GB RAM)์—์„œ ์ˆ˜ํ–‰๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ์ด ํ…Œ์ŠคํŠธ๋ฅผ ์žฌํ˜„ํ•˜๋ ค๋ฉด: + +!!! Example "์˜ˆ์ œ" + + === "ํŒŒ์ด์ฌ" + ```python + from ultralytics import FastSAM, SAM, YOLO + + # SAM-b ํ”„๋กœํŒŒ์ผ๋ง + model = SAM('sam_b.pt') + model.info() + model('ultralytics/assets') + + # MobileSAM ํ”„๋กœํŒŒ์ผ๋ง + model = SAM('mobile_sam.pt') + model.info() + model('ultralytics/assets') + + # FastSAM-s ํ”„๋กœํŒŒ์ผ๋ง + model = FastSAM('FastSAM-s.pt') + model.info() + model('ultralytics/assets') + + # YOLOv8n-seg ํ”„๋กœํŒŒ์ผ๋ง + model = YOLO('yolov8n-seg.pt') + model.info() + model('ultralytics/assets') + ``` + +## ์ž๋™ ์ฃผ์„: ์„ธ๋ถ„ํ™” ๋ฐ์ดํ„ฐ์…‹์„ ์œ„ํ•œ ์‹ ์†ํ•œ ๊ฒฝ๋กœ + +์ž๋™ ์ฃผ์„์€ SAM์˜ ํ•ต์‹ฌ ๊ธฐ๋Šฅ์œผ๋กœ, ๋ฏธ๋ฆฌ ํ›ˆ๋ จ๋œ ํƒ์ง€ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ [์„ธ๋ถ„ํ™” ๋ฐ์ดํ„ฐ์…‹](https://docs.ultralytics.com/datasets/segment)์„ ์ƒ์„ฑํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด ๊ธฐ๋Šฅ์„ ์‚ฌ์šฉํ•˜๋ฉด ๋ฒˆ๊ฑฐ๋กญ๊ณ  ์‹œ๊ฐ„์ด ์˜ค๋ž˜ ๊ฑธ๋ฆฌ๋Š” ์ˆ˜์ž‘์—… ์ฃผ์„ ์ž‘์—…์„ ๊ฑด๋„ˆ๋›ฐ๊ณ  ๋Œ€๋Ÿ‰์˜ ์ด๋ฏธ์ง€๋ฅผ ์‹ ์†ํ•˜๊ฒŒ ์ •ํ™•ํ•˜๊ฒŒ ์ฃผ์„์„ ๋‹ฌ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +### ํƒ์ง€ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์„ธ๋ถ„ํ™” ๋ฐ์ดํ„ฐ์…‹ ์ƒ์„ฑํ•˜๊ธฐ + +Ultralytics ํ”„๋ ˆ์ž„์›Œํฌ๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋ฏธ๋ฆฌ ํ›ˆ๋ จ๋œ ํƒ์ง€ ๋ฐ SAM ์„ธ๋ถ„ํ™” ๋ชจ๋ธ๊ณผ ํ•จ๊ป˜ ๋ฐ์ดํ„ฐ์…‹์„ ์ž๋™์œผ๋กœ ์ฃผ์„ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์•„๋ž˜์™€ ๊ฐ™์ด `auto_annotate` ํ•จ์ˆ˜๋ฅผ ์‚ฌ์šฉํ•˜์„ธ์š”: + +!!! Example "์˜ˆ์ œ" + + === "ํŒŒ์ด์ฌ" + ```python + from ultralytics.data.annotator import auto_annotate + + auto_annotate(data="path/to/images", det_model="yolov8x.pt", sam_model='sam_b.pt') + ``` + +| ์ธ์ˆ˜ | ์œ ํ˜• | ์„ค๋ช… | ๊ธฐ๋ณธ๊ฐ’ | +|------------|-----------------|-------------------------------------------------------------------|--------------| +| data | ๋ฌธ์ž์—ด | ์ฃผ์„์„ ๋‹ฌ ์ด๋ฏธ์ง€๊ฐ€ ํฌํ•จ๋œ ํด๋” ๊ฒฝ๋กœ. | | +| det_model | ๋ฌธ์ž์—ด, ์„ ํƒ์‚ฌํ•ญ | ๋ฏธ๋ฆฌ ํ›ˆ๋ จ๋œ YOLO ํƒ์ง€ ๋ชจ๋ธ. ๊ธฐ๋ณธ๊ฐ’์€ 'yolov8x.pt'. | 'yolov8x.pt' | +| sam_model | ๋ฌธ์ž์—ด, ์„ ํƒ์‚ฌํ•ญ | ๋ฏธ๋ฆฌ ํ›ˆ๋ จ๋œ SAM ์„ธ๋ถ„ํ™” ๋ชจ๋ธ. ๊ธฐ๋ณธ๊ฐ’์€ 'sam_b.pt'. | 'sam_b.pt' | +| device | ๋ฌธ์ž์—ด, ์„ ํƒ์‚ฌํ•ญ | ๋ชจ๋ธ์„ ์‹คํ–‰ํ•  ๋””๋ฐ”์ด์Šค. ๊ธฐ๋ณธ๊ฐ’์€ ๋นˆ ๋ฌธ์ž์—ด (CPU ๋˜๋Š” ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ GPU ์‚ฌ์šฉ). | | +| output_dir | ๋ฌธ์ž์—ด, None, ์„ ํƒ์‚ฌํ•ญ | ์ฃผ์„์ด ํฌํ•จ๋œ ๊ฒฐ๊ณผ๋ฅผ ์ €์žฅํ•  ๋””๋ ‰ํ† ๋ฆฌ ๊ฒฝ๋กœ. ๊ธฐ๋ณธ๊ฐ’์€ 'data'์™€ ๊ฐ™์€ ๋””๋ ‰ํ† ๋ฆฌ ๋‚ด๋ถ€์˜ 'labels' ํด๋”์ž…๋‹ˆ๋‹ค. | None | + +`auto_annotate` ํ•จ์ˆ˜๋Š” ์ด๋ฏธ์ง€ ๊ฒฝ๋กœ๋ฅผ ์ž…๋ ฅ์œผ๋กœ ๋ฐ›์•„, ์ž…๋ ฅํ•œ ๋ฏธ๋ฆฌ ํ›ˆ๋ จ๋œ ํƒ์ง€์™€ SAM ์„ธ๋ถ„ํ™” ๋ชจ๋ธ, ์ด ํ•จ์ˆ˜๋ฅผ ์‹คํ–‰ํ•  ๋””๋ฐ”์ด์Šค ๋ฐ ์ฃผ์„์ด ํฌํ•จ๋œ ๊ฒฐ๊ณผ๋ฅผ ์ €์žฅํ•  ๋””๋ ‰ํ† ๋ฆฌ ๊ฒฝ๋กœ๋ฅผ ์„ ํƒ์ ์œผ๋กœ ์ง€์ •ํ•  ์ˆ˜ ์žˆ๋Š” ๊ธฐ๋Šฅ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +๋ฏธ๋ฆฌ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•œ ์ž๋™ ์ฃผ์„ ๊ธฐ๋Šฅ์„ ํ™œ์šฉํ•˜๋ฉด ๋†’์€ ํ’ˆ์งˆ์˜ ์„ธ๋ถ„ํ™” ๋ฐ์ดํ„ฐ์…‹์„ ์ƒ์„ฑํ•˜๋Š” ๋ฐ ์†Œ์š”๋˜๋Š” ์‹œ๊ฐ„๊ณผ ๋…ธ๋ ฅ์„ ํฌ๊ฒŒ ์ค„์ผ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด ๊ธฐ๋Šฅ์€ ํŠนํžˆ ๋Œ€๋Ÿ‰์˜ ์ด๋ฏธ์ง€ ์ปฌ๋ ‰์…˜์„ ๋‹ค๋ฃจ๋Š” ์—ฐ๊ตฌ์›๊ณผ ๊ฐœ๋ฐœ์ž์—๊ฒŒ ์œ ์šฉํ•˜๋ฉฐ, ์ˆ˜์ž‘์—… ์ฃผ์„ ๋Œ€์‹  ๋ชจ๋ธ ๊ฐœ๋ฐœ๊ณผ ํ‰๊ฐ€์— ์ง‘์ค‘ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +## ์ธ์šฉ ๋ฐ ๊ฐ์‚ฌ์˜ ๋ง + +๊ท€ํ•˜์˜ ์—ฐ๊ตฌ ๋˜๋Š” ๊ฐœ๋ฐœ ์ž‘์—…์— SAM์ด ์œ ์šฉํ•˜๊ฒŒ ์‚ฌ์šฉ๋œ ๊ฒฝ์šฐ, ์ €ํฌ ๋…ผ๋ฌธ์„ ์ธ์šฉํ•ด ์ฃผ์‹œ๊ธฐ ๋ฐ”๋ž๋‹ˆ๋‹ค: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{kirillov2023segment, + title={Segment Anything}, + author={Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollรกr and Ross Girshick}, + year={2023}, + eprint={2304.02643}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +๋ชจ๋ธ ๊ฐœ๋ฐœ๊ณผ ์•Œ๊ณ ๋ฆฌ์ฆ˜ ๊ฐœ๋ฐœ์„ ์œ„ํ•œ ๊ท€์ค‘ํ•œ ๋ฆฌ์†Œ์Šค๋ฅผ ๋งŒ๋“ค๊ณ  ์œ ์ง€ ๊ด€๋ฆฌํ•˜๋Š” Meta AI์—๊ฒŒ ๊ฐ์‚ฌ์˜ ๋ง์”€์„ ๋“œ๋ฆฝ๋‹ˆ๋‹ค. + +*keywords: Segment Anything, Segment Anything Model, SAM, Meta SAM, ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”, ํ”„๋กฌํ”„ํŠธ ๊ธฐ๋ฐ˜ ์„ธ๋ถ„ํ™”, ์ œ๋กœ์ƒท ์„ฑ๋Šฅ, SA-1B ๋ฐ์ดํ„ฐ์…‹, ๊ณ ๊ธ‰์„ค๊ณ„, ์ž๋™ ์ฃผ์„, ์–ผํŠธ๋ผ๋ฆฌ์–ผ๋ฆฌํ‹ฑ์Šค, ์‚ฌ์ „ ํ›ˆ๋ จ ๋ชจ๋ธ, SAM base, SAM large, ์ธ์Šคํ„ด์Šค ์„ธ๋ถ„ํ™”, ์ปดํ“จํ„ฐ ๋น„์ „, ์ธ๊ณต ์ง€๋Šฅ, ๋จธ์‹  ๋Ÿฌ๋‹, ๋ฐ์ดํ„ฐ ์ฃผ์„, ์„ธ๋ถ„ํ™” ๋งˆ์Šคํฌ, ํƒ์ง€ ๋ชจ๋ธ, YOLO ํƒ์ง€ ๋ชจ๋ธ, bibtex, Meta AI.* diff --git a/ultralytics/docs/ko/models/sam.md:Zone.Identifier b/ultralytics/docs/ko/models/sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/models/sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/models/yolo-nas.md b/ultralytics/docs/ko/models/yolo-nas.md new file mode 100755 index 0000000..04d8765 --- /dev/null +++ b/ultralytics/docs/ko/models/yolo-nas.md @@ -0,0 +1,119 @@ +--- +comments: true +description: YOLO-NAS๋Š” ์šฐ์ˆ˜ํ•œ ๋ฌผ์ฒด ๊ฐ์ง€ ๋ชจ๋ธ๋กœ์„œ ์ž์„ธํ•œ ์„ค๋ช…์„œ๋ฅผ ํƒ์ƒ‰ํ•ด๋ณด์„ธ์š”. Ultralytics Python API๋ฅผ ์‚ฌ์šฉํ•œ ๊ธฐ๋Šฅ, ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ, ์‚ฌ์šฉ๋ฒ• ๋“ฑ์„ ์ž์„ธํžˆ ์•Œ์•„๋ณด์„ธ์š”. +keywords: YOLO-NAS, Deci AI, ๋ฌผ์ฒด ๊ฐ์ง€, ๋”ฅ๋Ÿฌ๋‹, ์‹ ๊ฒฝ ์•„ํ‚คํ…์ฒ˜ ๊ฒ€์ƒ‰, Ultralytics Python API, YOLO ๋ชจ๋ธ, ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ, ์–‘์žํ™”, ์ตœ์ ํ™”, COCO, Objects365, Roboflow 100 +--- + +# YOLO-NAS + +## ๊ฐœ์š” + +Deci AI์—์„œ ๊ฐœ๋ฐœํ•œ YOLO-NAS๋Š” ์›๋ž˜์˜ YOLO ๋ชจ๋ธ์˜ ํ•œ๊ณ„๋ฅผ ํ•ด๊ฒฐํ•˜๊ธฐ ์œ„ํ•ด ๊ณ ๋„์˜ ์‹ ๊ฒฝ ์•„ํ‚คํ…์ฒ˜ ๊ฒ€์ƒ‰(Neural Architecture Search) ๊ธฐ์ˆ ๋กœ ๋งŒ๋“ค์–ด์ง„ ํ˜์‹ ์ ์ธ ๋ฌผ์ฒด ๊ฐ์ง€ ๊ธฐ๋ฐ˜ ๋ชจ๋ธ์ž…๋‹ˆ๋‹ค. ์–‘์žํ™” ์ง€์›๊ณผ ์ •ํ™•์„ฑ-์ง€์—ฐ ํŠธ๋ ˆ์ด๋“œ์˜คํ”„์˜ ์ค‘์š”ํ•œ ๊ฐœ์„ ์„ ํ†ตํ•ด YOLO-NAS๋Š” ๋ฌผ์ฒด ๊ฐ์ง€ ๋ถ„์•ผ์—์„œ ์ฃผ๋ชฉํ•  ๋งŒํ•œ ์„ฑ๋Šฅ ํ–ฅ์ƒ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +![๋ชจ๋ธ ์˜ˆ์‹œ ์ด๋ฏธ์ง€](https://learnopencv.com/wp-content/uploads/2023/05/yolo-nas_COCO_map_metrics.png) +**YOLO-NAS ๊ฐœ์š”.** YOLO-NAS๋Š” ์–‘์žํ™” ๊ด€๋ จ ๋ธ”๋ก๊ณผ ์„ ํƒ์  ์–‘์žํ™”๋ฅผ ์ ์šฉํ•˜์—ฌ ์ตœ์ ์˜ ์„ฑ๋Šฅ์„ ๋‹ฌ์„ฑํ•ฉ๋‹ˆ๋‹ค. ๋ชจ๋ธ์€ INT8 ์–‘์žํ™” ๋ฒ„์ „์œผ๋กœ ๋ณ€ํ™˜๋  ๋•Œ ์ตœ์†Œํ•œ์˜ ์ •ํ™•๋„ ๊ฐ์†Œ๋ฅผ ๊ฒฝํ—˜ํ•˜๋ฏ€๋กœ ๋‹ค๋ฅธ ๋ชจ๋ธ๋“ค๊ณผ ๋น„๊ตํ–ˆ์„ ๋•Œ ์ƒ๋‹นํ•œ ๊ฐœ์„ ์„ ์ด๋Œ์–ด๋ƒ…๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ํ˜์‹ ์€ ์˜ˆ์ธกํ•  ์ˆ˜ ์—†๋Š” ๋ฌผ์ฒด ๊ฐ์ง€ ๋Šฅ๋ ฅ๊ณผ ๋†’์€ ์„ฑ๋Šฅ์„ ๊ฐ€์ง„ ์šฐ์ˆ˜ํ•œ ์•„ํ‚คํ…์ฒ˜๋กœ ์ด์–ด์ง‘๋‹ˆ๋‹ค. + +### ์ฃผ์š” ๊ธฐ๋Šฅ + +- **์–‘์žํ™” ์นœํ™”์ ์ธ ๊ธฐ๋ณธ ๋ธ”๋ก**: YOLO-NAS๋Š” ์ด์ „ YOLO ๋ชจ๋ธ์˜ ํ•œ๊ณ„ ์ค‘ ํ•˜๋‚˜์ธ ์–‘์žํ™”์— ์ ํ•ฉํ•œ ์ƒˆ๋กœ์šด ๊ธฐ๋ณธ ๋ธ”๋ก์„ ๋„์ž…ํ•ฉ๋‹ˆ๋‹ค. +- **์ •๊ตํ•œ ํ›ˆ๋ จ๊ณผ ์–‘์žํ™”**: YOLO-NAS๋Š” ๊ณ ๊ธ‰ ํ›ˆ๋ จ ๋ฐฉ์‹๊ณผ ํ›ˆ๋ จ ํ›„ ์–‘์žํ™”๋ฅผ ํ™œ์šฉํ•˜์—ฌ ์„ฑ๋Šฅ์„ ํ–ฅ์ƒ์‹œํ‚ต๋‹ˆ๋‹ค. +- **AutoNAC ์ตœ์ ํ™”์™€ ์‚ฌ์ „ ํ›ˆ๋ จ**: YOLO-NAS๋Š” AutoNAC ์ตœ์ ํ™”๋ฅผ ํ™œ์šฉํ•˜๋ฉฐ COCO, Objects365, Roboflow 100๊ณผ ๊ฐ™์€ ์œ ๋ช…ํ•œ ๋ฐ์ดํ„ฐ์…‹์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋ฉ๋‹ˆ๋‹ค. ์ด๋ฅผ ํ†ตํ•ด YOLO-NAS๋Š” ๋ณธ๊ฒฉ์ ์ธ ํ”„๋กœ๋•์…˜ ํ™˜๊ฒฝ์—์„œ์˜ ๋ฌผ์ฒด ๊ฐ์ง€ ์ž‘์—…์— ๋งค์šฐ ์ ํ•ฉํ•ฉ๋‹ˆ๋‹ค. + +## ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ + +Ultralytics๊ฐ€ ์ œ๊ณตํ•˜๋Š” ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLO-NAS ๋ชจ๋ธ๋กœ ๋‹ค์Œ ์„ธ๋Œ€์˜ ๋ฌผ์ฒด ๊ฐ์ง€ ๊ธฐ์ˆ ์˜ ํž˜์„ ์ฒดํ—˜ํ•ด ๋ณด์„ธ์š”. ์ด๋Ÿฌํ•œ ๋ชจ๋ธ์€ ์†๋„์™€ ์ •ํ™•์„ฑ ์ธก๋ฉด์—์„œ ์ตœ๊ณ ์˜ ์„ฑ๋Šฅ์„ ์ œ๊ณตํ•˜๊ธฐ ์œ„ํ•ด ์„ค๊ณ„๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ํŠน์ • ์š”๊ตฌ์— ๋งž๊ฒŒ ๋‹ค์–‘ํ•œ ์˜ต์…˜ ์ค‘ ์„ ํƒํ•˜์„ธ์š”: + +| ๋ชจ๋ธ | mAP | ์ง€์—ฐ ์‹œ๊ฐ„ (๋ฐ€๋ฆฌ์ดˆ) | +|------------------|-------|-------------| +| YOLO-NAS S | 47.5 | 3.21 | +| YOLO-NAS M | 51.55 | 5.85 | +| YOLO-NAS L | 52.22 | 7.87 | +| YOLO-NAS S INT-8 | 47.03 | 2.36 | +| YOLO-NAS M INT-8 | 51.0 | 3.78 | +| YOLO-NAS L INT-8 | 52.1 | 4.78 | + +๊ฐ ๋ชจ๋ธ ๋ณ€ํ˜•์€ ํ‰๊ท  ํ‰๊ท  ์ •๋ฐ€๋„(mAP)์™€ ์ง€์—ฐ ์‹œ๊ฐ„ ๊ฐ„์˜ ๊ท ํ˜•์„ ์ œ๊ณตํ•˜์—ฌ ๋ฌผ์ฒด ๊ฐ์ง€ ์ž‘์—…์„ ์„ฑ๋Šฅ๊ณผ ์†๋„ ๋ชจ๋‘ ์ตœ์ ํ™”ํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•ฉ๋‹ˆ๋‹ค. + +## ์‚ฌ์šฉ ์˜ˆ์‹œ + +Ultralytics๋Š” YOLO-NAS ๋ชจ๋ธ์„ `ultralytics` Python ํŒจํ‚ค์ง€๋ฅผ ํ†ตํ•ด Python ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜์— ์‰ฝ๊ฒŒ ํ†ตํ•ฉํ•  ์ˆ˜ ์žˆ๋„๋ก ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. ์ด ํŒจํ‚ค์ง€๋Š” ํ”„๋กœ์„ธ์Šค๋ฅผ ๊ฐ„์†Œํ™”ํ•˜๊ธฐ ์œ„ํ•œ ์‚ฌ์šฉ์ž ์นœํ™”์ ์ธ Python API๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +๋‹ค์Œ ์˜ˆ์‹œ์—์„œ๋Š” ์ถ”๋ก ๊ณผ ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ๋ฅผ ์œ„ํ•ด `ultralytics` ํŒจํ‚ค์ง€์™€ ํ•จ๊ป˜ YOLO-NAS ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค: + +### ์ถ”๋ก ๊ณผ ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ ์˜ˆ์‹œ + +์ด ์˜ˆ์‹œ์—์„œ๋Š” COCO8 ๋ฐ์ดํ„ฐ์…‹์—์„œ YOLO-NAS-s ๋ชจ๋ธ์„ ์œ ํšจ์„ฑ ๊ฒ€์‚ฌํ•ฉ๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + ์ด ์˜ˆ์‹œ์—์„œ๋Š” YOLO-NAS๋ฅผ ์œ„ํ•œ ๊ฐ„๋‹จํ•œ ์ถ”๋ก  ๋ฐ ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ ์ฝ”๋“œ๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ถ”๋ก  ๊ฒฐ๊ณผ๋ฅผ ์ฒ˜๋ฆฌํ•˜๊ธฐ ์œ„ํ•œ ๋ฐฉ๋ฒ•์€ [์˜ˆ์ธก](../modes/predict.md) ๋ชจ๋“œ๋ฅผ ์ฐธ์กฐํ•˜์„ธ์š”. ์ถ”๊ฐ€ ๋ชจ๋“œ์—์„œ YOLO-NAS๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๋ฐฉ๋ฒ•์€ [Val](../modes/val.md) ๋ฐ [Export](../modes/export.md)๋ฅผ ์ฐธ์กฐํ•˜์„ธ์š”. `ultralytics` ํŒจํ‚ค์ง€์—์„œ YOLO-NAS์˜ ํ›ˆ๋ จ์€ ์ง€์›ํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค. + + === "Python" + + PyTorch ์‚ฌ์ „ ํ›ˆ๋ จ๋œ `*.pt` ๋ชจ๋ธ ํŒŒ์ผ์„ `NAS()` ํด๋ž˜์Šค์— ์ „๋‹ฌํ•˜์—ฌ Python์—์„œ ๋ชจ๋ธ ์ธ์Šคํ„ด์Šค๋ฅผ ์ƒ์„ฑํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค: + + ```python + from ultralytics import NAS + + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLO-NAS-s ๋ชจ๋ธ ๋กœ๋“œ + model = NAS('yolo_nas_s.pt') + + # ๋ชจ๋ธ ์ •๋ณด ํ‘œ์‹œ (์„ ํƒ ์‚ฌํ•ญ) + model.info() + + # COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์—์„œ ๋ชจ๋ธ ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ + results = model.val(data='coco8.yaml') + + # YOLO-NAS-s ๋ชจ๋ธ๋กœ 'bus.jpg' ์ด๋ฏธ์ง€์— ์ถ”๋ก  ์‹คํ–‰ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI ๋ช…๋ น์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ์„ ์ง์ ‘ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค: + + ```bash + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLO-NAS-s ๋ชจ๋ธ๋กœ COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์˜ ์„ฑ๋Šฅ ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ + yolo val model=yolo_nas_s.pt data=coco8.yaml + + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLO-NAS-s ๋ชจ๋ธ๋กœ 'bus.jpg' ์ด๋ฏธ์ง€์— ์ถ”๋ก  ์‹คํ–‰ + yolo predict model=yolo_nas_s.pt source=path/to/bus.jpg + ``` + +## ์ง€์›๋˜๋Š” ์ž‘์—… ๋ฐ ๋ชจ๋“œ + +YOLO-NAS ๋ชจ๋ธ์€ Small (s), Medium (m) ๋ฐ Large (l) ์„ธ ๊ฐ€์ง€ ๋ณ€ํ˜•์ด ์žˆ์Šต๋‹ˆ๋‹ค. ๊ฐ ๋ณ€ํ˜•์€ ๋‹ค๋ฅธ ๊ณ„์‚ฐ ๋ฐ ์„ฑ๋Šฅ ์š”๊ตฌ ์‚ฌํ•ญ์„ ์ถฉ์กฑ์‹œํ‚ค๊ธฐ ์œ„ํ•ด ์„ค๊ณ„๋˜์—ˆ์Šต๋‹ˆ๋‹ค: + +- **YOLO-NAS-s**: ๊ณ„์‚ฐ ์ž์›์ด ์ œํ•œ๋˜๊ณ  ํšจ์œจ์„ฑ์ด ์ค‘์š”ํ•œ ํ™˜๊ฒฝ์— ์ตœ์ ํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค. +- **YOLO-NAS-m**: ๋” ๋†’์€ ์ •ํ™•์„ฑ์„ ๊ฐ€์ง€๋Š” ์ผ๋ฐ˜์ ์ธ ๋ฌผ์ฒด ๊ฐ์ง€ ์ž‘์—…์— ์ ํ•ฉํ•œ ๊ท ํ˜•์žกํžŒ ๋ชจ๋ธ์ž…๋‹ˆ๋‹ค. +- **YOLO-NAS-l**: ๊ณ„์‚ฐ ์ž์›์ด ์ œํ•œ๋˜์ง€ ์•Š๋Š” ํ™˜๊ฒฝ์—์„œ ๊ฐ€์žฅ ๋†’์€ ์ •ํ™•์„ฑ์ด ํ•„์š”ํ•œ ์‹œ๋‚˜๋ฆฌ์˜ค์— ๋งž๊ฒŒ ์„ค๊ณ„๋˜์—ˆ์Šต๋‹ˆ๋‹ค. + +์•„๋ž˜๋Š” ๊ฐ ๋ชจ๋ธ์— ๋Œ€ํ•œ ์ž์„ธํ•œ ๊ฐœ์š”๋กœ, ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๊ฐ€์ค‘์น˜, ์ง€์›ํ•˜๋Š” ์ž‘์—…, ๋‹ค์–‘ํ•œ ์ž‘๋™ ๋ชจ๋“œ์™€์˜ ํ˜ธํ™˜์„ฑ์— ๋Œ€ํ•œ ๋งํฌ๊ฐ€ ์ œ๊ณต๋ฉ๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ ์œ ํ˜• | ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๊ฐ€์ค‘์น˜ | ์ง€์›๋˜๋Š” ์ž‘์—… | ์ถ”๋ก  | ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ | ํ›ˆ๋ จ | ๋‚ด๋ณด๋‚ด๊ธฐ | +|------------|-----------------------------------------------------------------------------------------------|-----------------------------|----|--------|----|------| +| YOLO-NAS-s | [yolo_nas_s.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_s.pt) | [๋ฌผ์ฒด ๊ฐ์ง€](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-m | [yolo_nas_m.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_m.pt) | [๋ฌผ์ฒด ๊ฐ์ง€](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-l | [yolo_nas_l.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_l.pt) | [๋ฌผ์ฒด ๊ฐ์ง€](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | + +## ์ธ์šฉ ๋ฐ ๊ฐ์‚ฌ์˜ ๋ง์”€ + +YOLO-NAS๋ฅผ ์—ฐ๊ตฌ ๋˜๋Š” ๊ฐœ๋ฐœ ์ž‘์—…์— ํ™œ์šฉํ•œ ๊ฒฝ์šฐ SuperGradients๋ฅผ ์ธ์šฉํ•ด ์ฃผ์„ธ์š”. + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{supergradients, + doi = {10.5281/ZENODO.7789328}, + url = {https://zenodo.org/record/7789328}, + author = {Aharon, Shay and {Louis-Dupont} and {Ofri Masad} and Yurkova, Kate and {Lotem Fridman} and {Lkdci} and Khvedchenya, Eugene and Rubin, Ran and Bagrov, Natan and Tymchenko, Borys and Keren, Tomer and Zhilko, Alexander and {Eran-Deci}}, + title = {Super-Gradients}, + publisher = {GitHub}, + journal = {GitHub repository}, + year = {2021}, + } + ``` + +Deci AI์˜ [SuperGradients](https://github.com/Deci-AI/super-gradients/) ํŒ€์—๊ฒŒ ์ปดํ“จํ„ฐ ๋น„์ „ ์ปค๋ฎค๋‹ˆํ‹ฐ๋ฅผ ์œ„ํ•ด ์ด ๊ฐ€์น˜ ์žˆ๋Š” ์ž๋ฃŒ๋ฅผ ๋งŒ๋“ค๊ณ  ์œ ์ง€ ๊ด€๋ฆฌํ•œ ๋ฐ ๋Œ€ํ•ด ๊ฐ์‚ฌ์˜ ๋ง์”€์„ ์ „ํ•ฉ๋‹ˆ๋‹ค. ํ˜์‹ ์ ์ธ ์•„ํ‚คํ…์ฒ˜์™€ ์šฐ์ˆ˜ํ•œ ๋ฌผ์ฒด ๊ฐ์ง€ ๋Šฅ๋ ฅ์„ ๊ฐ–์ถ˜ YOLO-NAS๊ฐ€ ๊ฐœ๋ฐœ์ž์™€ ์—ฐ๊ตฌ์ž์—๊ฒŒ ์ค‘์š”ํ•œ ๋„๊ตฌ๊ฐ€ ๋  ๊ฒƒ์œผ๋กœ ๊ธฐ๋Œ€ํ•ฉ๋‹ˆ๋‹ค. diff --git a/ultralytics/docs/ko/models/yolo-nas.md:Zone.Identifier b/ultralytics/docs/ko/models/yolo-nas.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/models/yolo-nas.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/models/yolov3.md b/ultralytics/docs/ko/models/yolov3.md new file mode 100755 index 0000000..103896f --- /dev/null +++ b/ultralytics/docs/ko/models/yolov3.md @@ -0,0 +1,98 @@ +--- +comments: true +description: YOLOv3, YOLOv3-Ultralytics ๋ฐ YOLOv3u์— ๋Œ€ํ•œ ๊ฐœ์š”๋ฅผ ์–ป์œผ์„ธ์š”. ๋ฌผ์ฒด ํƒ์ง€๋ฅผ ์œ„ํ•œ ์ฃผ์š” ๊ธฐ๋Šฅ, ์‚ฌ์šฉ๋ฒ• ๋ฐ ์ง€์› ์ž‘์—…์— ๋Œ€ํ•ด ์•Œ์•„๋ณด์„ธ์š”. +keywords: YOLOv3, YOLOv3-Ultralytics, YOLOv3u, ๋ฌผ์ฒด ํƒ์ง€, ์ถ”๋ก , ํ›ˆ๋ จ, Ultralytics +--- + +# YOLOv3, YOLOv3-Ultralytics ๋ฐ YOLOv3u + +## ๊ฐœ์š” + +์ด ๋ฌธ์„œ๋Š” ์„ธ ๊ฐ€์ง€ ๋ฐ€์ ‘ํ•˜๊ฒŒ ๊ด€๋ จ๋œ ๋ฌผ์ฒด ํƒ์ง€ ๋ชจ๋ธ์ธ [YOLOv3](https://pjreddie.com/darknet/yolo/), [YOLOv3-Ultralytics](https://github.com/ultralytics/yolov3) ๋ฐ [YOLOv3u](https://github.com/ultralytics/ultralytics)์— ๋Œ€ํ•œ ๊ฐœ์š”๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +1. **YOLOv3:** ์ด๊ฒƒ์€ You Only Look Once (YOLO) ๋ฌผ์ฒด ํƒ์ง€ ์•Œ๊ณ ๋ฆฌ์ฆ˜์˜ ์„ธ ๋ฒˆ์งธ ๋ฒ„์ „์ž…๋‹ˆ๋‹ค. Joseph Redmon์ด ์ฒ˜์Œ ๊ฐœ๋ฐœํ•œ YOLOv3๋Š” ๋‹ค์ค‘ ์Šค์ผ€์ผ ์˜ˆ์ธก ๋ฐ ์„ธ ๊ฐ€์ง€ ๋‹ค๋ฅธ ํฌ๊ธฐ์˜ ํƒ์ง€ ์ปค๋„๊ณผ ๊ฐ™์€ ๊ธฐ๋Šฅ์„ ๋„์ž…ํ•˜์—ฌ ์ด์ „ ๋ชจ๋ธ๋ณด๋‹ค ํ–ฅ์ƒ๋์Šต๋‹ˆ๋‹ค. + +2. **YOLOv3-Ultralytics:** ์ด๊ฒƒ์€ Ultralytics์˜ YOLOv3 ๋ชจ๋ธ ๊ตฌํ˜„์ž…๋‹ˆ๋‹ค. ์ด ๋ชจ๋ธ์€ ์›๋ณธ YOLOv3 ์•„ํ‚คํ…์ฒ˜๋ฅผ ๋ณต์ œํ•˜๋ฉฐ ๋” ๋งŽ์€ ์‚ฌ์ „ ํ›ˆ๋ จ ๋ชจ๋ธ ๋ฐ ์‰ฌ์šด ์‚ฌ์šฉ์ž ์ •์˜ ์˜ต์…˜๊ณผ ๊ฐ™์€ ์ถ”๊ฐ€ ๊ธฐ๋Šฅ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +3. **YOLOv3u:** ์ด๊ฒƒ์€ YOLOv8 ๋ชจ๋ธ์—์„œ ์‚ฌ์šฉ๋˜๋Š” ์•ต์ปค ์—†์ด ๋ฌผ์ฒด ์—†์Œ ๋ถ„๋ฆฌ ํ—ค๋“œ๋ฅผ ํ†ตํ•ฉํ•œ YOLOv3-Ultralytics์˜ ์—…๋ฐ์ดํŠธ๋œ ๋ฒ„์ „์ž…๋‹ˆ๋‹ค. YOLOv3u๋Š” YOLOv3์™€ ๋™์ผํ•œ ๋ฐฑ๋ณธ ๋ฐ ๋„คํฌ ์•„ํ‚คํ…์ฒ˜๋ฅผ ์œ ์ง€ํ•˜์ง€๋งŒ YOLOv8์—์„œ ์—…๋ฐ์ดํŠธ๋œ ํƒ์ง€ ํ—ค๋“œ๋ฅผ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. + +![Ultralytics YOLOv3](https://raw.githubusercontent.com/ultralytics/assets/main/yolov3/banner-yolov3.png) + +## ์ฃผ์š” ๊ธฐ๋Šฅ + +- **YOLOv3:** ์ด ๋ชจ๋ธ์€ ํƒ์ง€๋ฅผ ์œ„ํ•ด 13x13, 26x26 ๋ฐ 52x52์˜ ์„ธ ๊ฐ€์ง€ ๋‹ค๋ฅธ ํฌ๊ธฐ์˜ ํƒ์ง€ ์ปค๋„์„ ํ™œ์šฉํ•˜๋Š” ์„ธ ๊ฐ€์ง€ ๋‹ค๋ฅธ ์Šค์ผ€์ผ์„ ๋„์ž…ํ–ˆ์Šต๋‹ˆ๋‹ค. ์ด๋Š” ๋‹ค์–‘ํ•œ ํฌ๊ธฐ์˜ ๊ฐ์ฒด์— ๋Œ€ํ•œ ํƒ์ง€ ์ •ํ™•๋„๋ฅผ ํฌ๊ฒŒ ํ–ฅ์ƒ์‹œ์ผฐ์Šต๋‹ˆ๋‹ค. ๋˜ํ•œ YOLOv3์€ ๊ฐ ๊ฒฝ๊ณ„ ์ƒ์ž์— ๋Œ€ํ•œ ๋‹ค์ค‘ ๋ ˆ์ด๋ธ” ์˜ˆ์ธก๊ณผ ๋” ๋‚˜์€ ํŠน์ง• ์ถ”์ถœ๊ธฐ ๋„คํŠธ์›Œํฌ์™€ ๊ฐ™์€ ๊ธฐ๋Šฅ์„ ์ถ”๊ฐ€ํ–ˆ์Šต๋‹ˆ๋‹ค. + +- **YOLOv3-Ultralytics:** Ultralytics์˜ YOLOv3 ๊ตฌํ˜„์€ ์›๋ณธ ๋ชจ๋ธ๊ณผ ๋™์ผํ•œ ์„ฑ๋Šฅ์„ ์ œ๊ณตํ•˜์ง€๋งŒ ๋” ๋งŽ์€ ์‚ฌ์ „ ํ›ˆ๋ จ ๋ชจ๋ธ, ์ถ”๊ฐ€์ ์ธ ํ›ˆ๋ จ ๋ฐฉ๋ฒ• ๋ฐ ์‰ฌ์šด ์‚ฌ์šฉ์ž ์ •์˜ ์˜ต์…˜์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ด๋กœ์จ ์‹ค์ œ ์‘์šฉ ๋ถ„์•ผ์— ๋Œ€ํ•ด ๋” ๋‹ค์–‘ํ•˜๊ณ  ์‚ฌ์šฉ์ž ์นœํ™”์ ์ธ ๋ชจ๋ธ์ด ๋ฉ๋‹ˆ๋‹ค. + +- **YOLOv3u:** ์ด ์—…๋ฐ์ดํŠธ๋œ ๋ชจ๋ธ์€ YOLOv8์˜ ์•ต์ปค ์—†์Œ, ๋ฌผ์ฒด ์—†๋Š” ๋ถ„๋ฆฌ ํ—ค๋“œ๋ฅผ ํ†ตํ•ฉํ•ฉ๋‹ˆ๋‹ค. ๋ฏธ๋ฆฌ ์ •์˜๋œ ์•ต์ปค ๋ฐ•์Šค ๋ฐ ๋ฌผ์ฒด ์ ์ˆ˜๊ฐ€ ํ•„์š” ์—†์–ด์ง„ ์ด ํƒ์ง€ ํ—ค๋“œ ์„ค๊ณ„๋Š” ๋‹ค์–‘ํ•œ ํฌ๊ธฐ์™€ ๋ชจ์–‘์˜ ๊ฐ์ฒด๋ฅผ ํƒ์ง€ํ•˜๋Š” ๋Šฅ๋ ฅ์„ ํ–ฅ์ƒ์‹œํ‚ฌ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด๋กœ์จ YOLOv3u๋Š” ๋ฌผ์ฒด ํƒ์ง€ ์ž‘์—…์— ๋Œ€ํ•ด ๋” ๊ฒฌ๊ณ ํ•˜๊ณ  ์ •ํ™•ํ•œ ๋ชจ๋ธ์ด ๋ฉ๋‹ˆ๋‹ค. + +## ์ง€์›๋˜๋Š” ์ž‘์—… ๋ฐ ๋ชจ๋“œ + +YOLOv3, YOLOv3-Ultralytics ๋ฐ YOLOv3u ์‹œ๋ฆฌ์ฆˆ๋Š” ๋ฌผ์ฒด ํƒ์ง€ ์ž‘์—…์„ ์œ„ํ•ด ํŠน๋ณ„ํžˆ ์„ค๊ณ„๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ๋ชจ๋ธ์€ ์ •ํ™•์„ฑ๊ณผ ์†๋„๋ฅผ ๊ท ํ˜•์žˆ๊ฒŒ ์œ ์ง€ํ•˜์—ฌ ๋‹ค์–‘ํ•œ ์‹ค์ œ ์‹œ๋‚˜๋ฆฌ์˜ค์—์„œ ํšจ๊ณผ์ ์œผ๋กœ ์‚ฌ์šฉ๋  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๊ฐ ๋ฒ„์ „์€ ๋…ํŠนํ•œ ๊ธฐ๋Šฅ๊ณผ ์ตœ์ ํ™”๋ฅผ ์ œ๊ณตํ•˜์—ฌ ๋‹ค์–‘ํ•œ ์‘์šฉ ๋ถ„์•ผ์— ์ ํ•ฉํ•ฉ๋‹ˆ๋‹ค. + +์„ธ ๊ฐ€์ง€ ๋ชจ๋ธ์€ [์ถ”๋ก ](../modes/predict.md), [์œ ํšจ์„ฑ ๊ฒ€์‚ฌ](../modes/val.md), [ํ›ˆ๋ จ](../modes/train.md) ๋ฐ [๋‚ด๋ณด๋‚ด๊ธฐ](../modes/export.md)์™€ ๊ฐ™์€ ํฌ๊ด„์ ์ธ ๋ชจ๋“œ๋ฅผ ์ง€์›ํ•˜์—ฌ ํšจ๊ณผ์ ์ธ ๋ฌผ์ฒด ํƒ์ง€๋ฅผ ์œ„ํ•œ ์™„๋ฒฝํ•œ ๋„๊ตฌ ์„ธํŠธ๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ ์œ ํ˜• | ์ง€์›๋˜๋Š” ์ž‘์—… | ์ถ”๋ก  | ์œ ํšจ์„ฑ ๊ฒ€์‚ฌ | ํ›ˆ๋ จ | ๋‚ด๋ณด๋‚ด๊ธฐ | +|--------------------|-----------------------------|----|--------|----|------| +| YOLOv3 | [๋ฌผ์ฒด ํƒ์ง€](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3-Ultralytics | [๋ฌผ์ฒด ํƒ์ง€](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3u | [๋ฌผ์ฒด ํƒ์ง€](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +์ด ํ‘œ๋Š” ๊ฐ YOLOv3 ๋ฒ„์ „์˜ ๊ธฐ๋Šฅ์„ ํ•œ ๋ˆˆ์— ๋ณด์—ฌ์ฃผ๋ฉฐ, ๋ฌผ์ฒด ํƒ์ง€ ์›Œํฌํ”Œ๋กœ์šฐ์˜ ๋‹ค์–‘ํ•œ ์ž‘์—… ๋ฐ ์šด์˜ ๋ชจ๋“œ์— ๋Œ€ํ•ด ๋‹ค์–‘์„ฑ๊ณผ ์ ํ•ฉ์„ฑ์„ ๊ฐ•์กฐํ•ฉ๋‹ˆ๋‹ค. + +## ์‚ฌ์šฉ ์˜ˆ์ œ + +๋‹ค์Œ ์˜ˆ์ œ๋Š” ๊ฐ„๋‹จํ•œ YOLOv3 ํ›ˆ๋ จ ๋ฐ ์ถ”๋ก  ์˜ˆ์ œ๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ด์™€ ๋‹ค๋ฅธ [๋ชจ๋“œ](../modes/index.md)์˜ ์ „์ฒด ์„ค๋ช…์€ [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) ๋ฐ [Export](../modes/export.md) ๋ฌธ์„œ ํŽ˜์ด์ง€๋ฅผ ์ฐธ์กฐํ•˜์„ธ์š”. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + Python์—์„œ PyTorch ์‚ฌ์ „ ํ›ˆ๋ จ๋œ `*.pt` ๋ชจ๋ธ ๋ฐ ์„ค์ • `*.yaml` ํŒŒ์ผ์„ YOLO() ํด๋ž˜์Šค์— ์ „๋‹ฌํ•˜์—ฌ ๋ชจ๋ธ ์ธ์Šคํ„ด์Šค๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + + ```python + from ultralytics import YOLO + + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv3n ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov3n.pt') + + # ๋ชจ๋ธ ์ •๋ณด ํ‘œ์‹œ (์„ ํƒ ์‚ฌํ•ญ) + model.info() + + # COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์—์„œ 100 epoch ๋™์•ˆ ๋ชจ๋ธ ํ›ˆ๋ จ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # YOLOv3n ๋ชจ๋ธ๋กœ 'bus.jpg' ์ด๋ฏธ์ง€์— ์ถ”๋ก  ์‹คํ–‰ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI ๋ช…๋ น์–ด๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ์„ ์ง์ ‘ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + + ```bash + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv3n ๋ชจ๋ธ ๋กœ๋“œํ•˜๊ณ  COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์—์„œ 100 epoch ๋™์•ˆ ํ›ˆ๋ จ + yolo train model=yolov3n.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv3n ๋ชจ๋ธ ๋กœ๋“œํ•˜๊ณ  'bus.jpg' ์ด๋ฏธ์ง€์— ์ถ”๋ก  ์‹คํ–‰ + yolo predict model=yolov3n.pt source=path/to/bus.jpg + ``` + +## ์ธ์šฉ ๋ฐ ๊ฐ์‚ฌ์˜ ๊ธ€ + +๋ณธ์ธ์˜ ์—ฐ๊ตฌ์—์„œ YOLOv3๋ฅผ ์‚ฌ์šฉํ•œ๋‹ค๋ฉด, ์›๋ณธ YOLO ๋…ผ๋ฌธ๊ณผ Ultralytics YOLOv3 ์ €์žฅ์†Œ๋ฅผ ์ธ์šฉํ•ด ์ฃผ์‹ญ์‹œ์˜ค. + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Redmon, Joseph and Farhadi, Ali}, + journal={arXiv preprint arXiv:1804.02767}, + year={2018} + } + ``` + +Joseph Redmon๊ณผ Ali Farhadi์—๊ฒŒ ์›๋ณธ YOLOv3 ๊ฐœ๋ฐœ์— ๋Œ€ํ•œ ๊ฐ์‚ฌ์˜ ๊ธ€์„ ์ „ํ•ฉ๋‹ˆ๋‹ค. diff --git a/ultralytics/docs/ko/models/yolov3.md:Zone.Identifier b/ultralytics/docs/ko/models/yolov3.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/models/yolov3.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/models/yolov4.md b/ultralytics/docs/ko/models/yolov4.md new file mode 100755 index 0000000..568d2c1 --- /dev/null +++ b/ultralytics/docs/ko/models/yolov4.md @@ -0,0 +1,71 @@ +--- +comments: true +description: YOLOv4์— ๋Œ€ํ•œ ์ƒ์„ธ ๊ฐ€์ด๋“œ๋ฅผ ์‚ดํŽด๋ณด์„ธ์š”. ์ตœ์‹  ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฐ์ง€๊ธฐ์˜ ์•„ํ‚คํ…์ฒ˜ ํ•˜์ด๋ผ์ดํŠธ, ํ˜์‹ ์ ์ธ ๊ธฐ๋Šฅ ๋ฐ ์‘์šฉ ์˜ˆ์ œ๋ฅผ ์ดํ•ดํ•˜์„ธ์š”. +keywords: ultralytics, YOLOv4, ๊ฐ์ฒด ๊ฐ์ง€, ์‹ ๊ฒฝ๋ง, ์‹ค์‹œ๊ฐ„ ๊ฐ์ง€, ๊ฐ์ฒด ๊ฐ์ง€๊ธฐ, ๊ธฐ๊ณ„ ํ•™์Šต +--- + +# YOLOv4: ๋†’์€ ์†๋„์™€ ์ •๋ฐ€๋„๋ฅผ ๊ฐ–๋Š” ๊ฐ์ฒด ๊ฐ์ง€ + +Ultralytics YOLOv4 ๋ฌธ์„œ ํŽ˜์ด์ง€์— ์˜ค์‹  ๊ฒƒ์„ ํ™˜์˜ํ•ฉ๋‹ˆ๋‹ค. YOLOv4๋Š” ์•„ํ‚คํ…์ฒ˜ ๋ฐ ์•Œ๊ณ ๋ฆฌ์ฆ˜ ๊ฐœ์„ ์œผ๋กœ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฐ์ง€์˜ ์ตœ์  ์†๋„์™€ ์ •ํ™•๋„๋ฅผ ์ œ๊ณตํ•˜๋Š” ์ตœ์‹  ๊ฐ์ฒด ๊ฐ์ง€๊ธฐ์ž…๋‹ˆ๋‹ค. 2020๋…„์— Alexey Bochkovskiy๊ฐ€ [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet)์—์„œ ์ถœ์‹œ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. YOLOv4๋Š” ๋งŽ์€ ์‘์šฉ ๋ถ„์•ผ์—์„œ ์šฐ์ˆ˜ํ•œ ์„ ํƒ์ž…๋‹ˆ๋‹ค. + +![YOLOv4 ์•„ํ‚คํ…์ฒ˜ ๋‹ค์ด์–ด๊ทธ๋žจ](https://user-images.githubusercontent.com/26833433/246185689-530b7fe8-737b-4bb0-b5dd-de10ef5aface.png) +**YOLOv4 ์•„ํ‚คํ…์ฒ˜ ๋‹ค์ด์–ด๊ทธ๋žจ**. YOLOv4์˜ ๋ณต์žกํ•œ ๋„คํŠธ์›Œํฌ ์„ค๊ณ„๋ฅผ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. ์ตœ์ ์˜ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฐ์ง€๋ฅผ ์œ„ํ•ด ๋ฐฑ๋ณธ, ๋„ฅ ๋ฐ ํ—ค๋“œ ๊ตฌ์„ฑ ์š”์†Œ์™€ ์ด๋“ค์˜ ์ƒํ˜ธ ์—ฐ๊ฒฐ๋œ ๋ ˆ์ด์–ด๊ฐ€ ํฌํ•จ๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค. + +## ์†Œ๊ฐœ + +YOLOv4๋Š” You Only Look Once์˜ 4๋ฒˆ์งธ ๋ฒ„์ „์„ ์˜๋ฏธํ•ฉ๋‹ˆ๋‹ค. ์ด์ „ YOLO ๋ฒ„์ „์ธ [YOLOv3](yolov3.md) ๋ฐ ๊ธฐํƒ€ ๊ฐ์ฒด ๊ฐ์ง€ ๋ชจ๋ธ์˜ ํ•œ๊ณ„๋ฅผ ๊ทน๋ณตํ•˜๊ธฐ ์œ„ํ•ด ๊ฐœ๋ฐœ๋œ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฐ์ง€ ๋ชจ๋ธ์ž…๋‹ˆ๋‹ค. ๋‹ค๋ฅธ ํ•ฉ์„ฑ๊ณฑ ์‹ ๊ฒฝ๋ง(Convolutional Neural Network, CNN) ๊ธฐ๋ฐ˜ ๊ฐ์ฒด ๊ฐ์ง€๊ธฐ์™€๋Š” ๋‹ฌ๋ฆฌ YOLOv4๋Š” ์ถ”์ฒœ ์‹œ์Šคํ…œ๋ฟ๋งŒ ์•„๋‹ˆ๋ผ ๋…๋ฆฝ์ ์ธ ํ”„๋กœ์„ธ์Šค ๊ด€๋ฆฌ ๋ฐ ์ธ์  ๊ฐ์†Œ์—๋„ ์ ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด๋Š” ์ผ๋ฐ˜์ ์ธ ๊ทธ๋ž˜ํ”ฝ ์ฒ˜๋ฆฌ ์žฅ์น˜(Graphics Processing Unit, GPU)์—์„œ ์ž‘๋™ํ•จ์œผ๋กœ์จ ์ €๋ ดํ•œ ๊ฐ€๊ฒฉ์— ๋Œ€๋Ÿ‰ ์‚ฌ์šฉ์„ ๊ฐ€๋Šฅํ•˜๊ฒŒ ํ•ฉ๋‹ˆ๋‹ค. ๋˜ํ•œ, ํ›ˆ๋ จ์„ ์œ„ํ•ด ํ•˜๋‚˜์˜ GPU๋งŒ ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค. + +## ์•„ํ‚คํ…์ฒ˜ + +YOLOv4๋Š” ์„ฑ๋Šฅ์„ ์ตœ์ ํ™”ํ•˜๊ธฐ ์œ„ํ•ด ์—ฌ๋Ÿฌ ํ˜์‹ ์ ์ธ ๊ธฐ๋Šฅ์„ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. ์ด์—๋Š” Weighted-Residual-Connections (WRC), Cross-Stage-Partial-connections (CSP), Cross mini-Batch Normalization (CmBN), Self-adversarial-training (SAT), Mish-activation, Mosaic data augmentation, DropBlock regularization ๋ฐ CIoU loss๊ฐ€ ํฌํ•จ๋ฉ๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ๊ธฐ๋Šฅ๋“ค์€ ์ตœ์ฒจ๋‹จ ๊ฒฐ๊ณผ๋ฅผ ๋‹ฌ์„ฑํ•˜๊ธฐ ์œ„ํ•ด ๊ฒฐํ•ฉ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. + +์ผ๋ฐ˜์ ์ธ ๊ฐ์ฒด ๊ฐ์ง€๊ธฐ๋Š” ์ž…๋ ฅ, ๋ฐฑ๋ณธ, ๋„ฅ ๋ฐ ํ—ค๋“œ์™€ ๊ฐ™์€ ์—ฌ๋Ÿฌ ๋ถ€๋ถ„์œผ๋กœ ๊ตฌ์„ฑ๋ฉ๋‹ˆ๋‹ค. YOLOv4์˜ ๋ฐฑ๋ณธ์€ ImageNet์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋˜๋ฉฐ, ๊ฐ์ฒด์˜ ํด๋ž˜์Šค ๋ฐ ๊ฒฝ๊ณ„ ์ƒ์ž๋ฅผ ์˜ˆ์ธกํ•˜๋Š” ๋ฐ ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. ๋ฐฑ๋ณธ์€ VGG, ResNet, ResNeXt ๋˜๋Š” DenseNet๊ณผ ๊ฐ™์€ ์—ฌ๋Ÿฌ ๋ชจ๋ธ์—์„œ ๊ฐ€์ ธ์˜ฌ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๊ฐ์ฒด ๊ฐ์ง€๊ธฐ์˜ ๋„ฅ ๋ถ€๋ถ„์€ ๋‹ค์–‘ํ•œ ๋‹จ๊ณ„์—์„œ ํ”ผ์ฒ˜ ๋งต์„ ์ˆ˜์ง‘ํ•˜๋Š” ๋ฐ ์‚ฌ์šฉ๋˜๋ฉฐ, ์ผ๋ฐ˜์ ์œผ๋กœ ์—ฌ๋Ÿฌ ํ•˜ํ–ฅ ๊ฒฝ๋กœ ๋ฐ ์—ฌ๋Ÿฌ ์ƒํ–ฅ ๊ฒฝ๋กœ๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. ํ—ค๋“œ ๋ถ€๋ถ„์€ ์ตœ์ข… ๊ฐ์ฒด ๊ฐ์ง€ ๋ฐ ๋ถ„๋ฅ˜์— ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. + +## ๋ฒ ๊ณ  ์˜ค๋ธŒ ํ”„๋ฆฌ๋น„์Šค + +YOLOv4๋Š” ํ•™์Šต ์ค‘ ๋ชจ๋ธ์˜ ์ •ํ™•์„ฑ์„ ํ–ฅ์ƒ์‹œํ‚ค๋Š” ๊ธฐ๋ฒ•์ธ "๋ฒ ๊ณ  ์˜ค๋ธŒ ํ”„๋ฆฌ๋น„์Šค"๋ฅผ ์‚ฌ์šฉํ•˜๊ธฐ๋„ ํ•ฉ๋‹ˆ๋‹ค. ๋ฐ์ดํ„ฐ ์ฆ๊ฐ•์€ ๊ฐ์ฒด ๊ฐ์ง€์—์„œ ์ฃผ๋กœ ์‚ฌ์šฉ๋˜๋Š” ๋ฒ ๊ณ  ์˜ค๋ธŒ ํ”„๋ฆฌ๋น„์Šค ๊ธฐ๋ฒ•์œผ๋กœ, ์ž…๋ ฅ ์ด๋ฏธ์ง€์˜ ๋‹ค์–‘์„ฑ์„ ๋†’์—ฌ ๋ชจ๋ธ์˜ ๊ฒฌ๊ณ ์„ฑ์„ ํ–ฅ์ƒ์‹œํ‚ต๋‹ˆ๋‹ค. ๋ฐ์ดํ„ฐ ์ฆ๊ฐ•์˜ ๋ช‡ ๊ฐ€์ง€ ์˜ˆ๋Š” ํ™”์งˆ ์™œ๊ณก(์ด๋ฏธ์ง€์˜ ๋ฐ๊ธฐ, ๋Œ€์กฐ๋„, ์ƒ‰์ƒ, ์ฑ„๋„ ๋ฐ ๋…ธ์ด์ฆˆ ์กฐ์ •) ๋ฐ ๊ธฐํ•˜ํ•™์  ์™œ๊ณก(์ž„์˜์˜ ์Šค์ผ€์ผ๋ง, ํฌ๋กญ, ๋’ค์ง‘๊ธฐ, ํšŒ์ „ ์ถ”๊ฐ€)์ž…๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ๊ธฐ์ˆ ์€ ๋ชจ๋ธ์ด ๋‹ค์–‘ํ•œ ์œ ํ˜•์˜ ์ด๋ฏธ์ง€์— ๋Œ€ํ•ด ๋” ์ž˜ ์ผ๋ฐ˜ํ™”๋˜๋„๋ก ๋•์Šต๋‹ˆ๋‹ค. + +## ๊ธฐ๋Šฅ ๋ฐ ์„ฑ๋Šฅ + +YOLOv4๋Š” ๊ฐ์ฒด ๊ฐ์ง€์˜ ์ตœ์  ์†๋„์™€ ์ •ํ™•๋„๋ฅผ ์œ„ํ•ด ์„ค๊ณ„๋˜์—ˆ์Šต๋‹ˆ๋‹ค. YOLOv4์˜ ์•„ํ‚คํ…์ฒ˜์—๋Š” ๋ฐฑ๋ณธ์œผ๋กœ CSPDarknet53, ๋„ฅ์œผ๋กœ PANet, ๊ฐ์ง€ ํ—ค๋“œ๋กœ YOLOv3๊ฐ€ ํฌํ•จ๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค. ์ด ์„ค๊ณ„๋ฅผ ํ†ตํ•ด YOLOv4๋Š” ๋›ฐ์–ด๋‚œ ์†๋„๋กœ ๊ฐ์ฒด ๊ฐ์ง€๋ฅผ ์ˆ˜ํ–‰ํ•˜๋ฉฐ, ์‹ค์‹œ๊ฐ„ ์‘์šฉ ํ”„๋กœ๊ทธ๋žจ์— ์ ํ•ฉํ•ฉ๋‹ˆ๋‹ค. YOLOv4๋Š” ๊ฐ์ฒด ๊ฐ์ง€ ๋ฒค์น˜๋งˆํฌ์—์„œ ์ตœ์ฒจ๋‹จ ๊ฒฐ๊ณผ๋ฅผ ๋‹ฌ์„ฑํ•˜๊ณ  ์ •ํ™•๋„ ๋ฉด์—์„œ๋„ ๋›ฐ์–ด๋‚œ ์„ฑ๋Šฅ์„ ๋ณด์ž…๋‹ˆ๋‹ค. + +## ์‚ฌ์šฉ ์˜ˆ์ œ + +์ž‘์„ฑ ์‹œ์  ๊ธฐ์ค€์œผ๋กœ Ultralytics๋Š” ํ˜„์žฌ YOLOv4 ๋ชจ๋ธ์„ ์ง€์›ํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค. ๋”ฐ๋ผ์„œ YOLOv4๋ฅผ ์‚ฌ์šฉํ•˜๋ ค๋Š” ์‚ฌ์šฉ์ž๋Š” YOLOv4 GitHub ์ €์žฅ์†Œ์˜ ์„ค์น˜ ๋ฐ ์‚ฌ์šฉ ์ง€์นจ์„ ์ง์ ‘ ์ฐธ์กฐํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. + +๋‹ค์Œ์€ YOLOv4๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ์ผ๋ฐ˜์ ์ธ ๋‹จ๊ณ„์— ๋Œ€ํ•œ ๊ฐ„๋žตํ•œ ๊ฐœ์š”์ž…๋‹ˆ๋‹ค: + +1. YOLOv4 GitHub ์ €์žฅ์†Œ๋ฅผ ๋ฐฉ๋ฌธํ•˜์„ธ์š”: [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). + +2. ์„ค์น˜์— ๋Œ€ํ•œ README ํŒŒ์ผ์— ์ œ๊ณต๋œ ์ง€์นจ์„ ๋”ฐ๋ฅด์„ธ์š”. ์ผ๋ฐ˜์ ์œผ๋กœ ์ €์žฅ์†Œ๋ฅผ ํด๋ก ํ•˜๊ณ  ํ•„์š”ํ•œ ์ข…์†์„ฑ์„ ์„ค์น˜ํ•˜๊ณ  ํ•„์š”ํ•œ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋ฅผ ์„ค์ •ํ•˜๋Š” ๊ณผ์ •์„ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. + +3. ์„ค์น˜๊ฐ€ ์™„๋ฃŒ๋˜๋ฉด, ์ €์žฅ์†Œ์—์„œ ์ œ๊ณตํ•˜๋Š” ์‚ฌ์šฉ ์ง€์นจ์— ๋”ฐ๋ผ ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•˜๊ณ  ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด๋Š” ์ผ๋ฐ˜์ ์œผ๋กœ ๋ฐ์ดํ„ฐ์…‹์„ ์ค€๋น„ํ•˜๊ณ  ๋ชจ๋ธ ๋งค๊ฐœ๋ณ€์ˆ˜๋ฅผ ์„ค์ •ํ•˜๊ณ  ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•œ ๋‹ค์Œ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ๊ฐ์ฒด ๊ฐ์ง€๋ฅผ ์ˆ˜ํ–‰ํ•˜๋Š” ๊ฒƒ์„ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. + +ํŠน์ • ๋‹จ๊ณ„๋Š” ์‚ฌ์šฉ ์‚ฌ๋ก€์™€ YOLOv4 ์ €์žฅ์†Œ์˜ ํ˜„์žฌ ์ƒํƒœ์— ๋”ฐ๋ผ ๋‹ค๋ฅผ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๋”ฐ๋ผ์„œ YOLOv4 GitHub ์ €์žฅ์†Œ์—์„œ ์ œ๊ณต๋˜๋Š” ์ง€์นจ์„ ์ง์ ‘ ์ฐธ์กฐํ•˜๋Š” ๊ฒƒ์ด ๊ฐ•๋ ฅํžˆ ๊ถŒ์žฅ๋ฉ๋‹ˆ๋‹ค. + +YOLOv4์˜ ์ง€์›์ด ๊ตฌํ˜„๋˜๋ฉด Ultralytics๋ฅผ ์œ„ํ•œ ์‚ฌ์šฉ ์˜ˆ์ œ๋กœ ์ด ๋ฌธ์„œ๋ฅผ ์—…๋ฐ์ดํŠธํ•˜๊ธฐ ์œ„ํ•ด ๋…ธ๋ ฅํ•˜๊ฒ ์Šต๋‹ˆ๋‹ค. + +## ๊ฒฐ๋ก  + +YOLOv4๋Š” ์†๋„์™€ ์ •ํ™•๋„์˜ ๊ท ํ˜•์„ ์ด๋ฃจ๋Š” ๊ฐ•๋ ฅํ•˜๊ณ  ํšจ์œจ์ ์ธ ๊ฐ์ฒด ๊ฐ์ง€ ๋ชจ๋ธ์ž…๋‹ˆ๋‹ค. ํ•™์Šต ์ค‘ ํŠน์ • ๊ธฐ๋ฒ• ๋ฐ ๋ฒ ๊ณ  ์˜ค๋ธŒ ํ”„๋ฆฌ๋น„์Šค ๊ธฐ๋ฒ•์˜ ์‚ฌ์šฉ์œผ๋กœ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฐ์ง€ ์ž‘์—…์—์„œ ํƒ์›”ํ•œ ์„ฑ๋Šฅ์„ ๋ฐœํœ˜ํ•ฉ๋‹ˆ๋‹ค. ์ผ๋ฐ˜์ ์ธ GPU๋ฅผ ๊ฐ€์ง„ ์‚ฌ์šฉ์ž ๋ˆ„๊ตฌ๋‚˜ ์‚ฌ์šฉํ•˜๊ณ  ํ›ˆ๋ จํ•  ์ˆ˜ ์žˆ์–ด ๋‹ค์–‘ํ•œ ์‘์šฉ ๋ถ„์•ผ์— ์ ‘๊ทผ ๊ฐ€๋Šฅํ•˜๊ณ  ์‹ค์šฉ์ ์ž…๋‹ˆ๋‹ค. + +## ์ธ์šฉ ๋ฐ ๊ฐ์‚ฌ์˜ ๊ธ€ + +์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฐ์ง€ ๋ถ„์•ผ์—์„œ ์ค‘์š”ํ•œ ๊ธฐ์—ฌ๋ฅผ ํ•œ YOLOv4 ์ €์ž๋“ค์—๊ฒŒ ๊ฐ์‚ฌ๋“œ๋ฆฝ๋‹ˆ๋‹ค: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{bochkovskiy2020yolov4, + title={YOLOv4: Optimal Speed and Accuracy of Object Detection}, + author={Alexey Bochkovskiy and Chien-Yao Wang and Hong-Yuan Mark Liao}, + year={2020}, + eprint={2004.10934}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +์›๋ณธ YOLOv4 ๋…ผ๋ฌธ์€ [arXiv](https://arxiv.org/abs/2004.10934)์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ €์ž๋“ค์€ ์ž์‹ ๋“ค์˜ ์ž‘์—…์„ ์ผ๋ฐ˜์— ๊ณต๊ฐœํ•˜๊ณ  ์ฝ”๋“œ๋ฒ ์ด์Šค๋Š” [GitHub](https://github.com/AlexeyAB/darknet)์—์„œ ์•ก์„ธ์Šคํ•  ์ˆ˜ ์žˆ๋„๋ก ํ–ˆ์Šต๋‹ˆ๋‹ค. ์ €์ž๋“ค์˜ ๋…ธ๋ ฅ๊ณผ ๋„๋ฆฌ ์•Œ๋ ค์ง„ ์ปค๋ฎค๋‹ˆํ‹ฐ์— ์ž‘์—…์„ ์ œ๊ณตํ•ด ์ค€ ์‚ฌํ•ญ์„ ๊ฐ์‚ฌํžˆ ์—ฌ๊น๋‹ˆ๋‹ค. diff --git a/ultralytics/docs/ko/models/yolov4.md:Zone.Identifier b/ultralytics/docs/ko/models/yolov4.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/models/yolov4.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/models/yolov5.md b/ultralytics/docs/ko/models/yolov5.md new file mode 100755 index 0000000..451c4ed --- /dev/null +++ b/ultralytics/docs/ko/models/yolov5.md @@ -0,0 +1,113 @@ +--- +comments: true +description: YOLOv5u๋Š” YOLOv5 ๋ชจ๋ธ์˜ ๊ฐœ์„ ๋œ ์ •ํ™•๋„-์†๋„ ์ ˆ์ถฉ ๋ชจ๋ธ๋กœ, ๋‹ค์–‘ํ•œ ๊ฐ์ฒด ๊ฐ์ง€ ์ž‘์—…์— ๋Œ€ํ•œ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. +keywords: YOLOv5u, ๊ฐ์ฒด ๊ฐ์ง€, ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ, Ultralytics, ์ถ”๋ก , ๊ฒ€์ฆ, YOLOv5, YOLOv8, ์•ต์ปค ์—†์Œ, ๊ฐ์ฒด ์—ฌ๋ถ€ ์—†์Œ, ์‹ค์‹œ๊ฐ„ ์‘์šฉ, ๋จธ์‹  ๋Ÿฌ๋‹ +--- + +# YOLOv5 + +## ๊ฐœ์š” + +YOLOv5u๋Š” ๊ฐ์ฒด ๊ฐ์ง€ ๊ธฐ๋ฒ•์—์„œ์˜ ์ง„๋ณด๋ฅผ ๋‚˜ํƒ€๋ƒ…๋‹ˆ๋‹ค. Ultralytics์—์„œ ๊ฐœ๋ฐœํ•œ [YOLOv5](https://github.com/ultralytics/yolov5) ๋ชจ๋ธ์˜ ๊ธฐ๋ณธ ์•„ํ‚คํ…์ฒ˜๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ํ•œ YOLOv5u๋Š” [YOLOv8](yolov8.md) ๋ชจ๋ธ์—์„œ ๋„์ž…๋œ ์•ต์ปค ์—†์Œ, ๊ฐ์ฒด ์—ฌ๋ถ€ ์—†์Œ ๋ถ„๋ฆฌ ํ—ค๋“œ(head) ๊ธฐ๋Šฅ์„ ํ†ตํ•ฉํ•ฉ๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ์ ์‘์œผ๋กœ ์ธํ•ด ๋ชจ๋ธ์˜ ์•„ํ‚คํ…์ฒ˜๊ฐ€ ๊ฐœ์„ ๋˜์–ด, ๊ฐ์ฒด ๊ฐ์ง€ ์ž‘์—…์˜ ์ •ํ™•๋„์™€ ์†๋„ ์ ˆ์ถฉ์„ ๋”์šฑ ํ–ฅ์ƒ์‹œํ‚ต๋‹ˆ๋‹ค. ๊ฒฝํ—˜์  ๊ฒฐ๊ณผ์™€ ํ•ด๋‹น ๊ธฐ๋Šฅ์„ ๊ณ ๋ คํ•  ๋•Œ, YOLOv5u๋Š” ์—ฐ๊ตฌ ๋ฐ ์‹ค์ œ ์‘์šฉ ๋ชจ๋‘์—์„œ ๊ฒฌ๊ณ ํ•œ ์†”๋ฃจ์…˜์„ ์ฐพ๊ณ  ์žˆ๋Š” ์‚ฌ์šฉ์ž๋“ค์—๊ฒŒ ํšจ์œจ์ ์ธ ๋Œ€์•ˆ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +![Ultralytics YOLOv5](https://raw.githubusercontent.com/ultralytics/assets/main/yolov5/v70/splash.png) + +## ์ฃผ์š” ๊ธฐ๋Šฅ + +- **์•ต์ปค ์—†๋Š” ๋ถ„๋ฆฌ Ultralytics ํ—ค๋“œ:** ๊ธฐ์กด์˜ ๊ฐ์ฒด ๊ฐ์ง€ ๋ชจ๋ธ์€ ์‚ฌ์ „ ์ •์˜๋œ ์•ต์ปค ๋ฐ•์Šค๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๊ฐ์ฒด์˜ ์œ„์น˜๋ฅผ ์˜ˆ์ธกํ•ฉ๋‹ˆ๋‹ค. ๊ทธ๋Ÿฌ๋‚˜ YOLOv5u๋Š” ์ด ๋ฐฉ์‹์„ ํ˜„๋Œ€ํ™”ํ•ฉ๋‹ˆ๋‹ค. ์•ต์ปค ์—†๋Š” ๋ถ„๋ฆฌ Ultralytics ํ—ค๋“œ๋ฅผ ๋„์ž…ํ•จ์œผ๋กœ์จ ๋”์šฑ ์œ ์—ฐํ•˜๊ณ  ์ ์‘์ ์ธ ๊ฐ์ง€ ๋ฉ”์ปค๋‹ˆ์ฆ˜์„ ๋ณด์žฅํ•˜์—ฌ ๋‹ค์–‘ํ•œ ์‹œ๋‚˜๋ฆฌ์˜ค์—์„œ ์„ฑ๋Šฅ์„ ํ–ฅ์ƒ์‹œํ‚ต๋‹ˆ๋‹ค. + +- **์ •ํ™•๋„-์†๋„ ์ ˆ์ถฉ์˜ ์ตœ์ ํ™”:** ์†๋„์™€ ์ •ํ™•๋„๋Š” ์ข…์ข… ์ƒ์ถฉํ•˜๋Š” ๊ด€๊ณ„์— ์žˆ์Šต๋‹ˆ๋‹ค. ๊ทธ๋Ÿฌ๋‚˜ YOLOv5u๋Š” ์ด๋Ÿฌํ•œ ์ ˆ์ถฉ์„ ๋„์ „ํ•ฉ๋‹ˆ๋‹ค. ์‹ค์‹œ๊ฐ„ ํƒ์ง€๋ฅผ ๋ณด์žฅํ•˜๋ฉด์„œ๋„ ์ •ํ™•๋„๋ฅผ ํฌ์ƒํ•˜์ง€ ์•Š๋Š” ๊ท ํ˜•์„ ์ œ์‹œํ•ฉ๋‹ˆ๋‹ค. ์ด ๊ธฐ๋Šฅ์€ ์ž์œจ์ฃผํ–‰ ์ฐจ๋Ÿ‰, ๋กœ๋ด‡ ๊ณตํ•™, ์‹ค์‹œ๊ฐ„ ๋น„๋””์˜ค ๋ถ„์„ ๋“ฑ ์‹ ์†ํ•œ ์‘๋‹ต์„ ์š”๊ตฌํ•˜๋Š” ์‘์šฉ ํ”„๋กœ๊ทธ๋žจ์—์„œ ํŠนํžˆ ์ค‘์š”ํ•ฉ๋‹ˆ๋‹ค. + +- **๋‹ค์–‘ํ•œ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ:** ๋‹ค๋ฅธ ์ž‘์—…์—๋Š” ๋‹ค๋ฅธ ๋„๊ตฌ ์„ธํŠธ๊ฐ€ ํ•„์š”ํ•˜๋‹ค๋Š” ๊ฒƒ์„ ์ดํ•ดํ•˜๋Š” YOLOv5u๋Š” ๋‹ค์–‘ํ•œ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ถ”๋ก , ๊ฒ€์ฆ ๋˜๋Š” ํ›ˆ๋ จ์— ์ง‘์ค‘ํ•˜๊ณ  ์žˆ๋Š”์ง€ ์—ฌ๋ถ€์— ๊ด€๊ณ„์—†์ด ๋งž์ถคํ˜• ๋ชจ๋ธ์ด ๊ธฐ๋‹ค๋ฆฌ๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. ์ด ๋‹ค์–‘์„ฑ์€ ์ผ๋ฐ˜์ ์ธ ์†”๋ฃจ์…˜์ด ์•„๋‹Œ ๋…ํŠนํ•œ ๋„์ „ ๊ณผ์ œ์— ๋Œ€ํ•ด ํŠน๋ณ„ํžˆ ์„ธ๋ฐ€ํ•˜๊ฒŒ ์กฐ์ •๋œ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜๊ณ  ์žˆ๋‹ค๋Š” ๊ฒƒ์„ ๋ณด์žฅํ•ฉ๋‹ˆ๋‹ค. + +## ์ง€์›๋˜๋Š” ์ž‘์—… ๋ฐ ๋ชจ๋“œ + +ั€ะฐะทะฝะพะฑะพะนacionales of YOLOv5u ๋ชจ๋ธ์€ ๋‹ค์–‘ํ•œ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๊ฐ€์ค‘์น˜๋กœ [๊ฐ์ฒด ๊ฐ์ง€](../tasks/detect.md) ์ž‘์—…์—์„œ ๋›ฐ์–ด๋‚œ ์„ฑ๋Šฅ์„ ๋ฐœํœ˜ํ•ฉ๋‹ˆ๋‹ค. ์ด๋“ค์€ ๊ฐœ๋ฐœ๋ถ€ํ„ฐ ๋ฐฐํฌ๊นŒ์ง€ ๋‹ค์–‘ํ•œ ์‘์šฉ ํ”„๋กœ๊ทธ๋žจ์— ์ ํ•ฉํ•œ ๋‹ค์–‘ํ•œ ๋ชจ๋“œ๋ฅผ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ ์œ ํ˜• | ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๊ฐ€์ค‘์น˜ | ์ž‘์—… | ์ถ”๋ก  | ๊ฒ€์ฆ | ํ›ˆ๋ จ | ๋‚ด๋ณด๋‚ด๊ธฐ | +|---------|-----------------------------------------------------------------------------------------------------------------------------|-----------------------------|----|----|----|------| +| YOLOv5u | `yolov5nu`, `yolov5su`, `yolov5mu`, `yolov5lu`, `yolov5xu`, `yolov5n6u`, `yolov5s6u`, `yolov5m6u`, `yolov5l6u`, `yolov5x6u` | [๊ฐ์ฒด ๊ฐ์ง€](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +์ด ํ‘œ๋Š” YOLOv5u ๋ชจ๋ธ์˜ ๋‹ค์–‘ํ•œ ๋ณ€ํ˜•์„ ์ƒ์„ธํžˆ ๋ณด์—ฌ์ฃผ๋ฉฐ, ๊ฐ์ฒด ๊ฐ์ง€ ์ž‘์—…์—์„œ์˜ ์ ์šฉ ๊ฐ€๋Šฅ์„ฑ๊ณผ [์ถ”๋ก ](../modes/predict.md), [๊ฒ€์ฆ](../modes/val.md), [ํ›ˆ๋ จ](../modes/train.md), [๋‚ด๋ณด๋‚ด๊ธฐ](../modes/export.md)์™€ ๊ฐ™์€ ๋‹ค์–‘ํ•œ ์ž‘์—… ๋ชจ๋“œ์˜ ์ง€์›์„ ๊ฐ•์กฐํ•ฉ๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ํฌ๊ด„์ ์ธ ์ง€์›์„ ํ†ตํ•ด ์‚ฌ์šฉ์ž๋Š” ๋‹ค์–‘ํ•œ ๊ฐ์ฒด ๊ฐ์ง€ ์‹œ๋‚˜๋ฆฌ์˜ค์—์„œ YOLOv5u ๋ชจ๋ธ์˜ ๊ธฐ๋Šฅ์„ ์™„์ „ํžˆ ํ™œ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +## ์„ฑ๋Šฅ ์ง€ํ‘œ + +!!! ์„ฑ๋Šฅ + + === "๊ฐ์ง€" + + [COCO](https://docs.ultralytics.com/datasets/detect/coco/)์—์„œ ํ•™์Šต๋œ ์ด๋Ÿฌํ•œ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•œ ์‚ฌ์šฉ ์˜ˆ์ œ๋Š” [๊ฐ์ง€ ๋ฌธ์„œ](https://docs.ultralytics.com/tasks/detect/)๋ฅผ ์ฐธ์กฐํ•˜์„ธ์š”. ์ด ๋ฌธ์„œ์—๋Š” 80๊ฐœ์˜ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ํด๋ž˜์Šค๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. + + | ๋ชจ๋ธ | YAML | ํฌ๊ธฐ
(ํ”ฝ์…€) | mAPval
50-95 | ์†๋„
CPU ONNX
(ms) | ์†๋„
A100 TensorRT
(ms) | ๋งค๊ฐœ๋ณ€์ˆ˜
(M) | FLOPs
(B) | + |---------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------|-----------------------|----------------------|--------------------------------|-------------------------------------|--------------------|-------------------| + | [yolov5nu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5nu.pt) | [yolov5n.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 34.3 | 73.6 | 1.06 | 2.6 | 7.7 | + | [yolov5su.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5su.pt) | [yolov5s.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 43.0 | 120.7 | 1.27 | 9.1 | 24.0 | + | [yolov5mu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5mu.pt) | [yolov5m.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 49.0 | 233.9 | 1.86 | 25.1 | 64.2 | + | [yolov5lu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5lu.pt) | [yolov5l.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 52.2 | 408.4 | 2.50 | 53.2 | 135.0 | + | [yolov5xu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5xu.pt) | [yolov5x.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 53.2 | 763.2 | 3.81 | 97.2 | 246.4 | + | | | | | | | | | + | [yolov5n6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5n6u.pt) | [yolov5n6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 42.1 | 211.0 | 1.83 | 4.3 | 7.8 | + | [yolov5s6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5s6u.pt) | [yolov5s6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 48.6 | 422.6 | 2.34 | 15.3 | 24.6 | + | [yolov5m6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5m6u.pt) | [yolov5m6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 53.6 | 810.9 | 4.36 | 41.2 | 65.7 | + | [yolov5l6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5l6u.pt) | [yolov5l6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 55.7 | 1470.9 | 5.47 | 86.1 | 137.4 | + | [yolov5x6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5x6u.pt) | [yolov5x6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 56.8 | 2436.5 | 8.98 | 155.4 | 250.7 | + +## ์‚ฌ์šฉ ์˜ˆ์ œ + +์ด ์˜ˆ์ œ๋Š” ๊ฐ„๋‹จํ•œ YOLOv5 ํ›ˆ๋ จ ๋ฐ ์ถ”๋ก  ์˜ˆ์ œ๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ด์™€ ๊ธฐํƒ€ [๋ชจ๋“œ](../modes/index.md)์˜ ์ž์„ธํ•œ ์„ค๋ช…์€ [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) ๋ฐ [Export](../modes/export.md) ๋ฌธ์„œ ํŽ˜์ด์ง€๋ฅผ ์ฐธ์กฐํ•˜์„ธ์š”. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + Python์—์„œ `YOLO()` ํด๋ž˜์Šค๋กœ `*.pt` ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ๊ณผ ๊ตฌ์„ฑ `*.yaml` ํŒŒ์ผ์„ ์ „๋‹ฌํ•˜์—ฌ ๋ชจ๋ธ ์ธ์Šคํ„ด์Šค๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + + ```python + from ultralytics import YOLO + + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv5n ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov5n.pt') + + # ๋ชจ๋ธ ์ •๋ณด ํ‘œ์‹œ (์„ ํƒ ์‚ฌํ•ญ) + model.info() + + # COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ์„ 100๋ฒˆ ์—ํฌํฌ๋กœ ํ›ˆ๋ จ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # 'bus.jpg' ์ด๋ฏธ์ง€์— ๋Œ€ํ•ด YOLOv5n ๋ชจ๋ธ๋กœ ์ถ”๋ก  ์‹คํ–‰ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI ๋ช…๋ น์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ์„ ์ง์ ‘ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + + ```bash + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv5n ๋ชจ๋ธ ๋กœ๋“œ ๋ฐ COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ์„ 100๋ฒˆ ์—ํฌํฌ๋กœ ํ›ˆ๋ จ + yolo train model=yolov5n.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv5n ๋ชจ๋ธ ๋กœ๋“œ ๋ฐ 'bus.jpg' ์ด๋ฏธ์ง€์—์„œ ์ถ”๋ก  ์‹คํ–‰ + yolo predict model=yolov5n.pt source=path/to/bus.jpg + ``` + +## ์ธ์šฉ ๋ฐ ๊ฐ์‚ฌ์˜ ๊ธ€ + +์—ฐ๊ตฌ์—์„œ YOLOv5 ๋˜๋Š” YOLOv5u๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ๊ฒฝ์šฐ Ultralytics YOLOv5 ๋ฆฌํฌ์ง€ํ† ๋ฆฌ๋ฅผ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ธ์šฉํ•˜์„ธ์š”. + +!!! Quote "" + + === "BibTeX" + ```bibtex + @software{yolov5, + title = {Ultralytics YOLOv5}, + author = {Glenn Jocher}, + year = {2020}, + version = {7.0}, + license = {AGPL-3.0}, + url = {https://github.com/ultralytics/yolov5}, + doi = {10.5281/zenodo.3908559}, + orcid = {0000-0001-5950-6979} + } + ``` + +YOLOv5 ๋ชจ๋ธ์€ [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) ๋ฐ [Enterprise](https://ultralytics.com/license) ๋ผ์ด์„ ์Šค๋กœ ์ œ๊ณต๋ฉ๋‹ˆ๋‹ค. diff --git a/ultralytics/docs/ko/models/yolov5.md:Zone.Identifier b/ultralytics/docs/ko/models/yolov5.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/models/yolov5.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/models/yolov6.md b/ultralytics/docs/ko/models/yolov6.md new file mode 100755 index 0000000..a28ee86 --- /dev/null +++ b/ultralytics/docs/ko/models/yolov6.md @@ -0,0 +1,107 @@ +--- +comments: true +description: ์ตœ์ฒจ๋‹จ ๋ฌผ์ฒด ๊ฐ์ง€(์˜ค๋ธŒ์ ํŠธ ๋””ํ…์…˜) ๋ชจ๋ธ์ธ 'Meituan YOLOv6'์„ ์•Œ์•„๋ณด์„ธ์š”. ์†๋„์™€ ์ •ํ™•๋„ ์‚ฌ์ด์˜ ๊ท ํ˜•์„ ์œ ์ง€ํ•˜๋Š” ์ด ๋ชจ๋ธ์€ ์‹ค์‹œ๊ฐ„ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜์— ์ธ๊ธฐ ์žˆ๋Š” ์„ ํƒ์ž…๋‹ˆ๋‹ค. ์ด ๋ชจ๋ธ์€ BiC(Bi-directional Concatenation) ๋ชจ๋“ˆ, AAT(Anchor-Aided Training) ์ „๋žต, COCO ๋ฐ์ดํ„ฐ์…‹์—์„œ ์ตœ์ฒจ๋‹จ ์ •ํ™•๋„๋ฅผ ์‹คํ˜„ํ•˜๊ธฐ ์œ„ํ•œ ๊ฐœ์„ ๋œ ๋ฐฑ๋ณธ(backbone) ๋ฐ ๋„คํฌ(neck) ์„ค๊ณ„ ๋“ฑ์— ๋Œ€ํ•œ ์—ฌ๋Ÿฌ ์ฃผ๋ชฉํ• ๋งŒํ•œ ํ–ฅ์ƒ ์‚ฌํ•ญ์„ ๋„์ž…ํ•˜๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. +keywords: Meituan YOLOv6, ์˜ค๋ธŒ์ ํŠธ ๋””ํ…์…˜, Ultralytics, YOLOv6 ๋ฌธ์„œ, Bi-directional Concatenation, Anchor-Aided Training, ์‚ฌ์ „ ํ›ˆ๋ จ ๋ชจ๋ธ, ์‹ค์‹œ๊ฐ„ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ +--- + +# Meituan YOLOv6 + +## ๊ฐœ์š” + +[Meituan](https://about.meituan.com/) YOLOv6์€ ์†๋„์™€ ์ •ํ™•๋„ ์‚ฌ์ด์—์„œ ํ˜„์ €ํ•œ ๊ท ํ˜•์„ ์ œ๊ณตํ•˜๋Š” ์ตœ์ฒจ๋‹จ ๋ฌผ์ฒด ๊ฐ์ง€๊ธฐ์ž…๋‹ˆ๋‹ค. ์ด ๋ชจ๋ธ์€ Bi-directional Concatenation(BiC) ๋ชจ๋“ˆ, Anchor-Aided Training(AAT) ์ „๋žต, ๊ทธ๋ฆฌ๊ณ  COCO ๋ฐ์ดํ„ฐ์…‹์—์„œ ์ตœ์ฒจ๋‹จ ์ •ํ™•๋„๋ฅผ ์‹คํ˜„ํ•˜๊ธฐ ์œ„ํ•œ ๊ฐœ์„ ๋œ ๋ฐฑ๋ณธ(backbone) ๋ฐ ๋„คํฌ(neck) ๋””์ž์ธ ๋“ฑ, ์•„ํ‚คํ…์ฒ˜์™€ ํ›ˆ๋ จ ๋ฐฉ์‹์— ๋Œ€ํ•œ ์—ฌ๋Ÿฌ ์ฃผ๋ชฉํ• ๋งŒํ•œ ํ–ฅ์ƒ ์‚ฌํ•ญ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +![Meituan YOLOv6](https://user-images.githubusercontent.com/26833433/240750495-4da954ce-8b3b-41c4-8afd-ddb74361d3c2.png) +![๋ชจ๋ธ ์˜ˆ์‹œ ์ด๋ฏธ์ง€](https://user-images.githubusercontent.com/26833433/240750557-3e9ec4f0-0598-49a8-83ea-f33c91eb6d68.png) +**YOLOv6 ๊ฐœ์š”**. ์•„ํ‚คํ…์ฒ˜ ๋‹ค์ด์–ด๊ทธ๋žจ์œผ๋กœ, ๋‹ค์‹œ ์„ค๊ณ„๋œ ๋„คํŠธ์›Œํฌ ๊ตฌ์„ฑ ์š”์†Œ์™€ ํ›ˆ๋ จ ์ „๋žต์ด ์ค‘์š”ํ•œ ์„ฑ๋Šฅ ๊ฐœ์„ ์„ ์ด๋ˆ ๋ชจ์Šต์„ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. (a) YOLOv6์˜ ๋„คํฌ(neck) (N๊ณผ S ํ‘œ์‹œ)์ž…๋‹ˆ๋‹ค. M/L์˜ ๊ฒฝ์šฐ, RepBlocks์€ CSPStackRep์œผ๋กœ ๋Œ€์ฒด๋ฉ๋‹ˆ๋‹ค. (b) BiC ๋ชจ๋“ˆ์˜ ๊ตฌ์กฐ์ž…๋‹ˆ๋‹ค. (c) SimCSPSPPF ๋ธ”๋ก์ž…๋‹ˆ๋‹ค. ([์ถœ์ฒ˜](https://arxiv.org/pdf/2301.05586.pdf)). + +### ์ฃผ์š” ํŠน์ง• + +- **Bi-directional Concatenation (BiC) ๋ชจ๋“ˆ**: YOLOv6์€ ๊ฐ์ง€๊ธฐ(neck)์— BiC ๋ชจ๋“ˆ์„ ๋„์ž…ํ•˜์—ฌ ์œ„์น˜ ์‹ ํ˜ธ(localization signals)๋ฅผ ๊ฐ•ํ™”ํ•˜๊ณ  ์„ฑ๋Šฅ์„ ํ–ฅ์ƒ์‹œํ‚ค๋Š”๋ฐ, ์†๋„ ์ €ํ•˜๊ฐ€ ๊ฑฐ์˜ ์—†์Šต๋‹ˆ๋‹ค. +- **Anchor-Aided Training (AAT) ์ „๋žต**: ์ด ๋ชจ๋ธ์€ ์ถ”๋ก  ํšจ์œจ์„ ์ €ํ•˜์‹œํ‚ค์ง€ ์•Š๊ณ  ์•ต์ปค ๊ธฐ๋ฐ˜(anchor-based)๊ณผ ์•ต์ปค ์—†์Œ(anchor-free) ํŒจ๋Ÿฌ๋‹ค์ž„์˜ ์ด์ ์„ ๋ชจ๋‘ ๋ˆ„๋ฆด ์ˆ˜ ์žˆ๋„๋ก AAT๋ฅผ ์ œ์•ˆํ•ฉ๋‹ˆ๋‹ค. +- **๊ฐœ์„ ๋œ ๋ฐฑ๋ณธ ๋ฐ ๋„คํฌ ๋””์ž์ธ**: YOLOv6์„ ๋ฐฑ๋ณธ๊ณผ ๋„คํฌ์— ์ถ”๊ฐ€์ ์ธ ๋‹จ๊ณ„๋ฅผ ํฌํ•จํ•˜์—ฌ ๊นŠ๊ฒŒ ๋งŒ๋“ค์–ด COCO ๋ฐ์ดํ„ฐ์…‹์—์„œ ์ตœ์ฒจ๋‹จ ์„ฑ๋Šฅ์„ ๋‹ฌ์„ฑํ•ฉ๋‹ˆ๋‹ค. +- **์…€ํ”„ ๋””์Šคํ‹ธ๋ ˆ์ด์…˜ ์ „๋žต**: YOLOv6์˜ ์ž‘์€ ๋ชจ๋ธ ์„ฑ๋Šฅ์„ ๊ฐ•ํ™”ํ•˜๊ธฐ ์œ„ํ•ด ์ƒˆ๋กœ์šด ์…€ํ”„ ๋””์Šคํ‹ธ๋ ˆ์ด์…˜ ์ „๋žต์ด ๋„์ž…๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ์ด๋Š” ํ›ˆ๋ จ ์ค‘ ๋ณด์กฐ ํšŒ๊ท€ ๋ธŒ๋žœ์น˜๋ฅผ ๊ฐ•ํ™”ํ•˜๊ณ  ์ถ”๋ก  ์ค‘์—๋Š” ์ด๋ฅผ ์ œ๊ฑฐํ•˜์—ฌ ์„ฑ๋Šฅ ์ €ํ•˜๋ฅผ ๋ฐฉ์ง€ํ•ฉ๋‹ˆ๋‹ค. + +## ์„ฑ๋Šฅ ๋ฉ”ํŠธ๋ฆญ + +YOLOv6์€ ๋‹ค์–‘ํ•œ ์Šค์ผ€์ผ์˜ ์‚ฌ์ „ ํ›ˆ๋ จ ๋ชจ๋ธ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค: + +- YOLOv6-N: NVIDIA Tesla T4 GPU์—์„œ 1187 FPS๋กœ COCO val2017์—์„œ 37.5% AP. +- YOLOv6-S: 484 FPS๋กœ 45.0% AP. +- YOLOv6-M: 226 FPS๋กœ 50.0% AP. +- YOLOv6-L: 116 FPS๋กœ 52.8% AP. +- YOLOv6-L6: ์‹ค์‹œ๊ฐ„์—์„œ ์ตœ์ฒจ๋‹จ ์ •ํ™•์„ฑ. + +๋˜ํ•œ, YOLOv6์€ ๋‹ค์–‘ํ•œ ์ •๋ฐ€๋„์— ๋Œ€ํ•œ ์–‘์žํ™” ๋ชจ๋ธ๊ณผ ๋ชจ๋ฐ”์ผ ํ”Œ๋žซํผ์— ์ตœ์ ํ™”๋œ ๋ชจ๋ธ๋„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +## ์‚ฌ์šฉ ์˜ˆ์‹œ + +๋‹ค์Œ์€ ๊ฐ„๋‹จํ•œ YOLOv6 ํ›ˆ๋ จ ๋ฐ ์ถ”๋ก  ์˜ˆ์‹œ์ž…๋‹ˆ๋‹ค. ์ด ์™ธ์—๋„ [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md), [Export](../modes/export.md) ๋ฌธ์„œ ํŽ˜์ด์ง€์—์„œ ์ž์„ธํ•œ ๋‚ด์šฉ์„ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + `*.pt` ์‚ฌ์ „ ํ›ˆ๋ จ๋œ PyTorch ๋ชจ๋ธ๊ณผ ๊ตฌ์„ฑ `*.yaml` ํŒŒ์ผ์„ `YOLO()` ํด๋ž˜์Šค์— ์ „๋‹ฌํ•˜์—ฌ ํŒŒ์ด์ฌ์—์„œ ๋ชจ๋ธ ์ธ์Šคํ„ด์Šค๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค: + + ```python + from ultralytics import YOLO + + # YOLOv6n ๋ชจ๋ธ์„ ์ฒ˜์Œ๋ถ€ํ„ฐ ๋งŒ๋“ญ๋‹ˆ๋‹ค + model = YOLO('yolov6n.yaml') + + # ๋ชจ๋ธ ์ •๋ณด๋ฅผ ํ‘œ์‹œํ•ฉ๋‹ˆ๋‹ค (์„ ํƒ ์‚ฌํ•ญ) + model.info() + + # COCO8 ์˜ˆ์‹œ ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ ๋ชจ๋ธ์„ 100 ์—ํญ ๋™์•ˆ ํ›ˆ๋ จํ•ฉ๋‹ˆ๋‹ค + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # YOLOv6n ๋ชจ๋ธ๋กœ 'bus.jpg' ์ด๋ฏธ์ง€์—์„œ ์ถ”๋ก ์„ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI ๋ช…๋ น์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ์„ ์ง์ ‘ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค: + + ```bash + # ์ฒ˜์Œ๋ถ€ํ„ฐ YOLOv6n ๋ชจ๋ธ์„ ๋งŒ๋“ค๊ณ  COCO8 ์˜ˆ์‹œ ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ 100 ์—ํญ ๋™์•ˆ ํ›ˆ๋ จํ•ฉ๋‹ˆ๋‹ค + yolo train model=yolov6n.yaml data=coco8.yaml epochs=100 imgsz=640 + + # ์ฒ˜์Œ๋ถ€ํ„ฐ YOLOv6n ๋ชจ๋ธ์„ ๋งŒ๋“ค๊ณ  'bus.jpg' ์ด๋ฏธ์ง€์—์„œ ์ถ”๋ก ์„ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค + yolo predict model=yolov6n.yaml source=path/to/bus.jpg + ``` + +## ์ง€์›๋˜๋Š” ์ž‘์—… ๋ฐ ๋ชจ๋“œ + +YOLOv6 ์‹œ๋ฆฌ์ฆˆ๋Š” ๋†’์€ ์„ฑ๋Šฅ์˜ [์˜ค๋ธŒ์ ํŠธ ๋””ํ…์…˜](../tasks/detect.md)์„ ์œ„ํ•ด ์ตœ์ ํ™”๋œ ๋‹ค์–‘ํ•œ ๋ชจ๋ธ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ด ๋ชจ๋ธ๋“ค์€ ๋‹ค์–‘ํ•œ ๊ณ„์‚ฐ ์š”๊ตฌ ์‚ฌํ•ญ๊ณผ ์ •ํ™•๋„ ์š”๊ตฌ ์‚ฌํ•ญ์— ๋งž์ถ”์–ด ๋‹ค์šฉ๋„๋กœ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ ์œ ํ˜• | ์‚ฌ์ „ ํ›ˆ๋ จ ๊ฐ€์ค‘์น˜ | ์ง€์›๋˜๋Š” ์ž‘์—… | ์ถ”๋ก  | ๊ฒ€์ฆ | ํ›ˆ๋ จ | ์ต์ŠคํฌํŠธ | +|-----------|----------------|--------------------------------|----|----|----|------| +| YOLOv6-N | `yolov6-n.pt` | [์˜ค๋ธŒ์ ํŠธ ๋””ํ…์…˜](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-S | `yolov6-s.pt` | [์˜ค๋ธŒ์ ํŠธ ๋””ํ…์…˜](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-M | `yolov6-m.pt` | [์˜ค๋ธŒ์ ํŠธ ๋””ํ…์…˜](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L | `yolov6-l.pt` | [์˜ค๋ธŒ์ ํŠธ ๋””ํ…์…˜](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L6 | `yolov6-l6.pt` | [์˜ค๋ธŒ์ ํŠธ ๋””ํ…์…˜](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +์ด ํ‘œ๋Š” YOLOv6 ๋ชจ๋ธ์˜ ๋‹ค์–‘ํ•œ ๋ณ€ํ˜•์— ๋Œ€ํ•œ ์ž์„ธํ•œ ๊ฐœ์š”๋ฅผ ์ œ๊ณตํ•˜๋ฉฐ, ์˜ค๋ธŒ์ ํŠธ ๋””ํ…์…˜ ์ž‘์—…๊ณผ [์ถ”๋ก ](../modes/predict.md), [๊ฒ€์ฆ](../modes/val.md), [ํ›ˆ๋ จ](../modes/train.md), [์ต์ŠคํฌํŠธ](../modes/export.md)์™€ ๊ฐ™์€ ๋‹ค์–‘ํ•œ ์šด์˜ ๋ชจ๋“œ์™€์˜ ํ˜ธํ™˜์„ฑ์„ ๊ฐ•์กฐํ•ฉ๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ํฌ๊ด„์ ์ธ ์ง€์›์„ ํ†ตํ•ด ์‚ฌ์šฉ์ž๋“ค์€ ๋‹ค์–‘ํ•œ ์˜ค๋ธŒ์ ํŠธ ๋””ํ…์…˜ ์‹œ๋‚˜๋ฆฌ์˜ค์—์„œ YOLOv6 ๋ชจ๋ธ์˜ ๊ธฐ๋Šฅ์„ ์ตœ๋Œ€ํ•œ ํ™œ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +## ์ธ์šฉ ๋ฐ ๊ฐ์‚ฌ์˜ ๊ธ€ + +์‹ค์‹œ๊ฐ„ ๋ฌผ์ฒด ๊ฐ์ง€ ๋ถ„์•ผ์—์„œ์˜ ์ค‘์š”ํ•œ ๊ธฐ์—ฌ์— ๋Œ€ํ•ด ์ž‘์„ฑ์ž๋“ค์—๊ฒŒ ๊ฐ์‚ฌ์˜ ๋ง์”€์„ ์ „ํ•ฉ๋‹ˆ๋‹ค: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{li2023yolov6, + title={YOLOv6 v3.0: A Full-Scale Reloading}, + author={Chuyi Li and Lulu Li and Yifei Geng and Hongliang Jiang and Meng Cheng and Bo Zhang and Zaidan Ke and Xiaoming Xu and Xiangxiang Chu}, + year={2023}, + eprint={2301.05586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + + YOLOv6 ์›๋ณธ ๋…ผ๋ฌธ์€ [arXiv](https://arxiv.org/abs/2301.05586)์—์„œ ์ฐพ์„ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ž‘์„ฑ์ž๋“ค์ด ์ž์‹ ์˜ ์ž‘์—…์„ ๊ณต๊ฐœํ•˜์ง€ ์•Š์•˜์œผ๋ฉฐ, ์ฝ”๋“œ๋Š” [GitHub](https://github.com/meituan/YOLOv6)์—์„œ ์•ก์„ธ์Šคํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์šฐ๋ฆฌ๋Š” ๊ทธ๋“ค์˜ ๋…ธ๋ ฅ๊ณผ ์—…๊ณ„ ๋ฐœ์ „์„ ์œ„ํ•ด ๋…ธ๋ ฅํ•ด ๋„๋ฆฌ ์•Œ๋ ค์ ธ ์žˆ๊ฒŒ ํ•œ ์ €์ž๋“ค์—๊ฒŒ ๊ฐ์‚ฌ์˜ ๋ง์”€์„ ์ „ํ•ฉ๋‹ˆ๋‹ค. diff --git a/ultralytics/docs/ko/models/yolov6.md:Zone.Identifier b/ultralytics/docs/ko/models/yolov6.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/models/yolov6.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/models/yolov7.md b/ultralytics/docs/ko/models/yolov7.md new file mode 100755 index 0000000..2071682 --- /dev/null +++ b/ultralytics/docs/ko/models/yolov7.md @@ -0,0 +1,65 @@ +--- +comments: true +description: YOLOv7์€ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ๋กœ, ๋›ฐ์–ด๋‚œ ์†๋„, ๊ฐ•๋ ฅํ•œ ์ •ํ™•์„ฑ, ๋…ํŠนํ•œ trainable bag-of-freebies ์ตœ์ ํ™”์— ๋Œ€ํ•ด ์•Œ์•„๋ด…๋‹ˆ๋‹ค. +keywords: YOLOv7, ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ, ์ตœ์ฒจ๋‹จ, Ultralytics, MS COCO ๋ฐ์ดํ„ฐ์…‹, ๋ชจ๋ธ ์žฌํŒŒ๋ผ๋ฏธํ„ฐํ™”, ๋™์  ๋ผ๋ฒจ ํ• ๋‹น, ํ™•์žฅ ์Šค์ผ€์ผ, ๋ณตํ•ฉ ์Šค์ผ€์ผ +--- + +# YOLOv7: Trainable Bag-of-Freebies + +YOLOv7์€ 5 FPS์—์„œ 160 FPS๊นŒ์ง€์˜ ๋ฒ”์œ„์—์„œ ์•Œ๋ ค์ง„ ๋ชจ๋“  ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ๋ฅผ ์†๋„์™€ ์ •ํ™•์„ฑ์—์„œ ๋Šฅ๊ฐ€ํ•˜๋Š” ์ตœ์ฒจ๋‹จ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ์ž…๋‹ˆ๋‹ค. ์ด ๋ชจ๋ธ์€ GPU V100์—์„œ 30 FPS ์ด์ƒ์„ ๋‹ฌ์„ฑํ•˜์—ฌ, ์•Œ๋ ค์ง„ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ ์ค‘ ๊ฐ€์žฅ ๋†’์€ ์ •ํ™•๋„(56.8% AP)๋ฅผ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. ๊ฒŒ๋‹ค๊ฐ€, YOLOv7์€ ๋‹ค๋ฅธ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ์ธ YOLOR, YOLOX, Scaled-YOLOv4, YOLOv5 ๋“ฑ์— ๋น„ํ•ด ์†๋„์™€ ์ •ํ™•์„ฑ ๋ฉด์—์„œ ๋” ๋›ฐ์–ด๋‚ฉ๋‹ˆ๋‹ค. ์ด ๋ชจ๋ธ์€ ๋‹ค๋ฅธ ๋ฐ์ดํ„ฐ์…‹์ด๋‚˜ ์‚ฌ์ „ ํ•™์Šต๋œ ๊ฐ€์ค‘์น˜๋ฅผ ์‚ฌ์šฉํ•˜์ง€ ์•Š๊ณ  MS COCO ๋ฐ์ดํ„ฐ์…‹์—์„œ ์ฒ˜์Œ๋ถ€ํ„ฐ ํ›ˆ๋ จ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. YOLOv7์˜ ์†Œ์Šค ์ฝ”๋“œ๋Š” GitHub์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +![YOLOv7์™€ ์ตœ์ฒจ๋‹จ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ ๋น„๊ต](https://github.com/ultralytics/ultralytics/assets/26833433/5e1e0420-8122-4c79-b8d0-2860aa79af92) +**์ตœ์ฒจ๋‹จ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ ๋น„๊ต**. ํ‘œ 2์˜ ๊ฒฐ๊ณผ์—์„œ ๋ณผ ์ˆ˜ ์žˆ๋“ฏ์ด, ์ œ์•ˆ๋œ ๋ฐฉ๋ฒ•์€ ์ตœ์ƒ์˜ ์†๋„-์ •ํ™•์„ฑ ๊ท ํ˜•์„ ๊ฐ€์ง€๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. YOLOv7-tiny-SiLU๋ฅผ YOLOv5-N(r6.1)๊ณผ ๋น„๊ตํ•ด๋ณด๋ฉด, ์ €ํฌ ๋ฐฉ๋ฒ•์€ ์•ฝ 127 fps๊ฐ€ ๋” ๋น ๋ฅด๊ณ  AP์—์„œ 10.7% ์ •ํ™•๋„๊ฐ€ ํ–ฅ์ƒ๋ฉ๋‹ˆ๋‹ค. ๊ฒŒ๋‹ค๊ฐ€, YOLOv7์€ 161 fps ํ”„๋ ˆ์ž„ ์†๋„์—์„œ 51.4% AP๋ฅผ ๋‹ฌ์„ฑํ•˜๋Š” ๋ฐ˜๋ฉด, PPYOLOE-L์€ ๋™์ผํ•œ AP์—์„œ 78 fps ํ”„๋ ˆ์ž„ ์†๋„๋งŒ ๊ฐ–์Šต๋‹ˆ๋‹ค. ๋งค๊ฐœ ๋ณ€์ˆ˜ ์‚ฌ์šฉ ์ธก๋ฉด์—์„œ YOLOv7๋Š” PPYOLOE-L์˜ 41%๋ฅผ ์ค„์ž…๋‹ˆ๋‹ค. YOLOv7-X๋ฅผ 114 fps์˜ ์ถ”๋ก  ์†๋„๋กœ YOLOv5-L(r6.1)์˜ 99 fps ์ถ”๋ก  ์†๋„์™€ ๋น„๊ตํ•˜๋ฉด, YOLOv7-X๋Š” AP๋ฅผ 3.9% ํ–ฅ์ƒ์‹œํ‚ต๋‹ˆ๋‹ค. YOLOv7-X๋ฅผ ์œ ์‚ฌํ•œ ์Šค์ผ€์ผ์˜ YOLOv5-X(r6.1)์™€ ๋น„๊ตํ•˜๋ฉด, YOLOv7-X์˜ ์ถ”๋ก  ์†๋„๊ฐ€ 31 fps ๋” ๋นจ๋ผ์ง‘๋‹ˆ๋‹ค. ๋˜ํ•œ, ๋งค๊ฐœ ๋ณ€์ˆ˜ ๋ฐ ๊ณ„์‚ฐ์˜ ์–‘ ์ธก๋ฉด์—์„œ YOLOv7-X๋Š” YOLOv5-X(r6.1)๊ณผ ๋น„๊ตํ•˜์—ฌ ๋งค๊ฐœ ๋ณ€์ˆ˜ 22%์™€ ๊ณ„์‚ฐ 8%๋ฅผ ์ค„์ด๊ณ  AP๋ฅผ 2.2% ํ–ฅ์ƒ์‹œํ‚ต๋‹ˆ๋‹ค ([์ถœ์ฒ˜](https://arxiv.org/pdf/2207.02696.pdf)). + +## ๊ฐœ์š” + +์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ์€ ๋‹ค์ค‘ ๊ฐ์ฒด ์ถ”์ , ์ž์œจ ์ฃผํ–‰, ๋กœ๋ด‡ ๊ณตํ•™ ๋ฐ ์˜๋ฃŒ ์ด๋ฏธ์ง€ ๋ถ„์„์„ ๋น„๋กฏํ•œ ๋งŽ์€ ์ปดํ“จํ„ฐ ๋น„์ „ ์‹œ์Šคํ…œ์˜ ์ค‘์š”ํ•œ ๊ตฌ์„ฑ ์š”์†Œ์ž…๋‹ˆ๋‹ค. ์ตœ๊ทผ ๋ช‡ ๋…„๊ฐ„ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ ๊ฐœ๋ฐœ์€ ํšจ์œจ์ ์ธ ๊ตฌ์กฐ ์„ค๊ณ„์™€ ๋‹ค์–‘ํ•œ CPU, GPU ๋ฐ ์‹ ๊ฒฝ ์ฒ˜๋ฆฌ ์žฅ์น˜(NPU)์˜ ์ถ”๋ก  ์†๋„ ํ–ฅ์ƒ์— ์ดˆ์ ์„ ๋งž์ถ”๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. YOLOv7์€ ๋ชจ๋ฐ”์ผ GPU์™€ GPU ์žฅ์น˜๋ฅผ ๋ชจ๋‘ ์ง€์›ํ•˜์—ฌ ์—ฃ์ง€๋ถ€ํ„ฐ ํด๋ผ์šฐ๋“œ๊นŒ์ง€ ๋‹ค์–‘ํ•œ ํ™˜๊ฒฝ์—์„œ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +๊ธฐ์กด์˜ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ๊ฐ€ ์•„ํ‚คํ…์ฒ˜ ์ตœ์ ํ™”์— ์ค‘์ ์„ ๋‘” ๊ฒƒ๊ณผ๋Š” ๋‹ฌ๋ฆฌ, YOLOv7์€ ํ›ˆ๋ จ ๊ณผ์ • ์ตœ์ ํ™”์— ์ดˆ์ ์„ ๋‘๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. ์ด๋Š” ์ถ”๋ก  ๋น„์šฉ์„ ์ฆ๊ฐ€์‹œํ‚ค์ง€ ์•Š๊ณ  ๊ฐ์ฒด ๊ฒ€์ถœ์˜ ์ •ํ™•๋„๋ฅผ ํ–ฅ์ƒ์‹œํ‚ค๋Š” ๋ชจ๋“ˆ๊ณผ ์ตœ์ ํ™” ๋ฐฉ๋ฒ•์„ ํฌํ•จํ•œ "trainable bag-of-freebies" ๊ฐœ๋…์„ ๋„์ž…ํ•ฉ๋‹ˆ๋‹ค. + +## ์ฃผ์š” ๊ธฐ๋Šฅ + +YOLOv7์€ ๋‹ค์Œ๊ณผ ๊ฐ™์€ ์ฃผ์š” ๊ธฐ๋Šฅ์„ ๋„์ž…ํ•ฉ๋‹ˆ๋‹ค: + +1. **๋ชจ๋ธ ์žฌํŒŒ๋ผ๋ฏธํ„ฐํ™”**: YOLOv7์€ ๊ทธ๋ž˜๋””์–ธํŠธ ์ „ํŒŒ ๊ฒฝ๋กœ ๊ฐœ๋…์„ ์ด์šฉํ•œ ๋‹ค๋ฅธ ๋„คํŠธ์›Œํฌ์˜ ๋ ˆ์ด์–ด์— ์ ์šฉ ๊ฐ€๋Šฅํ•œ ์ „๋žต์ธ ๊ณ„ํš๋œ ์žฌํŒŒ๋ผ๋ฏธํ„ฐํ™” ๋ชจ๋ธ์„ ์ œ์•ˆํ•ฉ๋‹ˆ๋‹ค. + +2. **๋™์  ๋ผ๋ฒจ ํ• ๋‹น**: ๋‹ค์ค‘ ์ถœ๋ ฅ ๋ ˆ์ด์–ด ๋ชจ๋ธ์˜ ํ›ˆ๋ จ์—์„œ๋Š” "๋‹ค๋ฅธ ๋ธŒ๋žœ์น˜์˜ ์ถœ๋ ฅ์— ๋Œ€ํ•ด ๋™์  ํƒ€๊นƒ์„ ์–ด๋–ป๊ฒŒ ํ• ๋‹นํ•  ๊ฒƒ์ธ๊ฐ€?"๋ผ๋Š” ์ƒˆ๋กœ์šด ๋ฌธ์ œ๊ฐ€ ๋ฐœ์ƒํ•ฉ๋‹ˆ๋‹ค. ์ด๋ฅผ ํ•ด๊ฒฐํ•˜๊ธฐ ์œ„ํ•ด YOLOv7์€ coarse-to-fine ๋ฆฌ๋“œ ๊ฐ€์ด๋“œ ๋ผ๋ฒจ ํ• ๋‹น์ด๋ผ๋Š” ์ƒˆ๋กœ์šด ๋ผ๋ฒจ ํ• ๋‹น ๋ฐฉ๋ฒ•์„ ๋„์ž…ํ•ฉ๋‹ˆ๋‹ค. + +3. **ํ™•์žฅ ๋ฐ ๋ณตํ•ฉ ์Šค์ผ€์ผ๋ง**: YOLOv7์€ ๋งค๊ฐœ ๋ณ€์ˆ˜์™€ ๊ณ„์‚ฐ์„ ํšจ๊ณผ์ ์œผ๋กœ ํ™œ์šฉํ•  ์ˆ˜ ์žˆ๋Š” ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ๋ฅผ ์œ„ํ•œ "ํ™•์žฅ" ๋ฐ "๋ณตํ•ฉ ์Šค์ผ€์ผ๋ง" ๋ฐฉ๋ฒ•์„ ์ œ์•ˆํ•ฉ๋‹ˆ๋‹ค. + +4. **ํšจ์œจ์„ฑ**: YOLOv7์ด ์ œ์•ˆํ•œ ๋ฐฉ๋ฒ•์€ ์ตœ์ฒจ๋‹จ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ๊ธฐ์˜ ๋งค๊ฐœ ๋ณ€์ˆ˜ ์•ฝ 40%, ๊ณ„์‚ฐ ์•ฝ 50%๋ฅผ ํšจ๊ณผ์ ์œผ๋กœ ์ค„์ผ ์ˆ˜ ์žˆ์œผ๋ฉฐ, ๋” ๋น ๋ฅธ ์ถ”๋ก  ์†๋„์™€ ๋” ๋†’์€ ๊ฒ€์ถœ ์ •ํ™•๋„๋ฅผ ๋‹ฌ์„ฑํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +## ์‚ฌ์šฉ ์˜ˆ์‹œ + +๊ธฐ์ˆ  ์‹œ์ ์—์„œ Ultralytics์€ ํ˜„์žฌ YOLOv7 ๋ชจ๋ธ์„ ์ง€์›ํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค. ๋”ฐ๋ผ์„œ YOLOv7์„ ์‚ฌ์šฉํ•˜๋ ค๋Š” ์‚ฌ์šฉ์ž๋Š” YOLOv7 GitHub ์ €์žฅ์†Œ์˜ ์„ค์น˜ ๋ฐ ์‚ฌ์šฉ ์ง€์นจ์„ ์ง์ ‘ ์ฐธ์กฐํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. + +YOLOv7์„ ์‚ฌ์šฉํ•˜๋Š” ์ผ๋ฐ˜์ ์ธ ๋‹จ๊ณ„์— ๋Œ€ํ•ด ๊ฐ„๋žตํžˆ ์„ค๋ช…ํ•ด ๋“œ๋ฆฌ๊ฒ ์Šต๋‹ˆ๋‹ค: + +1. YOLOv7 GitHub ์ €์žฅ์†Œ๋ฅผ ๋ฐฉ๋ฌธํ•ฉ๋‹ˆ๋‹ค: [https://github.com/WongKinYiu/yolov7](https://github.com/WongKinYiu/yolov7). + +2. ์„ค์น˜์— ๋Œ€ํ•œ README ํŒŒ์ผ์—์„œ ์ œ๊ณตํ•˜๋Š” ์ง€์นจ์„ ๋”ฐ๋ฆ…๋‹ˆ๋‹ค. ์ผ๋ฐ˜์ ์œผ๋กœ ์ €์žฅ์†Œ๋ฅผ ๋ณต์ œํ•˜๊ณ  ํ•„์š”ํ•œ ์ข…์†์„ฑ์„ ์„ค์น˜ํ•˜๊ณ  ํ•„์š”ํ•œ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋ฅผ ์„ค์ •ํ•˜๋Š” ๊ฒƒ์ด ํฌํ•จ๋ฉ๋‹ˆ๋‹ค. + +3. ์„ค์น˜๊ฐ€ ์™„๋ฃŒ๋˜๋ฉด ์ €์žฅ์†Œ์—์„œ ์ œ๊ณตํ•˜๋Š” ์‚ฌ์šฉ ์ง€์นจ์— ๋”ฐ๋ผ ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•˜๊ณ  ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด๋Š” ๋ฐ์ดํ„ฐ์…‹์„ ์ค€๋น„ํ•˜๊ณ  ๋ชจ๋ธ ๋งค๊ฐœ ๋ณ€์ˆ˜๋ฅผ ๊ตฌ์„ฑํ•˜๊ณ  ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•œ ๋‹ค์Œ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ๊ฐ์ฒด ๊ฒ€์ถœ์„ ์ˆ˜ํ–‰ํ•˜๋Š” ๊ฒƒ์„ ์ผ๋ฐ˜์ ์œผ๋กœ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. + +ํŠน์ • ๋‹จ๊ณ„๋Š” ์‚ฌ์šฉ ์‚ฌ๋ก€์™€ YOLOv7 ์ €์žฅ์†Œ์˜ ํ˜„์žฌ ์ƒํƒœ์— ๋”ฐ๋ผ ๋‹ฌ๋ผ์งˆ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๋”ฐ๋ผ์„œ YOLOv7 GitHub ์ €์žฅ์†Œ์—์„œ ์ œ๊ณตํ•˜๋Š” ์ง€์นจ์„ ์ง์ ‘ ์ฐธ์กฐํ•˜๋Š” ๊ฒƒ์ด ๊ถŒ์žฅ๋ฉ๋‹ˆ๋‹ค. + +YOLOv7์„ ์ง€์›ํ•˜๊ฒŒ ๋˜๋ฉด, Ultralytics์˜ ์‚ฌ์šฉ ์˜ˆ์‹œ๋ฅผ ํฌํ•จํ•˜์—ฌ ์ด ๋ฌธ์„œ๋ฅผ ์—…๋ฐ์ดํŠธํ•˜๊ธฐ ์œ„ํ•ด ์ตœ์„ ์„ ๋‹คํ•˜๊ฒ ์Šต๋‹ˆ๋‹ค. + +## ์ธ์šฉ ๋ฐ ๊ฐ์‚ฌ์˜ ๊ธ€ + +์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฒ€์ถœ ๋ถ„์•ผ์—์„œ์˜ ์ค‘์š”ํ•œ ๊ธฐ์—ฌ๋กœ ์ธํ•ด YOLOv7์˜ ์ €์ž๋“ค์—๊ฒŒ ๊ฐ์‚ฌ์˜ ๋ง์”€์„ ์ „ํ•˜๊ณ ์ž ํ•ฉ๋‹ˆ๋‹ค: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{wang2022yolov7, + title={{YOLOv7}: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors}, + author={Wang, Chien-Yao and Bochkovskiy, Alexey and Liao, Hong-Yuan Mark}, + journal={arXiv preprint arXiv:2207.02696}, + year={2022} + } + ``` + +์›๋ณธ YOLOv7 ๋…ผ๋ฌธ์€ [arXiv](https://arxiv.org/pdf/2207.02696.pdf)์—์„œ ์ฐพ์„ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ €์ž๋“ค์€ ์ž‘์—…์„ ๊ณต๊ฐœ์ ์œผ๋กœ ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•˜๊ฒŒ ํ•˜์˜€๊ณ , ์ฝ”๋“œ๋ฒ ์ด์Šค๋Š” [GitHub](https://github.com/WongKinYiu/yolov7)์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ €ํฌ๋Š” ์ด๋“ค์ด ํ•ด๋‹น ๋ถ„์•ผ์˜ ๋ฐœ์ „์— ๊ธฐ์—ฌํ•˜๊ณ  ์ž‘์—…์„ ํญ๋„“์€ ์ปค๋ฎค๋‹ˆํ‹ฐ์—๊ฒŒ ๊ณต๊ฐœ ๊ฐ€๋Šฅํ•˜๊ฒŒ ํ•œ ๋…ธ๋ ฅ์— ๊ฐ์‚ฌ๋“œ๋ฆฝ๋‹ˆ๋‹ค. diff --git a/ultralytics/docs/ko/models/yolov7.md:Zone.Identifier b/ultralytics/docs/ko/models/yolov7.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/models/yolov7.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/models/yolov8.md b/ultralytics/docs/ko/models/yolov8.md new file mode 100755 index 0000000..15084fa --- /dev/null +++ b/ultralytics/docs/ko/models/yolov8.md @@ -0,0 +1,162 @@ +--- +comments: true +description: YOLOv8๋Š” ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ํƒ์ง€๊ธฐ์ธ YOLO ์‹œ๋ฆฌ์ฆˆ์˜ ์ตœ์‹  ๋ฒ„์ „์œผ๋กœ, ์ตœ์‹  ์•„ํ‚คํ…์ฒ˜, ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ ๋ฐ ์ •ํ™•๋„์™€ ์†๋„์˜ ์ตœ์  ๊ท ํ˜•์„ ์ œ๊ณตํ•จ์œผ๋กœ์จ ๋‹ค์–‘ํ•œ ๊ฐ์ฒด ํƒ์ง€ ์ž‘์—…์— ์ ํ•ฉํ•œ ์„ ํƒ์ง€์ž…๋‹ˆ๋‹ค. +keywords: YOLOv8, Ultralytics, ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ํƒ์ง€๊ธฐ, ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ, ๋ฌธ์„œ, ๊ฐ์ฒด ํƒ์ง€, YOLO ์‹œ๋ฆฌ์ฆˆ, ๊ณ ๊ธ‰ ์•„ํ‚คํ…์ฒ˜, ์ •ํ™•๋„, ์†๋„ +--- + +# YOLOv8 + +## ๊ฐœ์š” + +YOLOv8๋Š” ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ํƒ์ง€๊ธฐ์ธ YOLO ์‹œ๋ฆฌ์ฆˆ์˜ ์ตœ์‹  ๋ฒ„์ „์œผ๋กœ, ์ •ํ™•๋„์™€ ์†๋„๋ฉด์—์„œ ์ตœ์‹  ๊ธฐ์ˆ ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ด์ „ YOLO ๋ฒ„์ „์˜ ๊ธฐ์ˆ ์  ๋ฐœ์ „์„ ๋ฐ”ํƒ•์œผ๋กœ, YOLOv8์€ ์ƒˆ๋กœ์šด ๊ธฐ๋Šฅ๊ณผ ์ตœ์ ํ™”๋ฅผ ๋„์ž…ํ•˜์—ฌ ๋‹ค์–‘ํ•œ ์‘์šฉ ๋ถ„์•ผ์—์„œ ๋‹ค์–‘ํ•œ ๊ฐ์ฒด ํƒ์ง€ ์ž‘์—…์— ์ด์ƒ์ ์ธ ์„ ํƒ์ง€๊ฐ€ ๋ฉ๋‹ˆ๋‹ค. + +![Ultralytics YOLOv8](https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png) + +## ์ฃผ์š” ๊ธฐ๋Šฅ + +- **๊ณ ๊ธ‰ ๋ฐฑ๋ณธ ๋ฐ ๋„ฅ ์•„ํ‚คํ…์ฒ˜:** YOLOv8์€ ์ตœ์ฒจ๋‹จ ๋ฐฑ๋ณธ ๋ฐ ๋„ฅ ์•„ํ‚คํ…์ฒ˜๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ํ–ฅ์ƒ๋œ ํŠน์ง• ์ถ”์ถœ๊ณผ ๊ฐ์ฒด ํƒ์ง€ ์„ฑ๋Šฅ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. +- **์•ต์ปค ์—†๋Š” ๋ถ„๋ฆฌ๋œ Ultralytics ํ—ค๋“œ:** YOLOv8์€ ์•ต์ปค ๊ธฐ๋ฐ˜ ์ ‘๊ทผ๋ฒ•์— ๋น„ํ•ด ๋” ๋‚˜์€ ์ •ํ™•๋„์™€ ํšจ์œจ์ ์ธ ํƒ์ง€ ๊ณผ์ •์„ ์œ„ํ•œ ์•ต์ปค ์—†๋Š” ๋ถ„๋ฆฌ๋œ Ultralytics ํ—ค๋“œ๋ฅผ ์ฑ„ํƒํ•ฉ๋‹ˆ๋‹ค. +- **์ •ํ™•๋„์™€ ์†๋„์˜ ์ตœ์  ๊ท ํ˜•ํ™”:** ์ •ํ™•๋„์™€ ์†๋„์˜ ์ตœ์  ๊ท ํ˜•์„ ์œ ์ง€ํ•˜๋Š” ๊ฒƒ์— ์ดˆ์ ์„ ๋งž์ถ˜ YOLOv8์€ ๋‹ค์–‘ํ•œ ์‘์šฉ ๋ถ„์•ผ์—์„œ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ํƒ์ง€ ์ž‘์—…์— ์ ํ•ฉํ•ฉ๋‹ˆ๋‹ค. +- **๋‹ค์–‘ํ•œ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ:** YOLOv8์€ ๋‹ค์–‘ํ•œ ์ž‘์—…๊ณผ ์„ฑ๋Šฅ ์š”๊ฑด์— ๋งž๋Š” ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์„ ์ œ๊ณตํ•˜์—ฌ ํŠน์ • ์‚ฌ์šฉ ์‚ฌ๋ก€์— ์ ํ•ฉํ•œ ๋ชจ๋ธ์„ ์‰ฝ๊ฒŒ ์ฐพ์„ ์ˆ˜ ์žˆ๋„๋ก ํ•ฉ๋‹ˆ๋‹ค. + +## ์ง€์›ํ•˜๋Š” ์ž‘์—… ๋ฐ ๋ชจ๋“œ + +YOLOv8 ์‹œ๋ฆฌ์ฆˆ๋Š” ์ปดํ“จํ„ฐ ๋น„์ „์˜ ํŠน์ • ์ž‘์—…์„ ์œ„ํ•ด ํŠนํ™”๋œ ๋‹ค์–‘ํ•œ ๋ชจ๋ธ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ๋ชจ๋ธ์€ ๊ฐ์ฒด ํƒ์ง€๋ถ€ํ„ฐ ์ธ์Šคํ„ด์Šค ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜, ํฌ์ฆˆ/ํ‚คํฌ์ธํŠธ ํƒ์ง€, ๋ถ„๋ฅ˜์™€ ๊ฐ™์€ ๋ณต์žกํ•œ ์ž‘์—…๊นŒ์ง€ ๋‹ค์–‘ํ•œ ์š”๊ตฌ ์‚ฌํ•ญ์„ ์ถฉ์กฑ์‹œํ‚ฌ ์ˆ˜ ์žˆ๋„๋ก ์„ค๊ณ„๋˜์—ˆ์Šต๋‹ˆ๋‹ค. + +๊ฐ YOLOv8 ์‹œ๋ฆฌ์ฆˆ์˜ ๋ณ€ํ˜•์€ ํ•ด๋‹น ์ž‘์—…์— ์ตœ์ ํ™”๋˜์–ด ๊ณ ์„ฑ๋Šฅ๊ณผ ์ •ํ™•๋„๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ๋˜ํ•œ ์ด๋Ÿฌํ•œ ๋ชจ๋ธ์€ [์ถ”๋ก ](../modes/predict.md), [๊ฒ€์ฆ](../modes/val.md), [ํ›ˆ๋ จ](../modes/train.md) ๋ฐ [๋‚ด๋ณด๋‚ด๊ธฐ](../modes/export.md)์™€ ๊ฐ™์€ ๋‹ค์–‘ํ•œ ์šด์˜ ๋ชจ๋“œ์™€ ํ˜ธํ™˜๋˜์–ด ๋ฐฐํฌ ๋ฐ ๊ฐœ๋ฐœ์˜ ๋‹ค์–‘ํ•œ ๋‹จ๊ณ„์—์„œ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋„๋ก ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ | ํŒŒ์ผ ์ด๋ฆ„ | ์ž‘์—… | ์ถ”๋ก  | ๊ฒ€์ฆ | ํ›ˆ๋ จ | ๋‚ด๋ณด๋‚ด๊ธฐ | +|-------------|----------------------------------------------------------------------------------------------------------------|------------------------------------|----|----|----|------| +| YOLOv8 | `yolov8n.pt` `yolov8s.pt` `yolov8m.pt` `yolov8l.pt` `yolov8x.pt` | [๊ฐ์ฒด ํƒ์ง€](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-seg | `yolov8n-seg.pt` `yolov8s-seg.pt` `yolov8m-seg.pt` `yolov8l-seg.pt` `yolov8x-seg.pt` | [์ธ์Šคํ„ด์Šค ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜](../tasks/segment.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-pose | `yolov8n-pose.pt` `yolov8s-pose.pt` `yolov8m-pose.pt` `yolov8l-pose.pt` `yolov8x-pose.pt` `yolov8x-pose-p6.pt` | [ํฌ์ฆˆ/ํ‚คํฌ์ธํŠธ](../tasks/pose.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-cls | `yolov8n-cls.pt` `yolov8s-cls.pt` `yolov8m-cls.pt` `yolov8l-cls.pt` `yolov8x-cls.pt` | [๋ถ„๋ฅ˜](../tasks/classify.md) | โœ… | โœ… | โœ… | โœ… | + +์ด ํ‘œ๋Š” YOLOv8 ๋ชจ๋ธ์˜ ๋ณ€ํ˜•์„ ๊ฐœ์š”๋กœ ์ œ๊ณตํ•˜๋ฉฐ, ํŠน์ • ์ž‘์—…์—์„œ์˜ ์ ์šฉ ๊ฐ€๋Šฅ์„ฑ ๋ฐ ์ถ”๋ก , ๊ฒ€์ฆ, ํ›ˆ๋ จ ๋ฐ ๋‚ด๋ณด๋‚ด๊ธฐ์™€ ๊ฐ™์€ ๋‹ค์–‘ํ•œ ์šด์˜ ๋ชจ๋“œ์™€์˜ ํ˜ธํ™˜์„ฑ์„ ๊ฐ•์กฐํ•ฉ๋‹ˆ๋‹ค. ์ด๋Š” ์ปดํ“จํ„ฐ ๋น„์ „์˜ ๋‹ค์–‘ํ•œ ์‘์šฉ ๋ถ„์•ผ์— ์ ํ•ฉํ•œ YOLOv8 ์‹œ๋ฆฌ์ฆˆ์˜ ๋‹ค์žฌ๋‹ค๋Šฅํ•จ๊ณผ ๊ฒฌ๊ณ ์„ฑ์„ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. + +## ์„ฑ๋Šฅ ์ง€ํ‘œ + +!!! ์„ฑ๋Šฅ + + === "๊ฐ์ฒด ํƒ์ง€ (COCO)" + + [COCO](https://docs.ultralytics.com/datasets/detect/coco/)์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ์ด๋Ÿฌํ•œ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•œ ์‚ฌ์šฉ ์˜ˆ์ œ์— ๋Œ€ํ•œ ์ •๋ณด๋Š” [๊ฐ์ฒด ํƒ์ง€ ๋ฌธ์„œ](https://docs.ultralytics.com/tasks/detect/)๋ฅผ ์ฐธ์กฐํ•˜์‹ญ์‹œ์˜ค. ์ด๋Š” 80๊ฐœ์˜ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ํด๋ž˜์Šค๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. + + | ๋ชจ๋ธ | ํฌ๊ธฐ
(ํ”ฝ์…€) | mAP
val 50-95 | ์†๋„
CPU ONNX
(๋ฐ€๋ฆฌ์ดˆ) | ์†๋„
A100 TensorRT
(๋ฐ€๋ฆฌ์ดˆ) | ๋งค๊ฐœ๋ณ€์ˆ˜
(๋ฐฑ๋งŒ) | FLOPs
(์‹ญ์–ต) | + | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------- | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + + === "๊ฐ์ฒด ํƒ์ง€ (Open Images V7)" + + [Open Images V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/)์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ์ด๋Ÿฌํ•œ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•œ ์‚ฌ์šฉ ์˜ˆ์ œ์— ๋Œ€ํ•œ ์ •๋ณด๋Š” [๊ฐ์ฒด ํƒ์ง€ ๋ฌธ์„œ](https://docs.ultralytics.com/tasks/detect/)๋ฅผ ์ฐธ์กฐํ•˜์‹ญ์‹œ์˜ค. ์ด๋Š” 600๊ฐœ์˜ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ํด๋ž˜์Šค๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. + + | ๋ชจ๋ธ | ํฌ๊ธฐ
(ํ”ฝ์…€) | mAP
val 50-95 | ์†๋„
CPU ONNX
(๋ฐ€๋ฆฌ์ดˆ) | ์†๋„
A100 TensorRT
(๋ฐ€๋ฆฌ์ดˆ) | ๋งค๊ฐœ๋ณ€์ˆ˜
(๋ฐฑ๋งŒ) | FLOPs
(์‹ญ์–ต) | + | ----------------------------------------------------------------------------------------- | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------- | ----------------- | + | [YOLOv8n-oiv7](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18.4 | 142.4 | 1.21 | 3.5 | 10.5 | + | [YOLOv8s-oiv7](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27.7 | 183.1 | 1.40 | 11.4 | 29.7 | + | [YOLOv8m-oiv7](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33.6 | 408.5 | 2.26 | 26.2 | 80.6 | + | [YOLOv8l-oiv7](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34.9 | 596.9 | 2.43 | 44.1 | 167.4 | + | [YOLOv8x-oiv7](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36.3 | 860.6 | 3.56 | 68.7 | 260.6 | + + === "์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜ (COCO)" + + [COCO](https://docs.ultralytics.com/datasets/segment/coco/)์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ์ด๋Ÿฌํ•œ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•œ ์‚ฌ์šฉ ์˜ˆ์ œ์— ๋Œ€ํ•œ ์ •๋ณด๋Š” [์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜ ๋ฌธ์„œ](https://docs.ultralytics.com/tasks/segment/)๋ฅผ ์ฐธ์กฐํ•˜์‹ญ์‹œ์˜ค. ์ด๋Š” 80๊ฐœ์˜ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ํด๋ž˜์Šค๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. + + | ๋ชจ๋ธ | ํฌ๊ธฐ
(ํ”ฝ์…€) | mAP
box 50-95 | mAP
mask 50-95 | ์†๋„
CPU ONNX
(๋ฐ€๋ฆฌ์ดˆ) | ์†๋„
A100 TensorRT
(๋ฐ€๋ฆฌ์ดˆ) | ๋งค๊ฐœ๋ณ€์ˆ˜
(๋ฐฑ๋งŒ) | FLOPs
(์‹ญ์–ต) | + | -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------- | ----------------- | + | [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | + | [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | + | [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | + | [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | + | [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + + === "๋ถ„๋ฅ˜ (ImageNet)" + + [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/)์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ์ด๋Ÿฌํ•œ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•œ ์‚ฌ์šฉ ์˜ˆ์ œ์— ๋Œ€ํ•œ ์ •๋ณด๋Š” [๋ถ„๋ฅ˜ ๋ฌธ์„œ](https://docs.ultralytics.com/tasks/classify/)๋ฅผ ์ฐธ์กฐํ•˜์‹ญ์‹œ์˜ค. ์ด๋Š” 1000๊ฐœ์˜ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ํด๋ž˜์Šค๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. + + | ๋ชจ๋ธ | ํฌ๊ธฐ
(ํ”ฝ์…€) | top1 ์ •ํ™•๋„ | top5 ์ •ํ™•๋„ | ์†๋„
CPU ONNX
(๋ฐ€๋ฆฌ์ดˆ) | ์†๋„
A100 TensorRT
(๋ฐ€๋ฆฌ์ดˆ) | ๋งค๊ฐœ๋ณ€์ˆ˜
(๋ฐฑ๋งŒ) | FLOPs
(๋ฐฑ๋งŒ) at 640 | + | -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | ----------------- | ------------------------------ | ----------------------------------- | ------------------- | ------------------------ | + | [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | + | [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | + | [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | + | [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | + | [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + + === "ํฌ์ฆˆ (COCO)" + + [COCO](https://docs.ultralytics.com/datasets/pose/coco/)์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ์ด๋Ÿฌํ•œ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•œ ์‚ฌ์šฉ ์˜ˆ์ œ์— ๋Œ€ํ•œ ์ •๋ณด๋Š” [ํฌ์ฆˆ ๋ฌธ์„œ](https://docs.ultralytics.com/tasks/pose/)๋ฅผ ์ฐธ์กฐํ•˜์‹ญ์‹œ์˜ค. ์ด๋Š” 'person' ํ•˜๋‚˜์˜ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ํด๋ž˜์Šค๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. + + | ๋ชจ๋ธ | ํฌ๊ธฐ
(ํ”ฝ์…€) | mAP
pose 50-95 | mAP
pose 50 | ์†๋„
CPU ONNX
(๋ฐ€๋ฆฌ์ดˆ) | ์†๋„
A100 TensorRT
(๋ฐ€๋ฆฌ์ดˆ) | ๋งค๊ฐœ๋ณ€์ˆ˜
(๋ฐฑ๋งŒ) | FLOPs
(์‹ญ์–ต) | + | ---------------------------------------------------------------------------------------------------- | --------------------- | -------------------- | ----------------- | ------------------------------ | ----------------------------------- | ------------------- | ----------------- | + | [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | + | [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | + | [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | + | [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | + | [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | + | [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +## ์‚ฌ์šฉ ์˜ˆ์ œ + +์ด ์˜ˆ์ œ๋Š” ๊ฐ„๋‹จํ•œ YOLOv8 ํ›ˆ๋ จ ๋ฐ ์ถ”๋ก  ์˜ˆ์ œ๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ด์™€ ๋‹ค๋ฅธ [๋ชจ๋“œ](../modes/index.md)์˜ ์ „์ฒด ๋ฌธ์„œ๋Š” [์˜ˆ์ธก](../modes/predict.md), [ํ›ˆ๋ จ](../modes/train.md), [๊ฒ€์ฆ](../modes/val.md) ๋ฐ [๋‚ด๋ณด๋‚ด๊ธฐ](../modes/export.md) ๋ฌธ์„œ๋ฅผ ์ฐธ์กฐํ•˜์‹ญ์‹œ์˜ค. + +์•„๋ž˜ ์˜ˆ์ œ๋Š” ๊ฐ์ฒด ํƒ์ง€๋ฅผ ์œ„ํ•œ YOLOv8 [Detect](../tasks/detect.md) ๋ชจ๋ธ์„ ๋Œ€์ƒ์œผ๋กœ ํ•ฉ๋‹ˆ๋‹ค. ์ถ”๊ฐ€ ์ง€์›๋˜๋Š” ์ž‘์—…์— ๋Œ€ํ•ด์„œ๋Š” [Segment](../tasks/segment.md), [Classify](../tasks/classify.md) ๋ฐ [Pose](../tasks/pose.md) ๋ฌธ์„œ๋ฅผ ์ฐธ์กฐํ•˜์‹ญ์‹œ์˜ค. + +!!! ์˜ˆ์‹œ + + === "Python" + + python์—์„œ PyTorch ์‚ฌ์ „ ํ›ˆ๋ จ๋œ `*.pt` ๋ชจ๋ธ ๋ฐ ๊ตฌ์„ฑ `*.yaml` ํŒŒ์ผ์„ `YOLO()` ํด๋ž˜์Šค์— ์ „๋‹ฌํ•˜์—ฌ ๋ชจ๋ธ ์ธ์Šคํ„ด์Šค๋ฅผ ๋งŒ๋“ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + + ```python + from ultralytics import YOLO + + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov8n.pt') + + # ๋ชจ๋ธ ์ •๋ณด ํ‘œ์‹œ (์˜ต์…˜) + model.info() + + # COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์—์„œ ๋ชจ๋ธ์„ 100ํšŒ ์—ํฌํฌ๋กœ ํ›ˆ๋ จ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # 'bus.jpg' ์ด๋ฏธ์ง€์—์„œ YOLOv8n ๋ชจ๋ธ๋กœ ์ถ”๋ก  ์‹คํ–‰ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI ๋ช…๋ น์„ ์‚ฌ์šฉํ•˜๋ฉด ์ง์ ‘ ๋ชจ๋ธ์„ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + + ```bash + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ์„ ๋กœ๋“œํ•˜๊ณ  COCO8 ์˜ˆ์ œ ๋ฐ์ดํ„ฐ์…‹์—์„œ 100ํšŒ ์—ํฌํฌ๋กœ ํ›ˆ๋ จ + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # COCO ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ์„ ๋กœ๋“œํ•˜๊ณ  'bus.jpg' ์ด๋ฏธ์ง€์—์„œ ์ถ”๋ก  ์‹คํ–‰ + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## ์ธ์šฉ ๋ฐ ๊ฐ์‚ฌ์˜ ๊ธ€ + +์ด ์ €์žฅ์†Œ์—์„œ YOLOv8 ๋ชจ๋ธ์ด๋‚˜ ๋‹ค๋ฅธ ์†Œํ”„ํŠธ์›จ์–ด๋ฅผ ์‚ฌ์šฉํ•œ ์ž‘์—…์— ๋Œ€ํ•ด์„œ๋Š” ๋‹ค์Œ ํ˜•์‹์œผ๋กœ ์ธ์šฉํ•ด์ฃผ์‹œ๊ธฐ ๋ฐ”๋ž๋‹ˆ๋‹ค. + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @software{yolov8_ultralytics, + author = {Glenn Jocher and Ayush Chaurasia and Jing Qiu}, + title = {Ultralytics YOLOv8}, + version = {8.0.0}, + year = {2023}, + url = {https://github.com/ultralytics/ultralytics}, + orcid = {0000-0001-5950-6979, 0000-0002-7603-6750, 0000-0003-3783-7069}, + license = {AGPL-3.0} + } + ``` + +DOI๋Š” ์ค€๋น„ ์ค‘์ด๋ฉฐ ์ด์šฉ ๊ฐ€๋Šฅํ•ด์ง€๋ฉด ์ธ์šฉ๋ฌธ์— ์ถ”๊ฐ€๋  ์˜ˆ์ •์ž…๋‹ˆ๋‹ค. YOLOv8 ๋ชจ๋ธ์€ [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) ๋ฐ [Enterprise](https://ultralytics.com/license) ๋ผ์ด์„ผ์Šค์— ๋”ฐ๋ผ ์ œ๊ณต๋ฉ๋‹ˆ๋‹ค. diff --git a/ultralytics/docs/ko/models/yolov8.md:Zone.Identifier b/ultralytics/docs/ko/models/yolov8.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/models/yolov8.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/modes/benchmark.md b/ultralytics/docs/ko/modes/benchmark.md new file mode 100755 index 0000000..147da2f --- /dev/null +++ b/ultralytics/docs/ko/modes/benchmark.md @@ -0,0 +1,94 @@ +--- +comments: true +description: YOLOv8์˜ ๋‹ค์–‘ํ•œ ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹์— ๊ฑธ์ณ ์†๋„ ๋ฐ ์ •ํ™•์„ฑ์„ ํ”„๋กœํŒŒ์ผ๋งํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ์•Œ์•„๋ณด๊ณ , mAP50-95, accuracy_top5 ๋ฉ”ํŠธ๋ฆญ ๋ฐ ๊ธฐํƒ€์— ๋Œ€ํ•œ ํ†ต์ฐฐ์„ ์–ป์œผ์‹ญ์‹œ์˜ค. +keywords: Ultralytics, YOLOv8, ๋ฒค์น˜๋งˆํ‚น, ์†๋„ ํ”„๋กœํŒŒ์ผ๋ง, ์ •ํ™•๋„ ํ”„๋กœํŒŒ์ผ๋ง, mAP50-95, accuracy_top5, ONNX, OpenVINO, TensorRT, YOLO ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹ +--- + +# Ultralytics YOLO๋ฅผ ์‚ฌ์šฉํ•œ ๋ชจ๋ธ ๋ฒค์น˜๋งˆํ‚น + +Ultralytics YOLO ์ƒํƒœ๊ณ„ ๋ฐ ํ†ตํ•ฉ + +## ์†Œ๊ฐœ + +๋ชจ๋ธ์„ ํ•™์Šตํ•˜๊ณ  ๊ฒ€์ฆํ•œ ํ›„, ๋‹ค์Œ์œผ๋กœ ๋…ผ๋ฆฌ์ ์ธ ๋‹จ๊ณ„๋Š” ๋‹ค์–‘ํ•œ ์‹ค์ œ ์ƒํ™ฉ์—์„œ์˜ ์„ฑ๋Šฅ์„ ํ‰๊ฐ€ํ•˜๋Š” ๊ฒƒ์ž…๋‹ˆ๋‹ค. Ultralytics YOLOv8์˜ ๋ฒค์น˜๋งˆํฌ ๋ชจ๋“œ๋Š” ๋‹ค์–‘ํ•œ ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹์—์„œ ๋ชจ๋ธ์˜ ์†๋„์™€ ์ •ํ™•๋„๋ฅผ ํ‰๊ฐ€ํ•˜๋Š” ๊ฐ•๋ ฅํ•œ ํ”„๋ ˆ์ž„์›Œํฌ๋ฅผ ์ œ๊ณตํ•˜์—ฌ ์ด์™€ ๊ฐ™์€ ๋ชฉ์ ์„ ์ˆ˜ํ–‰ํ•˜๋Š” ์—ญํ• ์„ ํ•ฉ๋‹ˆ๋‹ค. + +## ๋ฒค์น˜๋งˆํ‚น์ด ์™œ ์ค‘์š”ํ•œ๊ฐ€์š”? + +- **์ •๋ณด์— ๊ธฐ๋ฐ˜ํ•œ ๊ฒฐ์ •:** ์†๋„์™€ ์ •ํ™•๋„ ์‚ฌ์ด์˜ ํƒ€ํ˜‘์ ์— ๋Œ€ํ•œ ํ†ต์ฐฐ๋ ฅ์„ ์–ป์„ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +- **์ž์› ๋ฐฐ๋ถ„:** ๋‹ค์–‘ํ•œ ํ•˜๋“œ์›จ์–ด์—์„œ ๊ฐ๊ธฐ ๋‹ค๋ฅธ ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹์˜ ์„ฑ๋Šฅ์„ ์ดํ•ดํ•ฉ๋‹ˆ๋‹ค. +- **์ตœ์ ํ™”:** ํŠน์ • ์‚ฌ์šฉ ์‚ฌ๋ก€์— ๊ฐ€์žฅ ์ ํ•ฉํ•œ ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹์„ ์•Œ์•„๋ƒ…๋‹ˆ๋‹ค. +- **๋น„์šฉ ํšจ์œจ์„ฑ:** ๋ฒค์น˜๋งˆํฌ ๊ฒฐ๊ณผ์— ๊ธฐ๋ฐ˜ํ•˜์—ฌ ํ•˜๋“œ์›จ์–ด ์ž์›์„ ๋ณด๋‹ค ํšจ์œจ์ ์œผ๋กœ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. + +### ๋ฒค์น˜๋งˆํฌ ๋ชจ๋“œ์˜ ์ฃผ์š” ๋ฉ”ํŠธ๋ฆญ + +- **mAP50-95:** ๊ฐ์ฒด ์ธ์‹, ์„ธ๋ถ„ํ™”, ์ž์„ธ ์ถ”์ •์— ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. +- **accuracy_top5:** ์ด๋ฏธ์ง€ ๋ถ„๋ฅ˜์— ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. +- **์ถ”๋ก  ์‹œ๊ฐ„:** ๊ฐ ์ด๋ฏธ์ง€ ๋‹น ๋ฐ€๋ฆฌ์ดˆ๋กœ ์ธก์ •๋œ ์‹œ๊ฐ„์ž…๋‹ˆ๋‹ค. + +### ์ง€์›๋˜๋Š” ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹ + +- **ONNX:** CPU ์„ฑ๋Šฅ ์ตœ์ ํ™”๋ฅผ ์œ„ํ•จ +- **TensorRT:** GPU ํšจ์œจ์„ฑ์„ ๊ทน๋Œ€ํ™”ํ•˜๊ธฐ ์œ„ํ•จ +- **OpenVINO:** ์ธํ…” ํ•˜๋“œ์›จ์–ด ์ตœ์ ํ™”๋ฅผ ์œ„ํ•จ +- **CoreML, TensorFlow SavedModel, ๊ทธ ์™ธ:** ๋‹ค์–‘ํ•œ ๋ฐฐํฌ ์š”๊ตฌ ์‚ฌํ•ญ์„ ์œ„ํ•จ. + +!!! Tip "ํŒ" + + * CPU ์†๋„ ํ–ฅ์ƒ์„ ์œ„ํ•ด ONNX ๋˜๋Š” OpenVINO๋กœ ๋‚ด๋ณด๋‚ด๊ธฐ. + * GPU ์†๋„ ํ–ฅ์ƒ์„ ์œ„ํ•ด TensorRT๋กœ ๋‚ด๋ณด๋‚ด๊ธฐ. + +## ์‚ฌ์šฉ ์˜ˆ์ œ + +YOLOv8n ๋ฒค์น˜๋งˆํ‚น์„ ONNX, TensorRT ๋“ฑ ๋ชจ๋“  ์ง€์›๋˜๋Š” ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹์— ๋Œ€ํ•ด ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค. ์™„๋ฒฝํ•œ ๋‚ด๋ณด๋‚ด๊ธฐ ์ธ์ˆ˜ ๋ชฉ๋ก์„ ๋ณด๋ ค๋ฉด ์•„๋ž˜์˜ ์ธ์ˆ˜ ์„น์…˜์„ ์ฐธ์กฐํ•˜์„ธ์š”. + +!!! Example "์˜ˆ์ œ" + + === "ํŒŒ์ด์ฌ" + + ```python + from ultralytics.utils.benchmarks import benchmark + + # GPU์—์„œ ๋ฒค์น˜๋งˆํ‚น + benchmark(model='yolov8n.pt', data='coco8.yaml', imgsz=640, half=False, device=0) + ``` + === "CLI" + + ```bash + yolo benchmark model=yolov8n.pt data='coco8.yaml' imgsz=640 half=False device=0 + ``` + +## ์ธ์ˆ˜ + +`model`, `data`, `imgsz`, `half`, `device`, `verbose`์™€ ๊ฐ™์€ ์ธ์ˆ˜๋“ค์€ ์‚ฌ์šฉ์ž๋“ค์ด ๋ฒค์น˜๋งˆํ‚น์„ ํŠน์ • ํ•„์š”์— ๋งž๊ฒŒ ์กฐ์ •ํ•˜๊ณ  ์‰ฝ๊ฒŒ ๋‹ค๋ฅธ ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹์˜ ์„ฑ๋Šฅ์„ ๋น„๊ตํ•  ์ˆ˜ ์žˆ๋„๋ก ์œ ์—ฐ์„ฑ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +| ํ‚ค | ๊ฐ’ | ์„ค๋ช… | +|-----------|---------|----------------------------------------------------------| +| `model` | `None` | ๋ชจ๋ธ ํŒŒ์ผ ๊ฒฝ๋กœ, ์˜ˆ: yolov8n.pt, yolov8n.yaml | +| `data` | `None` | ๋ฒค์น˜๋งˆํ‚น ๋ฐ์ดํ„ฐ ์„ธํŠธ๋ฅผ ์ฐธ์กฐํ•˜๋Š” YAML ๊ฒฝ๋กœ ('val' ๋ ˆ์ด๋ธ” ์•„๋ž˜) | +| `imgsz` | `640` | ์Šค์นผ๋ผ ๋˜๋Š” (h, w) ๋ฆฌ์ŠคํŠธ ํ˜•ํƒœ์˜ ์ด๋ฏธ์ง€ ํฌ๊ธฐ, ์˜ˆ: (640, 480) | +| `half` | `False` | FP16 ์–‘์žํ™” | +| `int8` | `False` | INT8 ์–‘์žํ™” | +| `device` | `None` | ์‹คํ–‰ํ•  ๊ธฐ๊ธฐ, ์˜ˆ: CUDA device=0 ํ˜น์€ device=0,1,2,3 ๋˜๋Š” device=cpu | +| `verbose` | `False` | ์˜ค๋ฅ˜ ์‹œ ๊ณ„์†ํ•˜์ง€ ์•Š์Œ (bool), ๋˜๋Š” val ํ•˜ํ•œ ์ž„๊ณ„๊ฐ’ (float) | + +## ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹ + +๋ฒค์น˜๋งˆํฌ๋Š” ์•„๋ž˜์— ๋‚˜์™€์žˆ๋Š” ๊ฐ€๋Šฅํ•œ ๋ชจ๋“  ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹์—์„œ ์ž๋™์œผ๋กœ ์‹คํ–‰์„ ์‹œ๋„ํ•ฉ๋‹ˆ๋‹ค. + +| ํ˜•์‹ | `format` ์ธ์ž | ๋ชจ๋ธ | ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ | ์ธ์ˆ˜ | +|--------------------------------------------------------------------|---------------|---------------------------|-------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +์ „์ฒด `export` ์„ธ๋ถ€ ์ •๋ณด๋Š” [Export](https://docs.ultralytics.com/modes/export/) ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. diff --git a/ultralytics/docs/ko/modes/benchmark.md:Zone.Identifier b/ultralytics/docs/ko/modes/benchmark.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/modes/benchmark.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/modes/export.md b/ultralytics/docs/ko/modes/export.md new file mode 100755 index 0000000..98a9f10 --- /dev/null +++ b/ultralytics/docs/ko/modes/export.md @@ -0,0 +1,108 @@ +--- +comments: true +description: YOLOv8 ๋ชจ๋ธ์„ ONNX, TensorRT, CoreML ๋“ฑ์˜ ๋‹ค์–‘ํ•œ ํ˜•์‹์œผ๋กœ ๋‚ด๋ณด๋‚ด๋Š” ๋‹จ๊ณ„๋ณ„ ๊ฐ€์ด๋“œ๋ฅผ ํ™•์ธํ•ด๋ณด์„ธ์š”. ์ด์ œ ๋ฐฐํฌ๋ฅผ ์ง„ํ–‰ํ•ด๋ณด์„ธ์š”!. +keywords: YOLO, YOLOv8, Ultralytics, ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ, ONNX, TensorRT, CoreML, TensorFlow SavedModel, OpenVINO, PyTorch, ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ +--- + +# Ultralytics YOLO๋ฅผ ์‚ฌ์šฉํ•œ ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ + +Ultralytics YOLO ์ƒํƒœ๊ณ„ ๋ฐ ํ†ตํ•ฉ + +## ์†Œ๊ฐœ + +๋ชจ๋ธ์„ ํ›ˆ๋ จํ•˜๋Š” ์ตœ์ข… ๋ชฉ์ ์€ ์‹ค์ œ ํ™˜๊ฒฝ์—์„œ ๋ฐฐํฌํ•˜๊ธฐ ์œ„ํ•จ์ž…๋‹ˆ๋‹ค. Ultralytics YOLOv8์˜ ๋‚ด๋ณด๋‚ด๊ธฐ ๋ชจ๋“œ๋Š” ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์„ ๋‹ค์–‘ํ•œ ํ˜•์‹์œผ๋กœ ๋‚ด๋ณด๋‚ด์–ด ์—ฌ๋Ÿฌ ํ”Œ๋žซํผ๊ณผ ๋””๋ฐ”์ด์Šค์—์„œ ๋ฐฐํฌํ•  ์ˆ˜ ์žˆ๋Š” ๋ฒ”์šฉ์ ์ธ ์˜ต์…˜์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ด ํฌ๊ด„์ ์ธ ๊ฐ€์ด๋“œ๋Š” ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ์˜ ๋ฏธ๋ฌ˜ํ•œ ์ ๋“ค์„ ์„ค๋ช…ํ•˜๊ณ  ์ตœ๋Œ€์˜ ํ˜ธํ™˜์„ฑ๊ณผ ์„ฑ๋Šฅ์„ ๋‹ฌ์„ฑํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ์•ˆ๋‚ดํ•˜๋Š” ๊ฒƒ์„ ๋ชฉํ‘œ๋กœ ํ•ฉ๋‹ˆ๋‹ค. + +

+
+ +
+ ์‹œ์ฒญํ•˜๊ธฐ: ์‚ฌ์šฉ์ž ์ง€์ • ํ›ˆ๋ จ๋œ Ultralytics YOLOv8 ๋ชจ๋ธ์„ ๋‚ด๋ณด๋‚ด๊ณ  ์›น์บ ์—์„œ ์‹ค์‹œ๊ฐ„ ์ถ”๋ก ์„ ์‹คํ–‰ํ•˜๋Š” ๋ฐฉ๋ฒ•. +

+ +## YOLOv8์˜ ๋‚ด๋ณด๋‚ด๊ธฐ ๋ชจ๋“œ๋ฅผ ์„ ํƒํ•ด์•ผ ํ•˜๋Š” ์ด์œ ๋Š” ๋ฌด์—‡์ธ๊ฐ€์š”? + +- **๋ฒ”์šฉ์„ฑ:** ONNX, TensorRT, CoreML ๋“ฑ ๋‹ค์–‘ํ•œ ํ˜•์‹์œผ๋กœ ๋‚ด๋ณด๋‚ผ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +- **์„ฑ๋Šฅ:** TensorRT๋ฅผ ์‚ฌ์šฉํ•  ๊ฒฝ์šฐ ์ตœ๋Œ€ 5๋ฐฐ ๋น ๋ฅธ GPU ์†๋„ ํ–ฅ์ƒ ๋ฐ ONNX ๋˜๋Š” OpenVINO๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ตœ๋Œ€ 3๋ฐฐ ๋น ๋ฅธ CPU ์†๋„ ํ–ฅ์ƒ์„ ์–ป์„ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +- **ํ˜ธํ™˜์„ฑ:** ๋ชจ๋ธ์„ ๋‹ค์–‘ํ•œ ํ•˜๋“œ์›จ์–ด ๋ฐ ์†Œํ”„ํŠธ์›จ์–ด ํ™˜๊ฒฝ์—์„œ ๋ฐฐํฌํ•  ์ˆ˜ ์žˆ๋„๋ก ๋งŒ๋“ค์–ด์ค๋‹ˆ๋‹ค. +- **์‚ฌ์šฉ์˜ ์šฉ์ด์„ฑ:** ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ๋ฅผ ์œ„ํ•œ ๊ฐ„๋‹จํ•œ CLI ๋ฐ Python API ์ œ๊ณต. + +### ๋‚ด๋ณด๋‚ด๊ธฐ ๋ชจ๋“œ์˜ ์ฃผ์š” ๊ธฐ๋Šฅ + +๋‹ค์Œ์€ ๋ช‡ ๊ฐ€์ง€ ์ฃผ์š” ๊ธฐ๋Šฅ๋“ค์ž…๋‹ˆ๋‹ค: + +- **์›ํด๋ฆญ ๋‚ด๋ณด๋‚ด๊ธฐ:** ๋‹ค์–‘ํ•œ ํ˜•์‹์œผ๋กœ ๋‚ด๋ณด๋‚ด๊ธฐ ์œ„ํ•œ ๊ฐ„๋‹จํ•œ ๋ช…๋ น์–ด. +- **๋ฐฐ์น˜ ๋‚ด๋ณด๋‚ด๊ธฐ:** ๋ฐฐ์น˜ ์ถ”๋ก ์ด ๊ฐ€๋Šฅํ•œ ๋ชจ๋ธ๋“ค์„ ๋‚ด๋ณด๋‚ผ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +- **์ตœ์ ํ™”๋œ ์ถ”๋ก :** ๋‚ด๋ณด๋‚ธ ๋ชจ๋ธ๋“ค์€ ๋” ๋น ๋ฅธ ์ถ”๋ก  ์‹œ๊ฐ„์„ ์œ„ํ•ด ์ตœ์ ํ™”๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค. +- **ํŠœํ† ๋ฆฌ์–ผ ๋น„๋””์˜ค:** ์›ํ™œํ•œ ๋‚ด๋ณด๋‚ด๊ธฐ ๊ฒฝํ—˜์„ ์œ„ํ•œ ์‹ฌ๋„ ์žˆ๋Š” ๊ฐ€์ด๋“œ ๋ฐ ํŠœํ† ๋ฆฌ์–ผ. + +!!! Tip "ํŒ" + + * CPU ์†๋„ ํ–ฅ์ƒ์„ ์œ„ํ•ด ONNX ๋˜๋Š” OpenVINO๋กœ ๋‚ด๋ณด๋‚ด์„ธ์š”. + * GPU ์†๋„ ํ–ฅ์ƒ์„ ์œ„ํ•ด TensorRT๋กœ ๋‚ด๋ณด๋‚ด์„ธ์š”. + +## ์‚ฌ์šฉ ์˜ˆ์‹œ + +YOLOv8n ๋ชจ๋ธ์„ ONNX๋‚˜ TensorRT์™€ ๊ฐ™์€ ๋‹ค๋ฅธ ํ˜•์‹์œผ๋กœ ๋‚ด๋ณด๋ƒ…๋‹ˆ๋‹ค. ๋‚ด๋ณด๋‚ด๊ธฐ ์ธ์ˆ˜์— ๋Œ€ํ•œ ์ „์ฒด ๋ชฉ๋ก์€ ์•„๋ž˜ '์ธ์ˆ˜' ์„น์…˜์„ ์ฐธ์กฐํ•˜์„ธ์š”. + +!!! ์˜ˆ์‹œ "" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n.pt') # ๊ณต์‹ ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('path/to/best.pt') # ์‚ฌ์šฉ์ž ์ง€์ • ํ›ˆ๋ จ ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + + # ๋ชจ๋ธ์„ ๋‚ด๋ณด๋‚ด๊ธฐ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # ๊ณต์‹ ๋ชจ๋ธ์„ ๋‚ด๋ณด๋‚ด๊ธฐ + yolo export model=path/to/best.pt format=onnx # ์‚ฌ์šฉ์ž ์ง€์ • ํ›ˆ๋ จ ๋ชจ๋ธ์„ ๋‚ด๋ณด๋‚ด๊ธฐ + ``` + +## ์ธ์ˆ˜ + +YOLO ๋ชจ๋ธ์˜ ๋‚ด๋ณด๋‚ด๊ธฐ ์„ค์ •์€ ๋‹ค๋ฅธ ํ™˜๊ฒฝ์ด๋‚˜ ํ”Œ๋žซํผ์—์„œ ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜๊ธฐ ์œ„ํ•ด ์ €์žฅ ๋˜๋Š” ๋‚ด๋ณด๋‚ด๊ธฐํ•  ๋•Œ ์‚ฌ์šฉํ•˜๋Š” ๋‹ค์–‘ํ•œ ๊ตฌ์„ฑ ๋ฐ ์˜ต์…˜์„ ์˜๋ฏธํ•ฉ๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ์„ค์ •์€ ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ, ํฌ๊ธฐ ๋ฐ ๋‹ค์–‘ํ•œ ์‹œ์Šคํ…œ๊ณผ์˜ ํ˜ธํ™˜์„ฑ์— ์˜ํ–ฅ์„ ๋ฏธ์น  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ผ๋ฐ˜์ ์ธ YOLO ๋‚ด๋ณด๋‚ด๊ธฐ ์„ค์ •์—๋Š” ๋‚ด๋ณด๋‚ธ ๋ชจ๋ธ ํŒŒ์ผ์˜ ํ˜•์‹(์˜ˆ: ONNX, TensorFlow SavedModel), ๋ชจ๋ธ์ด ์‹คํ–‰๋  ์žฅ์น˜(์˜ˆ: CPU, GPU) ๋ฐ ๋งˆ์Šคํฌ ๋˜๋Š” ์ƒ์ž๋‹น ์—ฌ๋Ÿฌ ๋ผ๋ฒจ๊ณผ ๊ฐ™์€ ์ถ”๊ฐ€ ๊ธฐ๋Šฅ์˜ ํฌํ•จ ์—ฌ๋ถ€ ๋“ฑ์ด ์žˆ์Šต๋‹ˆ๋‹ค. ๋ชจ๋ธ์ด ์‚ฌ์šฉ๋˜๋Š” ํŠน์ • ์ž‘์—…๊ณผ ๋Œ€์ƒ ํ™˜๊ฒฝ ๋˜๋Š” ํ”Œ๋žซํผ์˜ ์š”๊ตฌ ์‚ฌํ•ญ์ด๋‚˜ ์ œ์•ฝ ์‚ฌํ•ญ์— ๋”ฐ๋ผ ๋‚ด๋ณด๋‚ด๊ธฐ ๊ณผ์ •์— ์˜ํ–ฅ์„ ๋ฏธ์น˜๋Š” ๋‹ค๋ฅธ ์š”์†Œ๋“ค๋„ ์žˆ์„ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๋‚ด๋ณด๋‚ธ ๋ชจ๋ธ์ด ์˜๋„ํ•œ ์šฉ๋„๋กœ ์ตœ์ ํ™”๋˜์–ด ์žˆ๊ณ  ๋Œ€์ƒ ํ™˜๊ฒฝ์—์„œ ํšจ๊ณผ์ ์œผ๋กœ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋„๋ก ์ด๋Ÿฌํ•œ ์„ค์ •์„ ์„ธ์‹ฌํ•˜๊ฒŒ ๊ณ ๋ คํ•˜๊ณ  ๊ตฌ์„ฑํ•˜๋Š” ๊ฒƒ์ด ์ค‘์š”ํ•ฉ๋‹ˆ๋‹ค. + +| ํ‚ค | ๊ฐ’ | ์„ค๋ช… | +|-------------|-----------------|---------------------------------------------| +| `format` | `'torchscript'` | ๋‚ด๋ณด๋‚ผ ํ˜•์‹ | +| `imgsz` | `640` | ์Šค์นผ๋ผ ๋˜๋Š” (h, w) ๋ฆฌ์ŠคํŠธ ํ˜•์‹์˜ ์ด๋ฏธ์ง€ ํฌ๊ธฐ, ์˜ˆ: (640, 480) | +| `keras` | `False` | TF SavedModel ๋‚ด๋ณด๋‚ด๊ธฐ์— Keras ์‚ฌ์šฉ | +| `optimize` | `False` | TorchScript: ๋ชจ๋ฐ”์ผ ์ตœ์ ํ™” | +| `half` | `False` | FP16 ์–‘์žํ™” | +| `int8` | `False` | INT8 ์–‘์žํ™” | +| `dynamic` | `False` | ONNX/TensorRT: ๋™์  ์ถ• | +| `simplify` | `False` | ONNX/TensorRT: ๋ชจ๋ธ ๋‹จ์ˆœํ™” | +| `opset` | `None` | ONNX: opset ๋ฒ„์ „ (์„ ํƒ์ , ๊ธฐ๋ณธ๊ฐ’์€ ์ตœ์‹ ) | +| `workspace` | `4` | TensorRT: ์ž‘์—…๊ณต๊ฐ„ ํฌ๊ธฐ (GB) | +| `nms` | `False` | CoreML: NMS ์ถ”๊ฐ€ | + +## ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹ + +์•„๋ž˜ ํ‘œ์—๋Š” ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ YOLOv8 ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹์ด ๋‚˜์™€ ์žˆ์Šต๋‹ˆ๋‹ค. `format` ์ธ์ˆ˜๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์–ด๋–ค ํ˜•์‹์œผ๋กœ๋“  ๋‚ด๋ณด๋‚ผ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์˜ˆ: `format='onnx'` ๋˜๋Š” `format='engine'`. + +| ํ˜•์‹ | `format` ์ธ์ˆ˜ | ๋ชจ๋ธ | ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ | ์ธ์ˆ˜ | +|--------------------------------------------------------------------|---------------|---------------------------|-------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | diff --git a/ultralytics/docs/ko/modes/export.md:Zone.Identifier b/ultralytics/docs/ko/modes/export.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/modes/export.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/modes/index.md b/ultralytics/docs/ko/modes/index.md new file mode 100755 index 0000000..1d406be --- /dev/null +++ b/ultralytics/docs/ko/modes/index.md @@ -0,0 +1,73 @@ +--- +comments: true +description: YOLOv8๋ฅผ ํ†ตํ•ด ํŠธ๋ ˆ์ด๋‹๋ถ€ํ„ฐ ์ถ”์ ๊นŒ์ง€, Ultralytics์— ์ตœ์ ํ™”๋œ ๋ชจ๋“  ๊ธฐ๋Šฅ์„ ํ™œ์šฉํ•˜์„ธ์š”. ์ง€์›๋˜๋Š” ๊ฐ ๋ชจ๋“œ์— ๋Œ€ํ•œ ํ†ต์ฐฐ๋ ฅ๊ณผ ์˜ˆ์‹œ๋ฅผ ํฌํ•จํ•˜์—ฌ ๊ฒ€์ฆ, ๋‚ด๋ณด๋‚ด๊ธฐ, ๋ฒค์น˜๋งˆํ‚น๊นŒ์ง€ ์ดํ•ดํ•˜์‹ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +keywords: Ultralytics, YOLOv8, ๋จธ์‹ ๋Ÿฌ๋‹, ๊ฐ์ฒดํƒ์ง€, ํŠธ๋ ˆ์ด๋‹, ๊ฒ€์ฆ, ์˜ˆ์ธก, ๋‚ด๋ณด๋‚ด๊ธฐ, ์ถ”์ , ๋ฒค์น˜๋งˆํ‚น +--- + +# Ultralytics YOLOv8 ๋ชจ๋“œ + +Ultralytics YOLO ์ƒํƒœ๊ณ„ ๋ฐ ํ†ตํ•ฉ + +## ์„œ๋ก  + +Ultralytics YOLOv8๋Š” ๋‹จ์ˆœํ•œ ๊ฐ์ฒด ํƒ์ง€ ๋ชจ๋ธ์ด ์•„๋‹™๋‹ˆ๋‹ค; ๋ฐ์ดํ„ฐ ์ˆ˜์ง‘์—์„œ ๋ชจ๋ธ ํŠธ๋ ˆ์ด๋‹, ๊ฒ€์ฆ, ๋ฐฐํฌ, ์‹ค์„ธ๊ณ„ ์ถ”์ ์— ์ด๋ฅด๊ธฐ๊นŒ์ง€ ๋จธ์‹ ๋Ÿฌ๋‹ ๋ชจ๋ธ์˜ ์ „์ฒด ์ƒ์• ์ฃผ๊ธฐ๋ฅผ ์ปค๋ฒ„ํ•˜๊ธฐ ์œ„ํ•ด ์„ค๊ณ„๋œ ๋‹ค์žฌ๋‹ค๋Šฅํ•œ ํ”„๋ ˆ์ž„์›Œํฌ์ž…๋‹ˆ๋‹ค. ๊ฐ๊ฐ์˜ ๋ชจ๋“œ๋Š” ํŠน์ • ๋ชฉ์ ์„ ์œ„ํ•ด ์„ฌ์„ธํ•˜๊ฒŒ ๊ตฌ์„ฑ๋˜๋ฉฐ, ๋‹ค์–‘ํ•œ ์ž‘์—… ๋ฐ ์‚ฌ์šฉ ์‚ฌ๋ก€์— ํ•„์š”ํ•œ ์œ ์—ฐ์„ฑ๊ณผ ํšจ์œจ์„ฑ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +

+
+ +
+ ์‹œ์ฒญํ•˜๊ธฐ: Ultralytics ๋ชจ๋“œ ํŠœํ† ๋ฆฌ์–ผ: ํŠธ๋ ˆ์ด๋‹, ๊ฒ€์ฆ, ์˜ˆ์ธก, ๋‚ด๋ณด๋‚ด๊ธฐ ๋ฐ ๋ฒค์น˜๋งˆํ‚น. +

+ +### ๋ชจ๋“œ ์š”์•ฝ + +YOLOv8์ด ์ง€์›ํ•˜๋Š” **๋ชจ๋“œ**๋ฅผ ์ดํ•ดํ•˜๋Š” ๊ฒƒ์€ ๋ชจ๋ธ์„ ์ตœ๋Œ€ํ•œ ํ™œ์šฉํ•˜๊ธฐ ์œ„ํ•ด ํ•„์ˆ˜์ ์ž…๋‹ˆ๋‹ค: + +- **Train** ๋ชจ๋“œ: ์‚ฌ์šฉ์ž ๋งž์ถค ๋˜๋Š” ์‚ฌ์ „ ๋กœ๋“œ๋œ ๋ฐ์ดํ„ฐ์…‹ ์œ„์—์„œ ๋ชจ๋ธ์„ ํŠœ๋‹ํ•ฉ๋‹ˆ๋‹ค. +- **Val** ๋ชจ๋“œ: ํŠธ๋ ˆ์ด๋‹ ํ›„ ๋ชจ๋ธ ์„ฑ๋Šฅ์„ ๊ฒ€์ฆํ•˜๊ธฐ ์œ„ํ•œ ์ฒดํฌํฌ์ธํŠธ. +- **Predict** ๋ชจ๋“œ: ์‹ค์„ธ๊ณ„ ๋ฐ์ดํ„ฐ์—์„œ ๋ชจ๋ธ์˜ ์˜ˆ์ธก๋ ฅ์„ ๋ฐœํœ˜ํ•ฉ๋‹ˆ๋‹ค. +- **Export** ๋ชจ๋“œ: ๋‹ค์–‘ํ•œ ํฌ๋งท์œผ๋กœ ๋ชจ๋ธ์„ ๋ฐฐํฌ ์ค€๋น„ ์ƒํƒœ๋กœ ๋งŒ๋“ญ๋‹ˆ๋‹ค. +- **Track** ๋ชจ๋“œ: ๊ฐ์ฒด ํƒ์ง€ ๋ชจ๋ธ์„ ์‹ค์‹œ๊ฐ„ ์ถ”์  ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜์œผ๋กœ ํ™•์žฅํ•ฉ๋‹ˆ๋‹ค. +- **Benchmark** ๋ชจ๋“œ: ๋‹ค์–‘ํ•œ ๋ฐฐํฌ ํ™˜๊ฒฝ์—์„œ ๋ชจ๋ธ์˜ ์†๋„์™€ ์ •ํ™•๋„๋ฅผ ๋ถ„์„ํ•ฉ๋‹ˆ๋‹ค. + +์ด ํฌ๊ด„์ ์ธ ๊ฐ€์ด๋“œ๋Š” ๊ฐ ๋ชจ๋“œ์— ๋Œ€ํ•œ ๊ฐœ์š”์™€ ์‹ค์ œ ์ธ์‚ฌ์ดํŠธ๋ฅผ ์ œ๊ณตํ•˜์—ฌ YOLOv8์˜ ์ „์ฒด ์ž ์žฌ๋ ฅ์„ ํ™œ์šฉํ•  ์ˆ˜ ์žˆ๋„๋ก ๋„์™€์ค๋‹ˆ๋‹ค. + +## [Train](train.md) + +Train ๋ชจ๋“œ๋Š” ์‚ฌ์šฉ์ž ๋งž์ถค ๋ฐ์ดํ„ฐ์…‹ ์œ„์—์„œ YOLOv8 ๋ชจ๋ธ์„ ํŠธ๋ ˆ์ด๋‹ํ•˜๊ธฐ ์œ„ํ•ด ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. ์ด ๋ชจ๋“œ์—์„œ๋Š” ์ง€์ •๋œ ๋ฐ์ดํ„ฐ์…‹๊ณผ ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ์„ ํŠธ๋ ˆ์ด๋‹ํ•ฉ๋‹ˆ๋‹ค. ํŠธ๋ ˆ์ด๋‹ ๊ณผ์ •์—์„œ ๋ชจ๋ธ์˜ ํŒŒ๋ผ๋ฏธํ„ฐ๋ฅผ ์ตœ์ ํ™”ํ•˜์—ฌ ์ด๋ฏธ์ง€ ๋‚ด ๊ฐ์ฒด์˜ ํด๋ž˜์Šค์™€ ์œ„์น˜๋ฅผ ์ •ํ™•ํžˆ ์˜ˆ์ธกํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•ฉ๋‹ˆ๋‹ค. + +[Train ์˜ˆ์‹œ](train.md){ .md-button } + +## [Val](val.md) + +Val ๋ชจ๋“œ๋Š” ํŠธ๋ ˆ์ด๋‹๋œ YOLOv8 ๋ชจ๋ธ์„ ๊ฒ€์ฆํ•˜๊ธฐ ์œ„ํ•ด ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. ์ด ๋ชจ๋“œ์—์„œ๋Š” ๋ชจ๋ธ์„ ๊ฒ€์ฆ ์„ธํŠธ์—์„œ ํ‰๊ฐ€ํ•˜์—ฌ ์ •ํ™•๋„ ๋ฐ ์ผ๋ฐ˜ํ™” ์„ฑ๋Šฅ์„ ์ธก์ •ํ•ฉ๋‹ˆ๋‹ค. ์ด ๋ชจ๋“œ๋Š” ๋ชจ๋ธ์˜ ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ๋ฅผ ์กฐ์ •ํ•˜๊ณ  ์„ฑ๋Šฅ์„ ๊ฐœ์„ ํ•˜๋Š”๋ฐ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +[Val ์˜ˆ์‹œ](val.md){ .md-button } + +## [Predict](predict.md) + +Predict ๋ชจ๋“œ๋Š” ํŠธ๋ ˆ์ด๋‹๋œ YOLOv8 ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ƒˆ ์ด๋ฏธ์ง€ ๋˜๋Š” ๋น„๋””์˜ค์—์„œ ์˜ˆ์ธก์„ ์ˆ˜ํ–‰ํ•˜๊ธฐ ์œ„ํ•ด ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. ์ด ๋ชจ๋“œ์—์„œ๋Š” ์ฒดํฌํฌ์ธํŠธ ํŒŒ์ผ์—์„œ ๋ชจ๋ธ์„ ๋กœ๋“œํ•˜๊ณ , ์‚ฌ์šฉ์ž๊ฐ€ ์ด๋ฏธ์ง€๋‚˜ ๋น„๋””์˜ค๋ฅผ ์ œ๊ณตํ•˜์—ฌ ์ถ”๋ก ์„ ์ˆ˜ํ–‰ํ•ฉ๋‹ˆ๋‹ค. ๋ชจ๋ธ์€ ์ž…๋ ฅ ์ด๋ฏธ์ง€ ๋˜๋Š” ๋น„๋””์˜ค์—์„œ ๊ฐ์ฒด์˜ ํด๋ž˜์Šค์™€ ์œ„์น˜๋ฅผ ์˜ˆ์ธกํ•ฉ๋‹ˆ๋‹ค. + +[Predict ์˜ˆ์‹œ](predict.md){ .md-button } + +## [Export](export.md) + +Export ๋ชจ๋“œ๋Š” ๋ฐฐํฌ๋ฅผ ์œ„ํ•ด YOLOv8 ๋ชจ๋ธ์„ ๋‚ด๋ณด๋‚ผ ์ˆ˜ ์žˆ๋Š” ํฌ๋งท์œผ๋กœ ๋ณ€ํ™˜ํ•˜๊ธฐ ์œ„ํ•ด ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. ์ด ๋ชจ๋“œ์—์„œ๋Š” ๋ชจ๋ธ์„ ๋‹ค๋ฅธ ์†Œํ”„ํŠธ์›จ์–ด ์–ดํ”Œ๋ฆฌ์ผ€์ด์…˜ ๋˜๋Š” ํ•˜๋“œ์›จ์–ด ๊ธฐ๊ธฐ์—์„œ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋Š” ํฌ๋งท์œผ๋กœ ๋ณ€ํ™˜ํ•ฉ๋‹ˆ๋‹ค. ์ด ๋ชจ๋“œ๋Š” ๋ชจ๋ธ์„ ์ƒ์‚ฐ ํ™˜๊ฒฝ์œผ๋กœ ๋ฐฐํฌํ•˜๋Š”๋ฐ ์œ ์šฉํ•ฉ๋‹ˆ๋‹ค. + +[Export ์˜ˆ์‹œ](export.md){ .md-button } + +## [Track](track.md) + +Track ๋ชจ๋“œ๋Š” ์‹ค์‹œ๊ฐ„์œผ๋กœ YOLOv8 ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ๊ฐ์ฒด๋ฅผ ์ถ”์ ํ•˜๊ธฐ ์œ„ํ•ด ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. ์ด ๋ชจ๋“œ์—์„œ๋Š” ์ฒดํฌํฌ์ธํŠธ ํŒŒ์ผ์—์„œ ๋ชจ๋ธ์„ ๋กœ๋“œํ•˜๊ณ , ์‚ฌ์šฉ์ž๊ฐ€ ์‹ค์‹œ๊ฐ„ ๋น„๋””์˜ค ์ŠคํŠธ๋ฆผ์„ ์ œ๊ณตํ•˜์—ฌ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ์ถ”์ ์„ ์ˆ˜ํ–‰ํ•ฉ๋‹ˆ๋‹ค. ์ด ๋ชจ๋“œ๋Š” ๊ฐ์‹œ ์‹œ์Šคํ…œ์ด๋‚˜ ์ž์œจ ์ฃผํ–‰ ์ฐจ๋Ÿ‰ ๊ฐ™์€ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜์— ์œ ์šฉํ•ฉ๋‹ˆ๋‹ค. + +[Track ์˜ˆ์‹œ](track.md){ .md-button } + +## [Benchmark](benchmark.md) + +Benchmark ๋ชจ๋“œ๋Š” YOLOv8์˜ ๋‹ค์–‘ํ•œ ๋‚ด๋ณด๋‚ด๊ธฐ ํฌ๋งท์— ๋Œ€ํ•œ ์†๋„์™€ ์ •ํ™•๋„๋ฅผ ํ”„๋กœํŒŒ์ผ๋งํ•˜๊ธฐ ์œ„ํ•ด ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. ๋ฒค์น˜๋งˆํฌ๋Š” ๋‚ด๋ณด๋‚ธ ํฌ๋งท์˜ ํฌ๊ธฐ, ๊ทธ๋ฆฌ๊ณ  ๊ฐ์ฒด ํƒ์ง€, ์„ธ๋ถ„ํ™” ๋ฐ ํฌ์ฆˆ์— ๋Œ€ํ•œ `mAP50-95` ๋ฉ”ํŠธ๋ฆญ ๋˜๋Š” ๋ถ„๋ฅ˜์— ๋Œ€ํ•œ `accuracy_top5` ๋ฉ”ํŠธ๋ฆญ, ๊ทธ๋ฆฌ๊ณ  ONNX, OpenVINO, TensorRT ๋“ฑ ๋‹ค์–‘ํ•œ ๋‚ด๋ณด๋‚ด๊ธฐ ํฌ๋งท์—์„œ์˜ ์ด๋ฏธ์ง€๋‹น ์ถ”๋ก  ์‹œ๊ฐ„์„ ๋ฐ€๋ฆฌ์ดˆ๋กœ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ด ์ •๋ณด๋Š” ์†๋„์™€ ์ •ํ™•๋„์— ๋Œ€ํ•œ ํŠน์ • ์‚ฌ์šฉ ์‚ฌ๋ก€ ์š”๊ตฌ ์‚ฌํ•ญ์— ๊ธฐ๋ฐ˜ํ•˜์—ฌ ์ตœ์ ์˜ ๋‚ด๋ณด๋‚ด๊ธฐ ํฌ๋งท์„ ์„ ํƒํ•˜๋Š” ๋ฐ ๋„์›€์ด ๋  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +[Benchmark ์˜ˆ์‹œ](benchmark.md){ .md-button } diff --git a/ultralytics/docs/ko/modes/index.md:Zone.Identifier b/ultralytics/docs/ko/modes/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/modes/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/modes/predict.md b/ultralytics/docs/ko/modes/predict.md new file mode 100755 index 0000000..ce0f6ea --- /dev/null +++ b/ultralytics/docs/ko/modes/predict.md @@ -0,0 +1,227 @@ +--- +comments: true +description: YOLOv8 ์˜ˆ์ธก ๋ชจ๋“œ๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋‹ค์–‘ํ•œ ์ž‘์—…์„ ์ˆ˜ํ–‰ํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ์•Œ์•„๋ณด์‹ญ์‹œ์˜ค. ์ด๋ฏธ์ง€, ๋น„๋””์˜ค ๋ฐ ๋ฐ์ดํ„ฐ ํ˜•์‹๊ณผ ๊ฐ™์€ ๋‹ค์–‘ํ•œ ์ถ”๋ก  ์†Œ์Šค์— ๋Œ€ํ•ด ์ž์„ธํžˆ ์•Œ์•„๋ด…๋‹ˆ๋‹ค. +keywords: Ultralytics, YOLOv8, ์˜ˆ์ธก ๋ชจ๋“œ, ์ถ”๋ก  ์†Œ์Šค, ์˜ˆ์ธก ์ž‘์—…, ์ŠคํŠธ๋ฆฌ๋ฐ ๋ชจ๋“œ, ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ, ๋น„๋””์˜ค ์ฒ˜๋ฆฌ, ๋จธ์‹  ๋Ÿฌ๋‹, AI +--- + +# Ultralytics YOLO๋กœ ๋ชจ๋ธ ์˜ˆ์ธก + +Ultralytics YOLO ์ƒํƒœ๊ณ„์™€ ํ†ตํ•ฉ + +## ์†Œ๊ฐœ + +๋จธ์‹  ๋Ÿฌ๋‹ ๋ฐ ์ปดํ“จํ„ฐ ๋น„์ „์˜ ์„ธ๊ณ„์—์„œ ์‹œ๊ฐ์  ๋ฐ์ดํ„ฐ๋ฅผ ํ•ด์„ํ•˜๋Š” ๊ณผ์ •์„ '์ถ”๋ก ' ๋˜๋Š” '์˜ˆ์ธก'์ด๋ผ๊ณ  ํ•ฉ๋‹ˆ๋‹ค. Ultralytics YOLOv8๋Š” ๋‹ค์–‘ํ•œ ๋ฐ์ดํ„ฐ ์†Œ์Šค์—์„œ์˜ ๊ณ ์„ฑ๋Šฅ, ์‹ค์‹œ๊ฐ„ ์ถ”๋ก ์„ ์œ„ํ•ด ๋งž์ถคํ™”๋œ ๊ฐ•๋ ฅํ•œ ๊ธฐ๋Šฅ์ธ **์˜ˆ์ธก ๋ชจ๋“œ**๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +

+
+ +
+ ์‹œ์ฒญ: Ultralytics YOLOv8 ๋ชจ๋ธ์—์„œ ์ถœ๋ ฅ์„ ์ถ”์ถœํ•˜์—ฌ ๋งž์ถค ํ”„๋กœ์ ํŠธ์— ์‚ฌ์šฉํ•˜๋Š” ๋ฐฉ๋ฒ•. +

+ +## ์‹ค์ œ ์‘์šฉ ๋ถ„์•ผ + +| ์ œ์กฐ์—… | ์Šคํฌ์ธ  | ์•ˆ์ „ | +|:-----------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------------:| +| ![์ฐจ๋Ÿ‰ ์˜ˆ๋น„ ๋ถ€ํ’ˆ ํƒ์ง€](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a0f802a8-0776-44cf-8f17-93974a4a28a1) | ![์ถ•๊ตฌ ์„ ์ˆ˜ ํƒ์ง€](https://github.com/RizwanMunawar/ultralytics/assets/62513924/7d320e1f-fc57-4d7f-a691-78ee579c3442) | ![์‚ฌ๋žŒ ๋„˜์–ด์ง ํƒ์ง€](https://github.com/RizwanMunawar/ultralytics/assets/62513924/86437c4a-3227-4eee-90ef-9efb697bdb43) | +| ์ฐจ๋Ÿ‰ ์˜ˆ๋น„ ๋ถ€ํ’ˆ ํƒ์ง€ | ์ถ•๊ตฌ ์„ ์ˆ˜ ํƒ์ง€ | ์‚ฌ๋žŒ ๋„˜์–ด์ง ํƒ์ง€ | + +## ์˜ˆ์ธก ์ธํผ๋Ÿฐ์Šค๋ฅผ ์œ„ํ•ด Ultralytics YOLO ์‚ฌ์šฉํ•˜๊ธฐ + +๋‹ค์Œ์€ YOLOv8์˜ ์˜ˆ์ธก ๋ชจ๋“œ๋ฅผ ๋‹ค์–‘ํ•œ ์ถ”๋ก  ์š”๊ตฌ ์‚ฌํ•ญ์— ์‚ฌ์šฉํ•ด์•ผ ํ•˜๋Š” ์ด์œ ์ž…๋‹ˆ๋‹ค: + +- **๋‹ค์–‘์„ฑ:** ์ด๋ฏธ์ง€, ๋น„๋””์˜ค, ์‹ฌ์ง€์–ด ์‹ค์‹œ๊ฐ„ ์ŠคํŠธ๋ฆผ์— ๋Œ€ํ•œ ์ถ”๋ก ์„ ์ˆ˜ํ–‰ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +- **์„ฑ๋Šฅ:** ์ •ํ™•์„ฑ์„ ํฌ์ƒํ•˜์ง€ ์•Š๊ณ  ์‹ค์‹œ๊ฐ„, ๊ณ ์† ์ฒ˜๋ฆฌ๋ฅผ ์œ„ํ•ด ์„ค๊ณ„๋˜์—ˆ์Šต๋‹ˆ๋‹ค. +- **์‚ฌ์šฉ ํŽธ์˜์„ฑ:** ๋น ๋ฅธ ๋ฐฐํฌ ๋ฐ ํ…Œ์ŠคํŠธ๋ฅผ ์œ„ํ•œ ์ง๊ด€์ ์ธ Python ๋ฐ CLI ์ธํ„ฐํŽ˜์ด์Šค๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. +- **๊ณ ๋„์˜ ์‚ฌ์šฉ์ž ์ •์˜:** ํŠน์ • ์š”๊ตฌ ์‚ฌํ•ญ์— ๋งž๊ฒŒ ๋ชจ๋ธ์˜ ์ถ”๋ก  ํ–‰๋™์„ ์กฐ์œจํ•˜๊ธฐ ์œ„ํ•œ ๋‹ค์–‘ํ•œ ์„ค์ • ๋ฐ ๋งค๊ฐœ๋ณ€์ˆ˜๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +### ์˜ˆ์ธก ๋ชจ๋“œ์˜ ์ฃผ์š” ๊ธฐ๋Šฅ + +YOLOv8์˜ ์˜ˆ์ธก ๋ชจ๋“œ๋Š” ๊ฐ•๋ ฅํ•˜๊ณ  ๋‹ค์žฌ๋‹ค๋Šฅํ•˜๊ฒŒ ์„ค๊ณ„๋˜์—ˆ์œผ๋ฉฐ, ๋‹ค์Œ๊ณผ ๊ฐ™์€ ํŠน์ง•์„ ๊ฐ–๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค: + +- **๋‹ค์ค‘ ๋ฐ์ดํ„ฐ ์†Œ์Šค ํ˜ธํ™˜์„ฑ:** ๋ฐ์ดํ„ฐ๊ฐ€ ๊ฐœ๋ณ„ ์ด๋ฏธ์ง€, ์ด๋ฏธ์ง€ ์ปฌ๋ ‰์…˜, ๋น„๋””์˜ค ํŒŒ์ผ ๋˜๋Š” ์‹ค์‹œ๊ฐ„ ๋น„๋””์˜ค ์ŠคํŠธ๋ฆผ์˜ ํ˜•ํƒœ๋กœ ์กด์žฌํ•˜๋Š”์ง€ ์—ฌ๋ถ€์— ๊ด€๊ณ„์—†์ด ์˜ˆ์ธก ๋ชจ๋“œ๊ฐ€ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. +- **์ŠคํŠธ๋ฆฌ๋ฐ ๋ชจ๋“œ:** `Results` ๊ฐ์ฒด์˜ ๋ฉ”๋ชจ๋ฆฌ ํšจ์œจ์ ์ธ ์ƒ์„ฑ์ž๋กœ ์ŠคํŠธ๋ฆฌ๋ฐ ๊ธฐ๋Šฅ์„ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. ์˜ˆ์ธก๊ธฐ์˜ ํ˜ธ์ถœ ๋ฉ”์„œ๋“œ์—์„œ `stream=True`๋กœ ์„ค์ •ํ•˜์—ฌ ํ™œ์„ฑํ™”ํ•ฉ๋‹ˆ๋‹ค. +- **๋ฐฐ์น˜ ์ฒ˜๋ฆฌ:** ๋‹จ์ผ ๋ฐฐ์น˜์—์„œ ์—ฌ๋Ÿฌ ์ด๋ฏธ์ง€ ๋˜๋Š” ๋น„๋””์˜ค ํ”„๋ ˆ์ž„์„ ์ฒ˜๋ฆฌํ•˜๋Š” ๊ธฐ๋Šฅ์„ ํ†ตํ•ด ์ถ”๋ก  ์‹œ๊ฐ„์„ ๋”์šฑ ๋‹จ์ถ•ํ•ฉ๋‹ˆ๋‹ค. +- **ํ†ตํ•ฉ ์นœํ™”์ :** ์œ ์—ฐํ•œ API ๋•๋ถ„์— ๊ธฐ์กด ๋ฐ์ดํ„ฐ ํŒŒ์ดํ”„๋ผ์ธ ๋ฐ ๊ธฐํƒ€ ์†Œํ”„ํŠธ์›จ์–ด ๊ตฌ์„ฑ ์š”์†Œ์™€ ์‰ฝ๊ฒŒ ํ†ตํ•ฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +Ultralytics YOLO ๋ชจ๋ธ์€ Python `Results` ๊ฐ์ฒด์˜ ๋ฆฌ์ŠคํŠธ๋ฅผ ๋ฐ˜ํ™˜ํ•˜๊ฑฐ๋‚˜, ์ถ”๋ก  ์ค‘ `stream=True`๊ฐ€ ๋ชจ๋ธ์— ์ „๋‹ฌ๋  ๋•Œ `Results` ๊ฐ์ฒด์˜ ๋ฉ”๋ชจ๋ฆฌ ํšจ์œจ์ ์ธ Python ์ƒ์„ฑ์ž๋ฅผ ๋ฐ˜ํ™˜ํ•ฉ๋‹ˆ๋‹ค: + +!!! ์˜ˆ์‹œ "์˜ˆ์ธก" + + === "`stream=False`๋กœ ๋ฆฌ์ŠคํŠธ ๋ฐ˜ํ™˜" + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov8n.pt') # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ + + # ์ด๋ฏธ์ง€ ๋ฆฌ์ŠคํŠธ์— ๋Œ€ํ•œ ๋ฐฐ์น˜ ์ถ”๋ก  ์‹คํ–‰ + results = model(['im1.jpg', 'im2.jpg']) # Results ๊ฐ์ฒด์˜ ๋ฆฌ์ŠคํŠธ ๋ฐ˜ํ™˜ + + # ๊ฒฐ๊ณผ ๋ฆฌ์ŠคํŠธ ์ฒ˜๋ฆฌ + for result in results: + boxes = result.boxes # bbox ์ถœ๋ ฅ์„ ์œ„ํ•œ Boxes ๊ฐ์ฒด + masks = result.masks # ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜ ๋งˆ์Šคํฌ ์ถœ๋ ฅ์„ ์œ„ํ•œ Masks ๊ฐ์ฒด + keypoints = result.keypoints # ์ž์„ธ ์ถœ๋ ฅ์„ ์œ„ํ•œ Keypoints ๊ฐ์ฒด + probs = result.probs # ๋ถ„๋ฅ˜ ์ถœ๋ ฅ์„ ์œ„ํ•œ Probs ๊ฐ์ฒด + ``` + + === "`stream=True`๋กœ ์ƒ์„ฑ์ž ๋ฐ˜ํ™˜" + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov8n.pt') # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ + + # ์ด๋ฏธ์ง€ ๋ฆฌ์ŠคํŠธ์— ๋Œ€ํ•œ ๋ฐฐ์น˜ ์ถ”๋ก  ์‹คํ–‰ + results = model(['im1.jpg', 'im2.jpg'], stream=True) # Results ๊ฐ์ฒด์˜ ์ƒ์„ฑ์ž ๋ฐ˜ํ™˜ + + # ๊ฒฐ๊ณผ ์ƒ์„ฑ์ž ์ฒ˜๋ฆฌ + for result in results: + boxes = result.boxes # bbox ์ถœ๋ ฅ์„ ์œ„ํ•œ Boxes ๊ฐ์ฒด + masks = result.masks # ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜ ๋งˆ์Šคํฌ ์ถœ๋ ฅ์„ ์œ„ํ•œ Masks ๊ฐ์ฒด + keypoints = result.keypoints # ์ž์„ธ ์ถœ๋ ฅ์„ ์œ„ํ•œ Keypoints ๊ฐ์ฒด + probs = result.probs # ๋ถ„๋ฅ˜ ์ถœ๋ ฅ์„ ์œ„ํ•œ Probs ๊ฐ์ฒด + ``` + +## ์ถ”๋ก  ์†Œ์Šค + +YOLOv8์€ ์•„๋ž˜ ํ‘œ์— ํ‘œ์‹œ๋œ ๋ฐ”์™€ ๊ฐ™์ด ์ถ”๋ก ์„ ์œ„ํ•œ ๋‹ค์–‘ํ•œ ์œ ํ˜•์˜ ์ž…๋ ฅ ์†Œ์Šค๋ฅผ ์ฒ˜๋ฆฌํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์†Œ์Šค์—๋Š” ์ •์  ์ด๋ฏธ์ง€, ๋น„๋””์˜ค ์ŠคํŠธ๋ฆผ, ๋‹ค์–‘ํ•œ ๋ฐ์ดํ„ฐ ํ˜•์‹์ด ํฌํ•จ๋ฉ๋‹ˆ๋‹ค. ํ‘œ๋Š” ๋˜ํ•œ ๊ฐ ์†Œ์Šค๋ฅผ 'stream=True' โœ…์™€ ํ•จ๊ป˜ ์ŠคํŠธ๋ฆฌ๋ฐ ๋ชจ๋“œ์—์„œ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋Š”์ง€ ์—ฌ๋ถ€๋ฅผ ๋‚˜ํƒ€๋ƒ…๋‹ˆ๋‹ค. ์ŠคํŠธ๋ฆฌ๋ฐ ๋ชจ๋“œ๋Š” ๋น„๋””์˜ค๋‚˜ ๋ผ์ด๋ธŒ ์ŠคํŠธ๋ฆผ์„ ์ฒ˜๋ฆฌํ•  ๋•Œ ๊ฒฐ๊ณผ๋ฅผ ๋ฉ”๋ชจ๋ฆฌ์— ๋ชจ๋‘ ๋กœ๋“œํ•˜๋Š” ๋Œ€์‹  ๊ฒฐ๊ณผ์˜ ์ƒ์„ฑ์ž๋ฅผ ๋งŒ๋“ค์–ด ์œ ์šฉํ•˜๊ฒŒ ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. + +!!! Tip "ํŒ" + + ๊ธด ๋น„๋””์˜ค๋‚˜ ํฐ ๋ฐ์ดํ„ฐ ์„ธํŠธ๋ฅผ ์ฒ˜๋ฆฌํ•  ๋•Œ 'stream=True'๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ํšจ์œจ์ ์œผ๋กœ ๋ฉ”๋ชจ๋ฆฌ๋ฅผ ๊ด€๋ฆฌํ•ฉ๋‹ˆ๋‹ค. 'stream=False'์ผ ๋•Œ๋Š” ๋ชจ๋“  ํ”„๋ ˆ์ž„ ๋˜๋Š” ๋ฐ์ดํ„ฐ ํฌ์ธํŠธ์— ๋Œ€ํ•œ ๊ฒฐ๊ณผ๊ฐ€ ๋ฉ”๋ชจ๋ฆฌ์— ์ €์žฅ๋˜์–ด, ์ž…๋ ฅ์ด ํฌ๋ฉด ๋ฉ”๋ชจ๋ฆฌ ๋ถ€์กฑ ์˜ค๋ฅ˜๋ฅผ ๋น ๋ฅด๊ฒŒ ์œ ๋ฐœํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๋ฐ˜๋ฉด์—, 'stream=True'๋Š” ์ƒ์„ฑ์ž๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ํ˜„์žฌ ํ”„๋ ˆ์ž„ ๋˜๋Š” ๋ฐ์ดํ„ฐ ํฌ์ธํŠธ์˜ ๊ฒฐ๊ณผ๋งŒ ๋ฉ”๋ชจ๋ฆฌ์— ์œ ์ง€ํ•˜์—ฌ ๋ฉ”๋ชจ๋ฆฌ ์†Œ๋น„๋ฅผ ํฌ๊ฒŒ ์ค„์ด๊ณ  ๋ฉ”๋ชจ๋ฆฌ ๋ถ€์กฑ ๋ฌธ์ œ๋ฅผ ๋ฐฉ์ง€ํ•ฉ๋‹ˆ๋‹ค. + +| ์†Œ์Šค | ์ธ์ˆ˜ | ์œ ํ˜• | ๋น„๊ณ  | +|-----------|--------------------------------------------|-----------------|--------------------------------------------------------------------------| +| ์ด๋ฏธ์ง€ | `'image.jpg'` | `str` ๋˜๋Š” `Path` | ๋‹จ์ผ ์ด๋ฏธ์ง€ ํŒŒ์ผ. | +| URL | `'https://ultralytics.com/images/bus.jpg'` | `str` | ์ด๋ฏธ์ง€ URL. | +| ์Šคํฌ๋ฆฐ์ƒท | `'screen'` | `str` | ์Šคํฌ๋ฆฐ์ƒท์„ ์บก์ฒ˜ํ•ฉ๋‹ˆ๋‹ค. | +| PIL | `Image.open('im.jpg')` | `PIL.Image` | HWC ํ˜•์‹์œผ๋กœ RGB ์ฑ„๋„์ด ์žˆ์Šต๋‹ˆ๋‹ค. | +| OpenCV | `cv2.imread('im.jpg')` | `np.ndarray` | HWC ํ˜•์‹์œผ๋กœ BGR ์ฑ„๋„์ด ์žˆ๊ณ  `uint8 (0-255)` ์ž…๋‹ˆ๋‹ค. | +| numpy | `np.zeros((640,1280,3))` | `np.ndarray` | HWC ํ˜•์‹์œผ๋กœ BGR ์ฑ„๋„์ด ์žˆ๊ณ  `uint8 (0-255)` ์ž…๋‹ˆ๋‹ค. | +| torch | `torch.zeros(16,3,320,640)` | `torch.Tensor` | BCHW ํ˜•์‹์œผ๋กœ RGB ์ฑ„๋„์ด ์žˆ๊ณ  `float32 (0.0-1.0)` ์ž…๋‹ˆ๋‹ค. | +| CSV | `'sources.csv'` | `str` ๋˜๋Š” `Path` | ์ด๋ฏธ์ง€, ๋น„๋””์˜ค ๋˜๋Š” ๋””๋ ‰ํ† ๋ฆฌ ๊ฒฝ๋กœ๊ฐ€ ์žˆ๋Š” CSV ํŒŒ์ผ. | +| ๋น„๋””์˜ค โœ… | `'video.mp4'` | `str` ๋˜๋Š” `Path` | MP4, AVI ๋“ฑ๊ณผ ๊ฐ™์€ ํ˜•์‹์˜ ๋น„๋””์˜ค ํŒŒ์ผ์ž…๋‹ˆ๋‹ค. | +| ๋””๋ ‰ํ† ๋ฆฌ โœ… | `'path/'` | `str` ๋˜๋Š” `Path` | ์ด๋ฏธ์ง€๋‚˜ ๋น„๋””์˜ค๊ฐ€ ์žˆ๋Š” ๋””๋ ‰ํ† ๋ฆฌ ๊ฒฝ๋กœ์ž…๋‹ˆ๋‹ค. | +| ๊ธ€๋กœ๋ธŒ โœ… | `'path/*.jpg'` | `str` | ์—ฌ๋Ÿฌ ํŒŒ์ผ์— ์ผ์น˜ํ•˜๋Š” ๊ธ€๋กœ๋ธŒ ํŒจํ„ด์ž…๋‹ˆ๋‹ค. '*' ๋ฌธ์ž๋ฅผ ์™€์ผ๋“œ์นด๋“œ๋กœ ์‚ฌ์šฉํ•˜์„ธ์š”. | +| YouTube โœ… | `'https://youtu.be/LNwODJXcvt4'` | `str` | YouTube ๋น„๋””์˜ค์˜ URL์ž…๋‹ˆ๋‹ค. | +| ์ŠคํŠธ๋ฆผ โœ… | `'rtsp://example.com/media.mp4'` | `str` | RTSP, RTMP, TCP ๋˜๋Š” IP ์ฃผ์†Œ์™€ ๊ฐ™์€ ์ŠคํŠธ๋ฆฌ๋ฐ ํ”„๋กœํ† ์ฝœ์˜ URL์ž…๋‹ˆ๋‹ค. | +| ๋ฉ€ํ‹ฐ-์ŠคํŠธ๋ฆผ โœ… | `'list.streams'` | `str` ๋˜๋Š” `Path` | ์ŠคํŠธ๋ฆผ URL์ด ํ–‰๋‹น ํ•˜๋‚˜์”ฉ ์žˆ๋Š” `*.streams` ํ…์ŠคํŠธ ํŒŒ์ผ์ด๋ฉฐ, ์˜ˆ๋ฅผ ๋“ค์–ด 8๊ฐœ์˜ ์ŠคํŠธ๋ฆผ์€ ๋ฐฐ์น˜ ํฌ๊ธฐ 8์—์„œ ์‹คํ–‰๋ฉ๋‹ˆ๋‹ค. | + +์•„๋ž˜๋Š” ๊ฐ ์œ ํ˜•์˜ ์†Œ์Šค๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ์ฝ”๋“œ ์˜ˆ์ œ์ž…๋‹ˆ๋‹ค: + +!!! ์˜ˆ์‹œ "์˜ˆ์ธก ์†Œ์Šค" + + === "์ด๋ฏธ์ง€" + ์ด๋ฏธ์ง€ ํŒŒ์ผ์—์„œ ์ถ”๋ก ์„ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค. + ```python + from ultralytics import YOLO + + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov8n.pt') + + # ์ด๋ฏธ์ง€ ํŒŒ์ผ ๊ฒฝ๋กœ ์ •์˜ + source = 'path/to/image.jpg' + + # ์†Œ์Šค์—์„œ ์ถ”๋ก  ์‹คํ–‰ + results = model(source) # Results ๊ฐ์ฒด์˜ ๋ฆฌ์ŠคํŠธ + ``` + + === "์Šคํฌ๋ฆฐ์ƒท" + ํ˜„์žฌ ์Šคํฌ๋ฆฐ ์ฝ˜ํ…์ธ ๋ฅผ ์Šคํฌ๋ฆฐ์ƒท์œผ๋กœ ์ถ”๋ก ์„ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค. + ```python + from ultralytics import YOLO + + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov8n.pt') + + # ํ˜„์žฌ ์Šคํฌ๋ฆฐ์ƒท์„ ์†Œ์Šค๋กœ ์ •์˜ + source = 'screen' + + # ์†Œ์Šค์—์„œ ์ถ”๋ก  ์‹คํ–‰ + results = model(source) # Results ๊ฐ์ฒด์˜ ๋ฆฌ์ŠคํŠธ + ``` + + === "URL" + URL์„ ํ†ตํ•ด ์›๊ฒฉ์œผ๋กœ ํ˜ธ์ŠคํŒ…๋˜๋Š” ์ด๋ฏธ์ง€๋‚˜ ๋น„๋””์˜ค์—์„œ ์ถ”๋ก ์„ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค. + ```python + from ultralytics import YOLO + + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov8n.pt') + + # ์›๊ฒฉ ์ด๋ฏธ์ง€๋‚˜ ๋™์˜์ƒ URL ์ •์˜ + source = 'https://ultralytics.com/images/bus.jpg' + + # ์†Œ์Šค์—์„œ ์ถ”๋ก  ์‹คํ–‰ + results = model(source) # Results ๊ฐ์ฒด์˜ ๋ฆฌ์ŠคํŠธ + ``` + + === "PIL" + Python Imaging Library (PIL)๋กœ ์—ด๋ฆฐ ์ด๋ฏธ์ง€์—์„œ ์ถ”๋ก ์„ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค. + ```python + from PIL import Image + from ultralytics import YOLO + + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov8n.pt') + + # PIL์„ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€ ์—ด๊ธฐ + source = Image.open('path/to/image.jpg') + + # ์†Œ์Šค์—์„œ ์ถ”๋ก  ์‹คํ–‰ + results = model(source) # Results ๊ฐ์ฒด์˜ ๋ฆฌ์ŠคํŠธ + ``` + + === "OpenCV" + OpenCV๋กœ ์ฝ์€ ์ด๋ฏธ์ง€์—์„œ ์ถ”๋ก ์„ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค. + ```python + import cv2 + from ultralytics import YOLO + + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov8n.pt') + + # OpenCV๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€ ์ฝ๊ธฐ + source = cv2.imread('path/to/image.jpg') + + # ์†Œ์Šค์—์„œ ์ถ”๋ก  ์‹คํ–‰ + results = model(source) # Results ๊ฐ์ฒด์˜ ๋ฆฌ์ŠคํŠธ + ``` + + === "numpy" + numpy ๋ฐฐ์—ด๋กœ ํ‘œํ˜„๋œ ์ด๋ฏธ์ง€์—์„œ ์ถ”๋ก ์„ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค. + ```python + import numpy as np + from ultralytics import YOLO + + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov8n.pt') + + # ๋ฌด์ž‘์œ„ numpy ๋ฐฐ์—ด ์ƒ์„ฑ, HWC ํ˜•ํƒœ (640, 640, 3), ๊ฐ’ ๋ฒ”์œ„ [0, 255], ํƒ€์ž… uint8 + source = np.random.randint(low=0, high=255, size=(640, 640, 3), dtype='uint8') + + # ์†Œ์Šค์—์„œ ์ถ”๋ก  ์‹คํ–‰ + results = model(source) # Results ๊ฐ์ฒด์˜ ๋ฆฌ์ŠคํŠธ + ``` + + === "torch" + PyTorch ํ…์„œ๋กœ ํ‘œํ˜„๋œ ์ด๋ฏธ์ง€์—์„œ ์ถ”๋ก ์„ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค. + ```python + import torch + from ultralytics import YOLO + + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov8n.pt') + + # ๋ฌด์ž‘์œ„ torch ํ…์„œ ์ƒ์„ฑ, BCHW ํ˜•ํƒœ (1, 3, 640, 640), ๊ฐ’ ๋ฒ”์œ„ [0, 1], ํƒ€์ž… float32 + source = torch.rand(1, 3, 640, 640, dtype=torch.float32) + + # ์†Œ์Šค์—์„œ ์ถ”๋ก  ์‹คํ–‰ + results = model(source) # Results ๊ฐ์ฒด์˜ ๋ฆฌ์ŠคํŠธ + ``` diff --git a/ultralytics/docs/ko/modes/predict.md:Zone.Identifier b/ultralytics/docs/ko/modes/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/modes/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/modes/track.md b/ultralytics/docs/ko/modes/track.md new file mode 100755 index 0000000..73f2cca --- /dev/null +++ b/ultralytics/docs/ko/modes/track.md @@ -0,0 +1,256 @@ +--- +comments: true +description: Ultralytics YOLO๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋น„๋””์˜ค ์ŠคํŠธ๋ฆผ์—์„œ ๊ฐ์ฒด ์ถ”์ ์„ ์‚ฌ์šฉํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ์•Œ์•„๋ณด์„ธ์š”. ๋‹ค์–‘ํ•œ ์ถ”์ ๊ธฐ๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ์•ˆ๋‚ด์™€ ์ถ”์ ๊ธฐ ๊ตฌ์„ฑ์„ ๋งž์ถค ์„ค์ •ํ•˜๋Š” ๋ฐฉ๋ฒ•์— ๋Œ€ํ•œ ๊ฐ€์ด๋“œ๊ฐ€ ์žˆ์Šต๋‹ˆ๋‹ค. +keywords: Ultralytics, YOLO, ๊ฐ์ฒด ์ถ”์ , ๋น„๋””์˜ค ์ŠคํŠธ๋ฆผ, BoT-SORT, ByteTrack, ํŒŒ์ด์ฌ ๊ฐ€์ด๋“œ, CLI ๊ฐ€์ด๋“œ +--- + +# Ultralytics YOLO๋ฅผ ์ด์šฉํ•œ ๋‹ค์ค‘ ๊ฐ์ฒด ์ถ”์  + +๋‹ค์ค‘ ๊ฐ์ฒด ์ถ”์  ์˜ˆ์‹œ + +๋น„๋””์˜ค ๋ถ„์„์˜ ์˜์—ญ์—์„œ ๊ฐ์ฒด ์ถ”์ ์€ ํ”„๋ ˆ์ž„ ๋‚ด์—์„œ ๊ฐ์ฒด์˜ ์œ„์น˜์™€ ํด๋ž˜์Šค๋ฅผ ์‹๋ณ„ํ•  ๋ฟ๋งŒ ์•„๋‹ˆ๋ผ ๋น„๋””์˜ค๊ฐ€ ์ง„ํ–‰๋จ์— ๋”ฐ๋ผ ๊ฐ๊ฐ์˜ ๊ฒ€์ถœ๋œ ๊ฐ์ฒด์— ๋Œ€ํ•ด ๊ณ ์œ  ID๋ฅผ ์œ ์ง€ํ•˜๋Š” ์ค‘์š”ํ•œ ์ž‘์—…์ž…๋‹ˆ๋‹ค. ์‘์šฉ ํ”„๋กœ๊ทธ๋žจ์€ ๊ฐ์‹œ ๋ฐ ๋ณด์•ˆ์—์„œ ์‹ค์‹œ๊ฐ„ ์Šคํฌ์ธ  ๋ถ„์„์— ์ด๋ฅด๊ธฐ๊นŒ์ง€ ๋ฌดํ•œํ•ฉ๋‹ˆ๋‹ค. + +## ๊ฐ์ฒด ์ถ”์ ์„ ์œ„ํ•ด Ultralytics YOLO๋ฅผ ์„ ํƒํ•ด์•ผ ํ•˜๋Š” ์ด์œ ๋Š”? + +Ultralytics ์ถ”์ ๊ธฐ์˜ ์ถœ๋ ฅ์€ ํ‘œ์ค€ ๊ฐ์ฒด ๊ฒ€์ถœ๊ณผ ์ผ๊ด€๋˜์ง€๋งŒ ๊ฐ์ฒด ID๊ฐ€ ์ถ”๊ฐ€๋œ ๊ฐ€์น˜๊ฐ€ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด๋ฅผ ํ†ตํ•ด ๋น„๋””์˜ค ์ŠคํŠธ๋ฆผ์—์„œ ๊ฐ์ฒด๋ฅผ ์ถ”์ ํ•˜๊ณ  ์ดํ›„ ๋ถ„์„์„ ์ˆ˜ํ–‰ํ•˜๊ธฐ๊ฐ€ ์‰ฝ์Šต๋‹ˆ๋‹ค. ์—ฌ๊ธฐ์— ์–ด๋–ค ์ด์œ ๋กœ Ultralytics YOLO๋ฅผ ์‚ฌ์šฉํ•ด์•ผ ํ•˜๋Š”์ง€์— ๋Œ€ํ•ด ์„ค๋ช…ํ•ฉ๋‹ˆ๋‹ค: + +- **ํšจ์œจ์„ฑ:** ์ •ํ™•์„ฑ์„ ์ €ํ•˜์‹œํ‚ค์ง€ ์•Š์œผ๋ฉด์„œ ์‹ค์‹œ๊ฐ„์œผ๋กœ ๋น„๋””์˜ค ์ŠคํŠธ๋ฆผ์„ ์ฒ˜๋ฆฌํ•ฉ๋‹ˆ๋‹ค. +- **์œ ์—ฐ์„ฑ:** ๋‹ค์–‘ํ•œ ์ถ”์  ์•Œ๊ณ ๋ฆฌ์ฆ˜๊ณผ ๊ตฌ์„ฑ์„ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. +- **์‚ฌ์šฉํ•˜๊ธฐ ์‰ฌ์›€:** ๊ฐ„๋‹จํ•œ ํŒŒ์ด์ฌ API ๋ฐ CLI ์˜ต์…˜์œผ๋กœ ๋น ๋ฅธ ํ†ตํ•ฉ ๋ฐ ๋ฐฐ์น˜๊ฐ€ ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค. +- **๋งž์ถค ์„ค์ •:** ๋งž์ถค ํ•™์Šต๋œ YOLO ๋ชจ๋ธ๊ณผ ํ•จ๊ป˜ ์‚ฌ์šฉํ•˜๊ธฐ ์‰ฌ์›Œ ํŠน์ • ๋„๋ฉ”์ธ ์‘์šฉ ํ”„๋กœ๊ทธ๋žจ์— ํ†ตํ•ฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +

+
+ +
+ ์‹œ์ฒญํ•˜๊ธฐ: Ultralytics YOLOv8๋กœ ๊ฐ์ฒด ๊ฐ์ง€ ๋ฐ ์ถ”์ ํ•˜๊ธฐ. +

+ +## ์‹ค์ œ ์„ธ๊ณ„ ์‘์šฉ ํ”„๋กœ๊ทธ๋žจ + +| ๊ตํ†ต์ˆ˜๋‹จ | ์†Œ๋งค์—… | ์ˆ˜์‚ฐ์—… | +|:-----------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------:| +| ![์ฐจ๋Ÿ‰ ์ถ”์ ](https://github.com/RizwanMunawar/ultralytics/assets/62513924/ee6e6038-383b-4f21-ac29-b2a1c7d386ab) | ![์‚ฌ๋žŒ ์ถ”์ ](https://github.com/RizwanMunawar/ultralytics/assets/62513924/93bb4ee2-77a0-4e4e-8eb6-eb8f527f0527) | ![๋ฌผ๊ณ ๊ธฐ ์ถ”์ ](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a5146d0f-bfa8-4e0a-b7df-3c1446cd8142) | +| ์ฐจ๋Ÿ‰ ์ถ”์  | ์‚ฌ๋žŒ ์ถ”์  | ๋ฌผ๊ณ ๊ธฐ ์ถ”์  | + +## ํ•œ๋ˆˆ์— ๋ณด๊ธฐ + +Ultralytics YOLO๋Š” ๊ฐ์ฒด ๊ฐ์ง€ ๊ธฐ๋Šฅ์„ ํ™•์žฅํ•˜์—ฌ ๊ฒฌ๊ณ ํ•˜๊ณ  ๋‹ค์žฌ๋‹ค๋Šฅํ•œ ๊ฐ์ฒด ์ถ”์ ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค: + +- **์‹ค์‹œ๊ฐ„ ์ถ”์ :** ๊ณ ํ™”๋ฉด๋ฅ ์˜ ๋น„๋””์˜ค์—์„œ ๋งค๋„๋Ÿฝ๊ฒŒ ๊ฐ์ฒด ์ถ”์ ํ•ฉ๋‹ˆ๋‹ค. +- **๋‹ค์ค‘ ์ถ”์ ๊ธฐ ์ง€์›:** ๋‹ค์–‘ํ•œ ๊ฒ€์ฆ๋œ ์ถ”์  ์•Œ๊ณ ๋ฆฌ์ฆ˜ ์ค‘์—์„œ ์„ ํƒ ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค. +- **๋งž์ถคํ˜• ์ถ”์ ๊ธฐ ๊ตฌ์„ฑ:** ๋‹ค์–‘ํ•œ ๋งค๊ฐœ๋ณ€์ˆ˜๋ฅผ ์กฐ์ •ํ•˜์—ฌ ํŠน์ • ์š”๊ตฌ์‚ฌํ•ญ์— ๋งž๊ฒŒ ์ถ”์  ์•Œ๊ณ ๋ฆฌ์ฆ˜์„ ๋งž์ถคํ™”ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +## ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ์ถ”์ ๊ธฐ + +Ultralytics YOLO๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์€ ์ถ”์  ์•Œ๊ณ ๋ฆฌ์ฆ˜์„ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. ๊ด€๋ จ YAML ๊ตฌ์„ฑ ํŒŒ์ผ(์˜ˆ: `tracker=tracker_type.yaml`)์„ ์ „๋‹ฌํ•˜์—ฌ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค: + +* [BoT-SORT](https://github.com/NirAharon/BoT-SORT) - ์ด ์ถ”์ ๊ธฐ๋ฅผ ํ™œ์„ฑํ™”ํ•˜๋ ค๋ฉด `botsort.yaml`์„ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. +* [ByteTrack](https://github.com/ifzhang/ByteTrack) - ์ด ์ถ”์ ๊ธฐ๋ฅผ ํ™œ์„ฑํ™”ํ•˜๋ ค๋ฉด `bytetrack.yaml`์„ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. + +๊ธฐ๋ณธ ์ถ”์ ๊ธฐ๋Š” BoT-SORT์ž…๋‹ˆ๋‹ค. + +## ์ถ”์  + +๋น„๋””์˜ค ์ŠคํŠธ๋ฆผ์—์„œ ์ถ”์ ๊ธฐ๋ฅผ ์‹คํ–‰ํ•˜๋ ค๋ฉด YOLOv8n, YOLOv8n-seg ๋ฐ YOLOv8n-pose์™€ ๊ฐ™์€ ํ›ˆ๋ จ๋œ Detect, Segment ๋˜๋Š” Pose ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์‹ญ์‹œ์˜ค. + +!!! ์˜ˆ์‹œ "" + + === "ํŒŒ์ด์ฌ" + + ```python + from ultralytics import YOLO + + # ๊ณต์‹ ๋ชจ๋ธ ๋˜๋Š” ๋งž์ถค ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n.pt') # ๊ณต์‹ Detect ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n-seg.pt') # ๊ณต์‹ Segment ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n-pose.pt') # ๊ณต์‹ Pose ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('path/to/best.pt') # ๋งž์ถค ํ•™์Šต๋œ ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + + # ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ถ”์  ์ˆ˜ํ–‰ + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True) # ๊ธฐ๋ณธ ์ถ”์ ๊ธฐ๋กœ ์ถ”์ ํ•˜๊ธฐ + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True, tracker="bytetrack.yaml") # ByteTrack ์ถ”์ ๊ธฐ๋กœ ์ถ”์ ํ•˜๊ธฐ + ``` + + === "CLI" + + ```bash + # ๋ช…๋ น ํ–‰ ์ธํ„ฐํŽ˜์ด์Šค๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋‹ค์–‘ํ•œ ๋ชจ๋ธ๋กœ ์ถ”์  ์ˆ˜ํ–‰ + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" # ๊ณต์‹ Detect ๋ชจ๋ธ + yolo track model=yolov8n-seg.pt source="https://youtu.be/LNwODJXcvt4" # ๊ณต์‹ Segment ๋ชจ๋ธ + yolo track model=yolov8n-pose.pt source="https://youtu.be/LNwODJXcvt4" # ๊ณต์‹ Pose ๋ชจ๋ธ + yolo track model=path/to/best.pt source="https://youtu.be/LNwODJXcvt4" # ๋งž์ถค ํ•™์Šต๋œ ๋ชจ๋ธ + + # ByteTrack ์ถ”์ ๊ธฐ๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ถ”์ ํ•˜๊ธฐ + yolo track model=path/to/best.pt tracker="bytetrack.yaml" + ``` + +์œ„์˜ ์‚ฌ์šฉ๋ฒ•์—์„œ ๋ณผ ์ˆ˜ ์žˆ๋“ฏ์ด ๋ชจ๋“  Detect, Segment ๋ฐ Pose ๋ชจ๋ธ์€ ๋น„๋””์˜ค๋‚˜ ์ŠคํŠธ๋ฆฌ๋ฐ ์ถœ์ฒ˜์—์„œ ์ถ”์ ์ด ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค. + +## ๊ตฌ์„ฑ + +### ์ถ”์  ์ธ์ˆ˜ + +์ถ”์  ๊ตฌ์„ฑ์€ `conf`, `iou` ๋ฐ `show`์™€ ๊ฐ™์€ ์˜ˆ์ธก ๋ชจ๋“œ์™€ ๋™์ผํ•œ ์†์„ฑ์„ ๊ณต์œ ํ•ฉ๋‹ˆ๋‹ค. ์ถ”๊ฐ€ ๊ตฌ์„ฑ์— ๋Œ€ํ•ด์„œ๋Š” [Predict](https://docs.ultralytics.com/modes/predict/) ๋ชจ๋ธ ํŽ˜์ด์ง€๋ฅผ ์ฐธ์กฐํ•˜์‹ญ์‹œ์˜ค. + +!!! ์˜ˆ์‹œ "" + + === "ํŒŒ์ด์ฌ" + + ```python + from ultralytics import YOLO + + # ์ถ”์  ๋งค๊ฐœ๋ณ€์ˆ˜๋ฅผ ๊ตฌ์„ฑํ•˜๊ณ  ์ถ”์ ๊ธฐ๋ฅผ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", conf=0.3, iou=0.5, show=True) + ``` + + === "CLI" + + ```bash + # ์ถ”์  ๋งค๊ฐœ๋ณ€์ˆ˜๋ฅผ ๊ตฌ์„ฑํ•˜๊ณ  ๋ช…๋ น ํ–‰ ์ธํ„ฐํŽ˜์ด์Šค๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ถ”์ ๊ธฐ๋ฅผ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" conf=0.3, iou=0.5 show + ``` + +### ์ถ”์ ๊ธฐ ์„ ํƒ + +Ultralytics์—์„œ๋Š” ์ˆ˜์ •๋œ ์ถ”์ ๊ธฐ ๊ตฌ์„ฑ ํŒŒ์ผ๋„ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด๋ฅผ ์œ„ํ•ด [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers)์—์„œ ์ถ”์ ๊ธฐ ๊ตฌ์„ฑ ํŒŒ์ผ์˜ ๋ณต์‚ฌ๋ณธ(์˜ˆ: `custom_tracker.yaml`)์„ ๋งŒ๋“ค๊ณ  ํ•„์š”ํ•œ๋Œ€๋กœ ๊ตฌ์„ฑ์„ ์ˆ˜์ •ํ•˜๋ฉด ๋ฉ๋‹ˆ๋‹ค(๋‹จ, `tracker_type` ์ œ์™ธ). + +!!! ์˜ˆ์‹œ "" + + === "ํŒŒ์ด์ฌ" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ค๊ณ  ๋งž์ถค ๊ตฌ์„ฑ ํŒŒ์ผ๋กœ ์ถ”์ ๊ธฐ๋ฅผ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", tracker='custom_tracker.yaml') + ``` + + === "CLI" + + ```bash + # ๋ช…๋ น ํ–‰ ์ธํ„ฐํŽ˜์ด์Šค๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋งž์ถค ๊ตฌ์„ฑ ํŒŒ์ผ๋กœ ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ค๊ณ  ์ถ”์ ๊ธฐ๋ฅผ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" tracker='custom_tracker.yaml' + ``` + +์ถ”์  ์ธ์ˆ˜์— ๋Œ€ํ•œ ์ข…ํ•ฉ์ ์ธ ๋ชฉ๋ก์€ [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) ํŽ˜์ด์ง€๋ฅผ ์ฐธ์กฐํ•˜์„ธ์š”. + +## ํŒŒ์ด์ฌ ์˜ˆ์‹œ + +### ๋ณด์กดํ•˜๋Š” ์ถ”์  ๋ฃจํ”„ + +๋‹ค์Œ์€ OpenCV(`cv2`)์™€ YOLOv8๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋น„๋””์˜ค ํ”„๋ ˆ์ž„์—์„œ ๊ฐ์ฒด ์ถ”์ ์„ ์‹คํ–‰ํ•˜๋Š” ํŒŒ์ด์ฌ ์Šคํฌ๋ฆฝํŠธ์ž…๋‹ˆ๋‹ค. ์ด ์Šคํฌ๋ฆฝํŠธ์—์„œ๋Š” ํ•„์š”ํ•œ ํŒจํ‚ค์ง€(`opencv-python` ๋ฐ `ultralytics`)๋ฅผ ์ด๋ฏธ ์„ค์น˜ํ–ˆ๋‹ค๊ณ  ๊ฐ€์ •ํ•ฉ๋‹ˆ๋‹ค. `persist=True` ์ธ์ˆ˜๋Š” ์ถ”์ ๊ธฐ์— ํ˜„์žฌ ์ด๋ฏธ์ง€ ๋˜๋Š” ํ”„๋ ˆ์ž„์ด ์‹œํ€€์Šค์—์„œ ๋‹ค์Œ ๊ฒƒ์ด๋ฉฐ ํ˜„์žฌ ์ด๋ฏธ์ง€์—์„œ ์ด์ „ ์ด๋ฏธ์ง€์˜ ์ถ”์ ์„ ์˜ˆ์ƒํ•œ๋‹ค๊ณ  ์•Œ๋ฆฝ๋‹ˆ๋‹ค. + +!!! ์˜ˆ์‹œ "์ถ”์ ์ด ํฌํ•จ๋œ ์ŠคํŠธ๋ฆฌ๋ฐ for-loop" + + ```python + import cv2 + from ultralytics import YOLO + + # YOLOv8 ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค + model = YOLO('yolov8n.pt') + + # ๋น„๋””์˜ค ํŒŒ์ผ์„ ์—ฝ๋‹ˆ๋‹ค + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # ๋น„๋””์˜ค ํ”„๋ ˆ์ž„์„ ๋ฐ˜๋ณตํ•ฉ๋‹ˆ๋‹ค + while cap.isOpened(): + # ๋น„๋””์˜ค์—์„œ ํ”„๋ ˆ์ž„์„ ์ฝ์Šต๋‹ˆ๋‹ค + success, frame = cap.read() + + if success: + # ํ”„๋ ˆ์ž„์— YOLOv8 ์ถ”์ ์„ ์‹คํ–‰ํ•˜์—ฌ ์ถ”์ ์„ ์œ ์ง€ํ•ฉ๋‹ˆ๋‹ค + results = model.track(frame, persist=True) + + # ๊ฒฐ๊ณผ๋ฅผ ํ”„๋ ˆ์ž„์— ์‹œ๊ฐํ™”ํ•ฉ๋‹ˆ๋‹ค + annotated_frame = results[0].plot() + + # ์–ด๋…ธํ…Œ์ด์…˜๋œ ํ”„๋ ˆ์ž„์„ ํ‘œ์‹œํ•ฉ๋‹ˆ๋‹ค + cv2.imshow("YOLOv8 ์ถ”์ ", annotated_frame) + + # 'q'๊ฐ€ ๋ˆŒ๋ฆฌ๋ฉด ๋ฃจํ”„๋ฅผ ์ค‘๋‹จํ•ฉ๋‹ˆ๋‹ค + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # ๋น„๋””์˜ค์˜ ๋์— ๋„๋‹ฌํ•˜๋ฉด ๋ฃจํ”„๋ฅผ ์ค‘๋‹จํ•ฉ๋‹ˆ๋‹ค + break + + # ๋น„๋””์˜ค ์บก์ฒ˜ ๊ฐ์ฒด๋ฅผ ํ•ด์ œํ•˜๊ณ  ํ‘œ์‹œ ์ฐฝ์„ ๋‹ซ์Šต๋‹ˆ๋‹ค + cap.release() + cv2.destroyAllWindows() + ``` + +์—ฌ๊ธฐ์„œ `model(frame)`์„ `model.track(frame)`์œผ๋กœ ๋ณ€๊ฒฝํ•˜๋ฉด ๋‹จ์ˆœ ๊ฐ์ง€๊ฐ€ ์•„๋‹Œ ๊ฐ์ฒด ์ถ”์ ์ด ๊ฐ€๋Šฅํ•ด์ง‘๋‹ˆ๋‹ค. ์ด ์ˆ˜์ •๋œ ์Šคํฌ๋ฆฝํŠธ๋Š” ๋น„๋””์˜ค์˜ ๊ฐ ํ”„๋ ˆ์ž„์— ์ถ”์ ๊ธฐ๋ฅผ ์‹คํ–‰ํ•˜๊ณ  ๊ฒฐ๊ณผ๋ฅผ ์‹œ๊ฐํ™”ํ•œ ํ›„ ์ฐฝ์— ํ‘œ์‹œํ•ฉ๋‹ˆ๋‹ค. 'q'๋ฅผ ๋ˆ„๋ฅด๋ฉด ๋ฃจํ”„๊ฐ€ ์ข…๋ฃŒ๋ฉ๋‹ˆ๋‹ค. + +### ์‹œ๊ฐ„์— ๋”ฐ๋ฅธ ์ถ”์  ๊ทธ๋ฆฌ๊ธฐ + +์—ฐ์† ํ”„๋ ˆ์ž„์—์„œ ๊ฐ์ฒด ์ถ”์ ์„ ์‹œ๊ฐํ™”ํ•˜๋ฉด ๋น„๋””์˜ค ๋‚ด์—์„œ ๊ฒ€์ถœ๋œ ๊ฐ์ฒด์˜ ์ด๋™ ํŒจํ„ด๊ณผ ํ–‰๋™์— ๋Œ€ํ•œ ์†Œ์ค‘ํ•œ ํ†ต์ฐฐ๋ ฅ์„ ์ œ๊ณตํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. Ultralytics YOLOv8์„ ์‚ฌ์šฉํ•˜๋ฉด ์ด๋Ÿฌํ•œ ์ถ”์ ์„ ์›ํ™œํ•˜๊ณ  ํšจ์œจ์ ์œผ๋กœ ํ”Œ๋กœํŒ…ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +๋‹ค์Œ ์˜ˆ์‹œ์—์„œ, ์—ฌ๋Ÿฌ ๋น„๋””์˜ค ํ”„๋ ˆ์ž„์— ๊ฑธ์นœ ๊ฒ€์ถœ๋œ ๊ฐ์ฒด์˜ ์›€์ง์ž„์„ ํ”Œ๋กœํŒ…ํ•˜๊ธฐ ์œ„ํ•ด YOLOv8์˜ ์ถ”์  ๊ธฐ๋Šฅ์„ ํ™œ์šฉํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. ์ด ์Šคํฌ๋ฆฝํŠธ๋Š” ๋น„๋””์˜ค ํŒŒ์ผ์„ ์—ฌ๋Š” ๊ฒƒ์„ ํฌํ•จํ•˜์—ฌ ํ”„๋ ˆ์ž„๋ณ„๋กœ ์ฝ๊ณ  YOLO ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ๋‹ค์–‘ํ•œ ๊ฐ์ฒด๋ฅผ ์‹๋ณ„ํ•˜๊ณ  ์ถ”์ ํ•ฉ๋‹ˆ๋‹ค. ๊ฒ€์ถœ๋œ ๊ฒฝ๊ณ„ ์ƒ์ž์˜ ์ค‘์‹ฌ์ ์„ ๋ณด์กดํ•˜๊ณ  ์—ฐ๊ฒฐํ•˜์—ฌ ์ถ”์ ๋œ ๊ฐ์ฒด์˜ ๊ฒฝ๋กœ๋ฅผ ๋‚˜ํƒ€๋‚ด๋Š” ์„ ์„ ๊ทธ๋ฆฝ๋‹ˆ๋‹ค. + +!!! ์˜ˆ์‹œ "๋น„๋””์˜ค ํ”„๋ ˆ์ž„์— ๊ฑธ์ณ ์ถ”์  ๊ทธ๋ฆฌ๊ธฐ" + + ```python + from collections import defaultdict + + import cv2 + import numpy as np + + from ultralytics import YOLO + + # YOLOv8 ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค + model = YOLO('yolov8n.pt') + + # ๋น„๋””์˜ค ํŒŒ์ผ์„ ์—ฝ๋‹ˆ๋‹ค + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # ์ถ”์  ๋‚ด์—ญ์„ ์ €์žฅํ•ฉ๋‹ˆ๋‹ค + track_history = defaultdict(lambda: []) + + # ๋น„๋””์˜ค ํ”„๋ ˆ์ž„์„ ๋ฐ˜๋ณตํ•ฉ๋‹ˆ๋‹ค + while cap.isOpened(): + # ๋น„๋””์˜ค์—์„œ ํ”„๋ ˆ์ž„์„ ์ฝ์Šต๋‹ˆ๋‹ค + success, frame = cap.read() + + if success: + # ํ”„๋ ˆ์ž„์— YOLOv8 ์ถ”์ ์„ ์‹คํ–‰ํ•˜์—ฌ ์ถ”์ ์„ ์œ ์ง€ํ•ฉ๋‹ˆ๋‹ค + results = model.track(frame, persist=True) + + # ์ƒ์ž ๋ฐ ์ถ”์  ID๋ฅผ ๊ฐ€์ ธ์˜ต๋‹ˆ๋‹ค + boxes = results[0].boxes.xywh.cpu() + track_ids = results[0].boxes.id.int().cpu().tolist() + + # ๊ฒฐ๊ณผ๋ฅผ ํ”„๋ ˆ์ž„์— ์‹œ๊ฐํ™”ํ•ฉ๋‹ˆ๋‹ค + annotated_frame = results[0].plot() + + # ์ถ”์ ์„ ํ”Œ๋กฏํ•ฉ๋‹ˆ๋‹ค + for box, track_id in zip(boxes, track_ids): + x, y, w, h = box + track = track_history[track_id] + track.append((float(x), float(y))) # x, y์˜ ์ค‘์‹ฌ์  + if len(track) > 30: # 90ํ”„๋ ˆ์ž„์— ๋Œ€ํ•ด 90๊ฐœ์˜ ์ถ”์ ์„ ์œ ์ง€ + track.pop(0) + + # ์ถ”์  ๋ผ์ธ์„ ๊ทธ๋ฆฝ๋‹ˆ๋‹ค + points = np.hstack(track).astype(np.int32).reshape((-1, 1, 2)) + cv2.polylines(annotated_frame, [points], isClosed=False, color=(230, 230, 230), thickness=10) + + # ์–ด๋…ธํ…Œ์ด์…˜๋œ ํ”„๋ ˆ์ž„์„ ํ‘œ์‹œํ•ฉ๋‹ˆ๋‹ค + cv2.imshow("YOLOv8 ์ถ”์ ", annotated_frame) + + # 'q'๊ฐ€ ๋ˆŒ๋ฆฌ๋ฉด ๋ฃจํ”„๋ฅผ ์ค‘๋‹จํ•ฉ๋‹ˆ๋‹ค + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # ๋น„๋””์˜ค์˜ ๋์— ๋„๋‹ฌํ•˜๋ฉด ๋ฃจํ”„๋ฅผ ์ค‘๋‹จํ•ฉ๋‹ˆ๋‹ค + break + + ``` diff --git a/ultralytics/docs/ko/modes/track.md:Zone.Identifier b/ultralytics/docs/ko/modes/track.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/modes/track.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/modes/train.md b/ultralytics/docs/ko/modes/train.md new file mode 100755 index 0000000..2caafd3 --- /dev/null +++ b/ultralytics/docs/ko/modes/train.md @@ -0,0 +1,145 @@ +--- +comments: true +description: YOLOv8 ๋ชจ๋ธ์„ Ultralytics YOLO๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ํ›ˆ๋ จํ•˜๋Š” ๋‹จ๊ณ„๋ณ„ ๊ฐ€์ด๋“œ๋กœ, ๋‹จ์ผ GPU ๋ฐ ๋‹ค์ค‘ GPU ํ›ˆ๋ จ์˜ ์˜ˆ์ œ ํฌํ•จ +keywords: Ultralytics, YOLOv8, YOLO, ๊ฐ์ฒด ๊ฐ์ง€, ํ›ˆ๋ จ ๋ชจ๋“œ, ์‚ฌ์šฉ์ž ์ •์˜ ๋ฐ์ดํ„ฐ์…‹, GPU ํ›ˆ๋ จ, ๋‹ค์ค‘ GPU, ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ, CLI ์˜ˆ์ œ, Python ์˜ˆ์ œ +--- + +# Ultralytics YOLO์™€ ํ•จ๊ป˜ ํ•˜๋Š” ๋ชจ๋ธ ํ›ˆ๋ จ + +Ultralytics YOLO ์ƒํƒœ๊ณ„ ๋ฐ ํ†ตํ•ฉ + +## ์†Œ๊ฐœ + +๋”ฅ๋Ÿฌ๋‹ ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•œ๋‹ค๋Š” ๊ฒƒ์€ ๋ชจ๋ธ์— ๋ฐ์ดํ„ฐ๋ฅผ ๊ณต๊ธ‰ํ•˜๊ณ  ๊ทธ๊ฒƒ์ด ์ •ํ™•ํ•œ ์˜ˆ์ธก์„ ํ•  ์ˆ˜ ์žˆ๋„๋ก ๋งค๊ฐœ๋ณ€์ˆ˜๋ฅผ ์กฐ์ •ํ•˜๋Š” ๊ณผ์ •์„ ๋งํ•ฉ๋‹ˆ๋‹ค. Ultralytics YOLOv8์˜ ํ›ˆ๋ จ ๋ชจ๋“œ๋Š” ํ˜„๋Œ€ ํ•˜๋“œ์›จ์–ด ๊ธฐ๋Šฅ์„ ์™„์ „ํžˆ ํ™œ์šฉํ•˜์—ฌ ๊ฐ์ฒด ๊ฐ์ง€ ๋ชจ๋ธ์˜ ํšจ๊ณผ์ ์ด๊ณ  ํšจ์œจ์ ์ธ ํ›ˆ๋ จ์„ ์œ„ํ•ด ์„ค๊ณ„๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ์ด ๊ฐ€์ด๋“œ๋Š” YOLOv8์˜ ๊ฐ•๋ ฅํ•œ ๊ธฐ๋Šฅ ์„ธํŠธ๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ž์ฒด ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•˜๋Š” ๋ฐ ํ•„์š”ํ•œ ๋ชจ๋“  ์„ธ๋ถ€ ์ •๋ณด๋ฅผ ๋‹ค๋ฃจ๋Š” ๊ฒƒ์„ ๋ชฉํ‘œ๋กœ ํ•ฉ๋‹ˆ๋‹ค. + +

+
+ +
+ ์‹œ์ฒญํ•˜๊ธฐ: Google Colab์—์„œ ์—ฌ๋Ÿฌ๋ถ„์˜ ์‚ฌ์šฉ์ž ์ •์˜ ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ YOLOv8 ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•˜๋Š” ๋ฐฉ๋ฒ•. +

+ +## Ultralytics YOLO๋กœ ํ›ˆ๋ จํ•˜๋Š” ์ด์œ ? + +YOLOv8์˜ ํ›ˆ๋ จ ๋ชจ๋“œ๋ฅผ ์„ ํƒํ•˜๋Š” ๋ฐ๋Š” ๋ช‡ ๊ฐ€์ง€ ์„ค๋“๋ ฅ ์žˆ๋Š” ์ด์œ ๊ฐ€ ์žˆ์Šต๋‹ˆ๋‹ค: + +- **ํšจ์œจ์„ฑ:** ๋‹จ์ผ GPU ์„ค์ •์ด๋“  ์—ฌ๋Ÿฌ GPU๋กœ ํ™•์žฅํ•˜๋“ , ํ•˜๋“œ์›จ์–ด๋ฅผ ์ตœ๋Œ€ํ•œ ํ™œ์šฉํ•˜์„ธ์š”. +- **๋‹ค์–‘์„ฑ:** COCO, VOC, ImageNet๊ณผ ๊ฐ™์€ ๊ธฐ์กด์˜ ๋ฐ์ดํ„ฐ์…‹๋ฟ๋งŒ ์•„๋‹ˆ๋ผ ์‚ฌ์šฉ์ž ์ •์˜ ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ๋„ ํ›ˆ๋ จ ๊ฐ€๋Šฅ. +- **์‚ฌ์šฉ์ž ์นœํ™”์ :** ๊ฐ„๋‹จํ•˜๋ฉด์„œ๋„ ๊ฐ•๋ ฅํ•œ CLI ๋ฐ Python ์ธํ„ฐํŽ˜์ด์Šค๋ฅผ ํ†ตํ•œ ์ง๊ด€์ ์ธ ํ›ˆ๋ จ ๊ฒฝํ—˜ ์ œ๊ณต. +- **ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ ์œ ์—ฐ์„ฑ:** ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ์„ ๋ฏธ์„ธ ์กฐ์ •ํ•  ์ˆ˜ ์žˆ๋Š” ๋‹ค์–‘ํ•˜๊ฒŒ ๊ตฌ์„ฑ ๊ฐ€๋Šฅํ•œ ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ. + +### ํ›ˆ๋ จ ๋ชจ๋“œ์˜ ์ฃผ์š” ๊ธฐ๋Šฅ + +๋‹ค์Œ์€ YOLOv8์˜ ํ›ˆ๋ จ ๋ชจ๋“œ์˜ ์ฃผ์š” ๊ธฐ๋Šฅ ์ค‘ ์ผ๋ถ€์ž…๋‹ˆ๋‹ค: + +- **์ž๋™ ๋ฐ์ดํ„ฐ์…‹ ๋‹ค์šด๋กœ๋“œ:** COCO, VOC, ImageNet๊ณผ ๊ฐ™์€ ํ‘œ์ค€ ๋ฐ์ดํ„ฐ์…‹๋“ค์€ ์ฒซ ์‚ฌ์šฉ์‹œ ์ž๋™์œผ๋กœ ๋‹ค์šด๋กœ๋“œ๋ฉ๋‹ˆ๋‹ค. +- **๋‹ค์ค‘ GPU ์ง€์›:** ์—ฌ๋Ÿฌ GPU์— ๊ฑธ์ณ ํ›ˆ๋ จ ๋…ธ๋ ฅ์„ ๋น ๋ฅด๊ฒŒ ํ™•๋Œ€ํ•˜๊ธฐ ์œ„ํ•œ ๊ทœ๋ชจ์žˆ๋Š” ํ›ˆ๋ จ ์ง€์›. +- **ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ ๊ตฌ์„ฑ:** YAML ๊ตฌ์„ฑ ํŒŒ์ผ์ด๋‚˜ CLI ์ธ์ˆ˜๋ฅผ ํ†ตํ•ด ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ ์ˆ˜์ • ๊ฐ€๋Šฅ. +- **์‹œ๊ฐํ™” ๋ฐ ๋ชจ๋‹ˆํ„ฐ๋ง:** ํ›ˆ๋ จ ์ง€ํ‘œ์˜ ์‹ค์‹œ๊ฐ„ ์ถ”์  ๋ฐ ํ•™์Šต ๊ณผ์ •์˜ ์‹œ๊ฐํ™”๋กœ ๋” ๋‚˜์€ ์ธ์‚ฌ์ดํŠธ ์ œ๊ณต. + +!!! Tip "ํŒ" + + * YOLOv8 ๋ฐ์ดํ„ฐ์…‹๋“ค์€ ์ฒซ ์‚ฌ์šฉ์‹œ ์ž๋™์œผ๋กœ ๋‹ค์šด๋กœ๋“œ๋ฉ๋‹ˆ๋‹ค, ์˜ˆ: `yolo train data=coco.yaml` + +## ์‚ฌ์šฉ ์˜ˆ์ œ + +COCO128 ๋ฐ์ดํ„ฐ์…‹์—์„œ YOLOv8n์„ ์ด๋ฏธ์ง€ ํฌ๊ธฐ 640์œผ๋กœ 100 ์—ํฌํฌ ๋™์•ˆ ํ›ˆ๋ จํ•ฉ๋‹ˆ๋‹ค. ํ›ˆ๋ จ ์žฅ์น˜๋Š” `device` ์ธ์ˆ˜๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ง€์ •ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ธ์ˆ˜๋ฅผ ์ „๋‹ฌํ•˜์ง€ ์•Š์œผ๋ฉด ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ๊ฒฝ์šฐ GPU `device=0`์ด, ์•„๋‹ˆ๋ฉด `device=cpu`๊ฐ€ ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. ์ „์ฒด ํ›ˆ๋ จ ์ธ์ˆ˜ ๋ชฉ๋ก์€ ์•„๋ž˜ Arguments ์„น์…˜์„ ์ฐธ์กฐํ•˜์„ธ์š”. + +!!! Example "๋‹จ์ผ GPU ๋ฐ CPU ํ›ˆ๋ จ ์˜ˆ์ œ" + + ์žฅ์น˜๋Š” ์ž๋™์œผ๋กœ ๊ฒฐ์ •๋ฉ๋‹ˆ๋‹ค. GPU๊ฐ€ ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•˜๋ฉด ์‚ฌ์šฉ๋˜๋ฉฐ, ๊ทธ๋ ‡์ง€ ์•Š์œผ๋ฉด CPU์—์„œ ํ›ˆ๋ จ์ด ์‹œ์ž‘๋ฉ๋‹ˆ๋‹ค. + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ์„ ๋กœ๋“œํ•˜์„ธ์š”. + model = YOLO('yolov8n.yaml') # YAML์—์„œ ์ƒˆ ๋ชจ๋ธ ๊ตฌ์ถ• + model = YOLO('yolov8n.pt') # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ ๋กœ๋“œ (ํ›ˆ๋ จ์„ ์œ„ํ•ด ๊ถŒ์žฅ๋จ) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # YAML์—์„œ ๊ตฌ์ถ• ๋ฐ ๊ฐ€์ค‘์น˜ ์ „๋‹ฌ + + # ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•ฉ๋‹ˆ๋‹ค. + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + + === "CLI" + + ```bash + # YAML์—์„œ ์ƒˆ ๋ชจ๋ธ์„ ๊ตฌ์ถ•ํ•˜๊ณ  ์ฒ˜์Œ๋ถ€ํ„ฐ ํ›ˆ๋ จ์„ ์‹œ์ž‘ํ•˜์„ธ์š”. + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ *.pt ๋ชจ๋ธ์—์„œ ํ›ˆ๋ จ์„ ์‹œ์ž‘ํ•˜์„ธ์š”. + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # YAML์—์„œ ์ƒˆ ๋ชจ๋ธ์„ ๊ตฌ์ถ•ํ•˜๊ณ , ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๊ฐ€์ค‘์น˜๋ฅผ ์ „๋‹ฌํ•˜๊ณ  ํ›ˆ๋ จ์„ ์‹œ์ž‘ํ•˜์„ธ์š”. + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### ๋‹ค์ค‘ GPU ํ›ˆ๋ จ + +๋‹ค์ค‘ GPU ํ›ˆ๋ จ์„ ํ†ตํ•ด ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ํ•˜๋“œ์›จ์–ด ๋ฆฌ์†Œ์Šค๋ฅผ ๋” ํšจ์œจ์ ์œผ๋กœ ํ™œ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด ๊ธฐ๋Šฅ์€ Python API์™€ ๋ช…๋ นํ–‰ ์ธํ„ฐํŽ˜์ด์Šค ๋ชจ๋‘๋ฅผ ํ†ตํ•ด ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๋‹ค์ค‘ GPU ํ›ˆ๋ จ์„ ํ™œ์„ฑํ™”ํ•˜๋ ค๋ฉด ์‚ฌ์šฉํ•˜๋ ค๋Š” GPU ์žฅ์น˜ ID๋ฅผ ์ง€์ •ํ•˜์„ธ์š”. + +!!! Example "๋‹ค์ค‘ GPU ํ›ˆ๋ จ ์˜ˆ์ œ" + + 2๊ฐœ์˜ GPU, CUDA ์žฅ์น˜ 0๊ณผ 1๋กœ ํ›ˆ๋ จํ•˜๋ ค๋ฉด ๋‹ค์Œ ๋ช…๋ น์„ ์‚ฌ์šฉํ•˜์„ธ์š”. ํ•„์š”์— ๋”ฐ๋ผ ์ถ”๊ฐ€ GPU๋กœ ํ™•์žฅํ•˜์„ธ์š”. + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ์„ ๋กœ๋“œํ•˜์„ธ์š”. + model = YOLO('yolov8n.pt') # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ ๋กœ๋“œ (ํ›ˆ๋ จ ์ถ”์ฒœ๋จ) + + # 2๊ฐœ์˜ GPU๋กœ ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•ฉ๋‹ˆ๋‹ค. + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device=[0, 1]) + ``` + + === "CLI" + + ```bash + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ *.pt ๋ชจ๋ธ๋กœ๋ถ€ํ„ฐ ์‹œ์ž‘ํ•˜์—ฌ GPU 0๊ณผ 1์„ ์‚ฌ์šฉํ•˜์—ฌ ํ›ˆ๋ จํ•ฉ๋‹ˆ๋‹ค. + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=0,1 + ``` + +### Apple M1 ๋ฐ M2 MPS ํ›ˆ๋ จ + +Ultralytics YOLO ๋ชจ๋ธ์— ํ†ตํ•ฉ๋œ Apple M1 ๋ฐ M2 ์นฉ๋“ค์— ๋Œ€ํ•œ ์ง€์›์„ ํ†ตํ•ด Apple์˜ ๊ฐ•๋ ฅํ•œ Metal Performance Shaders (MPS) ํ”„๋ ˆ์ž„์›Œํฌ๋ฅผ ํ™œ์šฉํ•˜์—ฌ ์žฅ์น˜์—์„œ ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. MPS๋Š” Apple ์‚ฌ์šฉ์ž ์ง€์ • ์‹ค๋ฆฌ์ฝ˜์—์„œ ์ปดํ“จํ„ฐ ๋ฐ ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ์ž‘์—…์„ ์‹คํ–‰ํ•˜๋Š” ๊ณ ์„ฑ๋Šฅ ๋ฐฉ๋ฒ•์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. + +Apple M1 ๋ฐ M2 ์นฉ์—์„œ ํ›ˆ๋ จ์„ ํ™œ์„ฑํ™”ํ•˜๋ ค๋ฉด, ํ›ˆ๋ จ ๊ณผ์ •์„ ์‹œ์ž‘ํ•  ๋•Œ ์žฅ์น˜๋กœ 'mps'๋ฅผ ์ง€์ •ํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. ์•„๋ž˜๋Š” Python ๋ฐ ๋ช…๋ นํ–‰ ์ธํ„ฐํŽ˜์ด์Šค๋ฅผ ํ†ตํ•ด ์ด๋ฅผ ์ˆ˜ํ–‰ํ•  ์ˆ˜ ์žˆ๋Š” ์˜ˆ์ œ์ž…๋‹ˆ๋‹ค: + +!!! Example "MPS ํ›ˆ๋ จ ์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ์„ ๋กœ๋“œํ•˜์„ธ์š”. + model = YOLO('yolov8n.pt') # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ ๋กœ๋“œ (ํ›ˆ๋ จ ์ถ”์ฒœ๋จ) + + # 2๊ฐœ์˜ GPU๋กœ ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•ฉ๋‹ˆ๋‹ค. + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device='mps') + ``` + + === "CLI" + + ```bash + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ *.pt ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ mps ์žฅ์น˜์—์„œ ํ›ˆ๋ จ์„ ์‹œ์ž‘ํ•ฉ๋‹ˆ๋‹ค. + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=mps + ``` + +M1/M2 ์นฉ์˜ ์—ฐ์‚ฐ๋ ฅ์„ ํ™œ์šฉํ•˜๋ฉด์„œ ํ›ˆ๋ จ ์ž‘์—…์„ ๋” ํšจ์œจ์ ์œผ๋กœ ์ฒ˜๋ฆฌํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ž์„ธํ•œ ์ง€์นจ๊ณผ ๊ณ ๊ธ‰ ์„ค์ • ์˜ต์…˜์„ ์›ํ•˜์‹ ๋‹ค๋ฉด [PyTorch MPS ๋ฌธ์„œ](https://pytorch.org/docs/stable/notes/mps.html)๋ฅผ ์ฐธ์กฐํ•˜์„ธ์š”. + +### ์ค‘๋‹จ๋œ ํ›ˆ๋ จ ์ด์–ด๋‚˜๊ฐ€๊ธฐ + +์ด์ „์— ์ €์žฅ๋œ ์ƒํƒœ์—์„œ ํ›ˆ๋ จ์„ ์ด์–ด๋‚˜๊ฐ€๋Š” ๊ธฐ๋Šฅ์€ ๋”ฅ๋Ÿฌ๋‹ ๋ชจ๋ธ์„ ๋‹ค๋ฃฐ ๋•Œ ์ค‘์š”ํ•œ ๊ธฐ๋Šฅ์ž…๋‹ˆ๋‹ค. ์ด ๊ธฐ๋Šฅ์€ ํ›ˆ๋ จ ๊ณผ์ •์ด ์˜ˆ๊ธฐ์น˜ ์•Š๊ฒŒ ์ค‘๋‹จ๋˜์—ˆ๊ฑฐ๋‚˜ ์ƒˆ๋กœ์šด ๋ฐ์ดํ„ฐ๋กœ ๋ชจ๋ธ์„ ๊ณ„์† ํ›ˆ๋ จํ•˜๊ฑฐ๋‚˜ ๋” ๋งŽ์€ ์—ํฌํฌ ๋™์•ˆ ํ›ˆ๋ จ์„ ์ง„ํ–‰ํ•˜๊ณ  ์‹ถ์„ ๋•Œ ์œ ์šฉํ•ฉ๋‹ˆ๋‹ค. + +ํ›ˆ๋ จ์„ ์žฌ๊ฐœํ•  ๋•Œ, Ultralytics YOLO๋Š” ๋งˆ์ง€๋ง‰์œผ๋กœ ์ €์žฅ๋œ ๋ชจ๋ธ์—์„œ ๊ฐ€์ค‘์น˜๋ฅผ ๋กœ๋“œํ•˜๊ณ  ์˜ตํ‹ฐ๋งˆ์ด์ € ์ƒํƒœ, ํ•™์Šต๋ฅ  ์Šค์ผ€์ค„๋Ÿฌ, ์—ํฌํฌ ๋ฒˆํ˜ธ๋„ ๋ณต์›ํ•ฉ๋‹ˆ๋‹ค. ์ด๋ฅผ ํ†ตํ•ด ํ›ˆ๋ จ ๊ณผ์ •์„ ์ค‘๋‹จ๋œ ์ง€์ ๋ถ€ํ„ฐ ์ด์–ด๊ฐˆ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +Ultralytics YOLO์—์„œ `train` ๋ฉ”์„œ๋“œ ํ˜ธ์ถœ ์‹œ `resume` ์ธ์ˆ˜๋ฅผ `True`๋กœ ์„ค์ •ํ•˜๊ณ  ๋ถ€๋ถ„์ ์œผ๋กœ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ ๊ฐ€์ค‘์น˜๊ฐ€ ํฌํ•จ๋œ `.pt` ํŒŒ์ผ์˜ ๊ฒฝ๋กœ๋ฅผ ์ง€์ •ํ•˜๋ฉด ํ›ˆ๋ จ์„ ์ด์–ด๋‚˜๊ฐˆ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. diff --git a/ultralytics/docs/ko/modes/train.md:Zone.Identifier b/ultralytics/docs/ko/modes/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/modes/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/modes/val.md b/ultralytics/docs/ko/modes/val.md new file mode 100755 index 0000000..2ebaf77 --- /dev/null +++ b/ultralytics/docs/ko/modes/val.md @@ -0,0 +1,86 @@ +--- +comments: true +description: YOLOv8 ๋ชจ๋ธ ๊ฒ€์ฆ ๊ฐ€์ด๋“œ. ๊ฒ€์ฆ ์„ค์ • ๋ฐ ์ธก์ • ํ•ญ๋ชฉ์„ ์‚ฌ์šฉํ•˜์—ฌ YOLO ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ์„ ํ‰๊ฐ€ํ•˜๋Š” ๋ฐฉ๋ฒ•์— ๋Œ€ํ•ด ์•Œ์•„๋ณด์„ธ์š”. Python ๋ฐ CLI ์˜ˆ์ œ๊ฐ€ ํฌํ•จ๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค. +keywords: Ultralytics, YOLO ๋ฌธ์„œ, YOLOv8, ๊ฒ€์ฆ, ๋ชจ๋ธ ํ‰๊ฐ€, ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ, ์ •ํ™•๋„, ์ธก์ • ํ•ญ๋ชฉ, Python, CLI +--- + +# Ultralytics YOLO๋กœ ๋ชจ๋ธ ๊ฒ€์ฆํ•˜๊ธฐ + +Ultralytics YOLO ์ƒํƒœ๊ณ„ ๋ฐ ํ†ตํ•ฉ + +## ๋„์ž… + +๊ฒ€์ฆ์€ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์˜ ํ’ˆ์งˆ์„ ํ‰๊ฐ€ํ•  ์ˆ˜ ์žˆ๊ฒŒ ํ•ด์ฃผ๋Š” ๊ธฐ๊ณ„ํ•™์Šต ํŒŒ์ดํ”„๋ผ์ธ์—์„œ ์ค‘์š”ํ•œ ๋‹จ๊ณ„์ž…๋‹ˆ๋‹ค. Ultralytics YOLOv8์˜ Val ๋ชจ๋“œ๋Š” ๋ชจ๋ธ์˜ ๊ฐ์ฒด ํƒ์ง€ ์„ฑ๋Šฅ์„ ํ‰๊ฐ€ํ•˜๊ธฐ ์œ„ํ•œ ๊ฐ•๋ ฅํ•œ ๋„๊ตฌ ๋ฐ ์ธก์ • ํ•ญ๋ชฉ ๋ชจ์Œ์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. ์ด ๊ฐ€์ด๋“œ๋Š” Val ๋ชจ๋“œ๋ฅผ ํšจ๊ณผ์ ์œผ๋กœ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ์˜ ์ •ํ™•์„ฑ๊ณผ ์‹ ๋ขฐ์„ฑ์„ ๋ณด์žฅํ•˜๋Š” ๋ฐฉ๋ฒ•์— ๋Œ€ํ•œ ์™„๋ฒฝํ•œ ๋ฆฌ์†Œ์Šค ์—ญํ• ์„ ํ•ฉ๋‹ˆ๋‹ค. + +## ์™œ Ultralytics YOLO๋กœ ๊ฒ€์ฆ์„ ํ•ด์•ผ ํ• ๊นŒ์š”? + +YOLOv8์˜ Val ๋ชจ๋“œ๋ฅผ ์‚ฌ์šฉํ•˜๋Š” ์ด์ ์€ ๋‹ค์Œ๊ณผ ๊ฐ™์Šต๋‹ˆ๋‹ค: + +- **์ •๋ฐ€๋„:** mAP50, mAP75, mAP50-95์™€ ๊ฐ™์€ ์ •ํ™•ํ•œ ์ธก์ • ํ•ญ๋ชฉ์œผ๋กœ ๋ชจ๋ธ์„ ์ข…ํ•ฉ์ ์œผ๋กœ ํ‰๊ฐ€ํ•ฉ๋‹ˆ๋‹ค. +- **ํŽธ์˜์„ฑ:** ํ›ˆ๋ จ ์„ค์ •์„ ๊ธฐ์–ตํ•˜๋Š” ๋‚ด์žฅ ๊ธฐ๋Šฅ์„ ํ™œ์šฉํ•˜์—ฌ ๊ฒ€์ฆ ์ ˆ์ฐจ๋ฅผ ๋‹จ์ˆœํ™”ํ•ฉ๋‹ˆ๋‹ค. +- **์œ ์—ฐ์„ฑ:** ๊ฐ™๊ฑฐ๋‚˜ ๋‹ค๋ฅธ ๋ฐ์ดํ„ฐ์…‹๊ณผ ์ด๋ฏธ์ง€ ํฌ๊ธฐ๋กœ ๋ชจ๋ธ์„ ๊ฒ€์ฆํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +- **ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ ํŠœ๋‹:** ๊ฒ€์ฆ ์ธก์ • ํ•ญ๋ชฉ์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ์„ ๋” ์ž˜ ์กฐ์œจํ•ฉ๋‹ˆ๋‹ค. + +### Val ๋ชจ๋“œ์˜ ์ฃผ์š” ๊ธฐ๋Šฅ + +YOLOv8์˜ Val ๋ชจ๋“œ๊ฐ€ ์ œ๊ณตํ•˜๋Š” ์ฃผ๋ชฉํ•  ๋งŒํ•œ ๊ธฐ๋Šฅ๋“ค์€ ๋‹ค์Œ๊ณผ ๊ฐ™์Šต๋‹ˆ๋‹ค: + +- **์ž๋™ํ™”๋œ ์„ค์ •:** ๋ชจ๋ธ์€ ํ›ˆ๋ จ ๊ตฌ์„ฑ์„ ๊ธฐ์–ตํ•˜์—ฌ ๊ฐ„๋‹จํ•˜๊ฒŒ ๊ฒ€์ฆ์ด ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค. +- **๋ฉ€ํ‹ฐ-๋ฉ”ํŠธ๋ฆญ ์ง€์›:** ๋‹ค์–‘ํ•œ ์ •ํ™•๋„ ์ธก์ • ํ•ญ๋ชฉ์„ ๊ธฐ๋ฐ˜์œผ๋กœ ๋ชจ๋ธ์„ ํ‰๊ฐ€ํ•ฉ๋‹ˆ๋‹ค. +- **CLI ๋ฐ Python API:** ๊ฒ€์ฆ์„ ์œ„ํ•ด ๋ช…๋ น ์ค„ ์ธํ„ฐํŽ˜์ด์Šค ๋˜๋Š” Python API ์ค‘์—์„œ ์„ ํƒํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +- **๋ฐ์ดํ„ฐ ํ˜ธํ™˜์„ฑ:** ํ›ˆ๋ จ ๋‹จ๊ณ„์—์„œ ์‚ฌ์šฉ๋œ ๋ฐ์ดํ„ฐ์…‹๊ณผ ์‚ฌ์šฉ์ž ์ •์˜ ๋ฐ์ดํ„ฐ์…‹ ๋ชจ๋‘์™€ ์›ํ™œํ•˜๊ฒŒ ์ž‘๋™ํ•ฉ๋‹ˆ๋‹ค. + +!!! Tip "ํŒ" + + * YOLOv8 ๋ชจ๋ธ์€ ํ›ˆ๋ จ ์„ค์ •์„ ์ž๋™์œผ๋กœ ๊ธฐ์–ตํ•˜๋ฏ€๋กœ `yolo val model=yolov8n.pt`๋‚˜ `model('yolov8n.pt').val()`๋งŒ์œผ๋กœ ๊ฐ™์€ ์ด๋ฏธ์ง€ ํฌ๊ธฐ์™€ ์›๋ณธ ๋ฐ์ดํ„ฐ์…‹์—์„œ ์‰ฝ๊ฒŒ ๊ฒ€์ฆํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +## ์‚ฌ์šฉ ์˜ˆ์ œ + +COCO128 ๋ฐ์ดํ„ฐ์…‹์—์„œ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ์˜ ์ •ํ™•๋„๋ฅผ ๊ฒ€์ฆํ•ฉ๋‹ˆ๋‹ค. `๋ชจ๋ธ`์€ ํ›ˆ๋ จ `๋ฐ์ดํ„ฐ`์™€ ์ธ์ž๋ฅผ ๋ชจ๋ธ ์†์„ฑ์œผ๋กœ ์œ ์ง€ํ•˜๋ฏ€๋กœ ์ธ์ž๊ฐ€ ํ•„์š” ์—†์Šต๋‹ˆ๋‹ค. ์ „์ฒด ๋‚ด๋ณด๋‚ด๊ธฐ ์ธ์ž ๋ชฉ๋ก์€ ์•„๋ž˜์˜ ์ธ์ž ์„น์…˜์„ ์ฐธ๊ณ ํ•˜์„ธ์š”. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋กœ๋“œ + model = YOLO('yolov8n.pt') # ๊ณต์‹ ๋ชจ๋ธ์„ ๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค + model = YOLO('path/to/best.pt') # ์‚ฌ์šฉ์ž ์ •์˜ ๋ชจ๋ธ์„ ๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค + + # ๋ชจ๋ธ ๊ฒ€์ฆ + metrics = model.val() # ์ธ์ž๊ฐ€ ํ•„์š” ์—†์Œ, ๋ฐ์ดํ„ฐ์…‹๊ณผ ์„ค์ •์ด ๊ธฐ์–ต๋ฉ๋‹ˆ๋‹ค + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ๊ฐ ์นดํ…Œ๊ณ ๋ฆฌ์˜ map50-95๊ฐ€ ํฌํ•จ๋œ ๋ชฉ๋ก + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # ๊ณต์‹ ๋ชจ๋ธ ๊ฒ€์ฆ + yolo detect val model=path/to/best.pt # ์‚ฌ์šฉ์ž ์ •์˜ ๋ชจ๋ธ ๊ฒ€์ฆ + ``` + +## ์ธ์ž + +YOLO ๋ชจ๋ธ์˜ ๊ฒ€์ฆ ์„ค์ •์€ ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ์„ ๊ฒ€์ฆ ๋ฐ์ดํ„ฐ์…‹์—์„œ ํ‰๊ฐ€ํ•˜๊ธฐ ์œ„ํ•œ ๋‹ค์–‘ํ•œ ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ ๋ฐ ๊ตฌ์„ฑ์„ ์˜๋ฏธํ•ฉ๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ์„ค์ •์€ ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ, ์†๋„, ์ •ํ™•์„ฑ์— ์˜ํ–ฅ์„ ๋ฏธ์น  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ผ๋ฐ˜์ ์ธ YOLO ๊ฒ€์ฆ ์„ค์ •์—๋Š” ๋ฐฐ์น˜ ํฌ๊ธฐ, ํ›ˆ๋ จ ์ค‘ ๊ฒ€์ฆ์ด ์ˆ˜ํ–‰๋˜๋Š” ๋นˆ๋„ ๋ฐ ๋ชจ๋ธ ์„ฑ๋Šฅ์„ ํ‰๊ฐ€ํ•˜๊ธฐ ์œ„ํ•ด ์‚ฌ์šฉ๋˜๋Š” ์ธก์ • ํ•ญ๋ชฉ์ด ํฌํ•จ๋ฉ๋‹ˆ๋‹ค. ๊ฒ€์ฆ ๊ณผ์ •์— ์˜ํ–ฅ์„ ์ค„ ์ˆ˜ ์žˆ๋Š” ๋‹ค๋ฅธ ์š”์†Œ๋กœ๋Š” ๊ฒ€์ฆ ๋ฐ์ดํ„ฐ์…‹์˜ ํฌ๊ธฐ์™€ ๊ตฌ์„ฑ ๋ฐ ๋ชจ๋ธ์ด ์‚ฌ์šฉ๋˜๋Š” ๊ตฌ์ฒด์ ์ธ ์ž‘์—…์ด ์žˆ์Šต๋‹ˆ๋‹ค. ๋ชจ๋ธ์ด ๊ฒ€์ฆ ๋ฐ์ดํ„ฐ์…‹์—์„œ ์ž˜ ์ˆ˜ํ–‰๋˜๊ณ  ์žˆ๊ณ  ๊ณผ์ ํ•ฉ์„ ๊ฐ์ง€ํ•˜๊ณ  ๋ฐฉ์ง€ํ•˜๊ธฐ ์œ„ํ•ด์„œ๋Š” ์ด๋Ÿฌํ•œ ์„ค์ •์„ ์‹ ์ค‘ํ•˜๊ฒŒ ์กฐ์ •ํ•˜๊ณ  ์‹คํ—˜ํ•˜๋Š” ๊ฒƒ์ด ์ค‘์š”ํ•ฉ๋‹ˆ๋‹ค. + +| Key | Value | Description | +|---------------|---------|---------------------------------------------------| +| `data` | `None` | ๋ฐ์ดํ„ฐ ํŒŒ์ผ ๊ฒฝ๋กœ ์˜ˆ: coco128.yaml | +| `imgsz` | `640` | ์ž…๋ ฅ ์ด๋ฏธ์ง€์˜ ํฌ๊ธฐ๋ฅผ ์ •์ˆ˜๋กœ ์ง€์ • | +| `batch` | `16` | ๋ฐฐ์น˜ ๋‹น ์ด๋ฏธ์ง€ ์ˆ˜ (-1์€ AutoBatch์— ํ•ด๋‹น) | +| `save_json` | `False` | ๊ฒฐ๊ณผ๋ฅผ JSON ํŒŒ์ผ๋กœ ์ €์žฅ | +| `save_hybrid` | `False` | ๋ผ๋ฒจ์˜ ํ•˜์ด๋ธŒ๋ฆฌ๋“œ ๋ฒ„์ „(๋ผ๋ฒจ + ์ถ”๊ฐ€ ์˜ˆ์ธก)์„ ์ €์žฅ | +| `conf` | `0.001` | ํƒ์ง€๋ฅผ ์œ„ํ•œ ๊ฐ์ฒด ์‹ ๋ขฐ๋„ ์ž„๊ณ„๊ฐ’ | +| `iou` | `0.6` | NMS ์šฉ ๊ต์ฐจ ์˜์—ญ๊ณผ ํ•ฉ์นœ ์˜์—ญ(IoU)์˜ ์ž„๊ณ„๊ฐ’ | +| `max_det` | `300` | ์ด๋ฏธ์ง€ ๋‹น ์ตœ๋Œ€ ํƒ์ง€ ๊ฐœ์ˆ˜ | +| `half` | `True` | ๋ฐ˜์ •๋ฐ€๋„(FP16) ์‚ฌ์šฉ | +| `device` | `None` | ์‚ฌ์šฉํ•  ์žฅ์น˜ ์˜ˆ: cuda์˜ device=0/1/2/3์ด๋‚˜ device=cpu | +| `dnn` | `False` | ONNX ์ถ”๋ก ์— OpenCV DNN ์‚ฌ์šฉ | +| `plots` | `False` | ํ›ˆ๋ จ ์ค‘ ํ”Œ๋กฏ ํ‘œ์‹œ | +| `rect` | `False` | ์ตœ์†Œํ•œ์˜ ํŒจ๋”ฉ์„ ์œ„ํ•ด ๊ฐ ๋ฐฐ์น˜๊ฐ€ ์ง์‚ฌ๊ฐํ˜• val๋กœ ์กฐ์ •๋จ | +| `split` | `val` | ๊ฒ€์ฆ์„ ์œ„ํ•ด ์‚ฌ์šฉ๋˜๋Š” ๋ฐ์ดํ„ฐ์…‹ ๋ถ„ํ• , ์˜ˆ: 'val', 'test', ํ˜น์€ 'train' | +| diff --git a/ultralytics/docs/ko/modes/val.md:Zone.Identifier b/ultralytics/docs/ko/modes/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/modes/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/quickstart.md b/ultralytics/docs/ko/quickstart.md new file mode 100755 index 0000000..ca52392 --- /dev/null +++ b/ultralytics/docs/ko/quickstart.md @@ -0,0 +1,207 @@ +--- +comments: true +description: pip, conda, git ๋ฐ Docker๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ Ultralytics์„ ์„ค์น˜ํ•˜๋Š” ๋‹ค์–‘ํ•œ ๋ฐฉ๋ฒ•์„ ํƒ์ƒ‰ํ•ด ๋ณด์„ธ์š”. Ultralytics์„ ๋ช…๋ น์ค„ ์ธํ„ฐํŽ˜์ด์Šค ๋˜๋Š” Python ํ”„๋กœ์ ํŠธ ๋‚ด์—์„œ ์‚ฌ์šฉํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ์•Œ์•„๋ณด์„ธ์š”. +keywords: Ultralytics ์„ค์น˜, pip๋ฅผ ์ด์šฉํ•œ Ultralytics ์„ค์น˜, Docker๋ฅผ ์ด์šฉํ•œ Ultralytics ์„ค์น˜, Ultralytics ๋ช…๋ น์ค„ ์ธํ„ฐํŽ˜์ด์Šค, Ultralytics Python ์ธํ„ฐํŽ˜์ด์Šค +--- + +## Ultralytics ์„ค์น˜ํ•˜๊ธฐ + +Ultralytics๋Š” pip, conda, Docker๋ฅผ ํฌํ•จํ•œ ๋‹ค์–‘ํ•œ ์„ค์น˜ ๋ฐฉ๋ฒ•์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค. `ultralytics` pip ํŒจํ‚ค์ง€๋ฅผ ์ด์šฉํ•ด ๊ฐ€์žฅ ์•ˆ์ •์ ์ธ ์ตœ์‹  ๋ฒ„์ „์˜ YOLOv8์„ ์„ค์น˜ํ•˜๊ฑฐ๋‚˜ [Ultralytics GitHub ์ €์žฅ์†Œ](https://github.com/ultralytics/ultralytics)๋ฅผ ๋ณต์ œํ•˜์—ฌ ๊ฐ€์žฅ ์ตœ์‹  ๋ฒ„์ „์„ ๋ฐ›์•„๋ณผ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. Docker๋ฅผ ์ด์šฉํ•˜๋ฉด ํŒจํ‚ค์ง€๋ฅผ ๋กœ์ปฌ์— ์„ค์น˜ํ•˜์ง€ ์•Š๊ณ  ๊ฒฉ๋ฆฌ๋œ ์ปจํ…Œ์ด๋„ˆ์—์„œ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +!!! Example "์„ค์น˜ํ•˜๊ธฐ" + + === "Pip ์„ค์น˜ํ•˜๊ธฐ (๊ถŒ์žฅ)" + pip์„ ์‚ฌ์šฉํ•˜์—ฌ `ultralytics` ํŒจํ‚ค์ง€๋ฅผ ์„ค์น˜ํ•˜๊ฑฐ๋‚˜, `pip install -U ultralytics`๋ฅผ ์‹คํ–‰ํ•˜์—ฌ ๊ธฐ์กด ์„ค์น˜๋ฅผ ์—…๋ฐ์ดํŠธํ•˜์„ธ์š”. Python Package Index(PyPI)์—์„œ `ultralytics` ํŒจํ‚ค์ง€์— ๋Œ€ํ•œ ์ž์„ธํ•œ ๋‚ด์šฉ์„ ํ™•์ธํ•˜์„ธ์š”: [https://pypi.org/project/ultralytics/](https://pypi.org/project/ultralytics/). + + [![PyPI ๋ฒ„์ „](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![๋‹ค์šด๋กœ๋“œ](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + + ```bash + # PyPI์—์„œ ultralytics ํŒจํ‚ค์ง€ ์„ค์น˜ํ•˜๊ธฐ + pip install ultralytics + ``` + + GitHub [์ €์žฅ์†Œ](https://github.com/ultralytics/ultralytics)์—์„œ ์ง์ ‘ `ultralytics` ํŒจํ‚ค์ง€๋ฅผ ์„ค์น˜ํ•  ์ˆ˜๋„ ์žˆ์Šต๋‹ˆ๋‹ค. ์ตœ์‹  ๊ฐœ๋ฐœ ๋ฒ„์ „์ด ํ•„์š”ํ•œ ๊ฒฝ์šฐ ์œ ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์‹œ์Šคํ…œ์— Git ๋ช…๋ น์ค„ ๋„๊ตฌ๊ฐ€ ์„ค์น˜๋˜์–ด ์žˆ๋Š”์ง€ ํ™•์ธํ•˜์„ธ์š”. `@main` ๋ช…๋ น์–ด๋Š” `main` ๋ธŒ๋žœ์น˜๋ฅผ ์„ค์น˜ํ•˜๋ฉฐ, `@my-branch`๋กœ ๋ณ€๊ฒฝํ•˜๊ฑฐ๋‚˜ `main` ๋ธŒ๋žœ์น˜๋ฅผ ๊ธฐ๋ณธ์œผ๋กœ ์‚ฌ์šฉํ•˜๋ ค๋ฉด ์•„์˜ˆ ์ œ๊ฑฐํ•˜๋ฉด ๋ฉ๋‹ˆ๋‹ค. + + ```bash + # GitHub์—์„œ ultralytics ํŒจํ‚ค์ง€ ์„ค์น˜ํ•˜๊ธฐ + pip install git+https://github.com/ultralytics/ultralytics.git@main + ``` + + + === "Conda ์„ค์น˜ํ•˜๊ธฐ" + pip์˜ ๋Œ€์•ˆ์œผ๋กœ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๋Š” ๋˜ ๋‹ค๋ฅธ ํŒจํ‚ค์ง€ ๊ด€๋ฆฌ์ž์ธ Conda๋ฅผ ํ†ตํ•ด์„œ๋„ ์„ค์น˜ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. [https://anaconda.org/conda-forge/ultralytics](https://anaconda.org/conda-forge/ultralytics)์—์„œ Anaconda์— ๋Œ€ํ•œ ์ž์„ธํ•œ ์ •๋ณด๋ฅผ ํ™•์ธํ•˜์„ธ์š”. Conda ํŒจํ‚ค์ง€๋ฅผ ์—…๋ฐ์ดํŠธํ•˜๋Š” Ultralytics feedstock ์ €์žฅ์†Œ๋Š” [https://github.com/conda-forge/ultralytics-feedstock/](https://github.com/conda-forge/ultralytics-feedstock/)์— ์žˆ์Šต๋‹ˆ๋‹ค. + + + [![Conda ๋ ˆ์‹œํ”ผ](https://img.shields.io/badge/recipe-ultralytics-green.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda ๋‹ค์šด๋กœ๋“œ](https://img.shields.io/conda/dn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda ๋ฒ„์ „](https://img.shields.io/conda/vn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda ํ”Œ๋žซํผ](https://img.shields.io/conda/pn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) + + ```bash + # conda๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ultralytics ํŒจํ‚ค์ง€ ์„ค์น˜ํ•˜๊ธฐ + conda install -c conda-forge ultralytics + ``` + + !!! Note "๋…ธํŠธ" + + CUDA ํ™˜๊ฒฝ์—์„œ ์„ค์น˜ํ•˜๋Š” ๊ฒฝ์šฐ ์ผ๋ฐ˜์ ์œผ๋กœ `ultralytics`, `pytorch` ๋ฐ `pytorch-cuda`๋ฅผ ๋™์ผํ•œ ๋ช…๋ น์–ด๋กœ ์„ค์น˜ํ•˜์—ฌ Conda ํŒจํ‚ค์ง€ ๊ด€๋ฆฌ์ž๊ฐ€ ์ถฉ๋Œ์„ ํ•ด๊ฒฐํ•˜๋„๋ก ํ•˜๊ฑฐ๋‚˜, ํ•„์š”ํ•œ ๊ฒฝ์šฐ CPU ์ „์šฉ `pytorch` ํŒจํ‚ค์ง€๋ฅผ ๋ฎ์–ด์“ธ ์ˆ˜ ์žˆ๋„๋ก `pytorch-cuda`๋ฅผ ๋งˆ์ง€๋ง‰์— ์„ค์น˜ํ•˜๋Š” ๊ฒƒ์ด ์ข‹์Šต๋‹ˆ๋‹ค. + ```bash + # Conda๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋“  ํŒจํ‚ค์ง€ ํ•จ๊ป˜ ์„ค์น˜ํ•˜๊ธฐ + conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics + ``` + + ### Conda Docker ์ด๋ฏธ์ง€ + + Ultralytics Conda Docker ์ด๋ฏธ์ง€๋“ค๋„ [DockerHub](https://hub.docker.com/r/ultralytics/ultralytics)์—์„œ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด ์ด๋ฏธ์ง€๋“ค์€ [Miniconda3](https://docs.conda.io/projects/miniconda/en/latest/)๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ํ•˜๋ฉฐ, Conda ํ™˜๊ฒฝ์—์„œ `ultralytics`๋ฅผ ์‚ฌ์šฉํ•˜๊ธฐ ์œ„ํ•œ ๊ฐ„๋‹จํ•œ ๋ฐฉ๋ฒ•์ž…๋‹ˆ๋‹ค. + + ```bash + # ์ด๋ฏธ์ง€ ์ด๋ฆ„์„ ๋ณ€์ˆ˜๋กœ ์„ค์ •ํ•˜๊ธฐ + t=ultralytics/ultralytics:latest-conda + + # Docker Hub์—์„œ ์ตœ์‹  ultralytics ์ด๋ฏธ์ง€ ๊ฐ€์ ธ์˜ค๊ธฐ + sudo docker pull $t + + # GPU ์ง€์›์œผ๋กœ ultralytics ์ด๋ฏธ์ง€๋ฅผ ์ปจํ…Œ์ด๋„ˆ์—์„œ ์‹คํ–‰ํ•˜๊ธฐ + sudo docker run -it --ipc=host --gpus all $t # ๋ชจ๋“  GPU ์‚ฌ์šฉ + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # ํŠน์ • GPU ์ง€์ • + ``` + + + === "Git ๋ณต์ œํ•˜๊ธฐ" + ๊ฐœ๋ฐœ์— ๊ธฐ์—ฌํ•˜๊ฑฐ๋‚˜ ์ตœ์‹  ์†Œ์Šค ์ฝ”๋“œ๋ฅผ ์‹คํ—˜ํ•ด ๋ณด๊ณ  ์‹ถ๋‹ค๋ฉด `ultralytics` ์ €์žฅ์†Œ๋ฅผ ๋ณต์ œํ•˜์„ธ์š”. ๋ณต์ œํ•œ ํ›„ ํ•ด๋‹น ๋””๋ ‰ํ† ๋ฆฌ๋กœ ์ด๋™ํ•˜์—ฌ pip์„ ์ด์šฉํ•ด ํŽธ์ง‘ ๊ฐ€๋Šฅ ๋ชจ๋“œ `-e`๋กœ ํŒจํ‚ค์ง€๋ฅผ ์„ค์น˜ํ•ฉ๋‹ˆ๋‹ค. + ```bash + # ultralytics ์ €์žฅ์†Œ ๋ณต์ œํ•˜๊ธฐ + git clone https://github.com/ultralytics/ultralytics + + # ๋ณต์ œํ•œ ๋””๋ ‰ํ† ๋ฆฌ๋กœ ์ด๋™ํ•˜๊ธฐ + cd ultralytics + + # ๊ฐœ๋ฐœ์„ ์œ„ํ•œ ํŽธ์ง‘ ๊ฐ€๋Šฅ ๋ชจ๋“œ๋กœ ํŒจํ‚ค์ง€ ์„ค์น˜ํ•˜๊ธฐ + pip install -e . + ``` + + === "Docker ์‚ฌ์šฉํ•˜๊ธฐ" + + Docker๋ฅผ ์‚ฌ์šฉํ•˜๋ฉด `ultralytics` ํŒจํ‚ค์ง€๋ฅผ ๊ฒฉ๋ฆฌ๋œ ์ปจํ…Œ์ด๋„ˆ์—์„œ ์›ํ™œํ•˜๊ฒŒ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ์œผ๋ฉฐ, ๋‹ค์–‘ํ•œ ํ™˜๊ฒฝ์—์„œ ์ผ๊ด€๋œ ์„ฑ๋Šฅ์„ ๋ณด์žฅํ•ฉ๋‹ˆ๋‹ค. [Docker Hub](https://hub.docker.com/r/ultralytics/ultralytics)์˜ ๊ณต์‹ `ultralytics` ์ด๋ฏธ์ง€ ์ค‘ ํ•˜๋‚˜๋ฅผ ์„ ํƒํ•จ์œผ๋กœ์จ ๋กœ์ปฌ ์„ค์น˜์˜ ๋ณต์žกํ•จ์„ ํ”ผํ•˜๊ณ  ๊ฒ€์ฆ๋œ ์ž‘์—… ํ™˜๊ฒฝ์— ์ ‘๊ทผํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. Ultralytics์€ ์„œ๋กœ ๋‹ค๋ฅธ ํ”Œ๋žซํผ๊ณผ ์‚ฌ์šฉ ์‚ฌ๋ก€์— ๋Œ€ํ•ด ๋†’์€ ํ˜ธํ™˜์„ฑ๊ณผ ํšจ์œจ์„ฑ์„ ์ œ๊ณตํ•˜๊ธฐ ์œ„ํ•ด 5๊ฐ€์ง€ ์ฃผ์š” Docker ์ด๋ฏธ์ง€๋ฅผ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค: + + Docker Pulls + + - **Dockerfile:** ํŠธ๋ ˆ์ด๋‹์— ์ถ”์ฒœ๋˜๋Š” GPU ์ด๋ฏธ์ง€์ž…๋‹ˆ๋‹ค. + - **Dockerfile-arm64:** Raspberry Pi์™€ ๊ฐ™์€ ARM64 ๊ธฐ๋ฐ˜ ํ”Œ๋žซํผ์— ๋ฐฐํฌํ•˜๊ธฐ์— ์ตœ์ ํ™”๋œ ARM64 ์•„ํ‚คํ…์ฒ˜์šฉ์ž…๋‹ˆ๋‹ค. + - **Dockerfile-cpu:** GPU๊ฐ€ ์—†๋Š” ํ™˜๊ฒฝ์—์„œ ์ธํผ๋Ÿฐ์Šค์— ์ ํ•ฉํ•œ Ubuntu ๊ธฐ๋ฐ˜ CPU ์ „์šฉ ๋ฒ„์ „์ž…๋‹ˆ๋‹ค. + - **Dockerfile-jetson:** NVIDIA Jetson ์žฅ์น˜์— ์ตœ์ ํ™”๋œ GPU ์ง€์›์„ ํ†ตํ•ฉํ•œ ๋ฒ„์ „์ž…๋‹ˆ๋‹ค. + - **Dockerfile-python:** ๊ฐ€๋ณ๊ฒŒ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜์„ ์œ„ํ•ด ํ•„์š”ํ•œ ์ข…์†์„ฑ๊ณผ Python๋งŒ ์žˆ๋Š” ์ตœ์†Œํ•œ์˜ ์ด๋ฏธ์ง€์ž…๋‹ˆ๋‹ค. + - **Dockerfile-conda:** Miniconda3๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ํ•˜๋ฉฐ ultralytics ํŒจํ‚ค์ง€์˜ conda ์„ค์น˜๋ฅผ ํฌํ•จํ•˜๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. + + ์•„๋ž˜์˜ ๋ช…๋ น์–ด๋กœ ์ตœ์‹  ์ด๋ฏธ์ง€๋ฅผ ๋ฐ›๊ณ  ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค: + + ```bash + # ์ด๋ฏธ์ง€ ์ด๋ฆ„์„ ๋ณ€์ˆ˜๋กœ ์„ค์ •ํ•˜๊ธฐ + t=ultralytics/ultralytics:latest + + # Docker Hub์—์„œ ์ตœ์‹  ultralytics ์ด๋ฏธ์ง€ ๊ฐ€์ ธ์˜ค๊ธฐ + sudo docker pull $t + + # GPU ์ง€์›์œผ๋กœ ultralytics ์ด๋ฏธ์ง€๋ฅผ ์ปจํ…Œ์ด๋„ˆ์—์„œ ์‹คํ–‰ํ•˜๊ธฐ + sudo docker run -it --ipc=host --gpus all $t # ๋ชจ๋“  GPU ์‚ฌ์šฉ + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # ํŠน์ • GPU ์ง€์ • + ``` + + ์œ„ ๋ช…๋ น์–ด๋Š” ์ตœ์‹  `ultralytics` ์ด๋ฏธ์ง€๋กœ Docker ์ปจํ…Œ์ด๋„ˆ๋ฅผ ์ดˆ๊ธฐํ™”ํ•ฉ๋‹ˆ๋‹ค. `-it` ํ”Œ๋ž˜๊ทธ๋Š” pseudo-TTY๋ฅผ ํ• ๋‹นํ•˜๊ณ  ํ‘œ์ค€ ์ž…๋ ฅ์„ ์œ ์ง€ํ•˜์—ฌ ์ปจํ…Œ์ด๋„ˆ์™€ ์ƒํ˜ธ ์ž‘์šฉํ•  ์ˆ˜ ์žˆ๊ฒŒ ํ•ด์ค๋‹ˆ๋‹ค. `--ipc=host` ํ”Œ๋ž˜๊ทธ๋Š” ํ”„๋กœ์„ธ์Šค ๊ฐ„ ๋ฉ”๋ชจ๋ฆฌ ๊ณต์œ ์— ํ•„์š”ํ•œ IPC(Inter-Process Communication) ๋„ค์ž„์ŠคํŽ˜์ด์Šค๋ฅผ ํ˜ธ์ŠคํŠธ๋กœ ์„ค์ •ํ•ฉ๋‹ˆ๋‹ค. `--gpus all` ํ”Œ๋ž˜๊ทธ๋Š” ์ปจํ…Œ์ด๋„ˆ ๋‚ด์—์„œ ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ๋ชจ๋“  GPU์— ๋Œ€ํ•œ ์ ‘๊ทผ์„ ํ™œ์„ฑํ™”ํ•˜๋Š”๋ฐ, GPU ๊ณ„์‚ฐ์ด ํ•„์š”ํ•œ ์ž‘์—…์— ์ค‘์š”ํ•ฉ๋‹ˆ๋‹ค. + + ์ฐธ๊ณ : ๋กœ์ปฌ ๊ธฐ๊ณ„์˜ ํŒŒ์ผ์„ ์ปจํ…Œ์ด๋„ˆ ๋‚ด์—์„œ ์ž‘์—…ํ•˜๊ธฐ ์œ„ํ•ด์„œ๋Š” ๋กœ์ปฌ ๋””๋ ‰ํ† ๋ฆฌ๋ฅผ ์ปจํ…Œ์ด๋„ˆ์— ๋งˆ์šดํŠธํ•˜๋Š” ๋ฐ Docker ๋ณผ๋ฅจ์„ ์‚ฌ์šฉํ•˜์„ธ์š”: + + ```bash + # ๋กœ์ปฌ ๋””๋ ‰ํ† ๋ฆฌ๋ฅผ ์ปจํ…Œ์ด๋„ˆ ๋‚ด๋ถ€ ๋””๋ ‰ํ† ๋ฆฌ์— ๋งˆ์šดํŠธํ•˜๊ธฐ + sudo docker run -it --ipc=host --gpus all -v /path/on/host:/path/in/container $t + ``` + + `/path/on/host`๋ฅผ ๋กœ์ปฌ ๊ธฐ๊ณ„์˜ ๋””๋ ‰ํ† ๋ฆฌ ๊ฒฝ๋กœ๋กœ, `/path/in/container`๋ฅผ ์ปจํ…Œ์ด๋„ˆ ๋‚ด๋ถ€์—์„œ ์›ํ•˜๋Š” ๊ฒฝ๋กœ๋กœ ๋ณ€๊ฒฝํ•˜์—ฌ ์ ‘๊ทผํ•  ์ˆ˜ ์žˆ๊ฒŒ ํ•˜์„ธ์š”. + + Docker ์‚ฌ์šฉ์— ๋Œ€ํ•œ ๊ณ ๊ธ‰ ๊ธฐ๋Šฅ์€ [Ultralytics Docker ๊ฐ€์ด๋“œ](https://docs.ultralytics.com/guides/docker-quickstart/)์—์„œ ๋” ํƒ๊ตฌํ•ด๋ณด์„ธ์š”. + +`ultralytics`์˜ ์ข…์†์„ฑ ๋ชฉ๋ก์€ [requirements.txt](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) ํŒŒ์ผ์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์œ„ ์˜ˆ์ œ์—์„œ๋Š” ๋ชจ๋“  ํ•„์š”ํ•œ ์ข…์†์„ฑ์„ ์„ค์น˜ํ•ฉ๋‹ˆ๋‹ค. + +

+
+ +
+ Watch: Ultralytics YOLO Quick Start Guide +

+ +!!! Tip "ํŒ" + + PyTorch ์„ค์น˜ ์š”๊ตฌ์‚ฌํ•ญ์€ ์šด์˜ ์ฒด์ œ์™€ CUDA ์š”๊ตฌ์‚ฌํ•ญ์— ๋”ฐ๋ผ ๋‹ค๋ฅด๋ฏ€๋กœ [https://pytorch.org/get-started/locally](https://pytorch.org/get-started/locally)์˜ ์ง€์นจ์— ๋”ฐ๋ผ PyTorch๋ฅผ ๋จผ์ € ์„ค์น˜ํ•˜๋Š” ๊ฒƒ์ด ๊ถŒ์žฅ๋ฉ๋‹ˆ๋‹ค. + + + PyTorch ์„ค์น˜ ์ง€์นจ + + +## ๋ช…๋ น์ค„ ์ธํ„ฐํŽ˜์ด์Šค(CLI)๋กœ Ultralytics ์‚ฌ์šฉํ•˜๊ธฐ + +Ultralytics ๋ช…๋ น์ค„ ์ธํ„ฐํŽ˜์ด์Šค(CLI)๋Š” Python ํ™˜๊ฒฝ์ด ํ•„์š” ์—†์ด ๋‹จ์ผ ๋ผ์ธ ๋ช…๋ น์–ด๋ฅผ ํ†ตํ•ด ์ž‘์—…์„ ์‰ฝ๊ฒŒ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•ฉ๋‹ˆ๋‹ค. CLI๋Š” ์ปค์Šคํ„ฐ๋งˆ์ด์ง•์ด๋‚˜ Python ์ฝ”๋“œ๊ฐ€ ํ•„์š” ์—†์Šต๋‹ˆ๋‹ค. `yolo` ๋ช…๋ น์–ด๋ฅผ ์ด์šฉํ•ด ํ„ฐ๋ฏธ๋„์—์„œ ๋ชจ๋“  ์ž‘์—…์„ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๋ช…๋ น์ค„์—์„œ YOLOv8์„ ์‚ฌ์šฉํ•˜๋Š” ๋ฐฉ๋ฒ•์— ๋Œ€ํ•ด ๋” ์•Œ์•„๋ณด๋ ค๋ฉด [CLI ๊ฐ€์ด๋“œ](/../usage/cli.md)๋ฅผ ์ฐธ๊ณ ํ•˜์„ธ์š”. + +!!! Example "์˜ˆ์ œ" + + === "๋ฌธ๋ฒ•" + + Ultralytics `yolo` ๋ช…๋ น์–ด๋Š” ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๋ฌธ๋ฒ•์„ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค: + ```bash + yolo TASK MODE ARGS + + ์—ฌ๊ธฐ์„œ TASK (์„ ํƒ์ )์€ [detect, segment, classify] ์ค‘ ํ•˜๋‚˜ + MODE (ํ•„์ˆ˜)๋Š” [train, val, predict, export, track] ์ค‘ ํ•˜๋‚˜ + ARGS (์„ ํƒ์ )์€ 'imgsz=320'๊ณผ ๊ฐ™์ด ๊ธฐ๋ณธ๊ฐ’์„ ์žฌ์ •์˜ํ•˜๋Š” 'arg=value' ์Œ์„ ์•„๋ฌด ๊ฐœ์ˆ˜๋‚˜ ์ง€์ •ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + ``` + ๋ชจ๋“  ARGS๋Š” ์ „์ฒด [๊ตฌ์„ฑ ๊ฐ€์ด๋“œ](/../usage/cfg.md)์—์„œ ๋˜๋Š” `yolo cfg`๋กœ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค + + === "Train" + + 10 ์—ํฌํฌ ๋™์•ˆ ์ดˆ๊ธฐ ํ•™์Šต๋ฅ  0.01๋กœ ๊ฐ์ง€ ๋ชจ๋ธ์„ ํ›ˆ๋ จํ•ฉ๋‹ˆ๋‹ค. + ```bash + yolo train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "Predict" + + ์ด์ „ ํ›ˆ๋ จ๋œ ์„ธ๋ถ„ํ™” ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€ ํฌ๊ธฐ 320์œผ๋กœ YouTube ๋™์˜์ƒ์„ ์˜ˆ์ธกํ•ฉ๋‹ˆ๋‹ค: + ```bash + yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "Val" + + ๋ฐฐ์น˜ ํฌ๊ธฐ 1์™€ ์ด๋ฏธ์ง€ ํฌ๊ธฐ 640์œผ๋กœ ์ด์ „ ํ›ˆ๋ จ๋œ ๊ฐ์ง€ ๋ชจ๋ธ์„ ๊ฒ€์ฆํ•ฉ๋‹ˆ๋‹ค: + ```bash + yolo val model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + + === "Export" + + YOLOv8n ๋ถ„๋ฅ˜ ๋ชจ๋ธ์„ ONNX ํ˜•์‹์œผ๋กœ ๋‚ด๋ณด๋ƒ…๋‹ˆ๋‹ค. ์ด๋ฏธ์ง€ ํฌ๊ธฐ๋Š” 224x128์ž…๋‹ˆ๋‹ค (TASK ํ•„์š” ์—†์Œ). + ```bash + yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128 + ``` + + === "ํŠน๋ณ„" + + ๋ฒ„์ „ ํ™•์ธ, ์„ค์ • ๋ณด๊ธฐ, ๊ฒ€์‚ฌ ์‹คํ–‰ ๋“ฑ์„ ์œ„ํ•œ ํŠน๋ณ„ ๋ช…๋ น์–ด๋ฅผ ์‹คํ–‰ํ•˜์„ธ์š”: + ```bash + yolo help + yolo checks + yolo version + yolo settings + yolo copy-cfg + yolo cfg + ``` + +!!! Warning "์ฃผ์˜" + + ๋ชจ๋“  ์ธ์ˆ˜๋Š” `arg=val`์Œ์œผ๋กœ ์ „๋‹ฌ๋˜์–ด์•ผ ํ•˜๋ฉฐ, ๊ฐ ์Œ ์‚ฌ์ด์—๋Š” ๊ณต๋ฐฑ์œผ๋กœ ๊ตฌ๋ถ„ํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค. ์ธ์ˆ˜ ์ ‘๋‘์‚ฌ๋กœ `--`๋ฅผ ์‚ฌ์šฉํ•˜๊ฑฐ๋‚˜ ์ธ์ˆ˜ ์‚ฌ์ด์— ์‰ผํ‘œ `,`๋ฅผ ์‚ฌ์šฉํ•ด์„œ๋Š” ์•ˆ ๋ฉ๋‹ˆ๋‹ค. + + - `yolo predict model=yolov8n.pt imgsz=640 conf=0.25`   โœ… + - `yolo predict model yolov8n.pt imgsz 640 conf 0.25`   โŒ + - `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25`   โŒ + +[CLI ๊ฐ€์ด๋“œ](/../usage/cli.md){ .md-button } diff --git a/ultralytics/docs/ko/quickstart.md:Zone.Identifier b/ultralytics/docs/ko/quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/tasks/classify.md b/ultralytics/docs/ko/tasks/classify.md new file mode 100755 index 0000000..74e43bd --- /dev/null +++ b/ultralytics/docs/ko/tasks/classify.md @@ -0,0 +1,172 @@ +--- +comments: true +description: YOLOv8 ๋ถ„๋ฅ˜ ๋ชจ๋ธ์— ๋Œ€ํ•œ ์ด๋ฏธ์ง€ ๋ถ„๋ฅ˜ ์ •๋ณด๋ฅผ ์•Œ์•„๋ณด์„ธ์š”. ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ ๋ชฉ๋ก๊ณผ ๋ชจ๋ธ ํ•™์Šต, ๊ฒ€์ฆ, ์˜ˆ์ธก, ๋‚ด๋ณด๋‚ด๊ธฐ ๋ฐฉ๋ฒ•์— ๋Œ€ํ•œ ์ž์„ธํ•œ ์ •๋ณด๋ฅผ ํ™•์ธํ•˜์‹ค ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +keywords: Ultralytics, YOLOv8, ์ด๋ฏธ์ง€ ๋ถ„๋ฅ˜, ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ, YOLOv8n-cls, ํ•™์Šต, ๊ฒ€์ฆ, ์˜ˆ์ธก, ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ +--- + +# ์ด๋ฏธ์ง€ ๋ถ„๋ฅ˜ + +Image classification examples + +์ด๋ฏธ์ง€ ๋ถ„๋ฅ˜๋Š” ๊ฐ€์žฅ ๋‹จ์ˆœํ•œ ์„ธ ๊ฐ€์ง€ ์ž‘์—… ์ค‘ ํ•˜๋‚˜๋กœ, ์ „์ฒด ์ด๋ฏธ์ง€๋ฅผ ๋ฏธ๋ฆฌ ์ •์˜๋œ ํด๋ž˜์Šค ์ง‘ํ•ฉ ์ค‘ ํ•˜๋‚˜๋กœ ๋ถ„๋ฅ˜ํ•˜๋Š” ์ž‘์—…์ž…๋‹ˆ๋‹ค. + +์ด๋ฏธ์ง€ ๋ถ„๋ฅ˜๊ธฐ์˜ ์ถœ๋ ฅ์€ ๋‹จ์ผ ํด๋ž˜์Šค ๋ผ๋ฒจ๊ณผ ์‹ ๋ขฐ๋„ ์ ์ˆ˜์ž…๋‹ˆ๋‹ค. ์ด๋ฏธ์ง€ ๋ถ„๋ฅ˜๋Š” ํด๋ž˜์Šค์˜ ์ด๋ฏธ์ง€๋งŒ ์•Œ๊ณ  ์‹ถ๊ณ  ํ•ด๋‹น ํด๋ž˜์Šค์˜ ๊ฐ์ฒด๊ฐ€ ์–ด๋””์— ์œ„์น˜ํ•˜๊ณ  ์žˆ๋Š”์ง€ ๋˜๋Š” ๊ทธ ์ •ํ™•ํ•œ ํ˜•ํƒœ๊ฐ€ ๋ฌด์—‡์ธ์ง€ ์•Œ ํ•„์š”๊ฐ€ ์—†์„ ๋•Œ ์œ ์šฉํ•ฉ๋‹ˆ๋‹ค. + +!!! Tip "ํŒ" + + YOLOv8 ๋ถ„๋ฅ˜ ๋ชจ๋ธ์€ `-cls` ์ ‘๋ฏธ์‚ฌ๋ฅผ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค. ์˜ˆ: `yolov8n-cls.pt`์ด๋ฉฐ, [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml)์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. + +## [๋ชจ๋ธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +์—ฌ๊ธฐ์—๋Š” ์‚ฌ์ „ ํ›ˆ๋ จ๋œ YOLOv8 ๋ถ„๋ฅ˜ ๋ชจ๋ธ์ด ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค. Detect, Segment ๋ฐ Pose ๋ชจ๋ธ์€ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ๋ฐ์ดํ„ฐ์…‹์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋˜๊ณ , ๋ถ„๋ฅ˜ ๋ชจ๋ธ์€ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) ๋ฐ์ดํ„ฐ์…‹์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋ฉ๋‹ˆ๋‹ค. + +[๋ชจ๋ธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models)์€ ์ฒซ ์‚ฌ์šฉ ์‹œ ์ตœ์‹  Ultralytics [๋ฆด๋ฆฌ์Šค](https://github.com/ultralytics/assets/releases)์—์„œ ์ž๋™์œผ๋กœ ๋‹ค์šด๋กœ๋“œ๋ฉ๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ | ํฌ๊ธฐ
(ํ”ฝ์…€) | ์ •ํ™•๋„
top1 | ์ •ํ™•๋„
top5 | ์†๋„
CPU ONNX
(ms) | ์†๋„
A100 TensorRT
(ms) | ๋งค๊ฐœ๋ณ€์ˆ˜
(M) | FLOPs
(B) at 640 | +|----------------------------------------------------------------------------------------------|-----------------|------------------|------------------|-----------------------------|----------------------------------|------------------|--------------------------| +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- **์ •ํ™•๋„** ๊ฐ’์€ [ImageNet](https://www.image-net.org/) ๋ฐ์ดํ„ฐ์…‹ ๊ฒ€์ฆ ์„ธํŠธ์—์„œ์˜ ๋ชจ๋ธ ์ •ํ™•๋„์ž…๋‹ˆ๋‹ค. +
[ImageNet](https://www.image-net.org/)์—์„œ ์žฌํ˜„ ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค: `yolo val classify data=path/to/ImageNet device=0` +- **์†๋„**๋Š” [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) ์ธ์Šคํ„ด์Šค๋ฅผ ์‚ฌ์šฉํ•ด ImageNet ๊ฒ€์ฆ ์ด๋ฏธ์ง€๋“ค์˜ ํ‰๊ท  ์†๋„์ž…๋‹ˆ๋‹ค. +
[ImageNet](https://www.image-net.org/)์—์„œ ์žฌํ˜„ ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค: `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu` + +## ํ•™์Šต + +YOLOv8n-cls ๋ชจ๋ธ์„ MNIST160 ๋ฐ์ดํ„ฐ์…‹์—์„œ 100 ์—ํฌํฌ ๋™์•ˆ ํ•™์Šต์‹œํ‚ค๊ณ  ์ด๋ฏธ์ง€ ํฌ๊ธฐ๋Š” 64๋กœ ์„ค์ •ํ•ฉ๋‹ˆ๋‹ค. ๊ฐ€๋Šฅํ•œ ๋ชจ๋“  ์ธ์ž๋Š” [์„ค์ •](/../usage/cfg.md) ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n-cls.yaml') # YAML์—์„œ ์ƒˆ ๋ชจ๋ธ ๊ตฌ์ถ• + model = YOLO('yolov8n-cls.pt') # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ (ํ•™์Šต์šฉ ์ถ”์ฒœ) + model = YOLO('yolov8n-cls.yaml').load('yolov8n-cls.pt') # YAML๋กœ ๊ตฌ์ถ•ํ•˜๊ณ  ๊ฐ€์ค‘์น˜ ์ „์†ก + + # ๋ชจ๋ธ ํ•™์Šต + result = model.train(data='mnist160', epochs=100, imgsz=64) + ``` + + === "CLI" + + ```bash + # YAML์—์„œ ์ƒˆ ๋ชจ๋ธ์„ ๊ตฌ์ถ•ํ•˜๊ณ  ์ฒ˜์Œ๋ถ€ํ„ฐ ํ•™์Šต ์‹œ์ž‘ + yolo classify train data=mnist160 model=yolov8n-cls.yaml epochs=100 imgsz=64 + + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ *.pt ๋ชจ๋ธ์—์„œ ํ•™์Šต ์‹œ์ž‘ + yolo classify train data=mnist160 model=yolov8n-cls.pt epochs=100 imgsz=64 + + # YAML์—์„œ ์ƒˆ ๋ชจ๋ธ์„ ๊ตฌ์ถ•ํ•˜๊ณ  ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๊ฐ€์ค‘์น˜๋ฅผ ์ „์†กํ•œ ๋’ค ํ•™์Šต ์‹œ์ž‘ + yolo classify train data=mnist160 model=yolov8n-cls.yaml pretrained=yolov8n-cls.pt epochs=100 imgsz=64 + ``` + +### ๋ฐ์ดํ„ฐ์…‹ ํ˜•์‹ + +YOLO ๋ถ„๋ฅ˜ ๋ฐ์ดํ„ฐ์…‹ ํ˜•์‹์€ [๋ฐ์ดํ„ฐ์…‹ ๊ฐ€์ด๋“œ](../../../datasets/classify/index.md)์—์„œ ์ž์„ธํžˆ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +## ๊ฒ€์ฆ + +ํ•™์Šต๋œ YOLOv8n-cls ๋ชจ๋ธ์˜ ์ •ํ™•๋„๋ฅผ MNIST160 ๋ฐ์ดํ„ฐ์…‹์—์„œ ๊ฒ€์ฆํ•ฉ๋‹ˆ๋‹ค. `model`์€ ๋ชจ๋ธ ์†์„ฑ์œผ๋กœ ํ›ˆ๋ จ ์‹œ `data` ๋ฐ ์ธ์ž๋ฅผ ์œ ์ง€ํ•˜๋ฏ€๋กœ ์ถ”๊ฐ€ ์ธ์ž๋ฅผ ์ „๋‹ฌํ•  ํ•„์š”๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n-cls.pt') # ๊ณต์‹ ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('path/to/best.pt') # ์‚ฌ์šฉ์ž ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + + # ๋ชจ๋ธ ๊ฒ€์ฆ + metrics = model.val() # ์ถ”๊ฐ€ ์ธ์ž ๋ถˆํ•„์š”, ๋ฐ์ดํ„ฐ์…‹ ๋ฐ ์„ค์ • ๊ธฐ์–ตํ•จ + metrics.top1 # top1 ์ •ํ™•๋„ + metrics.top5 # top5 ์ •ํ™•๋„ + ``` + === "CLI" + + ```bash + yolo classify val model=yolov8n-cls.pt # ๊ณต์‹ ๋ชจ๋ธ ๊ฒ€์ฆ + yolo classify val model=path/to/best.pt # ์‚ฌ์šฉ์ž ๋ชจ๋ธ ๊ฒ€์ฆ + ``` + +## ์˜ˆ์ธก + +ํ•™์Šต๋œ YOLOv8n-cls ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€์— ๋Œ€ํ•œ ์˜ˆ์ธก์„ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n-cls.pt') # ๊ณต์‹ ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('path/to/best.pt') # ์‚ฌ์šฉ์ž ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + + # ์˜ˆ์ธก ์‹คํ–‰ + results = model('https://ultralytics.com/images/bus.jpg') # ์ด๋ฏธ์ง€์— ๋Œ€ํ•œ ์˜ˆ์ธก ์‹คํ–‰ + ``` + === "CLI" + + ```bash + yolo classify predict model=yolov8n-cls.pt source='https://ultralytics.com/images/bus.jpg' # ๊ณต์‹ ๋ชจ๋ธ๋กœ ์˜ˆ์ธก ์‹คํ–‰ + yolo classify predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ์‚ฌ์šฉ์ž ๋ชจ๋ธ๋กœ ์˜ˆ์ธก ์‹คํ–‰ + ``` + +์ž์„ธํ•œ `predict` ๋ชจ๋“œ ์ •๋ณด๋Š” [์˜ˆ์ธก](https://docs.ultralytics.com/modes/predict/) ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•˜์„ธ์š”. + +## ๋‚ด๋ณด๋‚ด๊ธฐ + +YOLOv8n-cls ๋ชจ๋ธ์„ ONNX, CoreML ๋“ฑ๊ณผ ๊ฐ™์€ ๋‹ค๋ฅธ ํ˜•์‹์œผ๋กœ ๋‚ด๋ณด๋ƒ…๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n-cls.pt') # ๊ณต์‹ ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('path/to/best.pt') # ์‚ฌ์šฉ์ž ํ›ˆ๋ จ ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + + # ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-cls.pt format=onnx # ๊ณต์‹ ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ + yolo export model=path/to/best.pt format=onnx # ์‚ฌ์šฉ์ž ํ›ˆ๋ จ ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ + ``` + +์•„๋ž˜ ํ‘œ์— ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ YOLOv8-cls ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹์ด ๋‚˜์™€ ์žˆ์Šต๋‹ˆ๋‹ค. ๋‚ด๋ณด๋‚ธ ๋ชจ๋ธ์—์„œ ๋ฐ”๋กœ ์˜ˆ์ธกํ•˜๊ฑฐ๋‚˜ ๊ฒ€์ฆํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ฆ‰, `yolo predict model=yolov8n-cls.onnx`๋ฅผ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๋‚ด๋ณด๋‚ด๊ธฐ๊ฐ€ ์™„๋ฃŒ๋œ ํ›„ ๋ชจ๋ธ์— ๋Œ€ํ•œ ์‚ฌ์šฉ ์˜ˆ์ œ๋“ค์ด ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค. + +| ํ˜•์‹ | `format` ์ธ์ž | ๋ชจ๋ธ | ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ | ์ธ์ž | +|--------------------------------------------------------------------|---------------|-------------------------------|-------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-cls.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-cls.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-cls.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-cls_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-cls.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-cls.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-cls_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-cls.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-cls.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-cls_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-cls_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-cls_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-cls_ncnn_model/` | โœ… | `imgsz`, `half` | + +์ž์„ธํ•œ `export` ์ •๋ณด๋Š” [๋‚ด๋ณด๋‚ด๊ธฐ](https://docs.ultralytics.com/modes/export/) ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•˜์„ธ์š”. diff --git a/ultralytics/docs/ko/tasks/classify.md:Zone.Identifier b/ultralytics/docs/ko/tasks/classify.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/tasks/classify.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/tasks/detect.md b/ultralytics/docs/ko/tasks/detect.md new file mode 100755 index 0000000..ceee1eb --- /dev/null +++ b/ultralytics/docs/ko/tasks/detect.md @@ -0,0 +1,184 @@ +--- +comments: true +description: Ultralytics ๊ณต์‹ YOLOv8 ๋ฌธ์„œ์ž…๋‹ˆ๋‹ค. ๋ชจ๋ธ ํ›ˆ๋ จ, ๊ฒ€์ฆ, ์˜ˆ์ธก ๋ฐ ๋‹ค์–‘ํ•œ ํ˜•์‹์œผ๋กœ ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ ๋ฐฉ๋ฒ•์„ ๋ฐฐ์šฐ์‹ญ์‹œ์˜ค. ์„ธ๋ถ€์ ์ธ ์„ฑ๋Šฅ ํ†ต๊ณ„๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. +keywords: YOLOv8, Ultralytics, ๊ฐ์ฒด ๊ฐ์ง€, ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ, ํ›ˆ๋ จ, ๊ฒ€์ฆ, ์˜ˆ์ธก, ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ, COCO, ImageNet, PyTorch, ONNX, CoreML +--- + +# ๊ฐ์ฒด ๊ฐ์ง€ + +๊ฐ์ฒด ๊ฐ์ง€ ์˜ˆ์ œ + +๊ฐ์ฒด ๊ฐ์ง€๋Š” ์ด๋ฏธ์ง€ ๋˜๋Š” ๋น„๋””์˜ค ์ŠคํŠธ๋ฆผ ๋‚ด์˜ ๊ฐ์ฒด์˜ ์œ„์น˜์™€ ํด๋ž˜์Šค๋ฅผ ์‹๋ณ„ํ•˜๋Š” ์ž‘์—…์ž…๋‹ˆ๋‹ค. + +๊ฐ์ฒด ๊ฐ์ง€๊ธฐ์˜ ์ถœ๋ ฅ์€ ์ด๋ฏธ์ง€ ์† ๊ฐ์ฒด๋ฅผ ๋‚ดํฌํ•˜๋Š” ๊ฒฝ๊ณ„ ์ƒ์ž(bounding box) ์„ธํŠธ์™€ ๊ฐ ์ƒ์ž์— ๋Œ€ํ•œ ํด๋ž˜์Šค ๋ ˆ์ด๋ธ”๊ณผ ์‹ ๋ขฐ๋„ ์ ์ˆ˜๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. ์žฅ๋ฉด ๋‚ด ๊ด€์‹ฌ ๊ฐ์ฒด๋ฅผ ์‹๋ณ„ํ•ด์•ผ ํ•˜์ง€๋งŒ ๊ฐ์ฒด์˜ ์ •ํ™•ํ•œ ์œ„์น˜๋‚˜ ์ •ํ™•ํ•œ ๋ชจ์–‘์„ ์•Œ ํ•„์š”๊ฐ€ ์—†์„ ๋•Œ ๊ฐ์ฒด ๊ฐ์ง€๊ฐ€ ์ข‹์€ ์„ ํƒ์ž…๋‹ˆ๋‹ค. + +

+
+ +
+ ์‹œ์ฒญํ•˜๊ธฐ: ์‚ฌ์ „ ํ›ˆ๋ จ๋œ Ultralytics YOLOv8 ๋ชจ๋ธ๋กœ ๊ฐ์ฒด ๊ฐ์ง€ํ•˜๊ธฐ. +

+ +!!! Tip "ํŒ" + + YOLOv8 Detect ๋ชจ๋ธ๋“ค์€ ๊ธฐ๋ณธ YOLOv8 ๋ชจ๋ธ์ด๋ฉฐ ์˜ˆ๋ฅผ ๋“ค์–ด `yolov8n.pt` ์ด [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ๋ฐ์ดํ„ฐ์…‹์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. + +## [๋ชจ๋ธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +์—ฌ๊ธฐ์„œ๋Š” YOLOv8 ์‚ฌ์ „ ํ›ˆ๋ จ๋œ Detect ๋ชจ๋ธ์„ ๋‚˜ํƒ€๋ƒ…๋‹ˆ๋‹ค. Detect, Segment, ๋ฐ Pose ๋ชจ๋ธ์€ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ๋ฐ์ดํ„ฐ์…‹์—์„œ, Classify ๋ชจ๋ธ์€ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) ๋ฐ์ดํ„ฐ์…‹์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. + +[๋ชจ๋ธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models)์€ ์ฒซ ์‚ฌ์šฉ ์‹œ Ultralytics์˜ ์ตœ์‹  [๋ฆด๋ฆฌ์ฆˆ](https://github.com/ultralytics/assets/releases)์—์„œ ์ž๋™์œผ๋กœ ๋‹ค์šด๋กœ๋“œ๋ฉ๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ | ํฌ๊ธฐ
(ํ”ฝ์…€) | mAPval
50-95 | ์†๋„
CPU ONNX
(ms) | ์†๋„
A100 TensorRT
(ms) | ํŒŒ๋ผ๋ฏธํ„ฐ
(M) | FLOPs
(B) | +|--------------------------------------------------------------------------------------|-----------------|----------------------|-----------------------------|----------------------------------|------------------|-------------------| +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- **mAPval** ๊ฐ’์€ [COCO val2017](http://cocodataset.org) ๋ฐ์ดํ„ฐ์…‹์—์„œ ๋‹จ์ผ ๋ชจ๋ธ ๋‹จ์ผ ์Šค์ผ€์ผ์„ ์‚ฌ์šฉํ•œ ๊ฐ’์ž…๋‹ˆ๋‹ค. +
[COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ๋ฐ์ดํ„ฐ์™€ `yolo val detect data=coco.yaml device=0` ๋ช…๋ น์œผ๋กœ ์žฌํ˜„ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +- **์†๋„**๋Š” [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) ์ธ์Šคํ„ด์Šค๋ฅผ ์‚ฌ์šฉํ•ด COCO val ์ด๋ฏธ์ง€๋“ค์„ ํ‰๊ท ํ•œ ๊ฒƒ์ž…๋‹ˆ๋‹ค. +
[COCO128](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco128.yaml) ๋ฐ์ดํ„ฐ์™€ `yolo val detect data=coco128.yaml batch=1 device=0|cpu` ๋ช…๋ น์œผ๋กœ ์žฌํ˜„ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +## ํ›ˆ๋ จ + +COCO128 ๋ฐ์ดํ„ฐ์…‹์—์„œ ์ด๋ฏธ์ง€ ํฌ๊ธฐ 640์œผ๋กœ YOLOv8n ๋ชจ๋ธ์„ 100 ์—ํฌํฌ ๋™์•ˆ ํ›ˆ๋ จํ•ฉ๋‹ˆ๋‹ค. ๊ฐ€๋Šฅํ•œ ๋ชจ๋“  ์ธ์ˆ˜์— ๋Œ€ํ•œ ๋ชฉ๋ก์€ [์„ค์ •](/../usage/cfg.md) ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋กœ๋“œํ•˜๊ธฐ + model = YOLO('yolov8n.yaml') # YAML์—์„œ ์ƒˆ ๋ชจ๋ธ์„ ๋นŒ๋“œํ•ฉ๋‹ˆ๋‹ค. + model = YOLO('yolov8n.pt') # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์„ ๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค(ํ›ˆ๋ จ์„ ์œ„ํ•ด ๊ถŒ์žฅ๋ฉ๋‹ˆ๋‹ค). + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # YAML์—์„œ ๋นŒ๋“œํ•˜๊ณ  ๊ฐ€์ค‘์น˜๋ฅผ ์ „๋‹ฌํ•ฉ๋‹ˆ๋‹ค. + + # ๋ชจ๋ธ ํ›ˆ๋ จํ•˜๊ธฐ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # YAML์—์„œ ์ƒˆ ๋ชจ๋ธ์„ ๋นŒ๋“œํ•˜๊ณ  ์ฒ˜์Œ๋ถ€ํ„ฐ ํ›ˆ๋ จ์„ ์‹œ์ž‘ํ•ฉ๋‹ˆ๋‹ค. + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ *.pt ๋ชจ๋ธ๋กœ๋ถ€ํ„ฐ ํ›ˆ๋ จ์„ ์‹œ์ž‘ํ•ฉ๋‹ˆ๋‹ค. + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # YAML์—์„œ ์ƒˆ ๋ชจ๋ธ์„ ๋นŒ๋“œํ•˜๊ณ , ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๊ฐ€์ค‘์น˜๋ฅผ ์ „๋‹ฌํ•œ ํ›„ ํ›ˆ๋ จ์„ ์‹œ์ž‘ํ•ฉ๋‹ˆ๋‹ค. + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### ๋ฐ์ดํ„ฐ์…‹ ํ˜•์‹ + +YOLO ๊ฐ์ง€ ๋ฐ์ดํ„ฐ์…‹ ํ˜•์‹์€ [๋ฐ์ดํ„ฐ์…‹ ๊ฐ€์ด๋“œ](../../../datasets/detect/index.md)์—์„œ ์ž์„ธํžˆ ๋ณผ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๋‹ค๋ฅธ ํ˜•์‹(์˜ˆ: COCO ๋“ฑ)์˜ ๊ธฐ์กด ๋ฐ์ดํ„ฐ์…‹์„ YOLO ํ˜•์‹์œผ๋กœ ๋ณ€ํ™˜ํ•˜๋ ค๋ฉด Ultralytics์˜ [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) ๋„๊ตฌ๋ฅผ ์‚ฌ์šฉํ•˜์‹ญ์‹œ์˜ค. + +## ๊ฒ€์ฆ + +COCO128 ๋ฐ์ดํ„ฐ์…‹์—์„œ ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ์˜ ์ •ํ™•๋„๋ฅผ ๊ฒ€์ฆํ•ฉ๋‹ˆ๋‹ค. `model`์€ ํ›ˆ๋ จ ์‹œ์˜ `data`์™€ ์ธ์ˆ˜๋ฅผ ๋ชจ๋ธ ์†์„ฑ์œผ๋กœ ๋ณด์กดํ•˜๊ธฐ ๋•Œ๋ฌธ์— ์ธ์ˆ˜๋ฅผ ์ „๋‹ฌํ•  ํ•„์š”๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋กœ๋“œํ•˜๊ธฐ + model = YOLO('yolov8n.pt') # ๊ณต์‹ ๋ชจ๋ธ์„ ๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค. + model = YOLO('path/to/best.pt') # ์‚ฌ์šฉ์ž ์ •์˜ ๋ชจ๋ธ์„ ๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค. + + # ๋ชจ๋ธ ๊ฒ€์ฆํ•˜๊ธฐ + metrics = model.val() # ๋ฐ์ดํ„ฐ์…‹๊ณผ ์„ค์ •์„ ๊ธฐ์–ตํ•˜๋‹ˆ ์ธ์ˆ˜๋Š” ํ•„์š” ์—†์Šต๋‹ˆ๋‹ค. + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ๊ฐ ์นดํ…Œ๊ณ ๋ฆฌ์˜ map50-95๊ฐ€ ํฌํ•จ๋œ ๋ฆฌ์ŠคํŠธ์ž…๋‹ˆ๋‹ค. + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # ๊ณต์‹ ๋ชจ๋ธ ๊ฒ€์ฆํ•˜๊ธฐ + yolo detect val model=path/to/best.pt # ์‚ฌ์šฉ์ž ์ •์˜ ๋ชจ๋ธ ๊ฒ€์ฆํ•˜๊ธฐ + ``` + +## ์˜ˆ์ธก + +ํ›ˆ๋ จ๋œ YOLOv8n ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€์— ๋Œ€ํ•œ ์˜ˆ์ธก์„ ์ˆ˜ํ–‰ํ•ฉ๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋กœ๋“œํ•˜๊ธฐ + model = YOLO('yolov8n.pt') # ๊ณต์‹ ๋ชจ๋ธ์„ ๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค. + model = YOLO('path/to/best.pt') # ์‚ฌ์šฉ์ž ์ •์˜ ๋ชจ๋ธ์„ ๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค. + + # ๋ชจ๋ธ๋กœ ์˜ˆ์ธกํ•˜๊ธฐ + results = model('https://ultralytics.com/images/bus.jpg') # ์ด๋ฏธ์ง€์— ๋Œ€ํ•ด ์˜ˆ์ธกํ•ฉ๋‹ˆ๋‹ค. + ``` + === "CLI" + + ```bash + yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' # ๊ณต์‹ ๋ชจ๋ธ๋กœ ์˜ˆ์ธกํ•˜๊ธฐ + yolo detect predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ์‚ฌ์šฉ์ž ์ •์˜ ๋ชจ๋ธ๋กœ ์˜ˆ์ธกํ•˜๊ธฐ + ``` + +์ „์ฒด 'predict' ๋ชจ๋“œ ์„ธ๋ถ€ ์‚ฌํ•ญ์€ [Predict](https://docs.ultralytics.com/modes/predict/) ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•˜์„ธ์š”. + +## ๋‚ด๋ณด๋‚ด๊ธฐ + +YOLOv8n ๋ชจ๋ธ์„ ONNX, CoreML ๋“ฑ๊ณผ ๊ฐ™์€ ๋‹ค๋ฅธ ํ˜•์‹์œผ๋กœ ๋‚ด๋ณด๋ƒ…๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋กœ๋“œํ•˜๊ธฐ + model = YOLO('yolov8n.pt') # ๊ณต์‹ ๋ชจ๋ธ์„ ๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค. + model = YOLO('path/to/best.pt') # ์‚ฌ์šฉ์ž ์ •์˜ ๋ชจ๋ธ์„ ๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค. + + # ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # ๊ณต์‹ ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ + yolo export model=path/to/best.pt format=onnx # ์‚ฌ์šฉ์ž ์ •์˜ ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ + ``` + +์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ YOLOv8 ๋‚ด๋ณด๋‚ด๊ธฐ ํ˜•์‹์€ ์•„๋ž˜ ํ‘œ์— ๋‚˜์™€ ์žˆ์Šต๋‹ˆ๋‹ค. ๋‚ด๋ณด๋‚ด๊ธฐ ์™„๋ฃŒ ํ›„ ์‚ฌ์šฉ ์˜ˆ์‹œ๋Š” ๋ชจ๋ธ์— ๋Œ€ํ•ด ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. + +| ํ˜•์‹ | `format` ์ธ์ˆ˜ | ๋ชจ๋ธ | ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ | ์ธ์ˆ˜ | +|--------------------------------------------------------------------|---------------|---------------------------|-------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +์ „์ฒด 'export' ์„ธ๋ถ€ ์‚ฌํ•ญ์€ [Export](https://docs.ultralytics.com/modes/export/) ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•˜์„ธ์š”. diff --git a/ultralytics/docs/ko/tasks/detect.md:Zone.Identifier b/ultralytics/docs/ko/tasks/detect.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/tasks/detect.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/tasks/index.md b/ultralytics/docs/ko/tasks/index.md new file mode 100755 index 0000000..5864a0f --- /dev/null +++ b/ultralytics/docs/ko/tasks/index.md @@ -0,0 +1,55 @@ +--- +comments: true +description: YOLOv8์„ ์‚ฌ์šฉํ•˜์—ฌ ์ˆ˜ํ–‰ํ•  ์ˆ˜ ์žˆ๋Š” ์ปดํ“จํ„ฐ ๋น„์ „ ์ž‘์—…์˜ ๊ธฐ์ดˆ์ธ ํƒ์ง€, ์„ธ๋ถ„ํ™”, ๋ถ„๋ฅ˜ ๋ฐ ์ž์„ธ ์ถ”์ •์— ๋Œ€ํ•ด ์•Œ์•„๋ณด์„ธ์š”. AI ํ”„๋กœ์ ํŠธ์—์„œ์˜ ๊ทธ ์šฉ๋„๋ฅผ ์ดํ•ดํ•˜์„ธ์š”. +keywords: Ultralytics, YOLOv8, ํƒ์ง€, ์„ธ๋ถ„ํ™”, ๋ถ„๋ฅ˜, ์ž์„ธ ์ถ”์ •, AI ํ”„๋ ˆ์ž„์›Œํฌ, ์ปดํ“จํ„ฐ ๋น„์ „ ์ž‘์—… +--- + +# Ultralytics YOLOv8 ์ž‘์—… + +
+Ultralytics YOLO ์ง€์› ์ž‘์—… + +YOLOv8๋Š” ์—ฌ๋Ÿฌ ์ปดํ“จํ„ฐ ๋น„์ „ **์ž‘์—…**์„ ์ง€์›ํ•˜๋Š” AI ํ”„๋ ˆ์ž„์›Œํฌ์ž…๋‹ˆ๋‹ค. ์ด ํ”„๋ ˆ์ž„์›Œํฌ๋Š” [ํƒ์ง€](detect.md), [์„ธ๋ถ„ํ™”](segment.md), [๋ถ„๋ฅ˜](classify.md), ๊ทธ๋ฆฌ๊ณ  [์ž์„ธ](pose.md) ์ถ”์ •์„ ์ˆ˜ํ–‰ํ•˜๋Š” ๋ฐ ์‚ฌ์šฉ๋  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๊ฐ๊ฐ์˜ ์ž‘์—…์€ ์„œ๋กœ ๋‹ค๋ฅธ ๋ชฉ์ ๊ณผ ์‚ฌ์šฉ ์‚ฌ๋ก€๋ฅผ ๊ฐ€์ง€๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. + +!!! Note "๋…ธํŠธ" + + ๐Ÿšง ๋‹ค๊ตญ์–ด ๋ฌธ์„œํ™” ์ž‘์—…์ด ์ง„ํ–‰ ์ค‘์— ์žˆ์œผ๋ฉฐ, ๋” ๋‚˜์€ ๋ฌธ์„œ๋ฅผ ์ œ๊ณตํ•˜๊ธฐ ์œ„ํ•ด ๋…ธ๋ ฅํ•˜๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. ์ธ๋‚ดํ•ด ์ฃผ์…”์„œ ๊ฐ์‚ฌํ•ฉ๋‹ˆ๋‹ค! ๐Ÿ™ + +

+
+ +
+ ๋ณด๊ธฐ: Ultralytics YOLO ์ž‘์—… ํƒ์ƒ‰: ๊ฐ์ฒด ํƒ์ง€, ์„ธ๋ถ„ํ™”, ์ถ”์ , ์ž์„ธ ์ถ”์ •. +

+ +## [ํƒ์ง€](detect.md) + +ํƒ์ง€๋Š” YOLOv8์ด ์ง€์›ํ•˜๋Š” ๊ธฐ๋ณธ ์ž‘์—…์ž…๋‹ˆ๋‹ค. ์ด๋ฏธ์ง€ ๋˜๋Š” ๋น„๋””์˜ค ํ”„๋ ˆ์ž„์—์„œ ๊ฐ์ฒด๋ฅผ ํƒ์ง€ํ•˜๊ณ  ์ฃผ๋ณ€์— ๊ฒฝ๊ณ„ ์ƒ์ž๋ฅผ ๊ทธ๋ฆฌ๋Š” ๊ฒƒ์„ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. ํƒ์ง€๋œ ๊ฐ์ฒด๋“ค์€ ๊ทธ ํŠน์ง•์— ๋”ฐ๋ผ ๋‹ค๋ฅธ ์นดํ…Œ๊ณ ๋ฆฌ๋กœ ๋ถ„๋ฅ˜๋ฉ๋‹ˆ๋‹ค. YOLOv8์€ ๋‹จ์ผ ์ด๋ฏธ์ง€๋‚˜ ๋น„๋””์˜ค ํ”„๋ ˆ์ž„์—์„œ ์—ฌ๋Ÿฌ ๊ฐ์ฒด๋ฅผ ์ •ํ™•ํ•˜๊ณ  ๋น ๋ฅด๊ฒŒ ํƒ์ง€ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +[ํƒ์ง€ ์˜ˆ์‹œ](detect.md){ .md-button } + +## [์„ธ๋ถ„ํ™”](segment.md) + +์„ธ๋ถ„ํ™”๋Š” ์ด๋ฏธ์ง€๋ฅผ ๋‚ด์šฉ์— ๊ธฐ๋ฐ˜ํ•˜์—ฌ ๋‹ค๋ฅธ ์˜์—ญ์œผ๋กœ ๋‚˜๋ˆ„๋Š” ์ž‘์—…์ž…๋‹ˆ๋‹ค. ๊ฐ ์˜์—ญ์€ ๋‚ด์šฉ์— ๋”ฐ๋ผ ๋ ˆ์ด๋ธ”์ด ์ง€์ •๋ฉ๋‹ˆ๋‹ค. ์ด ์ž‘์—…์€ ์ด๋ฏธ์ง€ ์„ธ๋ถ„ํ™”์™€ ์˜๋ฃŒ ์˜์ƒ๊ณผ ๊ฐ™์€ ์‘์šฉ ๋ถ„์•ผ์— ์œ ์šฉํ•ฉ๋‹ˆ๋‹ค. YOLOv8๋Š” U-Net ์•„ํ‚คํ…์ฒ˜์˜ ๋ณ€ํ˜•์„ ์‚ฌ์šฉํ•˜์—ฌ ์„ธ๋ถ„ํ™”๋ฅผ ์ˆ˜ํ–‰ํ•ฉ๋‹ˆ๋‹ค. + +[์„ธ๋ถ„ํ™” ์˜ˆ์‹œ](segment.md){ .md-button } + +## [๋ถ„๋ฅ˜](classify.md) + +๋ถ„๋ฅ˜๋Š” ์ด๋ฏธ์ง€๋ฅผ ๋‹ค๋ฅธ ์นดํ…Œ๊ณ ๋ฆฌ๋กœ ๋ถ„๋ฅ˜ํ•˜๋Š” ์ž‘์—…์ž…๋‹ˆ๋‹ค. YOLOv8๋Š” ์ด๋ฏธ์ง€์˜ ๋‚ด์šฉ์„ ๋ฐ”ํƒ•์œผ๋กœ ์ด๋ฏธ์ง€ ๋ถ„๋ฅ˜์— ์‚ฌ์šฉ๋  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์ด๋Š” EfficientNet ์•„ํ‚คํ…์ฒ˜์˜ ๋ณ€ํ˜•์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ถ„๋ฅ˜ ์ž‘์—…์„ ์ˆ˜ํ–‰ํ•ฉ๋‹ˆ๋‹ค. + +[๋ถ„๋ฅ˜ ์˜ˆ์‹œ](classify.md){ .md-button } + +## [์ž์„ธ](pose.md) + +์ž์„ธ/ํ‚คํฌ์ธํŠธ ํƒ์ง€๋Š” ์ด๋ฏธ์ง€๋‚˜ ๋น„๋””์˜ค ํ”„๋ ˆ์ž„์—์„œ ํŠน์ • ์ ๋“ค์„ ํƒ์ง€ํ•˜๋Š” ์ž‘์—…์ž…๋‹ˆ๋‹ค. ์ด๋“ค ์ ์€ ํ‚คํฌ์ธํŠธ๋กœ ๋ถˆ๋ฆฌ๋ฉฐ, ์›€์ง์ž„ ์ถ”์ ์ด๋‚˜ ์ž์„ธ ์ถ”์ •์— ์‚ฌ์šฉ๋ฉ๋‹ˆ๋‹ค. YOLOv8์€ ์ด๋ฏธ์ง€๋‚˜ ๋น„๋””์˜ค ํ”„๋ ˆ์ž„์˜ ํ‚คํฌ์ธํŠธ๋ฅผ ์ •ํ™•ํ•˜๊ณ  ๋น ๋ฅด๊ฒŒ ํƒ์ง€ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +[์ž์„ธ ์˜ˆ์‹œ](pose.md){ .md-button } + +## ๊ฒฐ๋ก  + +YOLOv8์€ ํƒ์ง€, ์„ธ๋ถ„ํ™”, ๋ถ„๋ฅ˜, ํ‚คํฌ์ธํŠธ ํƒ์ง€ ๋“ฑ ๋‹ค์–‘ํ•œ ์ž‘์—…์„ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค. ๊ฐ๊ฐ์˜ ์ž‘์—…์€ ๋‹ค๋ฅธ ๋ชฉ์ ๊ณผ ์‚ฌ์šฉ ์‚ฌ๋ก€๋ฅผ ๊ฐ€์ง€๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ์ž‘์—…์˜ ์ฐจ์ด์ ์„ ์ดํ•ดํ•จ์œผ๋กœ์จ, ์ปดํ“จํ„ฐ ๋น„์ „ ์‘์šฉ ํ”„๋กœ๊ทธ๋žจ์— ์ ํ•ฉํ•œ ์ž‘์—…์„ ์„ ํƒํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. diff --git a/ultralytics/docs/ko/tasks/index.md:Zone.Identifier b/ultralytics/docs/ko/tasks/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/tasks/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/tasks/pose.md b/ultralytics/docs/ko/tasks/pose.md new file mode 100755 index 0000000..ee4944f --- /dev/null +++ b/ultralytics/docs/ko/tasks/pose.md @@ -0,0 +1,185 @@ +--- +comments: true +description: Ultralytics YOLOv8์„ ์‚ฌ์šฉํ•˜์—ฌ ํฌ์ฆˆ ์ถ”์ • ์ž‘์—…์„ ์ˆ˜ํ–‰ํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ์•Œ์•„๋ณด์„ธ์š”. ๋ฏธ๋ฆฌ ํ•™์Šต๋œ ๋ชจ๋ธ์„ ์ฐพ๊ณ , ํ•™์Šต, ๊ฒ€์ฆ, ์˜ˆ์ธก, ๋‚ด๋ณด๋‚ด๊ธฐ ๋“ฑ์„ ์ง„ํ–‰ํ•˜๋Š” ๋ฐฉ๋ฒ•์„ ๋ฐฐ์šธ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +keywords: Ultralytics, YOLO, YOLOv8, ํฌ์ฆˆ ์ถ”์ •, ํ‚คํฌ์ธํŠธ ๊ฒ€์ถœ, ๊ฐ์ฒด ๊ฒ€์ถœ, ๋ฏธ๋ฆฌ ํ•™์Šต๋œ ๋ชจ๋ธ, ๊ธฐ๊ณ„ ํ•™์Šต, ์ธ๊ณต ์ง€๋Šฅ +--- + +# ํฌ์ฆˆ ์ถ”์ • + +ํฌ์ฆˆ ์ถ”์ • ์˜ˆ์‹œ + +ํฌ์ฆˆ ์ถ”์ •์€ ์ด๋ฏธ์ง€ ๋‚ด ํŠน์ • ์ ๋“ค์˜ ์œ„์น˜๋ฅผ ์‹๋ณ„ํ•˜๋Š” ์ž‘์—…์ž…๋‹ˆ๋‹ค. ์ด๋Ÿฌํ•œ ์ ๋“ค์€ ๋ณดํ†ต ๊ด€์ ˆ, ํ‘œ์‹, ๋˜๋Š” ๊ธฐํƒ€ ๊ตฌ๋ณ„ ๊ฐ€๋Šฅํ•œ ํŠน์ง•์œผ๋กœ ๋‚˜ํƒ€๋‚˜๋Š” ํ‚คํฌ์ธํŠธ์ž…๋‹ˆ๋‹ค. ํ‚คํฌ์ธํŠธ์˜ ์œ„์น˜๋Š” ๋Œ€๊ฐœ 2D `[x, y]` ๋˜๋Š” 3D `[x, y, visible]` ์ขŒํ‘œ์˜ ์ง‘ํ•ฉ์œผ๋กœ ํ‘œํ˜„๋ฉ๋‹ˆ๋‹ค. + +ํฌ์ฆˆ ์ถ”์ • ๋ชจ๋ธ์˜ ์ถœ๋ ฅ์€ ์ด๋ฏธ์ง€ ์† ๊ฐ์ฒด ์ƒ์˜ ํ‚คํฌ์ธํŠธ๋ฅผ ๋‚˜ํƒ€๋‚ด๋Š” ์ ๋“ค์˜ ์ง‘ํ•ฉ๊ณผ ๊ฐ ์ ์˜ ์‹ ๋ขฐ๋„ ์ ์ˆ˜๋ฅผ ํฌํ•จํ•ฉ๋‹ˆ๋‹ค. ํฌ์ฆˆ ์ถ”์ •์€ ์žฅ๋ฉด ์† ๊ฐ์ฒด์˜ ๊ตฌ์ฒด์ ์ธ ๋ถ€๋ถ„์„ ์‹๋ณ„ํ•˜๊ณ , ์„œ๋กœ ๊ด€๋ จ๋œ ์œ„์น˜๋ฅผ ํŒŒ์•…ํ•ด์•ผ ํ•  ๋•Œ ์ข‹์€ ์„ ํƒ์ž…๋‹ˆ๋‹ค. + +

+
+ +
+ ์‹œ์ฒญํ•˜๊ธฐ: Ultralytics YOLOv8์„ ์ด์šฉํ•œ ํฌ์ฆˆ ์ถ”์ •. +

+ +!!! Tip "ํŒ" + + YOLOv8 _pose_ ๋ชจ๋ธ์€ `-pose` ์ ‘๋ฏธ์‚ฌ๊ฐ€ ๋ถ™์Šต๋‹ˆ๋‹ค. ์˜ˆ: `yolov8n-pose.pt`. ์ด ๋ชจ๋ธ๋“ค์€ [COCO keypoints](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco-pose.yaml) ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ ํ•™์Šต๋˜์—ˆ์œผ๋ฉฐ ํฌ์ฆˆ ์ถ”์ • ์ž‘์—…์— ์ ํ•ฉํ•ฉ๋‹ˆ๋‹ค. + +## [๋ชจ๋ธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +์—ฌ๊ธฐ์— ํ‘œ์‹œ๋œ YOLOv8 ๋ฏธ๋ฆฌ ํ•™์Šต๋œ ํฌ์ฆˆ ๋ชจ๋ธ์„ ํ™•์ธํ•˜์„ธ์š”. Detect, Segment ๋ฐ Pose ๋ชจ๋ธ์€ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ ๋ฏธ๋ฆฌ ํ•™์Šต๋˜๋ฉฐ, Classify ๋ชจ๋ธ์€ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ ๋ฏธ๋ฆฌ ํ•™์Šต๋ฉ๋‹ˆ๋‹ค. + +[๋ชจ๋ธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models)์€ ์ฒซ ์‚ฌ์šฉ ์‹œ Ultralytics [๋ฆด๋ฆฌ์Šค](https://github.com/ultralytics/assets/releases)์—์„œ ์ž๋™์œผ๋กœ ๋‹ค์šด๋กœ๋“œ๋ฉ๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ | ํฌ๊ธฐ
(ํ”ฝ์…€) | mAPํฌ์ฆˆ
50-95 | mAPํฌ์ฆˆ
50 | ์†๋„
CPU ONNX
(ms) | ์†๋„
A100 TensorRT
(ms) | ํŒŒ๋ผ๋ฏธํ„ฐ
(M) | FLOPs
(B) | +|------------------------------------------------------------------------------------------------------|-----------------|---------------------|------------------|-----------------------------|----------------------------------|------------------|-------------------| +| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | +| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | +| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | +| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | +| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | +| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +- **mAPval** ๊ฐ’์€ [COCO Keypoints val2017](http://cocodataset.org) ๋ฐ์ดํ„ฐ์…‹์—์„œ ๋‹จ์ผ ๋ชจ๋ธ ๋‹จ์ผ ๊ทœ๋ชจ๋ฅผ ๊ธฐ์ค€์œผ๋กœ ํ•ฉ๋‹ˆ๋‹ค. +
์žฌํ˜„ํ•˜๋ ค๋ฉด `yolo val pose data=coco-pose.yaml device=0`์„ ์‚ฌ์šฉํ•˜์„ธ์š”. +- **์†๋„**๋Š” [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) ์ธ์Šคํ„ด์Šค๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ COCO val ์ด๋ฏธ์ง€ ํ‰๊ท ์ž…๋‹ˆ๋‹ค. +
์žฌํ˜„ํ•˜๋ ค๋ฉด `yolo val pose data=coco8-pose.yaml batch=1 device=0|cpu`๋ฅผ ์‚ฌ์šฉํ•˜์„ธ์š”. + +## ํ•™์Šต + +COCO128-pose ๋ฐ์ดํ„ฐ์…‹์—์„œ YOLOv8-pose ๋ชจ๋ธ ํ•™์Šตํ•˜๊ธฐ. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n-pose.yaml') # YAML์—์„œ ์ƒˆ๋กœ์šด ๋ชจ๋ธ ๊ตฌ์ถ• + model = YOLO('yolov8n-pose.pt') # ์‚ฌ์ „ ํ•™์Šต๋œ ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ (ํ•™์Šต์— ์ถ”์ฒœ) + model = YOLO('yolov8n-pose.yaml').load('yolov8n-pose.pt') # YAML์—์„œ ๊ตฌ์ถ•ํ•˜๊ณ  ๊ฐ€์ค‘์น˜ ์ „๋‹ฌ + + # ๋ชจ๋ธ ํ•™์Šต + results = model.train(data='coco8-pose.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # YAML์—์„œ ์ƒˆ๋กœ์šด ๋ชจ๋ธ ๊ตฌ์ถ•ํ•˜๊ณ  ์ฒ˜์Œ๋ถ€ํ„ฐ ํ•™์Šต ์‹œ์ž‘ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml epochs=100 imgsz=640 + + # ์‚ฌ์ „ ํ•™์Šต๋œ *.pt ๋ชจ๋ธ๋กœ๋ถ€ํ„ฐ ํ•™์Šต ์‹œ์ž‘ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.pt epochs=100 imgsz=640 + + # YAML์—์„œ ์ƒˆ๋กœ์šด ๋ชจ๋ธ ๊ตฌ์ถ•ํ•˜๊ณ  ์‚ฌ์ „ ํ•™์Šต๋œ ๊ฐ€์ค‘์น˜๋ฅผ ์ „๋‹ฌํ•˜์—ฌ ํ•™์Šต ์‹œ์ž‘ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml pretrained=yolov8n-pose.pt epochs=100 imgsz=640 + ``` + +### ๋ฐ์ดํ„ฐ์…‹ ํ˜•์‹ + +YOLO ํฌ์ฆˆ ๋ฐ์ดํ„ฐ์…‹ ํ˜•์‹์— ๋Œ€ํ•œ ์ž์„ธํ•œ ๋‚ด์šฉ์€ [๋ฐ์ดํ„ฐ์…‹ ๊ฐ€์ด๋“œ](../../../datasets/pose/index.md)์—์„œ ์ฐพ์•„๋ณผ ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๊ธฐ์กด ๋ฐ์ดํ„ฐ์…‹์„ ๋‹ค๋ฅธ ํ˜•์‹(์˜ˆ: COCO ๋“ฑ)์—์„œ YOLO ํ˜•์‹์œผ๋กœ ๋ณ€ํ™˜ํ•˜๋ ค๋ฉด Ultralytics์˜ [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) ๋„๊ตฌ๋ฅผ ์‚ฌ์šฉํ•˜์„ธ์š”. + +## ๊ฒ€์ฆ + +ํ•™์Šต๋œ YOLOv8n-pose ๋ชจ๋ธ์˜ ์ •ํ™•๋„๋ฅผ COCO128-pose ๋ฐ์ดํ„ฐ์…‹์—์„œ ๊ฒ€์ฆํ•˜๊ธฐ. ๋ชจ๋ธ์€ ํ•™์Šต `data` ๋ฐ ์ธ์ˆ˜๋ฅผ ๋ชจ๋ธ ์†์„ฑ์œผ๋กœ ์œ ์ง€ํ•˜๊ธฐ ๋•Œ๋ฌธ์— ์ธ์ˆ˜๋ฅผ ์ „๋‹ฌํ•  ํ•„์š”๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n-pose.pt') # ๊ณต์‹ ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('path/to/best.pt') # ์‚ฌ์šฉ์ž ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + + # ๋ชจ๋ธ ๊ฒ€์ฆ + metrics = model.val() # ๋ฐ์ดํ„ฐ์…‹ ๋ฐ ์„ค์ •์„ ๊ธฐ์–ตํ•˜๋ฏ€๋กœ ์ธ์ˆ˜ ํ•„์š” ์—†์Œ + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ๊ฐ ๋ฒ”์ฃผ์˜ map50-95๋ฅผ ํฌํ•จํ•˜๋Š” ๋ฆฌ์ŠคํŠธ + ``` + === "CLI" + + ```bash + yolo pose val model=yolov8n-pose.pt # ๊ณต์‹ ๋ชจ๋ธ ๊ฒ€์ฆ + yolo pose val model=path/to/best.pt # ์‚ฌ์šฉ์ž ๋ชจ๋ธ ๊ฒ€์ฆ + ``` + +## ์˜ˆ์ธก + +ํ•™์Šต๋œ YOLOv8n-pose ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€์— ๋Œ€ํ•œ ์˜ˆ์ธก ์ˆ˜ํ–‰ํ•˜๊ธฐ. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n-pose.pt') # ๊ณต์‹ ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('path/to/best.pt') # ์‚ฌ์šฉ์ž ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + + # ๋ชจ๋ธ๋กœ ์˜ˆ์ธกํ•˜๊ธฐ + results = model('https://ultralytics.com/images/bus.jpg') # ์ด๋ฏธ์ง€์—์„œ ์˜ˆ์ธก + ``` + === "CLI" + + ```bash + yolo pose predict model=yolov8n-pose.pt source='https://ultralytics.com/images/bus.jpg' # ๊ณต์‹ ๋ชจ๋ธ๋กœ ์˜ˆ์ธก + yolo pose predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ์‚ฌ์šฉ์ž ๋ชจ๋ธ๋กœ ์˜ˆ์ธก + ``` + +`predict` ๋ชจ๋“œ์˜ ์ „์ฒด ์„ธ๋ถ€ ์ •๋ณด๋Š” [์˜ˆ์ธก](https://docs.ultralytics.com/modes/predict/) ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•˜์„ธ์š”. + +## ๋‚ด๋ณด๋‚ด๊ธฐ + +YOLOv8n ํฌ์ฆˆ ๋ชจ๋ธ์„ ONNX, CoreML ๋“ฑ ๋‹ค๋ฅธ ํ˜•์‹์œผ๋กœ ๋‚ด๋ณด๋‚ด๊ธฐ. + +!!! Example "์˜ˆ์ œ" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('yolov8n-pose.pt') # ๊ณต์‹ ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + model = YOLO('path/to/best.pt') # ์‚ฌ์šฉ์ž ํ•™์Šต ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ + + # ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-pose.pt format=onnx # ๊ณต์‹ ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ + yolo export model=path/to/best.pt format=onnx # ์‚ฌ์šฉ์ž ํ•™์Šต ๋ชจ๋ธ ๋‚ด๋ณด๋‚ด๊ธฐ + ``` + +YOLOv8-pose ๋‚ด๋ณด๋‚ด๊ธฐ ๊ฐ€๋Šฅํ•œ ํ˜•์‹์€ ์•„๋ž˜ ํ‘œ์— ๋‚˜์—ด๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค. ๋‚ด๋ณด๋‚ธ ๋ชจ๋ธ์—์„œ ์ง์ ‘ ์˜ˆ์ธก ๋˜๋Š” ๊ฒ€์ฆ์ด ๊ฐ€๋Šฅํ•ฉ๋‹ˆ๋‹ค, ์˜ˆ: `yolo predict model=yolov8n-pose.onnx`. ๋‚ด๋ณด๋‚ด๊ธฐ๊ฐ€ ์™„๋ฃŒ๋œ ํ›„ ๋ชจ๋ธ ์‚ฌ์šฉ ์˜ˆ์ œ๊ฐ€ ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค. + +| ํ˜•์‹ | `format` ์ธ์ˆ˜ | ๋ชจ๋ธ | ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ | ์ธ์ˆ˜ | +|--------------------------------------------------------------------|---------------|--------------------------------|-------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-pose.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-pose.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-pose.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-pose_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-pose.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-pose.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-pose_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-pose.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-pose.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-pose_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-pose_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-pose_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-pose_ncnn_model/` | โœ… | `imgsz`, `half` | + +`export`์˜ ์ „์ฒด ์„ธ๋ถ€ ์ •๋ณด๋Š” [๋‚ด๋ณด๋‚ด๊ธฐ](https://docs.ultralytics.com/modes/export/) ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•˜์„ธ์š”. diff --git a/ultralytics/docs/ko/tasks/pose.md:Zone.Identifier b/ultralytics/docs/ko/tasks/pose.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/tasks/pose.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ko/tasks/segment.md b/ultralytics/docs/ko/tasks/segment.md new file mode 100755 index 0000000..82c36b0 --- /dev/null +++ b/ultralytics/docs/ko/tasks/segment.md @@ -0,0 +1,188 @@ +--- +comments: true +description: Ultralytics YOLO๋ฅผ ์ด์šฉํ•œ ์ธ์Šคํ„ด์Šค ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜ ๋ชจ๋ธ ์‚ฌ์šฉ๋ฒ• ๋ฐฐ์šฐ๊ธฐ. ํ›ˆ๋ จ, ๊ฒ€์ฆ, ์ด๋ฏธ์ง€ ์˜ˆ์ธก ๋ฐ ๋ชจ๋ธ ์ˆ˜์ถœ์— ๋Œ€ํ•œ ์ง€์นจ. +keywords: yolov8, ์ธ์Šคํ„ด์Šค ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜, Ultralytics, COCO ๋ฐ์ดํ„ฐ์…‹, ์ด๋ฏธ์ง€ ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜, ๊ฐ์ฒด ํƒ์ง€, ๋ชจ๋ธ ํ›ˆ๋ จ, ๋ชจ๋ธ ๊ฒ€์ฆ, ์ด๋ฏธ์ง€ ์˜ˆ์ธก, ๋ชจ๋ธ ์ˆ˜์ถœ +--- + +# ์ธ์Šคํ„ด์Šค ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜ + +์ธ์Šคํ„ด์Šค ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜ ์˜ˆ์‹œ + +์ธ์Šคํ„ด์Šค ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜์€ ๊ฐ์ฒด ํƒ์ง€๋ฅผ ํ•œ ๋‹จ๊ณ„ ๋” ๋ฐœ์ „์‹œ์ผœ ์ด๋ฏธ์ง€์—์„œ ๊ฐ๊ฐ์˜ ๊ฐœ๋ณ„ ๊ฐ์ฒด๋ฅผ ์‹๋ณ„ํ•˜๊ณ  ์ด๋ฏธ์ง€์˜ ๋‚˜๋จธ์ง€ ๋ถ€๋ถ„์—์„œ ๋ถ„๋ฆฌํ•˜๋Š” ๊ธฐ์ˆ ์ž…๋‹ˆ๋‹ค. + +์ธ์Šคํ„ด์Šค ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜ ๋ชจ๋ธ์˜ ์ถœ๋ ฅ์€ ์ด๋ฏธ์ง€์˜ ๊ฐ ๊ฐ์ฒด๋ฅผ ์œค๊ณฝํ•˜๋Š” ๋งˆ์Šคํฌ๋‚˜ ์œค๊ณฝ ์„ ๋ฟ๋งŒ ์•„๋‹ˆ๋ผ ๊ฐ ๊ฐ์ฒด์— ๋Œ€ํ•œ ํด๋ž˜์Šค ๋ ˆ์ด๋ธ”๊ณผ ์‹ ๋ขฐ๋„ ์ ์ˆ˜๋กœ ๊ตฌ์„ฑ๋ฉ๋‹ˆ๋‹ค. ๊ฐ์ฒด๋“ค์ด ์ด๋ฏธ์ง€ ์•ˆ์—์„œ ์–ด๋””์— ์žˆ๋Š”์ง€ ๋ฟ๋งŒ ์•„๋‹ˆ๋ผ ๊ทธ๋“ค์˜ ์ •ํ™•ํ•œ ํ˜•ํƒœ๊ฐ€ ๋ฌด์—‡์ธ์ง€ ์•Œ์•„์•ผ ํ•  ๋•Œ ์ธ์Šคํ„ด์Šค ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜์ด ์œ ์šฉํ•ฉ๋‹ˆ๋‹ค. + +

+
+ +
+ ์‹œ์ฒญํ•˜๊ธฐ: Python์—์„œ ์‚ฌ์ „ ํ›ˆ๋ จ๋œ Ultralytics YOLOv8 ๋ชจ๋ธ๋กœ ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜ ์‹คํ–‰. +

+ +!!! Tip "ํŒ" + + YOLOv8 Segment ๋ชจ๋ธ์€ '-seg' ์ ‘๋ฏธ์‚ฌ๋ฅผ ์‚ฌ์šฉํ•˜๋ฉฐ ์ฆ‰, `yolov8n-seg.pt`์™€ ๊ฐ™์ด [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ๋ฐ์ดํ„ฐ์…‹์— ์‚ฌ์ „ ํ›ˆ๋ จ๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค. + +## [๋ชจ๋ธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +์—ฌ๊ธฐ์—๋Š” YOLOv8 ์‚ฌ์ „ ํ›ˆ๋ จ ์„ธ๊ทธ๋จผํŠธ ๋ชจ๋ธ๋“ค์ด ๋‚˜์—ด๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค. Detect, Segment, Pose ๋ชจ๋ธ๋“ค์€ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ๋ฐ์ดํ„ฐ์…‹์— ์‚ฌ์ „ ํ›ˆ๋ จ๋˜์–ด ์žˆ์œผ๋ฉฐ, Classify ๋ชจ๋ธ๋“ค์€ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) ๋ฐ์ดํ„ฐ์…‹์— ์‚ฌ์ „ ํ›ˆ๋ จ๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค. + +[๋ชจ๋ธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models)์€ ์ฒซ ์‚ฌ์šฉ ์‹œ Ultralytics์˜ ์ตœ์‹  [๋ฆด๋ฆฌ์Šค](https://github.com/ultralytics/assets/releases)์—์„œ ์ž๋™์œผ๋กœ ๋‹ค์šด๋กœ๋“œ ๋ฉ๋‹ˆ๋‹ค. + +| ๋ชจ๋ธ | ํฌ๊ธฐ
(ํ”ฝ์…€) | mAP๋ฐ•์Šค
50-95 | mAP๋งˆ์Šคํฌ
50-95 | ์†๋„
CPU ONNX
(๋ฐ€๋ฆฌ์ดˆ) | ์†๋„
A100 TensorRT
(๋ฐ€๋ฆฌ์ดˆ) | ๋งค๊ฐœ๋ณ€์ˆ˜
(M) | FLOPs
(B) | +|----------------------------------------------------------------------------------------------|-----------------|---------------------|----------------------|------------------------------|-----------------------------------|------------------|-------------------| +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- **mAPval** ๊ฐ’๋“ค์€ [COCO val2017](http://cocodataset.org) ๋ฐ์ดํ„ฐ์…‹์—์„œ ๋‹จ์ผ ๋ชจ๋ธ ๋‹จ์ผ ์Šค์ผ€์ผ๋กœ ์–ป์€ ๊ฐ’์ž…๋‹ˆ๋‹ค. +
๋ณต์ œ๋Š” `yolo val segment data=coco.yaml device=0` ๋ช…๋ น์–ด๋กœ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. +- **์†๋„**๋Š” [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) ์ธ์Šคํ„ด์Šค๋ฅผ ์ด์šฉํ•˜์—ฌ COCO ๊ฒ€์ฆ ์ด๋ฏธ์ง€๋กœ ํ‰๊ท  ๋‚ด์—ˆ์Šต๋‹ˆ๋‹ค. +
๋ณต์ œ๋Š” `yolo val segment data=coco128-seg.yaml batch=1 device=0|cpu` ๋ช…๋ น์–ด๋กœ ์‹คํ–‰ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +## ํ›ˆ๋ จ + +COCO128-seg ๋ฐ์ดํ„ฐ์…‹์—์„œ ์ด๋ฏธ์ง€ ํฌ๊ธฐ 640์œผ๋กœ YOLOv8n-seg์„ 100 ์—ํฌํฌ ๋™์•ˆ ํ›ˆ๋ จํ•ฉ๋‹ˆ๋‹ค. ๊ฐ€๋Šฅํ•œ ๋ชจ๋“  ์ธ์ž ๋ชฉ๋ก์€ [์„ค์ •](/../usage/cfg.md) ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "ํŒŒ์ด์ฌ" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค + model = YOLO('yolov8n-seg.yaml') # YAML์—์„œ ์ƒˆ๋กœ์šด ๋ชจ๋ธ์„ ๊ตฌ์„ฑ + model = YOLO('yolov8n-seg.pt') # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ด (ํ›ˆ๋ จ์— ์ถ”์ฒœ) + model = YOLO('yolov8n-seg.yaml').load('yolov8n.pt') # YAML์—์„œ ๊ตฌ์„ฑํ•˜๊ณ  ๊ฐ€์ค‘์น˜๋ฅผ ์ „๋‹ฌ + + # ๋ชจ๋ธ์„ ํ›ˆ๋ จ์‹œํ‚ต๋‹ˆ๋‹ค + results = model.train(data='coco128-seg.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # YAML์—์„œ ์ƒˆ๋กœ์šด ๋ชจ๋ธ์„ ๊ตฌ์„ฑํ•˜๊ณ  ์ฒ˜์Œ๋ถ€ํ„ฐ ํ›ˆ๋ จ์„ ์‹œ์ž‘ํ•ฉ๋‹ˆ๋‹ค + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml epochs=100 imgsz=640 + + # ์‚ฌ์ „ ํ›ˆ๋ จ๋œ *.pt ๋ชจ๋ธ๋กœ ๋ถ€ํ„ฐ ํ›ˆ๋ จ์„ ์‹œ์ž‘ํ•ฉ๋‹ˆ๋‹ค + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.pt epochs=100 imgsz=640 + + # YAML์—์„œ ์ƒˆ๋กœ์šด ๋ชจ๋ธ์„ ๊ตฌ์„ฑํ•˜๊ณ  ์‚ฌ์ „ ํ›ˆ๋ จ๋œ ๊ฐ€์ค‘์น˜๋ฅผ ์ „๋‹ฌํ•œ ๋’ค ํ›ˆ๋ จ์„ ์‹œ์ž‘ํ•ฉ๋‹ˆ๋‹ค + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml pretrained=yolov8n-seg.pt epochs=100 imgsz=640 + ``` + +### ๋ฐ์ดํ„ฐ์…‹ ํ˜•์‹ + +YOLO ์„ธ๊ทธ๋ฉ˜ํ…Œ์ด์…˜ ๋ฐ์ดํ„ฐ์…‹ ํ˜•์‹์€ [๋ฐ์ดํ„ฐ์…‹ ๊ฐ€์ด๋“œ](../../../datasets/segment/index.md)์—์„œ ์ž์„ธํžˆ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ๊ธฐ์กด ๋ฐ์ดํ„ฐ์…‹ (COCO ๋“ฑ)์„ YOLO ํ˜•์‹์œผ๋กœ ๋ณ€ํ™˜ํ•˜๋ ค๋ฉด Ultralytics์˜ [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) ๋„๊ตฌ๋ฅผ ์ด์šฉํ•˜์„ธ์š”. + +## ๊ฒ€์ฆ + +COCO128-seg ๋ฐ์ดํ„ฐ์…‹์—์„œ ํ›ˆ๋ จ๋œ YOLOv8n-seg ๋ชจ๋ธ์˜ ์ •ํ™•๋„๋ฅผ ๊ฒ€์ฆํ•ฉ๋‹ˆ๋‹ค. ๋ชจ๋ธ์€ ํ›ˆ๋ จํ•  ๋•Œ์˜ `data`์™€ ์ธ์ž๋ฅผ ๋ชจ๋ธ ์†์„ฑ์œผ๋กœ ๊ธฐ์–ตํ•˜๊ธฐ ๋•Œ๋ฌธ์— ๋ณ„๋„์˜ ์ธ์ž๋ฅผ ์ „๋‹ฌํ•  ํ•„์š”๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "ํŒŒ์ด์ฌ" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค + model = YOLO('yolov8n-seg.pt') # ๊ณต์‹ ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ด + model = YOLO('path/to/best.pt') # ์ปค์Šคํ…€ ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ด + + # ๋ชจ๋ธ์„ ๊ฒ€์ฆํ•ฉ๋‹ˆ๋‹ค + metrics = model.val() # ๋ฐ์ดํ„ฐ์…‹๊ณผ ์„ค์ •์ด ๊ธฐ์–ต๋˜์–ด ์žˆ์–ด ์ธ์ž๊ฐ€ ํ•„์š” ์—†์Šต๋‹ˆ๋‹ค + metrics.box.map # map50-95(B) + metrics.box.map50 # map50(B) + metrics.box.map75 # map75(B) + metrics.box.maps # ๊ฐ ์นดํ…Œ๊ณ ๋ฆฌ๋ณ„ map50-95(B) ๋ฆฌ์ŠคํŠธ + metrics.seg.map # map50-95(M) + metrics.seg.map50 # map50(M) + metrics.seg.map75 # map75(M) + metrics.seg.maps # ๊ฐ ์นดํ…Œ๊ณ ๋ฆฌ๋ณ„ map50-95(M) ๋ฆฌ์ŠคํŠธ + ``` + === "CLI" + + ```bash + yolo segment val model=yolov8n-seg.pt # ๊ณต์‹ ๋ชจ๋ธ๋กœ ๊ฒ€์ฆ + yolo segment val model=path/to/best.pt # ์ปค์Šคํ…€ ๋ชจ๋ธ๋กœ ๊ฒ€์ฆ + ``` + +## ์˜ˆ์ธก + +ํ›ˆ๋ จ๋œ YOLOv8n-seg ๋ชจ๋ธ์„ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€์— ๋Œ€ํ•œ ์˜ˆ์ธก์„ ์‹คํ–‰ํ•ฉ๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "ํŒŒ์ด์ฌ" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค + model = YOLO('yolov8n-seg.pt') # ๊ณต์‹ ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ด + model = YOLO('path/to/best.pt') # ์ปค์Šคํ…€ ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ด + + # ๋ชจ๋ธ๋กœ ์˜ˆ์ธก์„ ์ง„ํ–‰ํ•ฉ๋‹ˆ๋‹ค + results = model('https://ultralytics.com/images/bus.jpg') # ์ด๋ฏธ์ง€์— ๋Œ€ํ•œ ์˜ˆ์ธก + ``` + === "CLI" + + ```bash + yolo segment predict model=yolov8n-seg.pt source='https://ultralytics.com/images/bus.jpg' # ๊ณต์‹ ๋ชจ๋ธ๋กœ ์˜ˆ์ธก ์‹คํ–‰ + yolo segment predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ์ปค์Šคํ…€ ๋ชจ๋ธ๋กœ ์˜ˆ์ธก ์‹คํ–‰ + ``` + +`predict` ๋ชจ๋“œ์˜ ์ „์ฒด ์„ธ๋ถ€ ์‚ฌํ•ญ์€ [์˜ˆ์ธก](https://docs.ultralytics.com/modes/predict/) ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +## ์ˆ˜์ถœ + +ONNX, CoreML ๋“ฑ๊ณผ ๊ฐ™์€ ๋‹ค๋ฅธ ํ˜•์‹์œผ๋กœ YOLOv8n-seg ๋ชจ๋ธ์„ ์ˆ˜์ถœํ•ฉ๋‹ˆ๋‹ค. + +!!! Example "์˜ˆ์ œ" + + === "ํŒŒ์ด์ฌ" + + ```python + from ultralytics import YOLO + + # ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค + model = YOLO('yolov8n-seg.pt') # ๊ณต์‹ ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ด + model = YOLO('path/to/best.pt') # ์ปค์Šคํ…€ ํ›ˆ๋ จ ๋ชจ๋ธ์„ ๋ถˆ๋Ÿฌ์˜ด + + # ๋ชจ๋ธ์„ ์ˆ˜์ถœํ•ฉ๋‹ˆ๋‹ค + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-seg.pt format=onnx # ๊ณต์‹ ๋ชจ๋ธ์„ ์ˆ˜์ถœํ•ฉ๋‹ˆ๋‹ค + yolo export model=path/to/best.pt format=onnx # ์ปค์Šคํ…€ ํ›ˆ๋ จ ๋ชจ๋ธ์„ ์ˆ˜์ถœํ•ฉ๋‹ˆ๋‹ค + ``` + +์•„๋ž˜ ํ‘œ์— ๋‚˜์—ด๋œ ๊ฒƒ์€ ๊ฐ€๋Šฅํ•œ YOLOv8-seg ์ˆ˜์ถœ ํ˜•์‹์ž…๋‹ˆ๋‹ค. ์ˆ˜์ถœ ์™„๋ฃŒ ํ›„ ๋ชจ๋ธ ์‚ฌ์šฉ ์˜ˆ๋Š” ๋ชจ๋ธ์„ ์ง์ ‘ ์˜ˆ์ธกํ•˜๊ฑฐ๋‚˜ ๊ฒ€์ฆํ•  ๋•Œ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. + +| ํ˜•์‹ | `format` ์ธ์ž | ๋ชจ๋ธ | ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ | ์ธ์ž | +|--------------------------------------------------------------------|---------------|-------------------------------|-------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-seg.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-seg.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-seg.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-seg_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-seg.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-seg.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-seg_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-seg.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-seg.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-seg_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-seg_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-seg_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-seg_ncnn_model/` | โœ… | `imgsz`, `half` | + +`export`์˜ ์ „์ฒด ์„ธ๋ถ€ ์‚ฌํ•ญ์€ [์ˆ˜์ถœ](https://docs.ultralytics.com/modes/export/) ํŽ˜์ด์ง€์—์„œ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. diff --git a/ultralytics/docs/ko/tasks/segment.md:Zone.Identifier b/ultralytics/docs/ko/tasks/segment.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ko/tasks/segment.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/mkdocs.yml b/ultralytics/docs/mkdocs.yml new file mode 100755 index 0000000..37323f9 --- /dev/null +++ b/ultralytics/docs/mkdocs.yml @@ -0,0 +1,578 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +site_name: Ultralytics YOLOv8 Docs +site_description: Explore Ultralytics YOLOv8, a cutting-edge real-time object detection and image segmentation model for various applications and hardware platforms. +site_url: https://docs.ultralytics.com +site_author: Ultralytics +repo_url: https://github.com/ultralytics/ultralytics +edit_uri: https://github.com/ultralytics/ultralytics/tree/main/docs/en/ +repo_name: ultralytics/ultralytics +remote_name: https://github.com/ultralytics/docs +docs_dir: 'en/' # where to find the markdown files +site_dir: '../site/' # where to publish to + +theme: + name: material + language: en + custom_dir: overrides/ + logo: https://github.com/ultralytics/assets/raw/main/logo/Ultralytics_Logotype_Reverse.svg + favicon: assets/favicon.ico + icon: + repo: fontawesome/brands/github + # font: # disabled for faster page load times + # text: Helvetica + # code: Roboto Mono + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: indigo + toggle: + icon: material/brightness-4 + name: Switch to system preference + - media: "(prefers-color-scheme: light)" + scheme: default + primary: indigo + accent: indigo + toggle: + icon: material/brightness-7 + name: Switch to dark mode + features: + - announce.dismiss + - content.action.edit + - content.code.annotate + - content.code.copy + - content.tooltips + - search.highlight + - search.share + - search.suggest + - toc.follow + - navigation.top + - navigation.tabs + - navigation.tabs.sticky + - navigation.prune + - navigation.footer + - navigation.tracking + - navigation.instant + - navigation.instant.progress + - navigation.indexes + - navigation.sections + - content.tabs.link # all code tabs change simultaneously + +# Customization +copyright: ยฉ 2023 Ultralytics Inc. All rights reserved. +extra: + # version: + # provider: mike # version drop-down menu + robots: robots.txt + analytics: + provider: google + property: G-2M5EHKC0BH + alternate: # language drop-down + - name: ๐Ÿ‡ฌ๐Ÿ‡ง English + link: / + lang: en + - name: ๐Ÿ‡จ๐Ÿ‡ณ ็ฎ€ไฝ“ไธญๆ–‡ + link: /zh/ + lang: zh + - name: ๐Ÿ‡ฐ๐Ÿ‡ท ํ•œ๊ตญ์–ด + link: /ko/ + lang: ko + - name: ๐Ÿ‡ฏ๐Ÿ‡ต ๆ—ฅๆœฌ่ชž + link: /ja/ + lang: ja + - name: ๐Ÿ‡ท๐Ÿ‡บ ะ ัƒััะบะธะน + link: /ru/ + lang: ru + - name: ๐Ÿ‡ฉ๐Ÿ‡ช Deutsch + link: /de/ + lang: de + - name: ๐Ÿ‡ซ๐Ÿ‡ท Franรงais + link: /fr/ + lang: fr + - name: ๐Ÿ‡ช๐Ÿ‡ธ Espaรฑol + link: /es/ + lang: es + - name: ๐Ÿ‡ต๐Ÿ‡น Portuguรชs + link: /pt/ + lang: pt + - name: ๐Ÿ‡ฎ๐Ÿ‡ณ เคนเคฟเคจเฅเคฆเฅ€ + link: /hi/ + lang: hi + - name: ๐Ÿ‡ธ๐Ÿ‡ฆ ุงู„ุนุฑุจูŠุฉ + link: /ar/ + lang: ar + social: + - icon: fontawesome/brands/github + link: https://github.com/ultralytics + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/company/ultralytics/ + - icon: fontawesome/brands/twitter + link: https://twitter.com/ultralytics + - icon: fontawesome/brands/youtube + link: https://www.youtube.com/ultralytics + - icon: fontawesome/brands/docker + link: https://hub.docker.com/r/ultralytics/ultralytics/ + - icon: fontawesome/brands/python + link: https://pypi.org/project/ultralytics/ + - icon: fontawesome/brands/discord + link: https://ultralytics.com/discord + +extra_css: + - stylesheets/style.css +extra_javascript: + - javascript/extra.js + +markdown_extensions: + - admonition + - md_in_html + - tables + - attr_list + - def_list + - pymdownx.critic + - pymdownx.caret + - pymdownx.keys + - pymdownx.mark + - pymdownx.tilde + - pymdownx.details + - pymdownx.superfences + - pymdownx.inlinehilite + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.snippets: + base_path: ./ + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + - pymdownx.tabbed: + alternate_style: true + +# Primary navigation --------------------------------------------------------------------------------------------------- +nav: + - Home: + - Home: index.md + - Quickstart: quickstart.md + - Modes: + - modes/index.md + - Train: modes/train.md + - Val: modes/val.md + - Predict: modes/predict.md + - Export: modes/export.md + - Track: modes/track.md + - Benchmark: modes/benchmark.md + - Tasks: + - tasks/index.md + - Detect: tasks/detect.md + - Segment: tasks/segment.md + - Classify: tasks/classify.md + - Pose: tasks/pose.md + - Languages: + - ๐Ÿ‡ฌ๐Ÿ‡ง  English: https://docs.ultralytics.com/ + - ๐Ÿ‡จ๐Ÿ‡ณ  ็ฎ€ไฝ“ไธญๆ–‡: https://docs.ultralytics.com/zh/ + - ๐Ÿ‡ฐ๐Ÿ‡ท  ํ•œ๊ตญ์–ด: https://docs.ultralytics.com/ko/ + - ๐Ÿ‡ฏ๐Ÿ‡ต  ๆ—ฅๆœฌ่ชž: https://docs.ultralytics.com/ja/ + - ๐Ÿ‡ท๐Ÿ‡บ  ะ ัƒััะบะธะน: https://docs.ultralytics.com/ru/ + - ๐Ÿ‡ฉ๐Ÿ‡ช  Deutsch: https://docs.ultralytics.com/de/ + - ๐Ÿ‡ซ๐Ÿ‡ท  Franรงais: https://docs.ultralytics.com/fr/ + - ๐Ÿ‡ช๐Ÿ‡ธ  Espaรฑol: https://docs.ultralytics.com/es/ + - ๐Ÿ‡ต๐Ÿ‡น  Portuguรชs: https://docs.ultralytics.com/pt/ + - ๐Ÿ‡ฎ๐Ÿ‡ณ  เคนเคฟเคจเฅเคฆเฅ€: https://docs.ultralytics.com/hi/ + - ๐Ÿ‡ธ๐Ÿ‡ฆ  ุงู„ุนุฑุจูŠุฉ: https://docs.ultralytics.com/ar/ + - Quickstart: quickstart.md + - Modes: + - modes/index.md + - Train: modes/train.md + - Val: modes/val.md + - Predict: modes/predict.md + - Export: modes/export.md + - Track: modes/track.md + - Benchmark: modes/benchmark.md + - Tasks: + - tasks/index.md + - Detect: tasks/detect.md + - Segment: tasks/segment.md + - Classify: tasks/classify.md + - Pose: tasks/pose.md + - Models: + - models/index.md + - YOLOv3: models/yolov3.md + - YOLOv4: models/yolov4.md + - YOLOv5: models/yolov5.md + - YOLOv6: models/yolov6.md + - YOLOv7: models/yolov7.md + - YOLOv8: models/yolov8.md + - SAM (Segment Anything Model): models/sam.md + - MobileSAM (Mobile Segment Anything Model): models/mobile-sam.md + - FastSAM (Fast Segment Anything Model): models/fast-sam.md + - YOLO-NAS (Neural Architecture Search): models/yolo-nas.md + - RT-DETR (Realtime Detection Transformer): models/rtdetr.md + - Datasets: + - datasets/index.md + - Detection: + - datasets/detect/index.md + - Argoverse: datasets/detect/argoverse.md + - COCO: datasets/detect/coco.md + - COCO8: datasets/detect/coco8.md + - GlobalWheat2020: datasets/detect/globalwheat2020.md + - Objects365: datasets/detect/objects365.md + - OpenImagesV7: datasets/detect/open-images-v7.md + - SKU-110K: datasets/detect/sku-110k.md + - VisDrone: datasets/detect/visdrone.md + - VOC: datasets/detect/voc.md + - xView: datasets/detect/xview.md + - Segmentation: + - datasets/segment/index.md + - COCO: datasets/segment/coco.md + - COCO8-seg: datasets/segment/coco8-seg.md + - Pose: + - datasets/pose/index.md + - COCO: datasets/pose/coco.md + - COCO8-pose: datasets/pose/coco8-pose.md + - Tiger-pose: datasets/pose/tiger-pose.md + - Classification: + - datasets/classify/index.md + - Caltech 101: datasets/classify/caltech101.md + - Caltech 256: datasets/classify/caltech256.md + - CIFAR-10: datasets/classify/cifar10.md + - CIFAR-100: datasets/classify/cifar100.md + - Fashion-MNIST: datasets/classify/fashion-mnist.md + - ImageNet: datasets/classify/imagenet.md + - ImageNet-10: datasets/classify/imagenet10.md + - Imagenette: datasets/classify/imagenette.md + - Imagewoof: datasets/classify/imagewoof.md + - MNIST: datasets/classify/mnist.md + - Oriented Bounding Boxes (OBB): + - datasets/obb/index.md + - DOTAv2: datasets/obb/dota-v2.md + - Multi-Object Tracking: + - datasets/track/index.md + - Guides: + - guides/index.md + - YOLO Common Issues: guides/yolo-common-issues.md + - YOLO Performance Metrics: guides/yolo-performance-metrics.md + - YOLO Thread-Safe Inference: guides/yolo-thread-safe-inference.md + - Model Deployment Options: guides/model-deployment-options.md + - K-Fold Cross Validation: guides/kfold-cross-validation.md + - Hyperparameter Tuning: guides/hyperparameter-tuning.md + - SAHI Tiled Inference: guides/sahi-tiled-inference.md + - AzureML Quickstart: guides/azureml-quickstart.md + - Conda Quickstart: guides/conda-quickstart.md + - Docker Quickstart: guides/docker-quickstart.md + - Raspberry Pi: guides/raspberry-pi.md + - Triton Inference Server: guides/triton-inference-server.md + - Isolating Segmentation Objects: guides/isolating-segmentation-objects.md + - Real-World Projects: + - Object Counting: guides/object-counting.md + - Workouts Monitoring: guides/workouts-monitoring.md + - Objects Counting in Regions: guides/region-counting.md + - Security Alarm System: guides/security-alarm-system.md + - Heatmaps: guides/heatmaps.md + - Instance Segmentation with Object Tracking: guides/instance-segmentation-and-tracking.md + - VisionEye Mapping: guides/vision-eye.md + - Integrations: + - integrations/index.md + - Comet ML: integrations/comet.md + - OpenVINO: integrations/openvino.md + - Ray Tune: integrations/ray-tune.md + - Roboflow: integrations/roboflow.md + - MLflow: integrations/mlflow.md + - ClearML: integrations/clearml.md + - DVC: integrations/dvc.md + - Usage: + - CLI: usage/cli.md + - Python: usage/python.md + - Callbacks: usage/callbacks.md + - Configuration: usage/cfg.md + - Advanced Customization: usage/engine.md + - YOLOv5: + - yolov5/index.md + - Quickstart: yolov5/quickstart_tutorial.md + - Environments: + - Amazon Web Services (AWS): yolov5/environments/aws_quickstart_tutorial.md + - Google Cloud (GCP): yolov5/environments/google_cloud_quickstart_tutorial.md + - AzureML: yolov5/environments/azureml_quickstart_tutorial.md + - Docker Image: yolov5/environments/docker_image_quickstart_tutorial.md + - Tutorials: + - Train Custom Data: yolov5/tutorials/train_custom_data.md + - Tips for Best Training Results: yolov5/tutorials/tips_for_best_training_results.md + - Multi-GPU Training: yolov5/tutorials/multi_gpu_training.md + - PyTorch Hub: yolov5/tutorials/pytorch_hub_model_loading.md + - TFLite, ONNX, CoreML, TensorRT Export: yolov5/tutorials/model_export.md + - NVIDIA Jetson Nano Deployment: yolov5/tutorials/running_on_jetson_nano.md + - Test-Time Augmentation (TTA): yolov5/tutorials/test_time_augmentation.md + - Model Ensembling: yolov5/tutorials/model_ensembling.md + - Pruning/Sparsity Tutorial: yolov5/tutorials/model_pruning_and_sparsity.md + - Hyperparameter evolution: yolov5/tutorials/hyperparameter_evolution.md + - Transfer learning with frozen layers: yolov5/tutorials/transfer_learning_with_frozen_layers.md + - Architecture Summary: yolov5/tutorials/architecture_description.md + - Roboflow Datasets: yolov5/tutorials/roboflow_datasets_integration.md + - Neural Magic's DeepSparse: yolov5/tutorials/neural_magic_pruning_quantization.md + - Comet Logging: yolov5/tutorials/comet_logging_integration.md + - Clearml Logging: yolov5/tutorials/clearml_logging_integration.md + - HUB: + - hub/index.md + - Quickstart: hub/quickstart.md + - Datasets: hub/datasets.md + - Projects: hub/projects.md + - Models: hub/models.md + - Integrations: hub/integrations.md + - Ultralytics HUB App: + - hub/app/index.md + - 'iOS': hub/app/ios.md + - 'Android': hub/app/android.md + - Inference API: hub/inference_api.md + - Reference: + - cfg: + - __init__: reference/cfg/__init__.md + - data: + - annotator: reference/data/annotator.md + - augment: reference/data/augment.md + - base: reference/data/base.md + - build: reference/data/build.md + - converter: reference/data/converter.md + - dataset: reference/data/dataset.md + - loaders: reference/data/loaders.md + - utils: reference/data/utils.md + - engine: + - exporter: reference/engine/exporter.md + - model: reference/engine/model.md + - predictor: reference/engine/predictor.md + - results: reference/engine/results.md + - trainer: reference/engine/trainer.md + - tuner: reference/engine/tuner.md + - validator: reference/engine/validator.md + - hub: + - __init__: reference/hub/__init__.md + - auth: reference/hub/auth.md + - session: reference/hub/session.md + - utils: reference/hub/utils.md + - models: + - fastsam: + - model: reference/models/fastsam/model.md + - predict: reference/models/fastsam/predict.md + - prompt: reference/models/fastsam/prompt.md + - utils: reference/models/fastsam/utils.md + - val: reference/models/fastsam/val.md + - nas: + - model: reference/models/nas/model.md + - predict: reference/models/nas/predict.md + - val: reference/models/nas/val.md + - rtdetr: + - model: reference/models/rtdetr/model.md + - predict: reference/models/rtdetr/predict.md + - train: reference/models/rtdetr/train.md + - val: reference/models/rtdetr/val.md + - sam: + - amg: reference/models/sam/amg.md + - build: reference/models/sam/build.md + - model: reference/models/sam/model.md + - modules: + - decoders: reference/models/sam/modules/decoders.md + - encoders: reference/models/sam/modules/encoders.md + - sam: reference/models/sam/modules/sam.md + - tiny_encoder: reference/models/sam/modules/tiny_encoder.md + - transformer: reference/models/sam/modules/transformer.md + - predict: reference/models/sam/predict.md + - utils: + - loss: reference/models/utils/loss.md + - ops: reference/models/utils/ops.md + - yolo: + - classify: + - predict: reference/models/yolo/classify/predict.md + - train: reference/models/yolo/classify/train.md + - val: reference/models/yolo/classify/val.md + - detect: + - predict: reference/models/yolo/detect/predict.md + - train: reference/models/yolo/detect/train.md + - val: reference/models/yolo/detect/val.md + - model: reference/models/yolo/model.md + - pose: + - predict: reference/models/yolo/pose/predict.md + - train: reference/models/yolo/pose/train.md + - val: reference/models/yolo/pose/val.md + - segment: + - predict: reference/models/yolo/segment/predict.md + - train: reference/models/yolo/segment/train.md + - val: reference/models/yolo/segment/val.md + - nn: + - autobackend: reference/nn/autobackend.md + - modules: + - block: reference/nn/modules/block.md + - conv: reference/nn/modules/conv.md + - head: reference/nn/modules/head.md + - transformer: reference/nn/modules/transformer.md + - utils: reference/nn/modules/utils.md + - tasks: reference/nn/tasks.md + - solutions: + - ai_gym: reference/solutions/ai_gym.md + - object_counter: reference/solutions/object_counter.md + - heatmap: reference/solutions/heatmap.md + - trackers: + - basetrack: reference/trackers/basetrack.md + - bot_sort: reference/trackers/bot_sort.md + - byte_tracker: reference/trackers/byte_tracker.md + - track: reference/trackers/track.md + - utils: + - gmc: reference/trackers/utils/gmc.md + - kalman_filter: reference/trackers/utils/kalman_filter.md + - matching: reference/trackers/utils/matching.md + - utils: + - __init__: reference/utils/__init__.md + - autobatch: reference/utils/autobatch.md + - benchmarks: reference/utils/benchmarks.md + - callbacks: + - base: reference/utils/callbacks/base.md + - clearml: reference/utils/callbacks/clearml.md + - comet: reference/utils/callbacks/comet.md + - dvc: reference/utils/callbacks/dvc.md + - hub: reference/utils/callbacks/hub.md + - mlflow: reference/utils/callbacks/mlflow.md + - neptune: reference/utils/callbacks/neptune.md + - raytune: reference/utils/callbacks/raytune.md + - tensorboard: reference/utils/callbacks/tensorboard.md + - wb: reference/utils/callbacks/wb.md + - checks: reference/utils/checks.md + - dist: reference/utils/dist.md + - downloads: reference/utils/downloads.md + - errors: reference/utils/errors.md + - files: reference/utils/files.md + - instance: reference/utils/instance.md + - loss: reference/utils/loss.md + - metrics: reference/utils/metrics.md + - ops: reference/utils/ops.md + - patches: reference/utils/patches.md + - plotting: reference/utils/plotting.md + - tal: reference/utils/tal.md + - torch_utils: reference/utils/torch_utils.md + - triton: reference/utils/triton.md + - tuner: reference/utils/tuner.md + + - Help: + - Help: help/index.md + - Frequently Asked Questions (FAQ): help/FAQ.md + - Contributing Guide: help/contributing.md + - Continuous Integration (CI) Guide: help/CI.md + - Contributor License Agreement (CLA): help/CLA.md + - Minimum Reproducible Example (MRE) Guide: help/minimum_reproducible_example.md + - Code of Conduct: help/code_of_conduct.md + - Environmental, Health and Safety (EHS) Policy: help/environmental-health-safety.md + - Security Policy: help/security.md + - Privacy Policy: help/privacy.md + +# Plugins including 301 redirects navigation --------------------------------------------------------------------------- +plugins: + - search: + lang: en + - mkdocstrings: + enabled: true + default_handler: python + handlers: + python: + options: + docstring_style: google + show_root_heading: true + show_source: true + - ultralytics: + add_desc: False + add_image: True + add_share_buttons: True + default_image: https://github.com/ultralytics/ultralytics/assets/26833433/6d09221c-c52a-4234-9a5d-b862e93c6529 + - redirects: + redirect_maps: + callbacks.md: usage/callbacks.md + cfg.md: usage/cfg.md + cli.md: usage/cli.md + config.md: usage/cfg.md + engine.md: usage/engine.md + environments/AWS-Quickstart.md: yolov5/environments/aws_quickstart_tutorial.md + environments/Docker-Quickstart.md: yolov5/environments/docker_image_quickstart_tutorial.md + environments/GCP-Quickstart.md: yolov5/environments/google_cloud_quickstart_tutorial.md + FAQ/augmentation.md: yolov5/tutorials/tips_for_best_training_results.md + package-framework.md: index.md + package-framework/mock_detector.md: index.md + predict.md: modes/predict.md + python.md: usage/python.md + quick-start.md: quickstart.md + app.md: hub/app/index.md + sdk.md: index.md + usage/hyperparameter_tuning.md: integrations/ray-tune.md + reference/base_pred.md: reference/engine/predictor.md + reference/base_trainer.md: reference/engine/trainer.md + reference/exporter.md: reference/engine/exporter.md + reference/model.md: reference/engine/model.md + reference/nn.md: reference/nn/modules/head.md + reference/ops.md: reference/utils/ops.md + reference/results.md: reference/engine/results.md + reference/base_val.md: index.md + tasks/classification.md: tasks/classify.md + tasks/detection.md: tasks/detect.md + tasks/segmentation.md: tasks/segment.md + tasks/keypoints.md: tasks/pose.md + tasks/tracking.md: modes/track.md + SECURITY.md: help/security.md + tutorials/architecture-summary.md: yolov5/tutorials/architecture_description.md + tutorials/clearml-logging.md: yolov5/tutorials/clearml_logging_integration.md + tutorials/comet-logging.md: yolov5/tutorials/comet_logging_integration.md + tutorials/hyperparameter-evolution.md: yolov5/tutorials/hyperparameter_evolution.md + tutorials/model-ensembling.md: yolov5/tutorials/model_ensembling.md + tutorials/multi-gpu-training.md: yolov5/tutorials/multi_gpu_training.md + tutorials/nvidia-jetson.md: yolov5/tutorials/running_on_jetson_nano.md + tutorials/pruning-sparsity.md: yolov5/tutorials/model_pruning_and_sparsity.md + tutorials/pytorch-hub.md: yolov5/tutorials/pytorch_hub_model_loading.md + tutorials/roboflow.md: yolov5/tutorials/roboflow_datasets_integration.md + tutorials/test-time-augmentation.md: yolov5/tutorials/test_time_augmentation.md + tutorials/torchscript-onnx-coreml-export.md: yolov5/tutorials/model_export.md + tutorials/train-custom-datasets.md: yolov5/tutorials/train_custom_data.md + tutorials/training-tips-best-results.md: yolov5/tutorials/tips_for_best_training_results.md + tutorials/transfer-learning-froze-layers.md: yolov5/tutorials/transfer_learning_with_frozen_layers.md + tutorials/weights-and-biasis-logging.md: yolov5/tutorials/comet_logging_integration.md + yolov5/pytorch_hub.md: yolov5/tutorials/pytorch_hub_model_loading.md + yolov5/hyp_evolution.md: yolov5/tutorials/hyperparameter_evolution.md + yolov5/pruning_sparsity.md: yolov5/tutorials/model_pruning_and_sparsity.md + yolov5/roboflow.md: yolov5/tutorials/roboflow_datasets_integration.md + yolov5/comet.md: yolov5/tutorials/comet_logging_integration.md + yolov5/clearml.md: yolov5/tutorials/clearml_logging_integration.md + yolov5/tta.md: yolov5/tutorials/test_time_augmentation.md + yolov5/multi_gpu_training.md: yolov5/tutorials/multi_gpu_training.md + yolov5/ensemble.md: yolov5/tutorials/model_ensembling.md + yolov5/jetson_nano.md: yolov5/tutorials/running_on_jetson_nano.md + yolov5/transfer_learn_frozen.md: yolov5/tutorials/transfer_learning_with_frozen_layers.md + yolov5/neural_magic.md: yolov5/tutorials/neural_magic_pruning_quantization.md + yolov5/train_custom_data.md: yolov5/tutorials/train_custom_data.md + yolov5/architecture.md: yolov5/tutorials/architecture_description.md + yolov5/export.md: yolov5/tutorials/model_export.md + yolov5/yolov5_quickstart_tutorial.md: yolov5/quickstart_tutorial.md + yolov5/tips_for_best_training_results.md: yolov5/tutorials/tips_for_best_training_results.md + yolov5/tutorials/yolov5_neural_magic_tutorial.md: yolov5/tutorials/neural_magic_pruning_quantization.md + yolov5/tutorials/model_ensembling_tutorial.md: yolov5/tutorials/model_ensembling.md + yolov5/tutorials/pytorch_hub_tutorial.md: yolov5/tutorials/pytorch_hub_model_loading.md + yolov5/tutorials/yolov5_architecture_tutorial.md: yolov5/tutorials/architecture_description.md + yolov5/tutorials/multi_gpu_training_tutorial.md: yolov5/tutorials/multi_gpu_training.md + yolov5/tutorials/yolov5_pytorch_hub_tutorial.md: yolov5/tutorials/pytorch_hub_model_loading.md + yolov5/tutorials/model_export_tutorial.md: yolov5/tutorials/model_export.md + yolov5/tutorials/jetson_nano_tutorial.md: yolov5/tutorials/running_on_jetson_nano.md + yolov5/tutorials/yolov5_model_ensembling_tutorial.md: yolov5/tutorials/model_ensembling.md + yolov5/tutorials/roboflow_integration.md: yolov5/tutorials/roboflow_datasets_integration.md + yolov5/tutorials/pruning_and_sparsity_tutorial.md: yolov5/tutorials/model_pruning_and_sparsity.md + yolov5/tutorials/yolov5_transfer_learning_with_frozen_layers_tutorial.md: yolov5/tutorials/transfer_learning_with_frozen_layers.md + yolov5/tutorials/transfer_learning_with_frozen_layers_tutorial.md: yolov5/tutorials/transfer_learning_with_frozen_layers.md + yolov5/tutorials/yolov5_model_export_tutorial.md: yolov5/tutorials/model_export.md + yolov5/tutorials/neural_magic_tutorial.md: yolov5/tutorials/neural_magic_pruning_quantization.md + yolov5/tutorials/yolov5_clearml_integration_tutorial.md: yolov5/tutorials/clearml_logging_integration.md + yolov5/tutorials/yolov5_train_custom_data.md: yolov5/tutorials/train_custom_data.md + yolov5/tutorials/comet_integration_tutorial.md: yolov5/tutorials/comet_logging_integration.md + yolov5/tutorials/yolov5_pruning_and_sparsity_tutorial.md: yolov5/tutorials/model_pruning_and_sparsity.md + yolov5/tutorials/yolov5_jetson_nano_tutorial.md: yolov5/tutorials/running_on_jetson_nano.md + yolov5/tutorials/yolov5_roboflow_integration.md: yolov5/tutorials/roboflow_datasets_integration.md + yolov5/tutorials/hyperparameter_evolution_tutorial.md: yolov5/tutorials/hyperparameter_evolution.md + yolov5/tutorials/yolov5_hyperparameter_evolution_tutorial.md: yolov5/tutorials/hyperparameter_evolution.md + yolov5/tutorials/clearml_integration_tutorial.md: yolov5/tutorials/clearml_logging_integration.md + yolov5/tutorials/test_time_augmentation_tutorial.md: yolov5/tutorials/test_time_augmentation.md + yolov5/tutorials/yolov5_test_time_augmentation_tutorial.md: yolov5/tutorials/test_time_augmentation.md + yolov5/environments/yolov5_amazon_web_services_quickstart_tutorial.md: yolov5/environments/aws_quickstart_tutorial.md + yolov5/environments/yolov5_google_cloud_platform_quickstart_tutorial.md: yolov5/environments/google_cloud_quickstart_tutorial.md + yolov5/environments/yolov5_docker_image_quickstart_tutorial.md: yolov5/environments/docker_image_quickstart_tutorial.md diff --git a/ultralytics/docs/mkdocs.yml:Zone.Identifier b/ultralytics/docs/mkdocs.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/mkdocs.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/mkdocs_ar.yml b/ultralytics/docs/mkdocs_ar.yml new file mode 100755 index 0000000..966e196 --- /dev/null +++ b/ultralytics/docs/mkdocs_ar.yml @@ -0,0 +1,213 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +site_name: ูˆุซุงุฆู‚ Ultralytics YOLOv8 +site_description: ุงุณุชูƒุดู Ultralytics YOLOv8ุŒ ู†ู…ูˆุฐุฌ ุงู„ูƒุดู ุนู† ุงู„ุฃุฌุณุงู… ูˆุชู‚ุทูŠุน ุงู„ุตูˆุฑุฉ ููŠ ุงู„ูˆู‚ุช ุงู„ุญู‚ูŠู‚ูŠ ุงู„ู…ุชุทูˆุฑ ู„ู…ุฎุชู„ู ุงู„ุชุทุจูŠู‚ุงุช ูˆุงู„ู…ู†ุตุงุช ุงู„ุฃุฌู‡ุฒุฉ. +site_url: https://docs.ultralytics.com/ar/ +site_author: Ultralytics +repo_url: https://github.com/ultralytics/ultralytics +edit_uri: https://github.com/ultralytics/ultralytics/tree/main/docs/ar/ +repo_name: ultralytics/ultralytics +remote_name: https://github.com/ultralytics/docs +docs_dir: 'ar/' # where to find the markdown files +site_dir: '../site/ar' # where to publish to + +theme: + name: material + language: ar + custom_dir: overrides/ + logo: https://github.com/ultralytics/assets/raw/main/logo/Ultralytics_Logotype_Reverse.svg + favicon: assets/favicon.ico + icon: + repo: fontawesome/brands/github + # font: # disabled for faster page load times + # text: Helvetica + # code: Roboto Mono + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: indigo + toggle: + icon: material/brightness-4 + name: Switch to system preference + - media: "(prefers-color-scheme: light)" + scheme: default + primary: indigo + accent: indigo + toggle: + icon: material/brightness-7 + name: Switch to dark mode + features: + - announce.dismiss + - content.action.edit + - content.code.annotate + - content.code.copy + - content.tooltips + - search.highlight + - search.share + - search.suggest + - toc.follow + - navigation.top + - navigation.tabs + - navigation.tabs.sticky + - navigation.prune + - navigation.footer + - navigation.tracking + - navigation.instant + - navigation.instant.progress + - navigation.indexes + - navigation.sections + - content.tabs.link # all code tabs change simultaneously + +# Customization +copyright: ยฉ 2023 Ultralytics Inc. All rights reserved. +extra: + # version: + # provider: mike # version drop-down menu + robots: robots.txt + analytics: + provider: google + property: G-2M5EHKC0BH + alternate: # language drop-down + - name: ๐Ÿ‡ฌ๐Ÿ‡ง English + link: / + lang: en + - name: ๐Ÿ‡จ๐Ÿ‡ณ ็ฎ€ไฝ“ไธญๆ–‡ + link: /zh/ + lang: zh + - name: ๐Ÿ‡ฐ๐Ÿ‡ท ํ•œ๊ตญ์–ด + link: /ko/ + lang: ko + - name: ๐Ÿ‡ฏ๐Ÿ‡ต ๆ—ฅๆœฌ่ชž + link: /ja/ + lang: ja + - name: ๐Ÿ‡ท๐Ÿ‡บ ะ ัƒััะบะธะน + link: /ru/ + lang: ru + - name: ๐Ÿ‡ฉ๐Ÿ‡ช Deutsch + link: /de/ + lang: de + - name: ๐Ÿ‡ซ๐Ÿ‡ท Franรงais + link: /fr/ + lang: fr + - name: ๐Ÿ‡ช๐Ÿ‡ธ Espaรฑol + link: /es/ + lang: es + - name: ๐Ÿ‡ต๐Ÿ‡น Portuguรชs + link: /pt/ + lang: pt + - name: ๐Ÿ‡ฎ๐Ÿ‡ณ เคนเคฟเคจเฅเคฆเฅ€ + link: /hi/ + lang: hi + - name: ๐Ÿ‡ธ๐Ÿ‡ฆ ุงู„ุนุฑุจูŠุฉ + link: /ar/ + lang: ar + social: + - icon: fontawesome/brands/github + link: https://github.com/ultralytics + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/company/ultralytics/ + - icon: fontawesome/brands/twitter + link: https://twitter.com/ultralytics + - icon: fontawesome/brands/youtube + link: https://www.youtube.com/ultralytics + - icon: fontawesome/brands/docker + link: https://hub.docker.com/r/ultralytics/ultralytics/ + - icon: fontawesome/brands/python + link: https://pypi.org/project/ultralytics/ + - icon: fontawesome/brands/discord + link: https://ultralytics.com/discord + +extra_css: + - stylesheets/style.css +extra_javascript: + - javascript/extra.js + +markdown_extensions: + - admonition + - md_in_html + - tables + - attr_list + - def_list + - pymdownx.critic + - pymdownx.caret + - pymdownx.keys + - pymdownx.mark + - pymdownx.tilde + - pymdownx.details + - pymdownx.superfences + - pymdownx.inlinehilite + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.snippets: + base_path: ./ + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + - pymdownx.tabbed: + alternate_style: true + + +# Primary navigation --------------------------------------------------------------------------------------------------- +nav: + - ุงู„ุตูุญุฉ ุงู„ุฑุฆูŠุณูŠุฉ: + - ุงู„ุตูุญุฉ ุงู„ุฑุฆูŠุณูŠุฉ: index.md + - ุงู„ุจุฏุก ุงู„ุณุฑูŠุน: quickstart.md + - ุงู„ุฃูˆุถุงุน: + - modes/index.md + - ุงู„ุชุฏุฑูŠุจ: modes/train.md + - ุงู„ุชุญู‚ู‚: modes/val.md + - ุงู„ุชู†ุจุค: modes/predict.md + - ุงู„ุชุตุฏูŠุฑ: modes/export.md + - ุงู„ุชุชุจุน: modes/track.md + - ุงู„ู…ุนุงูŠูŠุฑ: modes/benchmark.md + - ุงู„ู…ู‡ุงู…: + - tasks/index.md + - ุงู„ูƒุดู: tasks/detect.md + - ุงู„ุชุฌุฒุฆุฉ: tasks/segment.md + - ุงู„ุชุตู†ูŠู: tasks/classify.md + - ุงู„ูˆุถุนูŠุฉ: tasks/pose.md + - ุงู„ุจุฏุก ุงู„ุณุฑูŠุน: quickstart.md + - ุงู„ุฃูˆุถุงุน: + - modes/index.md + - ุงู„ุชุฏุฑูŠุจ: modes/train.md + - ุงู„ุชุญู‚ู‚: modes/val.md + - ุงู„ุชู†ุจุค: modes/predict.md + - ุงู„ุชุตุฏูŠุฑ: modes/export.md + - ุงู„ุชุชุจุน: modes/track.md + - ุงู„ู…ุนุงูŠูŠุฑ: modes/benchmark.md + - ุงู„ู…ู‡ุงู…: + - tasks/index.md + - ุงู„ูƒุดู: tasks/detect.md + - ุงู„ุชุฌุฒุฆุฉ: tasks/segment.md + - ุงู„ุชุตู†ูŠู: tasks/classify.md + - ุงู„ูˆุถุนูŠุฉ: tasks/pose.md + - ุงู„ู†ู…ุงุฐุฌ: + - models/index.md + - YOLOv3: models/yolov3.md + - YOLOv4: models/yolov4.md + - YOLOv5: models/yolov5.md + - YOLOv6: models/yolov6.md + - YOLOv7: models/yolov7.md + - YOLOv8: models/yolov8.md + - SAM (Segment Anything Model): models/sam.md + - MobileSAM (Mobile Segment Anything Model): models/mobile-sam.md + - FastSAM (Fast Segment Anything Model): models/fast-sam.md + - YOLO-NAS (Neural Architecture Search): models/yolo-nas.md + - RT-DETR (Realtime Detection Transformer): models/rtdetr.md + - ุงู„ู…ุฌู…ูˆุนุงุช ุงู„ุจูŠุงู†ูŠุฉ: + - datasets/index.md + +# Plugins including 301 redirects navigation --------------------------------------------------------------------------- +plugins: + - search: + lang: ar + - ultralytics: + add_desc: False + add_image: True + add_share_buttons: True + default_image: https://github.com/ultralytics/ultralytics/assets/26833433/6d09221c-c52a-4234-9a5d-b862e93c6529 diff --git a/ultralytics/docs/mkdocs_ar.yml:Zone.Identifier b/ultralytics/docs/mkdocs_ar.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/mkdocs_ar.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/mkdocs_de.yml b/ultralytics/docs/mkdocs_de.yml new file mode 100755 index 0000000..c63c47d --- /dev/null +++ b/ultralytics/docs/mkdocs_de.yml @@ -0,0 +1,213 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +site_name: Ultralytics YOLOv8 Dokumentation +site_description: Entdecken Sie Ultralytics YOLOv8, ein hochmodernes Echtzeit-Objekterkennungs- und Bildsegmentierungsmodell fรผr verschiedene Anwendungen und Hardware-Plattformen. +site_url: https://docs.ultralytics.com/de/ +site_author: Ultralytics +repo_url: https://github.com/ultralytics/ultralytics +edit_uri: https://github.com/ultralytics/ultralytics/tree/main/docs/de/ +repo_name: ultralytics/ultralytics +remote_name: https://github.com/ultralytics/docs +docs_dir: 'de/' # where to find the markdown files +site_dir: '../site/de' # where to publish to + +theme: + name: material + language: de + custom_dir: overrides/ + logo: https://github.com/ultralytics/assets/raw/main/logo/Ultralytics_Logotype_Reverse.svg + favicon: assets/favicon.ico + icon: + repo: fontawesome/brands/github + # font: # disabled for faster page load times + # text: Helvetica + # code: Roboto Mono + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: indigo + toggle: + icon: material/brightness-4 + name: Switch to system preference + - media: "(prefers-color-scheme: light)" + scheme: default + primary: indigo + accent: indigo + toggle: + icon: material/brightness-7 + name: Switch to dark mode + features: + - announce.dismiss + - content.action.edit + - content.code.annotate + - content.code.copy + - content.tooltips + - search.highlight + - search.share + - search.suggest + - toc.follow + - navigation.top + - navigation.tabs + - navigation.tabs.sticky + - navigation.prune + - navigation.footer + - navigation.tracking + - navigation.instant + - navigation.instant.progress + - navigation.indexes + - navigation.sections + - content.tabs.link # all code tabs change simultaneously + +# Customization +copyright: ยฉ 2023 Ultralytics Inc. All rights reserved. +extra: + # version: + # provider: mike # version drop-down menu + robots: robots.txt + analytics: + provider: google + property: G-2M5EHKC0BH + alternate: # language drop-down + - name: ๐Ÿ‡ฌ๐Ÿ‡ง English + link: / + lang: en + - name: ๐Ÿ‡จ๐Ÿ‡ณ ็ฎ€ไฝ“ไธญๆ–‡ + link: /zh/ + lang: zh + - name: ๐Ÿ‡ฐ๐Ÿ‡ท ํ•œ๊ตญ์–ด + link: /ko/ + lang: ko + - name: ๐Ÿ‡ฏ๐Ÿ‡ต ๆ—ฅๆœฌ่ชž + link: /ja/ + lang: ja + - name: ๐Ÿ‡ท๐Ÿ‡บ ะ ัƒััะบะธะน + link: /ru/ + lang: ru + - name: ๐Ÿ‡ฉ๐Ÿ‡ช Deutsch + link: /de/ + lang: de + - name: ๐Ÿ‡ซ๐Ÿ‡ท Franรงais + link: /fr/ + lang: fr + - name: ๐Ÿ‡ช๐Ÿ‡ธ Espaรฑol + link: /es/ + lang: es + - name: ๐Ÿ‡ต๐Ÿ‡น Portuguรชs + link: /pt/ + lang: pt + - name: ๐Ÿ‡ฎ๐Ÿ‡ณ เคนเคฟเคจเฅเคฆเฅ€ + link: /hi/ + lang: hi + - name: ๐Ÿ‡ธ๐Ÿ‡ฆ ุงู„ุนุฑุจูŠุฉ + link: /ar/ + lang: ar + social: + - icon: fontawesome/brands/github + link: https://github.com/ultralytics + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/company/ultralytics/ + - icon: fontawesome/brands/twitter + link: https://twitter.com/ultralytics + - icon: fontawesome/brands/youtube + link: https://www.youtube.com/ultralytics + - icon: fontawesome/brands/docker + link: https://hub.docker.com/r/ultralytics/ultralytics/ + - icon: fontawesome/brands/python + link: https://pypi.org/project/ultralytics/ + - icon: fontawesome/brands/discord + link: https://ultralytics.com/discord + +extra_css: + - stylesheets/style.css +extra_javascript: + - javascript/extra.js + +markdown_extensions: + - admonition + - md_in_html + - tables + - attr_list + - def_list + - pymdownx.critic + - pymdownx.caret + - pymdownx.keys + - pymdownx.mark + - pymdownx.tilde + - pymdownx.details + - pymdownx.superfences + - pymdownx.inlinehilite + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.snippets: + base_path: ./ + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + - pymdownx.tabbed: + alternate_style: true + + +# Primary navigation --------------------------------------------------------------------------------------------------- +nav: + - Startseite: + - Startseite: index.md + - Schnellstart: quickstart.md + - Modi: + - modes/index.md + - Training: modes/train.md + - Validierung: modes/val.md + - Vorhersage: modes/predict.md + - Exportieren: modes/export.md + - Verfolgen: modes/track.md + - Benchmarking: modes/benchmark.md + - Aufgaben: + - tasks/index.md + - Erkennung: tasks/detect.md + - Segmentierung: tasks/segment.md + - Klassifizierung: tasks/classify.md + - Pose: tasks/pose.md + - Schnellstart: quickstart.md + - Modi: + - modes/index.md + - Training: modes/train.md + - Validierung: modes/val.md + - Vorhersage: modes/predict.md + - Exportieren: modes/export.md + - Verfolgen: modes/track.md + - Benchmarking: modes/benchmark.md + - Aufgaben: + - tasks/index.md + - Erkennung: tasks/detect.md + - Segmentierung: tasks/segment.md + - Klassifizierung: tasks/classify.md + - Pose: tasks/pose.md + - Modelle: + - models/index.md + - YOLOv3: models/yolov3.md + - YOLOv4: models/yolov4.md + - YOLOv5: models/yolov5.md + - YOLOv6: models/yolov6.md + - YOLOv7: models/yolov7.md + - YOLOv8: models/yolov8.md + - SAM (Segment Anything Model): models/sam.md + - MobileSAM (Mobile Segment Anything Model): models/mobile-sam.md + - FastSAM (Fast Segment Anything Model): models/fast-sam.md + - YOLO-NAS (Neural Architecture Search): models/yolo-nas.md + - RT-DETR (Realtime Detection Transformer): models/rtdetr.md + - Datensรคtze: + - datasets/index.md + +# Plugins including 301 redirects navigation --------------------------------------------------------------------------- +plugins: + - search: + lang: de + - ultralytics: + add_desc: False + add_image: True + add_share_buttons: True + default_image: https://github.com/ultralytics/ultralytics/assets/26833433/6d09221c-c52a-4234-9a5d-b862e93c6529 diff --git a/ultralytics/docs/mkdocs_de.yml:Zone.Identifier b/ultralytics/docs/mkdocs_de.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/mkdocs_de.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/mkdocs_es.yml b/ultralytics/docs/mkdocs_es.yml new file mode 100755 index 0000000..ec29a50 --- /dev/null +++ b/ultralytics/docs/mkdocs_es.yml @@ -0,0 +1,213 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +site_name: Documentaciรณn Ultralytics YOLOv8 +site_description: Explore Ultralytics YOLOv8, un modelo avanzado de detecciรณn de objetos e segmentaciรณn de imรกgenes en tiempo real para diversas aplicaciones y plataformas de hardware. +site_url: https://docs.ultralytics.com/es/ +site_author: Ultralytics +repo_url: https://github.com/ultralytics/ultralytics +edit_uri: https://github.com/ultralytics/ultralytics/tree/main/docs/es/ +repo_name: ultralytics/ultralytics +remote_name: https://github.com/ultralytics/docs +docs_dir: 'es/' # where to find the markdown files +site_dir: '../site/es' # where to publish to + +theme: + name: material + language: es + custom_dir: overrides/ + logo: https://github.com/ultralytics/assets/raw/main/logo/Ultralytics_Logotype_Reverse.svg + favicon: assets/favicon.ico + icon: + repo: fontawesome/brands/github + # font: # disabled for faster page load times + # text: Helvetica + # code: Roboto Mono + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: indigo + toggle: + icon: material/brightness-4 + name: Switch to system preference + - media: "(prefers-color-scheme: light)" + scheme: default + primary: indigo + accent: indigo + toggle: + icon: material/brightness-7 + name: Switch to dark mode + features: + - announce.dismiss + - content.action.edit + - content.code.annotate + - content.code.copy + - content.tooltips + - search.highlight + - search.share + - search.suggest + - toc.follow + - navigation.top + - navigation.tabs + - navigation.tabs.sticky + - navigation.prune + - navigation.footer + - navigation.tracking + - navigation.instant + - navigation.instant.progress + - navigation.indexes + - navigation.sections + - content.tabs.link # all code tabs change simultaneously + +# Customization +copyright: ยฉ 2023 Ultralytics Inc. All rights reserved. +extra: + # version: + # provider: mike # version drop-down menu + robots: robots.txt + analytics: + provider: google + property: G-2M5EHKC0BH + alternate: # language drop-down + - name: ๐Ÿ‡ฌ๐Ÿ‡ง English + link: / + lang: en + - name: ๐Ÿ‡จ๐Ÿ‡ณ ็ฎ€ไฝ“ไธญๆ–‡ + link: /zh/ + lang: zh + - name: ๐Ÿ‡ฐ๐Ÿ‡ท ํ•œ๊ตญ์–ด + link: /ko/ + lang: ko + - name: ๐Ÿ‡ฏ๐Ÿ‡ต ๆ—ฅๆœฌ่ชž + link: /ja/ + lang: ja + - name: ๐Ÿ‡ท๐Ÿ‡บ ะ ัƒััะบะธะน + link: /ru/ + lang: ru + - name: ๐Ÿ‡ฉ๐Ÿ‡ช Deutsch + link: /de/ + lang: de + - name: ๐Ÿ‡ซ๐Ÿ‡ท Franรงais + link: /fr/ + lang: fr + - name: ๐Ÿ‡ช๐Ÿ‡ธ Espaรฑol + link: /es/ + lang: es + - name: ๐Ÿ‡ต๐Ÿ‡น Portuguรชs + link: /pt/ + lang: pt + - name: ๐Ÿ‡ฎ๐Ÿ‡ณ เคนเคฟเคจเฅเคฆเฅ€ + link: /hi/ + lang: hi + - name: ๐Ÿ‡ธ๐Ÿ‡ฆ ุงู„ุนุฑุจูŠุฉ + link: /ar/ + lang: ar + social: + - icon: fontawesome/brands/github + link: https://github.com/ultralytics + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/company/ultralytics/ + - icon: fontawesome/brands/twitter + link: https://twitter.com/ultralytics + - icon: fontawesome/brands/youtube + link: https://www.youtube.com/ultralytics + - icon: fontawesome/brands/docker + link: https://hub.docker.com/r/ultralytics/ultralytics/ + - icon: fontawesome/brands/python + link: https://pypi.org/project/ultralytics/ + - icon: fontawesome/brands/discord + link: https://ultralytics.com/discord + +extra_css: + - stylesheets/style.css +extra_javascript: + - javascript/extra.js + +markdown_extensions: + - admonition + - md_in_html + - tables + - attr_list + - def_list + - pymdownx.critic + - pymdownx.caret + - pymdownx.keys + - pymdownx.mark + - pymdownx.tilde + - pymdownx.details + - pymdownx.superfences + - pymdownx.inlinehilite + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.snippets: + base_path: ./ + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + - pymdownx.tabbed: + alternate_style: true + + +# Primary navigation --------------------------------------------------------------------------------------------------- +nav: + - Inicio: + - Inicio: index.md + - Inicio rรกpido: quickstart.md + - Modos: + - modes/index.md + - Entrenamiento: modes/train.md + - Validaciรณn: modes/val.md + - Predicciรณn: modes/predict.md + - Exportar: modes/export.md + - Seguimiento: modes/track.md + - Benchmarking: modes/benchmark.md + - Tareas: + - tasks/index.md + - Detecciรณn: tasks/detect.md + - Segmentaciรณn: tasks/segment.md + - Clasificaciรณn: tasks/classify.md + - Pose: tasks/pose.md + - Inicio rรกpido: quickstart.md + - Modos: + - modes/index.md + - Entrenamiento: modes/train.md + - Validaciรณn: modes/val.md + - Predicciรณn: modes/predict.md + - Exportar: modes/export.md + - Seguimiento: modes/track.md + - Benchmarking: modes/benchmark.md + - Tareas: + - tasks/index.md + - Detecciรณn: tasks/detect.md + - Segmentaciรณn: tasks/segment.md + - Clasificaciรณn: tasks/classify.md + - Pose: tasks/pose.md + - Modelos: + - models/index.md + - YOLOv3: models/yolov3.md + - YOLOv4: models/yolov4.md + - YOLOv5: models/yolov5.md + - YOLOv6: models/yolov6.md + - YOLOv7: models/yolov7.md + - YOLOv8: models/yolov8.md + - SAM (Segment Anything Model): models/sam.md + - MobileSAM (Mobile Segment Anything Model): models/mobile-sam.md + - FastSAM (Fast Segment Anything Model): models/fast-sam.md + - YOLO-NAS (Neural Architecture Search): models/yolo-nas.md + - RT-DETR (Realtime Detection Transformer): models/rtdetr.md + - Conjuntos de datos: + - datasets/index.md + +# Plugins including 301 redirects navigation --------------------------------------------------------------------------- +plugins: + - search: + lang: es + - ultralytics: + add_desc: False + add_image: True + add_share_buttons: True + default_image: https://github.com/ultralytics/ultralytics/assets/26833433/6d09221c-c52a-4234-9a5d-b862e93c6529 diff --git a/ultralytics/docs/mkdocs_es.yml:Zone.Identifier b/ultralytics/docs/mkdocs_es.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/mkdocs_es.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/mkdocs_fr.yml b/ultralytics/docs/mkdocs_fr.yml new file mode 100755 index 0000000..37f99d8 --- /dev/null +++ b/ultralytics/docs/mkdocs_fr.yml @@ -0,0 +1,213 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +site_name: Documentation Ultralytics YOLOv8 +site_description: Explorez Ultralytics YOLOv8, un modรจle de pointe pour la dรฉtection d'objets et la segmentation d'image en temps rรฉel, adaptรฉ ร  diverses applications et plateformes matรฉrielles. +site_url: https://docs.ultralytics.com/fr/ +site_author: Ultralytics +repo_url: https://github.com/ultralytics/ultralytics +edit_uri: https://github.com/ultralytics/ultralytics/tree/main/docs/fr/ +repo_name: ultralytics/ultralytics +remote_name: https://github.com/ultralytics/docs +docs_dir: 'fr/' # where to find the markdown files +site_dir: '../site/fr' # where to publish to + +theme: + name: material + language: fr + custom_dir: overrides/ + logo: https://github.com/ultralytics/assets/raw/main/logo/Ultralytics_Logotype_Reverse.svg + favicon: assets/favicon.ico + icon: + repo: fontawesome/brands/github + # font: # disabled for faster page load times + # text: Helvetica + # code: Roboto Mono + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: indigo + toggle: + icon: material/brightness-4 + name: Switch to system preference + - media: "(prefers-color-scheme: light)" + scheme: default + primary: indigo + accent: indigo + toggle: + icon: material/brightness-7 + name: Switch to dark mode + features: + - announce.dismiss + - content.action.edit + - content.code.annotate + - content.code.copy + - content.tooltips + - search.highlight + - search.share + - search.suggest + - toc.follow + - navigation.top + - navigation.tabs + - navigation.tabs.sticky + - navigation.prune + - navigation.footer + - navigation.tracking + - navigation.instant + - navigation.instant.progress + - navigation.indexes + - navigation.sections + - content.tabs.link # all code tabs change simultaneously + +# Customization +copyright: ยฉ 2023 Ultralytics Inc. All rights reserved. +extra: + # version: + # provider: mike # version drop-down menu + robots: robots.txt + analytics: + provider: google + property: G-2M5EHKC0BH + alternate: # language drop-down + - name: ๐Ÿ‡ฌ๐Ÿ‡ง English + link: / + lang: en + - name: ๐Ÿ‡จ๐Ÿ‡ณ ็ฎ€ไฝ“ไธญๆ–‡ + link: /zh/ + lang: zh + - name: ๐Ÿ‡ฐ๐Ÿ‡ท ํ•œ๊ตญ์–ด + link: /ko/ + lang: ko + - name: ๐Ÿ‡ฏ๐Ÿ‡ต ๆ—ฅๆœฌ่ชž + link: /ja/ + lang: ja + - name: ๐Ÿ‡ท๐Ÿ‡บ ะ ัƒััะบะธะน + link: /ru/ + lang: ru + - name: ๐Ÿ‡ฉ๐Ÿ‡ช Deutsch + link: /de/ + lang: de + - name: ๐Ÿ‡ซ๐Ÿ‡ท Franรงais + link: /fr/ + lang: fr + - name: ๐Ÿ‡ช๐Ÿ‡ธ Espaรฑol + link: /es/ + lang: es + - name: ๐Ÿ‡ต๐Ÿ‡น Portuguรชs + link: /pt/ + lang: pt + - name: ๐Ÿ‡ฎ๐Ÿ‡ณ เคนเคฟเคจเฅเคฆเฅ€ + link: /hi/ + lang: hi + - name: ๐Ÿ‡ธ๐Ÿ‡ฆ ุงู„ุนุฑุจูŠุฉ + link: /ar/ + lang: ar + social: + - icon: fontawesome/brands/github + link: https://github.com/ultralytics + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/company/ultralytics/ + - icon: fontawesome/brands/twitter + link: https://twitter.com/ultralytics + - icon: fontawesome/brands/youtube + link: https://www.youtube.com/ultralytics + - icon: fontawesome/brands/docker + link: https://hub.docker.com/r/ultralytics/ultralytics/ + - icon: fontawesome/brands/python + link: https://pypi.org/project/ultralytics/ + - icon: fontawesome/brands/discord + link: https://ultralytics.com/discord + +extra_css: + - stylesheets/style.css +extra_javascript: + - javascript/extra.js + +markdown_extensions: + - admonition + - md_in_html + - tables + - attr_list + - def_list + - pymdownx.critic + - pymdownx.caret + - pymdownx.keys + - pymdownx.mark + - pymdownx.tilde + - pymdownx.details + - pymdownx.superfences + - pymdownx.inlinehilite + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.snippets: + base_path: ./ + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + - pymdownx.tabbed: + alternate_style: true + + +# Primary navigation --------------------------------------------------------------------------------------------------- +nav: + - Accueil: + - Accueil: index.md + - Dรฉmarrage rapide: quickstart.md + - Modes: + - modes/index.md + - Entraรฎnement: modes/train.md + - Validation: modes/val.md + - Prรฉdiction: modes/predict.md + - Exportation: modes/export.md + - Suivi: modes/track.md + - Benchmarking: modes/benchmark.md + - Tรขches: + - tasks/index.md + - Dรฉtection: tasks/detect.md + - Segmentation: tasks/segment.md + - Classification: tasks/classify.md + - Pose: tasks/pose.md + - Dรฉmarrage rapide: quickstart.md + - Modes: + - modes/index.md + - Entraรฎnement: modes/train.md + - Validation: modes/val.md + - Prรฉdiction: modes/predict.md + - Exportation: modes/export.md + - Suivi: modes/track.md + - Benchmarking: modes/benchmark.md + - Tรขches: + - tasks/index.md + - Dรฉtection: tasks/detect.md + - Segmentation: tasks/segment.md + - Classification: tasks/classify.md + - Pose: tasks/pose.md + - Modรจles: + - models/index.md + - YOLOv3: models/yolov3.md + - YOLOv4: models/yolov4.md + - YOLOv5: models/yolov5.md + - YOLOv6: models/yolov6.md + - YOLOv7: models/yolov7.md + - YOLOv8: models/yolov8.md + - SAM (Segment Anything Model): models/sam.md + - MobileSAM (Mobile Segment Anything Model): models/mobile-sam.md + - FastSAM (Fast Segment Anything Model): models/fast-sam.md + - YOLO-NAS (Neural Architecture Search): models/yolo-nas.md + - RT-DETR (Realtime Detection Transformer): models/rtdetr.md + - Jeux de donnรฉes: + - datasets/index.md + +# Plugins including 301 redirects navigation --------------------------------------------------------------------------- +plugins: + - search: + lang: fr + - ultralytics: + add_desc: False + add_image: True + add_share_buttons: True + default_image: https://github.com/ultralytics/ultralytics/assets/26833433/6d09221c-c52a-4234-9a5d-b862e93c6529 diff --git a/ultralytics/docs/mkdocs_fr.yml:Zone.Identifier b/ultralytics/docs/mkdocs_fr.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/mkdocs_fr.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/mkdocs_hi.yml b/ultralytics/docs/mkdocs_hi.yml new file mode 100755 index 0000000..45314bb --- /dev/null +++ b/ultralytics/docs/mkdocs_hi.yml @@ -0,0 +1,213 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +site_name: Ultralytics YOLOv8 เคฆเคธเฅเคคเคพเคตเฅ‡เคœเคผ +site_description: เคตเคฟเคญเคฟเคจเฅเคจ เคเคชเฅเคฒเคฟเค•เฅ‡เคถเคจเฅเคธ เค”เคฐ เคนเคพเคฐเฅเคกเคตเฅ‡เคฏเคฐ เคชเฅเคฒเฅ‡เคŸเคซเฅ‰เคฐเฅเคฎเฅเคธ เค•เฅ‡ เคฒเคฟเค Ultralytics YOLOv8 เค•เฅ€ เค–เฅ‹เคœ เค•เคฐเฅ‡เค‚, เคเค• เค…เคคเฅเคฏเคพเคงเฅเคจเคฟเค• เคตเคพเคธเฅเคคเคตเคฟเค• เคธเคฎเคฏ เคตเคธเฅเคคเฅ เคชเคนเคšเคพเคจ เค”เคฐ เค›เคตเคฟ เคตเคฟเคญเคพเคœเคจ เคฎเฅ‰เคกเคฒเฅค +site_url: https://docs.ultralytics.com/hi/ +site_author: Ultralytics +repo_url: https://github.com/ultralytics/ultralytics +edit_uri: https://github.com/ultralytics/ultralytics/tree/main/docs/hi/ +repo_name: ultralytics/ultralytics +remote_name: https://github.com/ultralytics/docs +docs_dir: 'hi/' # where to find the markdown files +site_dir: '../site/hi' # where to publish to + +theme: + name: material + language: hi + custom_dir: overrides/ + logo: https://github.com/ultralytics/assets/raw/main/logo/Ultralytics_Logotype_Reverse.svg + favicon: assets/favicon.ico + icon: + repo: fontawesome/brands/github + # font: # disabled for faster page load times + # text: Helvetica + # code: Roboto Mono + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: indigo + toggle: + icon: material/brightness-4 + name: Switch to system preference + - media: "(prefers-color-scheme: light)" + scheme: default + primary: indigo + accent: indigo + toggle: + icon: material/brightness-7 + name: Switch to dark mode + features: + - announce.dismiss + - content.action.edit + - content.code.annotate + - content.code.copy + - content.tooltips + - search.highlight + - search.share + - search.suggest + - toc.follow + - navigation.top + - navigation.tabs + - navigation.tabs.sticky + - navigation.prune + - navigation.footer + - navigation.tracking + - navigation.instant + - navigation.instant.progress + - navigation.indexes + - navigation.sections + - content.tabs.link # all code tabs change simultaneously + +# Customization +copyright: ยฉ 2023 Ultralytics Inc. All rights reserved. +extra: + # version: + # provider: mike # version drop-down menu + robots: robots.txt + analytics: + provider: google + property: G-2M5EHKC0BH + alternate: # language drop-down + - name: ๐Ÿ‡ฌ๐Ÿ‡ง English + link: / + lang: en + - name: ๐Ÿ‡จ๐Ÿ‡ณ ็ฎ€ไฝ“ไธญๆ–‡ + link: /zh/ + lang: zh + - name: ๐Ÿ‡ฐ๐Ÿ‡ท ํ•œ๊ตญ์–ด + link: /ko/ + lang: ko + - name: ๐Ÿ‡ฏ๐Ÿ‡ต ๆ—ฅๆœฌ่ชž + link: /ja/ + lang: ja + - name: ๐Ÿ‡ท๐Ÿ‡บ ะ ัƒััะบะธะน + link: /ru/ + lang: ru + - name: ๐Ÿ‡ฉ๐Ÿ‡ช Deutsch + link: /de/ + lang: de + - name: ๐Ÿ‡ซ๐Ÿ‡ท Franรงais + link: /fr/ + lang: fr + - name: ๐Ÿ‡ช๐Ÿ‡ธ Espaรฑol + link: /es/ + lang: es + - name: ๐Ÿ‡ต๐Ÿ‡น Portuguรชs + link: /pt/ + lang: pt + - name: ๐Ÿ‡ฎ๐Ÿ‡ณ เคนเคฟเคจเฅเคฆเฅ€ + link: /hi/ + lang: hi + - name: ๐Ÿ‡ธ๐Ÿ‡ฆ ุงู„ุนุฑุจูŠุฉ + link: /ar/ + lang: ar + social: + - icon: fontawesome/brands/github + link: https://github.com/ultralytics + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/company/ultralytics/ + - icon: fontawesome/brands/twitter + link: https://twitter.com/ultralytics + - icon: fontawesome/brands/youtube + link: https://www.youtube.com/ultralytics + - icon: fontawesome/brands/docker + link: https://hub.docker.com/r/ultralytics/ultralytics/ + - icon: fontawesome/brands/python + link: https://pypi.org/project/ultralytics/ + - icon: fontawesome/brands/discord + link: https://ultralytics.com/discord + +extra_css: + - stylesheets/style.css +extra_javascript: + - javascript/extra.js + +markdown_extensions: + - admonition + - md_in_html + - tables + - attr_list + - def_list + - pymdownx.critic + - pymdownx.caret + - pymdownx.keys + - pymdownx.mark + - pymdownx.tilde + - pymdownx.details + - pymdownx.superfences + - pymdownx.inlinehilite + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.snippets: + base_path: ./ + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + - pymdownx.tabbed: + alternate_style: true + + +# Primary navigation --------------------------------------------------------------------------------------------------- +nav: + - เคนเฅ‹เคฎเคชเฅ‡เคœ: + - เคนเฅ‹เคฎเคชเฅ‡เคœ: index.md + - เคคเฅเคตเคฐเคฟเคค เคชเฅเคฐเคพเคฐเค‚เคญ: quickstart.md + - เคฎเฅ‹เคก: + - modes/index.md + - เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ: modes/train.md + - เคฎเคพเคจเฅเคฏเคคเคพ: modes/val.md + - เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€: modes/predict.md + - เคจเคฟเคฐเฅเคฏเคพเคค: modes/export.md + - เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—: modes/track.md + - เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค•เคฟเค‚เค—: modes/benchmark.md + - เค•เคพเคฐเฅเคฏ: + - tasks/index.md + - เคชเคนเคšเคพเคจ: tasks/detect.md + - เค–เค‚เคกเคจ: tasks/segment.md + - เคตเคฐเฅเค—เฅ€เค•เคฐเคฃ: tasks/classify.md + - เคฎเฅเคฆเฅเคฐเคพ: tasks/pose.md + - เคคเฅเคตเคฐเคฟเคค เคชเฅเคฐเคพเคฐเค‚เคญ: quickstart.md + - เคฎเฅ‹เคก: + - modes/index.md + - เคชเฅเคฐเคถเคฟเค•เฅเคทเคฃ: modes/train.md + - เคฎเคพเคจเฅเคฏเคคเคพ: modes/val.md + - เคญเคตเคฟเคทเฅเคฏเคตเคพเคฃเฅ€: modes/predict.md + - เคจเคฟเคฐเฅเคฏเคพเคค: modes/export.md + - เคŸเฅเคฐเฅˆเค•เคฟเค‚เค—: modes/track.md + - เคฌเฅ‡เค‚เคšเคฎเคพเคฐเฅเค•เคฟเค‚เค—: modes/benchmark.md + - เค•เคพเคฐเฅเคฏ: + - tasks/index.md + - เคชเคนเคšเคพเคจ: tasks/detect.md + - เค–เค‚เคกเคจ: tasks/segment.md + - เคตเคฐเฅเค—เฅ€เค•เคฐเคฃ: tasks/classify.md + - เคฎเฅเคฆเฅเคฐเคพ: tasks/pose.md + - เคฎเฅ‰เคกเคฒ: + - models/index.md + - YOLOv3: models/yolov3.md + - YOLOv4: models/yolov4.md + - YOLOv5: models/yolov5.md + - YOLOv6: models/yolov6.md + - YOLOv7: models/yolov7.md + - YOLOv8: models/yolov8.md + - SAM (Segment Anything Model): models/sam.md + - MobileSAM (Mobile Segment Anything Model): models/mobile-sam.md + - FastSAM (Fast Segment Anything Model): models/fast-sam.md + - YOLO-NAS (Neural Architecture Search): models/yolo-nas.md + - RT-DETR (Realtime Detection Transformer): models/rtdetr.md + - เคกเฅ‡เคŸเคพเคธเฅ‡เคŸเฅเคธ: + - datasets/index.md + +# Plugins including 301 redirects navigation --------------------------------------------------------------------------- +plugins: + - search: + lang: hi + - ultralytics: + add_desc: False + add_image: True + add_share_buttons: True + default_image: https://github.com/ultralytics/ultralytics/assets/26833433/6d09221c-c52a-4234-9a5d-b862e93c6529 diff --git a/ultralytics/docs/mkdocs_hi.yml:Zone.Identifier b/ultralytics/docs/mkdocs_hi.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/mkdocs_hi.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/mkdocs_ja.yml b/ultralytics/docs/mkdocs_ja.yml new file mode 100755 index 0000000..53bd906 --- /dev/null +++ b/ultralytics/docs/mkdocs_ja.yml @@ -0,0 +1,213 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +site_name: Ultralytics YOLOv8 ใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ +site_description: Ultralytics YOLOv8ใ‚’ๆŽขๆฑ‚ใ—ใฆใใ ใ•ใ„ใ€‚ใ“ใ‚Œใฏใ€ใ•ใพใ–ใพใชใ‚ขใƒ—ใƒชใ‚ฑใƒผใ‚ทใƒงใƒณใŠใ‚ˆใณใƒใƒผใƒ‰ใ‚ฆใ‚งใ‚ขใƒ—ใƒฉใƒƒใƒˆใƒ•ใ‚ฉใƒผใƒ ใซ้ฉใ—ใŸๆœ€ๅ…ˆ็ซฏใฎใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ็‰ฉไฝ“ๆคœๅ‡บใŠใ‚ˆใณ็”ปๅƒๅˆ†ๅ‰ฒใƒขใƒ‡ใƒซใงใ™ใ€‚ +site_url: https://docs.ultralytics.com/ja/ +site_author: Ultralytics +repo_url: https://github.com/ultralytics/ultralytics +edit_uri: https://github.com/ultralytics/ultralytics/tree/main/docs/ja/ +repo_name: ultralytics/ultralytics +remote_name: https://github.com/ultralytics/docs +docs_dir: 'ja/' # where to find the markdown files +site_dir: '../site/ja' # where to publish to + +theme: + name: material + language: ja + custom_dir: overrides/ + logo: https://github.com/ultralytics/assets/raw/main/logo/Ultralytics_Logotype_Reverse.svg + favicon: assets/favicon.ico + icon: + repo: fontawesome/brands/github + # font: # disabled for faster page load times + # text: Helvetica + # code: Roboto Mono + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: indigo + toggle: + icon: material/brightness-4 + name: Switch to system preference + - media: "(prefers-color-scheme: light)" + scheme: default + primary: indigo + accent: indigo + toggle: + icon: material/brightness-7 + name: Switch to dark mode + features: + - announce.dismiss + - content.action.edit + - content.code.annotate + - content.code.copy + - content.tooltips + - search.highlight + - search.share + - search.suggest + - toc.follow + - navigation.top + - navigation.tabs + - navigation.tabs.sticky + - navigation.prune + - navigation.footer + - navigation.tracking + - navigation.instant + - navigation.instant.progress + - navigation.indexes + - navigation.sections + - content.tabs.link # all code tabs change simultaneously + +# Customization +copyright: ยฉ 2023 Ultralytics Inc. All rights reserved. +extra: + # version: + # provider: mike # version drop-down menu + robots: robots.txt + analytics: + provider: google + property: G-2M5EHKC0BH + alternate: # language drop-down + - name: ๐Ÿ‡ฌ๐Ÿ‡ง English + link: / + lang: en + - name: ๐Ÿ‡จ๐Ÿ‡ณ ็ฎ€ไฝ“ไธญๆ–‡ + link: /zh/ + lang: zh + - name: ๐Ÿ‡ฐ๐Ÿ‡ท ํ•œ๊ตญ์–ด + link: /ko/ + lang: ko + - name: ๐Ÿ‡ฏ๐Ÿ‡ต ๆ—ฅๆœฌ่ชž + link: /ja/ + lang: ja + - name: ๐Ÿ‡ท๐Ÿ‡บ ะ ัƒััะบะธะน + link: /ru/ + lang: ru + - name: ๐Ÿ‡ฉ๐Ÿ‡ช Deutsch + link: /de/ + lang: de + - name: ๐Ÿ‡ซ๐Ÿ‡ท Franรงais + link: /fr/ + lang: fr + - name: ๐Ÿ‡ช๐Ÿ‡ธ Espaรฑol + link: /es/ + lang: es + - name: ๐Ÿ‡ต๐Ÿ‡น Portuguรชs + link: /pt/ + lang: pt + - name: ๐Ÿ‡ฎ๐Ÿ‡ณ เคนเคฟเคจเฅเคฆเฅ€ + link: /hi/ + lang: hi + - name: ๐Ÿ‡ธ๐Ÿ‡ฆ ุงู„ุนุฑุจูŠุฉ + link: /ar/ + lang: ar + social: + - icon: fontawesome/brands/github + link: https://github.com/ultralytics + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/company/ultralytics/ + - icon: fontawesome/brands/twitter + link: https://twitter.com/ultralytics + - icon: fontawesome/brands/youtube + link: https://www.youtube.com/ultralytics + - icon: fontawesome/brands/docker + link: https://hub.docker.com/r/ultralytics/ultralytics/ + - icon: fontawesome/brands/python + link: https://pypi.org/project/ultralytics/ + - icon: fontawesome/brands/discord + link: https://ultralytics.com/discord + +extra_css: + - stylesheets/style.css +extra_javascript: + - javascript/extra.js + +markdown_extensions: + - admonition + - md_in_html + - tables + - attr_list + - def_list + - pymdownx.critic + - pymdownx.caret + - pymdownx.keys + - pymdownx.mark + - pymdownx.tilde + - pymdownx.details + - pymdownx.superfences + - pymdownx.inlinehilite + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.snippets: + base_path: ./ + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + - pymdownx.tabbed: + alternate_style: true + + +# Primary navigation --------------------------------------------------------------------------------------------------- +nav: + - ใƒ›ใƒผใƒ : + - ใƒ›ใƒผใƒ : index.md + - ใ‚ฏใ‚คใƒƒใ‚ฏใ‚นใ‚ฟใƒผใƒˆ: quickstart.md + - ใƒขใƒผใƒ‰: + - modes/index.md + - ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ: modes/train.md + - ๆคœ่จผ: modes/val.md + - ไบˆๆธฌ: modes/predict.md + - ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ: modes/export.md + - ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ: modes/track.md + - ใƒ™ใƒณใƒใƒžใƒผใ‚ฏ: modes/benchmark.md + - ใ‚ฟใ‚นใ‚ฏ: + - tasks/index.md + - ๆคœๅ‡บ: tasks/detect.md + - ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ: tasks/segment.md + - ๅˆ†้กž: tasks/classify.md + - ใƒใƒผใ‚บ: tasks/pose.md + - ใ‚ฏใ‚คใƒƒใ‚ฏใ‚นใ‚ฟใƒผใƒˆ: quickstart.md + - ใƒขใƒผใƒ‰: + - modes/index.md + - ใƒˆใƒฌใƒผใƒ‹ใƒณใ‚ฐ: modes/train.md + - ๆคœ่จผ: modes/val.md + - ไบˆๆธฌ: modes/predict.md + - ใ‚จใ‚ฏใ‚นใƒใƒผใƒˆ: modes/export.md + - ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐ: modes/track.md + - ใƒ™ใƒณใƒใƒžใƒผใ‚ฏ: modes/benchmark.md + - ใ‚ฟใ‚นใ‚ฏ: + - tasks/index.md + - ๆคœๅ‡บ: tasks/detect.md + - ใ‚ปใ‚ฐใƒกใƒณใƒ†ใƒผใ‚ทใƒงใƒณ: tasks/segment.md + - ๅˆ†้กž: tasks/classify.md + - ใƒใƒผใ‚บ: tasks/pose.md + - ใƒขใƒ‡ใƒซ: + - models/index.md + - YOLOv3: models/yolov3.md + - YOLOv4: models/yolov4.md + - YOLOv5: models/yolov5.md + - YOLOv6: models/yolov6.md + - YOLOv7: models/yolov7.md + - YOLOv8: models/yolov8.md + - SAM (Segment Anything Model): models/sam.md + - MobileSAM (Mobile Segment Anything Model): models/mobile-sam.md + - FastSAM (Fast Segment Anything Model): models/fast-sam.md + - YOLO-NAS (Neural Architecture Search): models/yolo-nas.md + - RT-DETR (Realtime Detection Transformer): models/rtdetr.md + - ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆ: + - datasets/index.md + +# Plugins including 301 redirects navigation --------------------------------------------------------------------------- +plugins: + - search: + lang: ja + - ultralytics: + add_desc: False + add_image: True + add_share_buttons: True + default_image: https://github.com/ultralytics/ultralytics/assets/26833433/6d09221c-c52a-4234-9a5d-b862e93c6529 diff --git a/ultralytics/docs/mkdocs_ja.yml:Zone.Identifier b/ultralytics/docs/mkdocs_ja.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/mkdocs_ja.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/mkdocs_ko.yml b/ultralytics/docs/mkdocs_ko.yml new file mode 100755 index 0000000..649bbc8 --- /dev/null +++ b/ultralytics/docs/mkdocs_ko.yml @@ -0,0 +1,213 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +site_name: Ultralytics YOLOv8 ๋ฌธ์„œ +site_description: Ultralytics YOLOv8์„ ํƒ์ƒ‰ํ•˜์„ธ์š”. ์ด๋Š” ๋‹ค์–‘ํ•œ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ๋ฐ ํ•˜๋“œ์›จ์–ด ํ”Œ๋žซํผ์„ ์œ„ํ•œ ์ตœ์ฒจ๋‹จ ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ๊ฐ์ง€ ๋ฐ ์ด๋ฏธ์ง€ ๋ถ„ํ•  ๋ชจ๋ธ์ž…๋‹ˆ๋‹ค. +site_url: https://docs.ultralytics.com/ko/ +site_author: Ultralytics +repo_url: https://github.com/ultralytics/ultralytics +edit_uri: https://github.com/ultralytics/ultralytics/tree/main/docs/ko/ +repo_name: ultralytics/ultralytics +remote_name: https://github.com/ultralytics/docs +docs_dir: 'ko/' # where to find the markdown files +site_dir: '../site/ko' # where to publish to + +theme: + name: material + language: ko + custom_dir: overrides/ + logo: https://github.com/ultralytics/assets/raw/main/logo/Ultralytics_Logotype_Reverse.svg + favicon: assets/favicon.ico + icon: + repo: fontawesome/brands/github + # font: # disabled for faster page load times + # text: Helvetica + # code: Roboto Mono + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: indigo + toggle: + icon: material/brightness-4 + name: Switch to system preference + - media: "(prefers-color-scheme: light)" + scheme: default + primary: indigo + accent: indigo + toggle: + icon: material/brightness-7 + name: Switch to dark mode + features: + - announce.dismiss + - content.action.edit + - content.code.annotate + - content.code.copy + - content.tooltips + - search.highlight + - search.share + - search.suggest + - toc.follow + - navigation.top + - navigation.tabs + - navigation.tabs.sticky + - navigation.prune + - navigation.footer + - navigation.tracking + - navigation.instant + - navigation.instant.progress + - navigation.indexes + - navigation.sections + - content.tabs.link # all code tabs change simultaneously + +# Customization +copyright: ยฉ 2023 Ultralytics Inc. All rights reserved. +extra: + # version: + # provider: mike # version drop-down menu + robots: robots.txt + analytics: + provider: google + property: G-2M5EHKC0BH + alternate: # language drop-down + - name: ๐Ÿ‡ฌ๐Ÿ‡ง English + link: / + lang: en + - name: ๐Ÿ‡จ๐Ÿ‡ณ ็ฎ€ไฝ“ไธญๆ–‡ + link: /zh/ + lang: zh + - name: ๐Ÿ‡ฐ๐Ÿ‡ท ํ•œ๊ตญ์–ด + link: /ko/ + lang: ko + - name: ๐Ÿ‡ฏ๐Ÿ‡ต ๆ—ฅๆœฌ่ชž + link: /ja/ + lang: ja + - name: ๐Ÿ‡ท๐Ÿ‡บ ะ ัƒััะบะธะน + link: /ru/ + lang: ru + - name: ๐Ÿ‡ฉ๐Ÿ‡ช Deutsch + link: /de/ + lang: de + - name: ๐Ÿ‡ซ๐Ÿ‡ท Franรงais + link: /fr/ + lang: fr + - name: ๐Ÿ‡ช๐Ÿ‡ธ Espaรฑol + link: /es/ + lang: es + - name: ๐Ÿ‡ต๐Ÿ‡น Portuguรชs + link: /pt/ + lang: pt + - name: ๐Ÿ‡ฎ๐Ÿ‡ณ เคนเคฟเคจเฅเคฆเฅ€ + link: /hi/ + lang: hi + - name: ๐Ÿ‡ธ๐Ÿ‡ฆ ุงู„ุนุฑุจูŠุฉ + link: /ar/ + lang: ar + social: + - icon: fontawesome/brands/github + link: https://github.com/ultralytics + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/company/ultralytics/ + - icon: fontawesome/brands/twitter + link: https://twitter.com/ultralytics + - icon: fontawesome/brands/youtube + link: https://www.youtube.com/ultralytics + - icon: fontawesome/brands/docker + link: https://hub.docker.com/r/ultralytics/ultralytics/ + - icon: fontawesome/brands/python + link: https://pypi.org/project/ultralytics/ + - icon: fontawesome/brands/discord + link: https://ultralytics.com/discord + +extra_css: + - stylesheets/style.css +extra_javascript: + - javascript/extra.js + +markdown_extensions: + - admonition + - md_in_html + - tables + - attr_list + - def_list + - pymdownx.critic + - pymdownx.caret + - pymdownx.keys + - pymdownx.mark + - pymdownx.tilde + - pymdownx.details + - pymdownx.superfences + - pymdownx.inlinehilite + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.snippets: + base_path: ./ + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + - pymdownx.tabbed: + alternate_style: true + + +# Primary navigation --------------------------------------------------------------------------------------------------- +nav: + - ํ™ˆ: + - ํ™ˆ: index.md + - ๋น ๋ฅธ ์‹œ์ž‘: quickstart.md + - ๋ชจ๋“œ: + - modes/index.md + - ํ›ˆ๋ จ: modes/train.md + - ๊ฒ€์ฆ: modes/val.md + - ์˜ˆ์ธก: modes/predict.md + - ๋‚ด๋ณด๋‚ด๊ธฐ: modes/export.md + - ์ถ”์ : modes/track.md + - ๋ฒค์น˜๋งˆํฌ: modes/benchmark.md + - ์ž‘์—…: + - tasks/index.md + - ๊ฐ์ง€: tasks/detect.md + - ๋ถ„ํ• : tasks/segment.md + - ๋ถ„๋ฅ˜: tasks/classify.md + - ํฌ์ฆˆ: tasks/pose.md + - ๋น ๋ฅธ ์‹œ์ž‘: quickstart.md + - ๋ชจ๋“œ: + - modes/index.md + - ํ›ˆ๋ จ: modes/train.md + - ๊ฒ€์ฆ: modes/val.md + - ์˜ˆ์ธก: modes/predict.md + - ๋‚ด๋ณด๋‚ด๊ธฐ: modes/export.md + - ์ถ”์ : modes/track.md + - ๋ฒค์น˜๋งˆํฌ: modes/benchmark.md + - ์ž‘์—…: + - tasks/index.md + - ๊ฐ์ง€: tasks/detect.md + - ๋ถ„ํ• : tasks/segment.md + - ๋ถ„๋ฅ˜: tasks/classify.md + - ํฌ์ฆˆ: tasks/pose.md + - ๋ชจ๋ธ: + - models/index.md + - YOLOv3: models/yolov3.md + - YOLOv4: models/yolov4.md + - YOLOv5: models/yolov5.md + - YOLOv6: models/yolov6.md + - YOLOv7: models/yolov7.md + - YOLOv8: models/yolov8.md + - SAM (Segment Anything Model): models/sam.md + - MobileSAM (Mobile Segment Anything Model): models/mobile-sam.md + - FastSAM (Fast Segment Anything Model): models/fast-sam.md + - YOLO-NAS (Neural Architecture Search): models/yolo-nas.md + - RT-DETR (Realtime Detection Transformer): models/rtdetr.md + - ๋ฐ์ดํ„ฐ์…‹: + - datasets/index.md + +# Plugins including 301 redirects navigation --------------------------------------------------------------------------- +plugins: + - search: + lang: ko + - ultralytics: + add_desc: False + add_image: True + add_share_buttons: True + default_image: https://github.com/ultralytics/ultralytics/assets/26833433/6d09221c-c52a-4234-9a5d-b862e93c6529 diff --git a/ultralytics/docs/mkdocs_ko.yml:Zone.Identifier b/ultralytics/docs/mkdocs_ko.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/mkdocs_ko.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/mkdocs_pt.yml b/ultralytics/docs/mkdocs_pt.yml new file mode 100755 index 0000000..724afab --- /dev/null +++ b/ultralytics/docs/mkdocs_pt.yml @@ -0,0 +1,213 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +site_name: Documentaรงรฃo Ultralytics YOLOv8 +site_description: Explore o Ultralytics YOLOv8, um modelo avanรงado de detecรงรฃo de objetos e segmentaรงรฃo de imagem em tempo real, adequado para vรกrias aplicaรงรตes e plataformas de hardware. +site_url: https://docs.ultralytics.com/pt/ +site_author: Ultralytics +repo_url: https://github.com/ultralytics/ultralytics +edit_uri: https://github.com/ultralytics/ultralytics/tree/main/docs/pt/ +repo_name: ultralytics/ultralytics +remote_name: https://github.com/ultralytics/docs +docs_dir: 'pt/' # where to find the markdown files +site_dir: '../site/pt' # where to publish to + +theme: + name: material + language: pt + custom_dir: overrides/ + logo: https://github.com/ultralytics/assets/raw/main/logo/Ultralytics_Logotype_Reverse.svg + favicon: assets/favicon.ico + icon: + repo: fontawesome/brands/github + # font: # disabled for faster page load times + # text: Helvetica + # code: Roboto Mono + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: indigo + toggle: + icon: material/brightness-4 + name: Switch to system preference + - media: "(prefers-color-scheme: light)" + scheme: default + primary: indigo + accent: indigo + toggle: + icon: material/brightness-7 + name: Switch to dark mode + features: + - announce.dismiss + - content.action.edit + - content.code.annotate + - content.code.copy + - content.tooltips + - search.highlight + - search.share + - search.suggest + - toc.follow + - navigation.top + - navigation.tabs + - navigation.tabs.sticky + - navigation.prune + - navigation.footer + - navigation.tracking + - navigation.instant + - navigation.instant.progress + - navigation.indexes + - navigation.sections + - content.tabs.link # all code tabs change simultaneously + +# Customization +copyright: ยฉ 2023 Ultralytics Inc. All rights reserved. +extra: + # version: + # provider: mike # version drop-down menu + robots: robots.txt + analytics: + provider: google + property: G-2M5EHKC0BH + alternate: # language drop-down + - name: ๐Ÿ‡ฌ๐Ÿ‡ง English + link: / + lang: en + - name: ๐Ÿ‡จ๐Ÿ‡ณ ็ฎ€ไฝ“ไธญๆ–‡ + link: /zh/ + lang: zh + - name: ๐Ÿ‡ฐ๐Ÿ‡ท ํ•œ๊ตญ์–ด + link: /ko/ + lang: ko + - name: ๐Ÿ‡ฏ๐Ÿ‡ต ๆ—ฅๆœฌ่ชž + link: /ja/ + lang: ja + - name: ๐Ÿ‡ท๐Ÿ‡บ ะ ัƒััะบะธะน + link: /ru/ + lang: ru + - name: ๐Ÿ‡ฉ๐Ÿ‡ช Deutsch + link: /de/ + lang: de + - name: ๐Ÿ‡ซ๐Ÿ‡ท Franรงais + link: /fr/ + lang: fr + - name: ๐Ÿ‡ช๐Ÿ‡ธ Espaรฑol + link: /es/ + lang: es + - name: ๐Ÿ‡ต๐Ÿ‡น Portuguรชs + link: /pt/ + lang: pt + - name: ๐Ÿ‡ฎ๐Ÿ‡ณ เคนเคฟเคจเฅเคฆเฅ€ + link: /hi/ + lang: hi + - name: ๐Ÿ‡ธ๐Ÿ‡ฆ ุงู„ุนุฑุจูŠุฉ + link: /ar/ + lang: ar + social: + - icon: fontawesome/brands/github + link: https://github.com/ultralytics + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/company/ultralytics/ + - icon: fontawesome/brands/twitter + link: https://twitter.com/ultralytics + - icon: fontawesome/brands/youtube + link: https://www.youtube.com/ultralytics + - icon: fontawesome/brands/docker + link: https://hub.docker.com/r/ultralytics/ultralytics/ + - icon: fontawesome/brands/python + link: https://pypi.org/project/ultralytics/ + - icon: fontawesome/brands/discord + link: https://ultralytics.com/discord + +extra_css: + - stylesheets/style.css +extra_javascript: + - javascript/extra.js + +markdown_extensions: + - admonition + - md_in_html + - tables + - attr_list + - def_list + - pymdownx.critic + - pymdownx.caret + - pymdownx.keys + - pymdownx.mark + - pymdownx.tilde + - pymdownx.details + - pymdownx.superfences + - pymdownx.inlinehilite + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.snippets: + base_path: ./ + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + - pymdownx.tabbed: + alternate_style: true + + +# Primary navigation --------------------------------------------------------------------------------------------------- +nav: + - Pรกgina Inicial: + - Pรกgina Inicial: index.md + - Inรญcio Rรกpido: quickstart.md + - Modos: + - modes/index.md + - Treinamento: modes/train.md + - Validaรงรฃo: modes/val.md + - Previsรฃo: modes/predict.md + - Exportaรงรฃo: modes/export.md + - Rastreamento: modes/track.md + - Benchmarking: modes/benchmark.md + - Tarefas: + - tasks/index.md + - Detecรงรฃo: tasks/detect.md + - Segmentaรงรฃo: tasks/segment.md + - Classificaรงรฃo: tasks/classify.md + - Pose: tasks/pose.md + - Inรญcio Rรกpido: quickstart.md + - Modos: + - modes/index.md + - Treinamento: modes/train.md + - Validaรงรฃo: modes/val.md + - Previsรฃo: modes/predict.md + - Exportaรงรฃo: modes/export.md + - Rastreamento: modes/track.md + - Benchmarking: modes/benchmark.md + - Tarefas: + - tasks/index.md + - Detecรงรฃo: tasks/detect.md + - Segmentaรงรฃo: tasks/segment.md + - Classificaรงรฃo: tasks/classify.md + - Pose: tasks/pose.md + - Modelos: + - models/index.md + - YOLOv3: models/yolov3.md + - YOLOv4: models/yolov4.md + - YOLOv5: models/yolov5.md + - YOLOv6: models/yolov6.md + - YOLOv7: models/yolov7.md + - YOLOv8: models/yolov8.md + - SAM (Segment Anything Model): models/sam.md + - MobileSAM (Mobile Segment Anything Model): models/mobile-sam.md + - FastSAM (Fast Segment Anything Model): models/fast-sam.md + - YOLO-NAS (Neural Architecture Search): models/yolo-nas.md + - RT-DETR (Realtime Detection Transformer): models/rtdetr.md + - Conjuntos de Dados: + - datasets/index.md + +# Plugins including 301 redirects navigation --------------------------------------------------------------------------- +plugins: + - search: + lang: pt + - ultralytics: + add_desc: False + add_image: True + add_share_buttons: True + default_image: https://github.com/ultralytics/ultralytics/assets/26833433/6d09221c-c52a-4234-9a5d-b862e93c6529 diff --git a/ultralytics/docs/mkdocs_pt.yml:Zone.Identifier b/ultralytics/docs/mkdocs_pt.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/mkdocs_pt.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/mkdocs_ru.yml b/ultralytics/docs/mkdocs_ru.yml new file mode 100755 index 0000000..b097c13 --- /dev/null +++ b/ultralytics/docs/mkdocs_ru.yml @@ -0,0 +1,213 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +site_name: ะ”ะพะบัƒะผะตะฝั‚ะฐั†ะธั Ultralytics YOLOv8 +site_description: ะ˜ะทัƒั‡ะธั‚ะต Ultralytics YOLOv8, ะฟะตั€ะตะดะพะฒัƒัŽ ะผะพะดะตะปัŒ ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ, ะฟะพะดั…ะพะดัั‰ัƒัŽ ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ะฟั€ะธะปะพะถะตะฝะธะน ะธ ะฐะฟะฟะฐั€ะฐั‚ะฝั‹ั… ะฟะปะฐั‚ั„ะพั€ะผ. +site_url: https://docs.ultralytics.com/ru/ +site_author: Ultralytics +repo_url: https://github.com/ultralytics/ultralytics +edit_uri: https://github.com/ultralytics/ultralytics/tree/main/docs/ru/ +repo_name: ultralytics/ultralytics +remote_name: https://github.com/ultralytics/docs +docs_dir: 'ru/' # where to find the markdown files +site_dir: '../site/ru' # where to publish to + +theme: + name: material + language: ru + custom_dir: overrides/ + logo: https://github.com/ultralytics/assets/raw/main/logo/Ultralytics_Logotype_Reverse.svg + favicon: assets/favicon.ico + icon: + repo: fontawesome/brands/github + # font: # disabled for faster page load times + # text: Helvetica + # code: Roboto Mono + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: indigo + toggle: + icon: material/brightness-4 + name: Switch to system preference + - media: "(prefers-color-scheme: light)" + scheme: default + primary: indigo + accent: indigo + toggle: + icon: material/brightness-7 + name: Switch to dark mode + features: + - announce.dismiss + - content.action.edit + - content.code.annotate + - content.code.copy + - content.tooltips + - search.highlight + - search.share + - search.suggest + - toc.follow + - navigation.top + - navigation.tabs + - navigation.tabs.sticky + - navigation.prune + - navigation.footer + - navigation.tracking + - navigation.instant + - navigation.instant.progress + - navigation.indexes + - navigation.sections + - content.tabs.link # all code tabs change simultaneously + +# Customization +copyright: ยฉ 2023 Ultralytics Inc. All rights reserved. +extra: + # version: + # provider: mike # version drop-down menu + robots: robots.txt + analytics: + provider: google + property: G-2M5EHKC0BH + alternate: # language drop-down + - name: ๐Ÿ‡ฌ๐Ÿ‡ง English + link: / + lang: en + - name: ๐Ÿ‡จ๐Ÿ‡ณ ็ฎ€ไฝ“ไธญๆ–‡ + link: /zh/ + lang: zh + - name: ๐Ÿ‡ฐ๐Ÿ‡ท ํ•œ๊ตญ์–ด + link: /ko/ + lang: ko + - name: ๐Ÿ‡ฏ๐Ÿ‡ต ๆ—ฅๆœฌ่ชž + link: /ja/ + lang: ja + - name: ๐Ÿ‡ท๐Ÿ‡บ ะ ัƒััะบะธะน + link: /ru/ + lang: ru + - name: ๐Ÿ‡ฉ๐Ÿ‡ช Deutsch + link: /de/ + lang: de + - name: ๐Ÿ‡ซ๐Ÿ‡ท Franรงais + link: /fr/ + lang: fr + - name: ๐Ÿ‡ช๐Ÿ‡ธ Espaรฑol + link: /es/ + lang: es + - name: ๐Ÿ‡ต๐Ÿ‡น Portuguรชs + link: /pt/ + lang: pt + - name: ๐Ÿ‡ฎ๐Ÿ‡ณ เคนเคฟเคจเฅเคฆเฅ€ + link: /hi/ + lang: hi + - name: ๐Ÿ‡ธ๐Ÿ‡ฆ ุงู„ุนุฑุจูŠุฉ + link: /ar/ + lang: ar + social: + - icon: fontawesome/brands/github + link: https://github.com/ultralytics + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/company/ultralytics/ + - icon: fontawesome/brands/twitter + link: https://twitter.com/ultralytics + - icon: fontawesome/brands/youtube + link: https://www.youtube.com/ultralytics + - icon: fontawesome/brands/docker + link: https://hub.docker.com/r/ultralytics/ultralytics/ + - icon: fontawesome/brands/python + link: https://pypi.org/project/ultralytics/ + - icon: fontawesome/brands/discord + link: https://ultralytics.com/discord + +extra_css: + - stylesheets/style.css +extra_javascript: + - javascript/extra.js + +markdown_extensions: + - admonition + - md_in_html + - tables + - attr_list + - def_list + - pymdownx.critic + - pymdownx.caret + - pymdownx.keys + - pymdownx.mark + - pymdownx.tilde + - pymdownx.details + - pymdownx.superfences + - pymdownx.inlinehilite + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.snippets: + base_path: ./ + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + - pymdownx.tabbed: + alternate_style: true + + +# Primary navigation --------------------------------------------------------------------------------------------------- +nav: + - ะ“ะปะฐะฒะฝะฐั: + - ะ“ะปะฐะฒะฝะฐั: index.md + - ะ‘ั‹ัั‚ั€ั‹ะน ัั‚ะฐั€ั‚: quickstart.md + - ะ ะตะถะธะผั‹: + - modes/index.md + - ะžะฑัƒั‡ะตะฝะธะต: modes/train.md + - ะ’ะฐะปะธะดะฐั†ะธั: modes/val.md + - ะŸั€ะพะณะฝะพะทะธั€ะพะฒะฐะฝะธะต: modes/predict.md + - ะญะบัะฟะพั€ั‚: modes/export.md + - ะžั‚ัะปะตะถะธะฒะฐะฝะธะต: modes/track.md + - ะ‘ะตะฝั‡ะผะฐั€ะบะธะฝะณ: modes/benchmark.md + - ะ—ะฐะดะฐั‡ะธ: + - tasks/index.md + - ะžะฑะฝะฐั€ัƒะถะตะฝะธะต: tasks/detect.md + - ะกะตะณะผะตะฝั‚ะฐั†ะธั: tasks/segment.md + - ะšะปะฐััะธั„ะธะบะฐั†ะธั: tasks/classify.md + - ะŸะพะทะฐ: tasks/pose.md + - ะ‘ั‹ัั‚ั€ั‹ะน ัั‚ะฐั€ั‚: quickstart.md + - ะ ะตะถะธะผั‹: + - modes/index.md + - ะžะฑัƒั‡ะตะฝะธะต: modes/train.md + - ะ’ะฐะปะธะดะฐั†ะธั: modes/val.md + - ะŸั€ะพะณะฝะพะทะธั€ะพะฒะฐะฝะธะต: modes/predict.md + - ะญะบัะฟะพั€ั‚: modes/export.md + - ะžั‚ัะปะตะถะธะฒะฐะฝะธะต: modes/track.md + - ะ‘ะตะฝั‡ะผะฐั€ะบะธะฝะณ: modes/benchmark.md + - ะ—ะฐะดะฐั‡ะธ: + - tasks/index.md + - ะžะฑะฝะฐั€ัƒะถะตะฝะธะต: tasks/detect.md + - ะกะตะณะผะตะฝั‚ะฐั†ะธั: tasks/segment.md + - ะšะปะฐััะธั„ะธะบะฐั†ะธั: tasks/classify.md + - ะŸะพะทะฐ: tasks/pose.md + - ะœะพะดะตะปะธ: + - models/index.md + - YOLOv3: models/yolov3.md + - YOLOv4: models/yolov4.md + - YOLOv5: models/yolov5.md + - YOLOv6: models/yolov6.md + - YOLOv7: models/yolov7.md + - YOLOv8: models/yolov8.md + - SAM (Segment Anything Model): models/sam.md + - MobileSAM (Mobile Segment Anything Model): models/mobile-sam.md + - FastSAM (Fast Segment Anything Model): models/fast-sam.md + - YOLO-NAS (Neural Architecture Search): models/yolo-nas.md + - RT-DETR (Realtime Detection Transformer): models/rtdetr.md + - ะ”ะฐะฝะฝั‹ะต: + - datasets/index.md + +# Plugins including 301 redirects navigation --------------------------------------------------------------------------- +plugins: + - search: + lang: ru + - ultralytics: + add_desc: False + add_image: True + add_share_buttons: True + default_image: https://github.com/ultralytics/ultralytics/assets/26833433/6d09221c-c52a-4234-9a5d-b862e93c6529 diff --git a/ultralytics/docs/mkdocs_ru.yml:Zone.Identifier b/ultralytics/docs/mkdocs_ru.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/mkdocs_ru.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/mkdocs_zh.yml b/ultralytics/docs/mkdocs_zh.yml new file mode 100755 index 0000000..393d749 --- /dev/null +++ b/ultralytics/docs/mkdocs_zh.yml @@ -0,0 +1,213 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +site_name: Ultralytics YOLOv8 ๆ–‡ๆกฃ +site_description: ๆŽข็ดขUltralytics YOLOv8๏ผŒ่ฟ™ๆ˜ฏไธ€ๆฌพๅฐ–็ซฏ็š„ๅฎžๆ—ถๅฏน่ฑกๆฃ€ๆต‹ๅ’Œๅ›พๅƒๅˆ†ๅ‰ฒๆจกๅž‹๏ผŒ้€‚็”จไบŽๅ„็งๅบ”็”จๅ’Œ็กฌไปถๅนณๅฐใ€‚ +site_url: https://docs.ultralytics.com/zh/ +site_author: Ultralytics +repo_url: https://github.com/ultralytics/ultralytics +edit_uri: https://github.com/ultralytics/ultralytics/tree/main/docs/zh/ +repo_name: ultralytics/ultralytics +remote_name: https://github.com/ultralytics/docs +docs_dir: 'zh/' # where to find the markdown files +site_dir: '../site/zh' # where to publish to + +theme: + name: material + language: zh + custom_dir: overrides/ + logo: https://github.com/ultralytics/assets/raw/main/logo/Ultralytics_Logotype_Reverse.svg + favicon: assets/favicon.ico + icon: + repo: fontawesome/brands/github + # font: # disabled for faster page load times + # text: Helvetica + # code: Roboto Mono + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: indigo + toggle: + icon: material/brightness-4 + name: Switch to system preference + - media: "(prefers-color-scheme: light)" + scheme: default + primary: indigo + accent: indigo + toggle: + icon: material/brightness-7 + name: Switch to dark mode + features: + - announce.dismiss + - content.action.edit + - content.code.annotate + - content.code.copy + - content.tooltips + - search.highlight + - search.share + - search.suggest + - toc.follow + - navigation.top + - navigation.tabs + - navigation.tabs.sticky + - navigation.prune + - navigation.footer + - navigation.tracking + - navigation.instant + - navigation.instant.progress + - navigation.indexes + - navigation.sections + - content.tabs.link # all code tabs change simultaneously + +# Customization +copyright: ยฉ 2023 Ultralytics Inc. All rights reserved. +extra: + # version: + # provider: mike # version drop-down menu + robots: robots.txt + analytics: + provider: google + property: G-2M5EHKC0BH + alternate: # language drop-down + - name: ๐Ÿ‡ฌ๐Ÿ‡ง English + link: / + lang: en + - name: ๐Ÿ‡จ๐Ÿ‡ณ ็ฎ€ไฝ“ไธญๆ–‡ + link: /zh/ + lang: zh + - name: ๐Ÿ‡ฐ๐Ÿ‡ท ํ•œ๊ตญ์–ด + link: /ko/ + lang: ko + - name: ๐Ÿ‡ฏ๐Ÿ‡ต ๆ—ฅๆœฌ่ชž + link: /ja/ + lang: ja + - name: ๐Ÿ‡ท๐Ÿ‡บ ะ ัƒััะบะธะน + link: /ru/ + lang: ru + - name: ๐Ÿ‡ฉ๐Ÿ‡ช Deutsch + link: /de/ + lang: de + - name: ๐Ÿ‡ซ๐Ÿ‡ท Franรงais + link: /fr/ + lang: fr + - name: ๐Ÿ‡ช๐Ÿ‡ธ Espaรฑol + link: /es/ + lang: es + - name: ๐Ÿ‡ต๐Ÿ‡น Portuguรชs + link: /pt/ + lang: pt + - name: ๐Ÿ‡ฎ๐Ÿ‡ณ เคนเคฟเคจเฅเคฆเฅ€ + link: /hi/ + lang: hi + - name: ๐Ÿ‡ธ๐Ÿ‡ฆ ุงู„ุนุฑุจูŠุฉ + link: /ar/ + lang: ar + social: + - icon: fontawesome/brands/github + link: https://github.com/ultralytics + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/company/ultralytics/ + - icon: fontawesome/brands/twitter + link: https://twitter.com/ultralytics + - icon: fontawesome/brands/youtube + link: https://www.youtube.com/ultralytics + - icon: fontawesome/brands/docker + link: https://hub.docker.com/r/ultralytics/ultralytics/ + - icon: fontawesome/brands/python + link: https://pypi.org/project/ultralytics/ + - icon: fontawesome/brands/discord + link: https://ultralytics.com/discord + +extra_css: + - stylesheets/style.css +extra_javascript: + - javascript/extra.js + +markdown_extensions: + - admonition + - md_in_html + - tables + - attr_list + - def_list + - pymdownx.critic + - pymdownx.caret + - pymdownx.keys + - pymdownx.mark + - pymdownx.tilde + - pymdownx.details + - pymdownx.superfences + - pymdownx.inlinehilite + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.snippets: + base_path: ./ + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + - pymdownx.tabbed: + alternate_style: true + + +# Primary navigation --------------------------------------------------------------------------------------------------- +nav: + - ไธป้กต: + - ไธป้กต: index.md + - ๅฟซ้€Ÿๅผ€ๅง‹: quickstart.md + - ๆจกๅผ: + - modes/index.md + - ่ฎญ็ปƒ: modes/train.md + - ้ชŒ่ฏ: modes/val.md + - ้ข„ๆต‹: modes/predict.md + - ๅฏผๅ‡บ: modes/export.md + - ่ทŸ่ธช: modes/track.md + - ๅŸบๅ‡†ๆต‹่ฏ•: modes/benchmark.md + - ไปปๅŠก: + - tasks/index.md + - ๆฃ€ๆต‹: tasks/detect.md + - ๅˆ†ๅ‰ฒ: tasks/segment.md + - ๅˆ†็ฑป: tasks/classify.md + - ๅงฟๆ€: tasks/pose.md + - ๅฟซ้€Ÿๅผ€ๅง‹: quickstart.md + - ๆจกๅผ: + - modes/index.md + - ่ฎญ็ปƒ: modes/train.md + - ้ชŒ่ฏ: modes/val.md + - ้ข„ๆต‹: modes/predict.md + - ๅฏผๅ‡บ: modes/export.md + - ่ทŸ่ธช: modes/track.md + - ๅŸบๅ‡†ๆต‹่ฏ•: modes/benchmark.md + - ไปปๅŠก: + - tasks/index.md + - ๆฃ€ๆต‹: tasks/detect.md + - ๅˆ†ๅ‰ฒ: tasks/segment.md + - ๅˆ†็ฑป: tasks/classify.md + - ๅงฟๆ€: tasks/pose.md + - ๆจกๅž‹: + - models/index.md + - YOLOv3: models/yolov3.md + - YOLOv4: models/yolov4.md + - YOLOv5: models/yolov5.md + - YOLOv6: models/yolov6.md + - YOLOv7: models/yolov7.md + - YOLOv8: models/yolov8.md + - SAM (Segment Anything Model): models/sam.md + - MobileSAM (Mobile Segment Anything Model): models/mobile-sam.md + - FastSAM (Fast Segment Anything Model): models/fast-sam.md + - YOLO-NAS (Neural Architecture Search): models/yolo-nas.md + - RT-DETR (Realtime Detection Transformer): models/rtdetr.md + - ๆ•ฐๆฎ้›†: + - datasets/index.md + +# Plugins including 301 redirects navigation --------------------------------------------------------------------------- +plugins: + - search: + lang: zh + - ultralytics: + add_desc: False + add_image: True + add_share_buttons: True + default_image: https://github.com/ultralytics/ultralytics/assets/26833433/6d09221c-c52a-4234-9a5d-b862e93c6529 diff --git a/ultralytics/docs/mkdocs_zh.yml:Zone.Identifier b/ultralytics/docs/mkdocs_zh.yml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/mkdocs_zh.yml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/overrides/assets/favicon.ico b/ultralytics/docs/overrides/assets/favicon.ico new file mode 100755 index 0000000..7aa5066 Binary files /dev/null and b/ultralytics/docs/overrides/assets/favicon.ico differ diff --git a/ultralytics/docs/overrides/assets/favicon.ico:Zone.Identifier b/ultralytics/docs/overrides/assets/favicon.ico:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/overrides/assets/favicon.ico:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/overrides/javascript/extra.js b/ultralytics/docs/overrides/javascript/extra.js new file mode 100755 index 0000000..d67fece --- /dev/null +++ b/ultralytics/docs/overrides/javascript/extra.js @@ -0,0 +1,69 @@ +// Function that applies light/dark theme based on the user's preference +const applyAutoTheme = () => { + // Determine the user's preferred color scheme + const prefersLight = window.matchMedia("(prefers-color-scheme: light)").matches; + const prefersDark = window.matchMedia("(prefers-color-scheme: dark)").matches; + + // Apply the appropriate attributes based on the user's preference + if (prefersLight) { + document.body.setAttribute("data-md-color-scheme", "default"); + document.body.setAttribute("data-md-color-primary", "indigo"); + } else if (prefersDark) { + document.body.setAttribute("data-md-color-scheme", "slate"); + document.body.setAttribute("data-md-color-primary", "black"); + } +}; + +// Function that checks and applies light/dark theme based on the user's preference (if auto theme is enabled) +function checkAutoTheme() { + // Array of supported language codes -> each language has its own palette (stored in local storage) + const supportedLangCodes = ["en", "zh", "ko", "ja", "ru", "de", "fr", "es", "pt"]; + // Get the URL path + const path = window.location.pathname; + // Extract the language code from the URL (assuming it's in the format /xx/...) + const langCode = path.split("/")[1]; + // Check if the extracted language code is in the supported languages + const isValidLangCode = supportedLangCodes.includes(langCode); + // Construct the local storage key based on the language code if valid, otherwise default to the root key + const localStorageKey = isValidLangCode ? `/${langCode}/.__palette` : "/.__palette"; + // Retrieve the palette from local storage using the constructed key + const palette = localStorage.getItem(localStorageKey); + if (palette) { + // Check if the palette's index is 0 (auto theme) + const paletteObj = JSON.parse(palette); + if (paletteObj && paletteObj.index === 0) { + applyAutoTheme(); + } + } +} + +// Run function when the script loads +checkAutoTheme(); + +// Re-run the function when the user's preference changes (when the user changes their system theme) +window.matchMedia("(prefers-color-scheme: light)").addEventListener("change", checkAutoTheme); +window.matchMedia("(prefers-color-scheme: dark)").addEventListener("change", checkAutoTheme); + +// Re-run the function when the palette changes (e.g. user switched from dark theme to auto theme) +// ! We can't use window.addEventListener("storage", checkAutoTheme) because it will NOT be triggered on the current tab +// ! So we have to use the following workaround: +// Get the palette input for auto theme +var autoThemeInput = document.getElementById("__palette_1"); +if (autoThemeInput) { + // Add a click event listener to the input + autoThemeInput.addEventListener("click", function () { + // Check if the auto theme is selected + if (autoThemeInput.checked) { + // Re-run the function after a short delay (to ensure that the palette has been updated) + setTimeout(applyAutoTheme); + } + }); +} + +// Add iframe navigation +window.onhashchange = function() { + window.parent.postMessage({ + type: 'navigation', + hash: window.location.pathname + window.location.search + window.location.hash + }, '*'); +}; diff --git a/ultralytics/docs/overrides/javascript/extra.js:Zone.Identifier b/ultralytics/docs/overrides/javascript/extra.js:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/overrides/javascript/extra.js:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/overrides/partials/comments.html b/ultralytics/docs/overrides/partials/comments.html new file mode 100755 index 0000000..57050a1 --- /dev/null +++ b/ultralytics/docs/overrides/partials/comments.html @@ -0,0 +1,50 @@ +{% if page.meta.comments %} +

{{ lang.t("meta.comments") }}

+ + + + + + +{% endif %} diff --git a/ultralytics/docs/overrides/partials/comments.html:Zone.Identifier b/ultralytics/docs/overrides/partials/comments.html:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/overrides/partials/comments.html:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/overrides/partials/source-file.html b/ultralytics/docs/overrides/partials/source-file.html new file mode 100755 index 0000000..84e2ab1 --- /dev/null +++ b/ultralytics/docs/overrides/partials/source-file.html @@ -0,0 +1,26 @@ +{% import "partials/language.html" as lang with context %} + + + +
+
+ + + + {% if page.meta.git_revision_date_localized %} + ๐Ÿ“… {{ lang.t("source.file.date.updated") }}: + {{ page.meta.git_revision_date_localized }} + {% if page.meta.git_creation_date_localized %} +
+ ๐ŸŽ‚ {{ lang.t("source.file.date.created") }}: + {{ page.meta.git_creation_date_localized }} + {% endif %} + + + {% elif page.meta.revision_date %} + ๐Ÿ“… {{ lang.t("source.file.date.updated") }}: + {{ page.meta.revision_date }} + {% endif %} +
+
diff --git a/ultralytics/docs/overrides/partials/source-file.html:Zone.Identifier b/ultralytics/docs/overrides/partials/source-file.html:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/overrides/partials/source-file.html:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/overrides/stylesheets/style.css b/ultralytics/docs/overrides/stylesheets/style.css new file mode 100755 index 0000000..7fe3511 --- /dev/null +++ b/ultralytics/docs/overrides/stylesheets/style.css @@ -0,0 +1,50 @@ +/* Table format like GitHub ----------------------------------------------------------------------------------------- */ +th, +td { + border: 1px solid var(--md-typeset-table-color); + border-spacing: 0; + border-bottom: none; + border-left: none; + border-top: none; +} + +.md-typeset__table { + line-height: 1; +} + +.md-typeset__table table:not([class]) { + font-size: 0.74rem; + border-right: none; +} + +.md-typeset__table table:not([class]) td, +.md-typeset__table table:not([class]) th { + padding: 9px; +} + +/* light mode alternating table bg colors */ +.md-typeset__table tr:nth-child(2n) { + background-color: #f6f8fa; +} + +/* dark mode alternating table bg colors */ +[data-md-color-scheme="slate"] .md-typeset__table tr:nth-child(2n) { + background-color: #161b22; +} +/* Table format like GitHub ----------------------------------------------------------------------------------------- */ + +/* Code block vertical scroll */ +div.highlight { + max-height: 20rem; + overflow-y: auto; /* for adding a scrollbar when needed */ +} + +/* Set content width */ +.md-grid { + max-width: 1440px; +} + +/* Set language dropdown maximum height to screen height */ +.md-header .md-select:hover .md-select__inner { + max-height: 75vh; +} diff --git a/ultralytics/docs/overrides/stylesheets/style.css:Zone.Identifier b/ultralytics/docs/overrides/stylesheets/style.css:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/overrides/stylesheets/style.css:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/index.md b/ultralytics/docs/pt/index.md new file mode 100755 index 0000000..3c9296f --- /dev/null +++ b/ultralytics/docs/pt/index.md @@ -0,0 +1,83 @@ +--- +comments: true +description: Explore um guia completo do Ultralytics YOLOv8, um modelo de detecรงรฃo de objetos e segmentaรงรฃo de imagens de alta velocidade e precisรฃo. Tutoriais de instalaรงรฃo, previsรฃo, treinamento e muito mais. +keywords: Ultralytics, YOLOv8, detecรงรฃo de objetos, segmentaรงรฃo de imagens, aprendizado de mรกquina, aprendizado profundo, visรฃo computacional, instalaรงรฃo do YOLOv8, previsรฃo do YOLOv8, treinamento do YOLOv8, histรณria do YOLO, licenรงas do YOLO +--- + +
+

+ + Banner Ultralytics YOLO +

+ GitHub da Ultralytics + space + LinkedIn da Ultralytics + space + Twitter da Ultralytics + space + YouTube da Ultralytics + space + TikTok da Ultralytics + space + Instagram da Ultralytics + space + Discord da Ultralytics +
+
+ Integraรงรฃo Contรญnua da Ultralytics + Cobertura de Cรณdigo da Ultralytics + Citaรงรฃo do YOLOv8 + Contagem de Pulls no Docker + Discord +
+ Executar no Gradient + Abrir no Colab + Abrir no Kaggle +
+ +Apresentamos o [Ultralytics](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics), a mais recente versรฃo do aclamado modelo de detecรงรฃo de objetos em tempo real e segmentaรงรฃo de imagens. O YOLOv8 รฉ baseado nos mais recentes avanรงos do aprendizado profundo e visรฃo computacional, oferecendo um desempenho sem paralelo em termos de velocidade e precisรฃo. Seu design simplificado o torna adequado para vรกrias aplicaรงรตes e facilmente adaptรกvel a diferentes plataformas de hardware, desde dispositivos de borda atรฉ APIs na nuvem. + +Explore os Documentos do YOLOv8, um recurso abrangente projetado para ajudรก-lo a entender e utilizar suas caracterรญsticas e capacidades. Seja vocรช um praticante experiente de aprendizado de mรกquina ou novo no campo, este hub tem como objetivo maximizar o potencial do YOLOv8 em seus projetos + +!!! Note "Nota" + + ๐Ÿšง Nossa documentaรงรฃo em vรกrios idiomas estรก atualmente em construรงรฃo e estamos trabalhando arduamente para aprimorรก-la. Agradecemos sua paciรชncia! ๐Ÿ™ + +## Por Onde Comeรงar + +- **Instalar** `ultralytics` com pip e comeรงar a funcionar em minutos   [:material-clock-fast: Comeรงar](quickstart.md){ .md-button } +- **Prever** novas imagens e vรญdeos com o YOLOv8   [:octicons-image-16: Prever em Imagens](modes/predict.md){ .md-button } +- **Treinar** um novo modelo YOLOv8 em seu prรณprio conjunto de dados personalizado   [:fontawesome-solid-brain: Treinar um Modelo](modes/train.md){ .md-button } +- **Explorar** tarefas do YOLOv8 como segmentar, classificar, estimar pose e rastrear   [:material-magnify-expand: Explorar Tarefas](tasks/index.md){ .md-button } + +

+
+ +
+ Assistir: Como Treinar um Modelo YOLOv8 em Seu Conjunto de Dados Personalizado no Google Colab. +

+ +## YOLO: Uma Breve Histรณria + +[YOLO](https://arxiv.org/abs/1506.02640) (You Only Look Once), um popular modelo de detecรงรฃo de objetos e segmentaรงรฃo de imagens, foi desenvolvido por Joseph Redmon e Ali Farhadi na Universidade de Washington. Lanรงado em 2015, o YOLO rapidamente ganhou popularidade por sua alta velocidade e precisรฃo. + +- [YOLOv2](https://arxiv.org/abs/1612.08242), lanรงado em 2016, aprimorou o modelo original incorporando normalizaรงรฃo em lote, caixas รขncora e aglomerados dimensionais. +- [YOLOv3](https://pjreddie.com/media/files/papers/YOLOv3.pdf), lanรงado em 2018, melhorou ainda mais o desempenho do modelo usando uma rede dorsal mais eficiente, mรบltiplas รขncoras e pooling piramidal espacial. +- [YOLOv4](https://arxiv.org/abs/2004.10934) foi lanรงado em 2020, introduzindo inovaรงรตes como a ampliaรงรฃo de dados Mosaic, uma nova cabeรงa de detecรงรฃo sem รขncoras e uma nova funรงรฃo de perda. +- [YOLOv5](https://github.com/ultralytics/yolov5) melhorou ainda mais o desempenho do modelo e adicionou novos recursos, como otimizaรงรฃo de hiperparรขmetros, rastreamento integrado de experimentos e exportaรงรฃo automรกtica para formatos de exportaรงรฃo populares. +- [YOLOv6](https://github.com/meituan/YOLOv6) foi disponibilizado em cรณdigo aberto por [Meituan](https://about.meituan.com/) em 2022 e estรก em uso em muitos dos robรดs autรดnomos de entrega da empresa. +- [YOLOv7](https://github.com/WongKinYiu/yolov7) adicionou tarefas adicionais, como estimativa de pose no conjunto de dados de keypoints COCO. +- [YOLOv8](https://github.com/ultralytics/ultralytics), a mais recente versรฃo do YOLO pela Ultralytics. Como um modelo de รบltima geraรงรฃo, o YOLOv8 baseia-se no sucesso das versรตes anteriores, introduzindo novos recursos e melhorias para desempenho, flexibilidade e eficiรชncia aprimorados. O YOLOv8 suporta uma gama completa de tarefas de IA de visรฃo, incluindo [detecรงรฃo](tasks/detect.md), [segmentaรงรฃo](tasks/segment.md), [estimativa de pose](tasks/pose.md), [rastreamento](modes/track.md) e [classificaรงรฃo](tasks/classify.md). Essa versatilidade permite que os usuรกrios aproveitem as capacidades do YOLOv8 em diversas aplicaรงรตes e domรญnios. + +## Licenรงas YOLO: Como o YOLO da Ultralytics รฉ licenciado? + +A Ultralytics oferece duas opรงรตes de licenรงa para acomodar casos de uso diversos: + +- **Licenรงa AGPL-3.0**: Essa licenรงa de cรณdigo aberto [aprovada pela OSI](https://opensource.org/licenses/) รฉ ideal para estudantes e entusiastas, promovendo colaboraรงรฃo aberta e compartilhamento de conhecimento. Veja o arquivo [LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) para mais detalhes. +- **Licenรงa Empresarial**: Projetada para uso comercial, esta licenรงa permite a integraรงรฃo perfeita do software Ultralytics e modelos de IA em bens e serviรงos comerciais, contornando os requisitos de cรณdigo aberto da AGPL-3.0. Se o seu cenรกrio envolver a incorporaรงรฃo de nossas soluรงรตes em uma oferta comercial, entre em contato atravรฉs do [Licenciamento da Ultralytics](https://ultralytics.com/license). + +Nossa estratรฉgia de licenciamento รฉ projetada para garantir que qualquer melhoria em nossos projetos de cรณdigo aberto retorne ร  comunidade. Mantemos os princรญpios de cรณdigo aberto prรณximos ao nosso coraรงรฃo โค๏ธ, e nossa missรฃo รฉ garantir que nossas contribuiรงรตes possam ser utilizadas e expandidas de formas que beneficiem todos. diff --git a/ultralytics/docs/pt/index.md:Zone.Identifier b/ultralytics/docs/pt/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/models/fast-sam.md b/ultralytics/docs/pt/models/fast-sam.md new file mode 100755 index 0000000..54543ec --- /dev/null +++ b/ultralytics/docs/pt/models/fast-sam.md @@ -0,0 +1,193 @@ +--- +comments: true +description: Explore o FastSAM, uma soluรงรฃo baseada em CNN para segmentaรงรฃo em tempo real de objetos em imagens. Melhor interaรงรฃo do usuรกrio, eficiรชncia computacional e adaptabilidade em tarefas de visรฃo. +keywords: FastSAM, aprendizado de mรกquina, soluรงรฃo baseada em CNN, segmentaรงรฃo de objetos, soluรงรฃo em tempo real, Ultralytics, tarefas de visรฃo, processamento de imagens, aplicaรงรตes industriais, interaรงรฃo do usuรกrio +--- + +# Fast Segment Anything Model (FastSAM) + +O Fast Segment Anything Model (FastSAM) รฉ uma soluรงรฃo inovadora baseada em CNN em tempo real para a tarefa de Segmentar Qualquer Coisa. Essa tarefa foi projetada para segmentar qualquer objeto dentro de uma imagem com base em vรกrias possรญveis instruรงรตes de interaรงรฃo do usuรกrio. O FastSAM reduz significativamente as demandas computacionais, mantendo um desempenho competitivo, tornando-o uma escolha prรกtica para uma variedade de tarefas de visรฃo. + +![Visรฃo geral da arquitetura do Fast Segment Anything Model (FastSAM)](https://user-images.githubusercontent.com/26833433/248551984-d98f0f6d-7535-45d0-b380-2e1440b52ad7.jpg) + +## Visรฃo Geral + +O FastSAM รฉ projetado para abordar as limitaรงรตes do [Segment Anything Model (SAM)](sam.md), um modelo Transformer pesado com requisitos substanciais de recursos computacionais. O FastSAM divide a tarefa de segmentar qualquer coisa em duas etapas sequenciais: segmentaรงรฃo de todas as instรขncias e seleรงรฃo guiada por instruรงรตes. A primeira etapa usa o [YOLOv8-seg](../tasks/segment.md) para produzir as mรกscaras de segmentaรงรฃo de todas as instรขncias na imagem. Na segunda etapa, ele gera a regiรฃo de interesse correspondente ร  instruรงรฃo. + +## Recursos Principais + +1. **Soluรงรฃo em Tempo Real:** Aproveitando a eficiรชncia computacional das CNNs, o FastSAM fornece uma soluรงรฃo em tempo real para a tarefa de segmentar qualquer coisa, tornando-o valioso para aplicaรงรตes industriais que exigem resultados rรกpidos. + +2. **Eficiรชncia e Desempenho:** O FastSAM oferece uma reduรงรฃo significativa nas demandas computacionais e de recursos sem comprometer a qualidade do desempenho. Ele alcanรงa um desempenho comparรกvel ao SAM, mas com recursos computacionais drasticamente reduzidos, permitindo aplicaรงรตes em tempo real. + +3. **Segmentaรงรฃo Guiada por Instruรงรตes:** O FastSAM pode segmentar qualquer objeto dentro de uma imagem com base em vรกrias possรญveis instruรงรตes de interaรงรฃo do usuรกrio, proporcionando flexibilidade e adaptabilidade em diferentes cenรกrios. + +4. **Baseado em YOLOv8-seg:** O FastSAM รฉ baseado no [YOLOv8-seg](../tasks/segment.md), um detector de objetos equipado com um ramo de segmentaรงรฃo de instรขncias. Isso permite que ele produza efetivamente as mรกscaras de segmentaรงรฃo de todas as instรขncias em uma imagem. + +5. **Resultados Competitivos em Bancos de Dados de Referรชncia:** Na tarefa de proposta de objetos no MS COCO, o FastSAM alcanรงa pontuaรงรตes altas em uma velocidade significativamente mais rรกpida do que o [SAM](sam.md) em uma รบnica NVIDIA RTX 3090, demonstrando sua eficiรชncia e capacidade. + +6. **Aplicaรงรตes Prรกticas:** A abordagem proposta fornece uma nova soluรงรฃo prรกtica para um grande nรบmero de tarefas de visรฃo em alta velocidade, dezenas ou centenas de vezes mais rรกpido do que os mรฉtodos atuais. + +7. **Viabilidade de Compressรฃo do Modelo:** O FastSAM demonstra a viabilidade de um caminho que pode reduzir significativamente o esforรงo computacional, introduzindo uma prioridade artificial ร  estrutura, abrindo assim novas possibilidades para arquiteturas de modelos grandes para tarefas gerais de visรฃo. + +## Modelos Disponรญveis, Tarefas Suportadas e Modos de Operaรงรฃo + +Esta tabela apresenta os modelos disponรญveis com seus pesos prรฉ-treinados especรญficos, as tarefas que eles suportam e sua compatibilidade com diferentes modos de operaรงรฃo, como [Inferรชncia](../modes/predict.md), [Validaรงรฃo](../modes/val.md), [Treinamento](../modes/train.md) e [Exportaรงรฃo](../modes/export.md), indicados por emojis โœ… para modos suportados e emojis โŒ para modos nรฃo suportados. + +| Tipo de Modelo | Pesos Prรฉ-treinados | Tarefas Suportadas | Inferรชncia | Validaรงรฃo | Treinamento | Exportaรงรฃo | +|----------------|---------------------|--------------------------------------------------|------------|-----------|-------------|------------| +| FastSAM-s | `FastSAM-s.pt` | [Segmentaรงรฃo de Instรขncias](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| FastSAM-x | `FastSAM-x.pt` | [Segmentaรงรฃo de Instรขncias](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Exemplos de Uso + +Os modelos FastSAM sรฃo fรกceis de integrar em suas aplicaรงรตes Python. A Ultralytics fornece uma API Python amigรกvel ao usuรกrio e comandos de linha de comando (CLI) para facilitar o desenvolvimento. + +### Uso de Prediรงรฃo + +Para realizar detecรงรฃo de objetos em uma imagem, use o mรฉtodo `predict` conforme mostrado abaixo: + +!!! Example "Exemplo" + + === "Python" + ```python + from ultralytics import FastSAM + from ultralytics.models.fastsam import FastSAMPrompt + + # Definir uma fonte de inferรชncia + source = 'caminho/para/onibus.jpg' + + # Criar um modelo FastSAM + model = FastSAM('FastSAM-s.pt') # ou FastSAM-x.pt + + # Executar inferรชncia em uma imagem + everything_results = model(source, device='cpu', retina_masks=True, imgsz=1024, conf=0.4, iou=0.9) + + # Preparar um objeto de Processo de Instruรงรตes + prompt_process = FastSAMPrompt(source, everything_results, device='cpu') + + # Instruรงรฃo: tudo + ann = prompt_process.everything_prompt() + + # Forma padrรฃo (bbox) [0,0,0,0] -> [x1,y1,x2,y2] + ann = prompt_process.box_prompt(bbox=[200, 200, 300, 300]) + + # Instruรงรฃo: texto + ann = prompt_process.text_prompt(text='uma foto de um cachorro') + + # Instruรงรฃo: ponto + # pontos padrรฃo [[0,0]] [[x1,y1],[x2,y2]] + # ponto_label padrรฃo [0] [1,0] 0:fundo, 1:frente + ann = prompt_process.point_prompt(points=[[200, 200]], pointlabel=[1]) + prompt_process.plot(annotations=ann, output='./') + ``` + + === "CLI" + ```bash + # Carregar um modelo FastSAM e segmentar tudo com ele + yolo segment predict model=FastSAM-s.pt source=caminho/para/onibus.jpg imgsz=640 + ``` + +Este trecho de cรณdigo demonstra a simplicidade de carregar um modelo prรฉ-treinado e executar uma prediรงรฃo em uma imagem. + +### Uso de Validaรงรฃo + +A validaรงรฃo do modelo em um conjunto de dados pode ser feita da seguinte forma: + +!!! Example "Exemplo" + + === "Python" + ```python + from ultralytics import FastSAM + + # Criar um modelo FastSAM + model = FastSAM('FastSAM-s.pt') # ou FastSAM-x.pt + + # Validar o modelo + results = model.val(data='coco8-seg.yaml') + ``` + + === "CLI" + ```bash + # Carregar um modelo FastSAM e validรก-lo no conjunto de dados de exemplo COCO8 com tamanho de imagem 640 + yolo segment val model=FastSAM-s.pt data=coco8.yaml imgsz=640 + ``` + +Observe que o FastSAM suporta apenas detecรงรฃo e segmentaรงรฃo de uma รบnica classe de objeto. Isso significa que ele reconhecerรก e segmentarรก todos os objetos como a mesma classe. Portanto, ao preparar o conjunto de dados, vocรช precisarรก converter todos os IDs de categoria de objeto para 0. + +## Uso Oficial do FastSAM + +O FastSAM tambรฉm estรก disponรญvel diretamente no repositรณrio [https://github.com/CASIA-IVA-Lab/FastSAM](https://github.com/CASIA-IVA-Lab/FastSAM). Aqui estรก uma visรฃo geral breve das etapas tรญpicas que vocรช pode seguir para usar o FastSAM: + +### Instalaรงรฃo + +1. Clone o repositรณrio do FastSAM: + ```shell + git clone https://github.com/CASIA-IVA-Lab/FastSAM.git + ``` + +2. Crie e ative um ambiente Conda com Python 3.9: + ```shell + conda create -n FastSAM python=3.9 + conda activate FastSAM + ``` + +3. Navegue atรฉ o repositรณrio clonado e instale os pacotes necessรกrios: + ```shell + cd FastSAM + pip install -r requirements.txt + ``` + +4. Instale o modelo CLIP: + ```shell + pip install git+https://github.com/openai/CLIP.git + ``` + +### Exemplo de Uso + +1. Baixe um [checkpoint do modelo](https://drive.google.com/file/d/1m1sjY4ihXBU1fZXdQ-Xdj-mDltW-2Rqv/view?usp=sharing). + +2. Use o FastSAM para inferรชncia. Exemplos de comandos: + + - Segmentar tudo em uma imagem: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg + ``` + + - Segmentar objetos especรญficos usando uma instruรงรฃo de texto: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --text_prompt "o cachorro amarelo" + ``` + + - Segmentar objetos dentro de uma caixa delimitadora (fornecer coordenadas da caixa no formato xywh): + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --box_prompt "[570,200,230,400]" + ``` + + - Segmentar objetos prรณximos a pontos especรญficos: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --point_prompt "[[520,360],[620,300]]" --point_label "[1,0]" + ``` + +Alรฉm disso, vocรช pode experimentar o FastSAM atravรฉs de um [demo no Colab](https://colab.research.google.com/drive/1oX14f6IneGGw612WgVlAiy91UHwFAvr9?usp=sharing) ou no [demo web do HuggingFace](https://huggingface.co/spaces/An-619/FastSAM) para ter uma experiรชncia visual. + +## Citaรงรตes e Reconhecimentos + +Gostarรญamos de reconhecer os autores do FastSAM por suas contribuiรงรตes significativas no campo da segmentaรงรฃo de instรขncias em tempo real: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{zhao2023fast, + title={Fast Segment Anything}, + author={Xu Zhao and Wenchao Ding and Yongqi An and Yinglong Du and Tao Yu and Min Li and Ming Tang and Jinqiao Wang}, + year={2023}, + eprint={2306.12156}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +O artigo original do FastSAM pode ser encontrado no [arXiv](https://arxiv.org/abs/2306.12156). Os autores disponibilizaram seu trabalho publicamente, e o cรณdigo pode ser acessado no [GitHub](https://github.com/CASIA-IVA-Lab/FastSAM). Agradecemos seus esforรงos em avanรงar o campo e tornar seu trabalho acessรญvel ร  comunidade em geral. diff --git a/ultralytics/docs/pt/models/fast-sam.md:Zone.Identifier b/ultralytics/docs/pt/models/fast-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/models/fast-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/models/index.md b/ultralytics/docs/pt/models/index.md new file mode 100755 index 0000000..c7958f3 --- /dev/null +++ b/ultralytics/docs/pt/models/index.md @@ -0,0 +1,98 @@ +--- +comments: true +description: Explore a ampla gama de modelos da famรญlia YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS e RT-DETR suportados pela Ultralytics. Comece com exemplos para uso tanto em CLI quanto em Python. +keywords: Ultralytics, documentaรงรฃo, YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS, RT-DETR, modelos, arquiteturas, Python, CLI +--- + +# Modelos Suportados pela Ultralytics + +Bem-vindo ร  documentaรงรฃo de modelos da Ultralytics! Oferecemos suporte para uma ampla variedade de modelos, cada um adaptado para tarefas especรญficas como [detecรงรฃo de objetos](../tasks/detect.md), [segmentaรงรฃo de instรขncias](../tasks/segment.md), [classificaรงรฃo de imagens](../tasks/classify.md), [estimativa de pose](../tasks/pose.md), e [rastreamento de mรบltiplos objetos](../modes/track.md). Se vocรช tem interesse em contribuir com sua arquitetura de modelo para a Ultralytics, confira nosso [Guia de Contribuiรงรฃo](../../help/contributing.md). + +!!! Note "Nota" + + ๐Ÿšง Nossa documentaรงรฃo em vรกrios idiomas estรก atualmente em construรงรฃo, e estamos trabalhando arduamente para melhorรก-la. Agradecemos sua paciรชncia! ๐Ÿ™ + +## Modelos em Destaque + +Aqui estรฃo alguns dos principais modelos suportados: + +1. **[YOLOv3](yolov3.md)**: A terceira iteraรงรฃo da famรญlia de modelos YOLO, originalmente por Joseph Redmon, conhecida por suas capacidades eficientes de detecรงรฃo de objetos em tempo real. +2. **[YOLOv4](yolov4.md)**: Uma atualizaรงรฃo nativa para o darknet do YOLOv3, lanรงada por Alexey Bochkovskiy em 2020. +3. **[YOLOv5](yolov5.md)**: Uma versรฃo aprimorada da arquitetura YOLO pela Ultralytics, oferecendo melhor desempenho e compensaรงรตes de velocidade em comparaรงรฃo com as versรตes anteriores. +4. **[YOLOv6](yolov6.md)**: Lanรงado pela [Meituan](https://about.meituan.com/) em 2022, e em uso em muitos dos robรดs autรดnomos de entregas da empresa. +5. **[YOLOv7](yolov7.md)**: Modelos YOLO atualizados lanรงados em 2022 pelos autores do YOLOv4. +6. **[YOLOv8](yolov8.md) NOVO ๐Ÿš€**: A versรฃo mais recente da famรญlia YOLO, apresentando capacidades aprimoradas, como segmentaรงรฃo de instรขncias, estimativa de pose/pontos-chave e classificaรงรฃo. +7. **[Segment Anything Model (SAM)](sam.md)**: Modelo Segment Anything (SAM) da Meta. +8. **[Mobile Segment Anything Model (MobileSAM)](mobile-sam.md)**: MobileSAM para aplicaรงรตes mรณveis, pela Universidade Kyung Hee. +9. **[Fast Segment Anything Model (FastSAM)](fast-sam.md)**: FastSAM pelo Grupo de Anรกlise de Imagem e Vรญdeo, Instituto de Automaรงรฃo, Academia Chinesa de Ciรชncias. +10. **[YOLO-NAS](yolo-nas.md)**: Modelos de Pesquisa de Arquitetura Neural YOLO (NAS). +11. **[Realtime Detection Transformers (RT-DETR)](rtdetr.md)**: Modelos de Transformador de Detecรงรฃo em Tempo Real (RT-DETR) do PaddlePaddle da Baidu. + +

+
+ +
+ Assista: Execute modelos YOLO da Ultralytics em apenas algumas linhas de cรณdigo. +

+ +## Introduรงรฃo: Exemplos de Uso + +Este exemplo oferece exemplos simples de treinamento e inferรชncia com YOLO. Para uma documentaรงรฃo completa sobre estes e outros [modos](../modes/index.md), veja as pรกginas de documentaรงรฃo de [Previsรฃo](../modes/predict.md), [Treinamento](../modes/train.md), [Validaรงรฃo](../modes/val.md) e [Exportaรงรฃo](../modes/export.md). + +Note que o exemplo abaixo รฉ para modelos YOLOv8 [Detect](../tasks/detect.md) para detecรงรฃo de objetos. Para tarefas suportadas adicionais, veja as documentaรงรตes de [Segmentaรงรฃo](../tasks/segment.md), [Classificaรงรฃo](../tasks/classify.md) e [Pose](../tasks/pose.md). + +!!! Example "Exemplo" + + === "Python" + + Modelos `*.pt` prรฉ-treinados em PyTorch, bem como arquivos de configuraรงรฃo `*.yaml`, podem ser passados para as classes `YOLO()`, `SAM()`, `NAS()` e `RTDETR()` para criar uma instรขncia de modelo em Python: + + ```python + from ultralytics import YOLO + + # Carregar um modelo YOLOv8n prรฉ-treinado no COCO + modelo = YOLO('yolov8n.pt') + + # Exibir informaรงรตes do modelo (opcional) + modelo.info() + + # Treinar o modelo no conjunto de dados de exemplo COCO8 por 100 รฉpocas + resultados = modelo.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Executar inferรชncia com o modelo YOLOv8n na imagem 'bus.jpg' + resultados = modelo('path/to/bus.jpg') + ``` + + === "CLI" + + Comandos CLI estรฃo disponรญveis para executar diretamente os modelos: + + ```bash + # Carregar um modelo YOLOv8n prรฉ-treinado no COCO e treinรก-lo no conjunto de dados de exemplo COCO8 por 100 รฉpocas + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Carregar um modelo YOLOv8n prรฉ-treinado no COCO e executar inferรชncia na imagem 'bus.jpg' + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## Contribuindo com Novos Modelos + +Interessado em contribuir com seu modelo para a Ultralytics? ร“timo! Estamos sempre abertos a expandir nosso portfรณlio de modelos. + +1. **Fork do Repositรณrio**: Comece fazendo um fork do [repositรณrio no GitHub da Ultralytics](https://github.com/ultralytics/ultralytics). + +2. **Clone Seu Fork**: Clone seu fork para a sua mรกquina local e crie uma nova branch para trabalhar. + +3. **Implemente Seu Modelo**: Adicione seu modelo seguindo as normas e diretrizes de codificaรงรฃo fornecidas no nosso [Guia de Contribuiรงรฃo](../../help/contributing.md). + +4. **Teste Cuidadosamente**: Assegure-se de testar seu modelo rigorosamente, tanto isoladamente quanto como parte do pipeline. + +5. **Crie um Pull Request**: Uma vez que estiver satisfeito com seu modelo, crie um pull request para o repositรณrio principal para revisรฃo. + +6. **Revisรฃo de Cรณdigo & Mesclagem**: Apรณs a revisรฃo, se seu modelo atender aos nossos critรฉrios, ele serรก integrado ao repositรณrio principal. + +Para etapas detalhadas, consulte nosso [Guia de Contribuiรงรฃo](../../help/contributing.md). diff --git a/ultralytics/docs/pt/models/index.md:Zone.Identifier b/ultralytics/docs/pt/models/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/models/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/models/mobile-sam.md b/ultralytics/docs/pt/models/mobile-sam.md new file mode 100755 index 0000000..e4ffdc5 --- /dev/null +++ b/ultralytics/docs/pt/models/mobile-sam.md @@ -0,0 +1,116 @@ +--- +comments: true +description: Saiba mais sobre o MobileSAM, sua implementaรงรฃo, comparaรงรฃo com o SAM original e como baixรก-lo e testรก-lo no framework Ultralytics. Melhore suas aplicaรงรตes mรณveis hoje. +keywords: MobileSAM, Ultralytics, SAM, aplicaรงรตes mรณveis, Arxiv, GPU, API, codificador de imagens, decodificador de mรกscaras, download do modelo, mรฉtodo de teste +--- + +![Logotipo do MobileSAM](https://github.com/ChaoningZhang/MobileSAM/blob/master/assets/logo2.png?raw=true) + +# Segmentaรงรฃo Mรณvel de Qualquer Coisa (MobileSAM) + +O artigo do MobileSAM agora estรก disponรญvel no [arXiv](https://arxiv.org/pdf/2306.14289.pdf). + +Uma demonstraรงรฃo do MobileSAM executando em uma CPU pode ser acessada neste [link de demonstraรงรฃo](https://huggingface.co/spaces/dhkim2810/MobileSAM). O desempenho em um Mac i5 CPU leva aproximadamente 3 segundos. Na demonstraรงรฃo do Hugging Face, a interface e CPUs de menor desempenho contribuem para uma resposta mais lenta, mas ela continua funcionando efetivamente. + +O MobileSAM รฉ implementado em vรกrios projetos, incluindo [Grounding-SAM](https://github.com/IDEA-Research/Grounded-Segment-Anything), [AnyLabeling](https://github.com/vietanhdev/anylabeling) e [Segment Anything in 3D](https://github.com/Jumpat/SegmentAnythingin3D). + +O MobileSAM รฉ treinado em uma รบnica GPU com um conjunto de dados de 100 mil imagens (1% das imagens originais) em menos de um dia. O cรณdigo para esse treinamento serรก disponibilizado no futuro. + +## Modelos Disponรญveis, Tarefas Suportadas e Modos de Operaรงรฃo + +Esta tabela apresenta os modelos disponรญveis com seus pesos prรฉ-treinados especรญficos, as tarefas que eles suportam e sua compatibilidade com diferentes modos de operaรงรฃo, como [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md) e [Export](../modes/export.md), indicados pelos emojis โœ… para os modos suportados e โŒ para os modos nรฃo suportados. + +| Tipo de Modelo | Pesos Prรฉ-treinados | Tarefas Suportadas | Inference | Validation | Training | Export | +|----------------|---------------------|--------------------------------------------------|-----------|------------|----------|--------| +| MobileSAM | `mobile_sam.pt` | [Segmentaรงรฃo de Instรขncias](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Adaptaรงรฃo de SAM para MobileSAM + +Como o MobileSAM mantรฉm o mesmo pipeline do SAM original, incorporamos o prรฉ-processamento original, pรณs-processamento e todas as outras interfaces. Consequentemente, aqueles que estรฃo atualmente usando o SAM original podem fazer a transiรงรฃo para o MobileSAM com um esforรงo mรญnimo. + +O MobileSAM tem um desempenho comparรกvel ao SAM original e mantรฉm o mesmo pipeline, exceto por uma mudanรงa no codificador de imagens. Especificamente, substituรญmos o codificador de imagens ViT-H original (632M) por um ViT menor (5M). Em uma รบnica GPU, o MobileSAM opera em cerca de 12 ms por imagem: 8 ms no codificador de imagens e 4 ms no decodificador de mรกscaras. + +A tabela a seguir fornece uma comparaรงรฃo dos codificadores de imagens baseados em ViT: + +| Codificador de Imagens | SAM Original | MobileSAM | +|------------------------|--------------|-----------| +| Parรขmetros | 611M | 5M | +| Velocidade | 452ms | 8ms | + +Tanto o SAM original quanto o MobileSAM utilizam o mesmo decodificador de mรกscaras baseado em prompt: + +| Decodificador de Mรกscaras | SAM Original | MobileSAM | +|---------------------------|--------------|-----------| +| Parรขmetros | 3,876M | 3,876M | +| Velocidade | 4ms | 4ms | + +Aqui estรก a comparaรงรฃo de todo o pipeline: + +| Pipeline Completo (Enc+Dec) | SAM Original | MobileSAM | +|-----------------------------|--------------|-----------| +| Parรขmetros | 615M | 9,66M | +| Velocidade | 456ms | 12ms | + +O desempenho do MobileSAM e do SAM original รฉ demonstrado usando tanto um ponto quanto uma caixa como prompts. + +![Imagem com Ponto como Prompt](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +![Imagem com Caixa como Prompt](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +Com seu desempenho superior, o MobileSAM รฉ aproximadamente 5 vezes menor e 7 vezes mais rรกpido que o FastSAM atual. Mais detalhes estรฃo disponรญveis na [pรกgina do projeto MobileSAM](https://github.com/ChaoningZhang/MobileSAM). + +## Testando o MobileSAM no Ultralytics + +Assim como o SAM original, oferecemos um mรฉtodo de teste simples no Ultralytics, incluindo modos para prompts de Ponto e Caixa. + +### Download do Modelo + +Vocรช pode baixar o modelo [aqui](https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt). + +### Prompt de Ponto + +!!! Example "Exemplo" + + === "Python" + ```python + from ultralytics import SAM + + # Carregar o modelo + model = SAM('mobile_sam.pt') + + # Prever um segmento com base em um prompt de ponto + model.predict('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +### Prompt de Caixa + +!!! Example "Exemplo" + + === "Python" + ```python + from ultralytics import SAM + + # Carregar o modelo + model = SAM('mobile_sam.pt') + + # Prever um segmento com base em um prompt de caixa + model.predict('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + ``` + +Implementamos `MobileSAM` e `SAM` usando a mesma API. Para obter mais informaรงรตes sobre o uso, consulte a [pรกgina do SAM](sam.md). + +## Citaรงรตes e Agradecimentos + +Se vocรช achar o MobileSAM รบtil em sua pesquisa ou trabalho de desenvolvimento, considere citar nosso artigo: + +!!! Citar "" + + === "BibTeX" + + ```bibtex + @article{mobile_sam, + title={Faster Segment Anything: Towards Lightweight SAM for Mobile Applications}, + author={Zhang, Chaoning and Han, Dongshen and Qiao, Yu and Kim, Jung Uk and Bae, Sung Ho and Lee, Seungkyu and Hong, Choong Seon}, + journal={arXiv preprint arXiv:2306.14289}, + year={2023} + } diff --git a/ultralytics/docs/pt/models/mobile-sam.md:Zone.Identifier b/ultralytics/docs/pt/models/mobile-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/models/mobile-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/models/rtdetr.md b/ultralytics/docs/pt/models/rtdetr.md new file mode 100755 index 0000000..1d8a252 --- /dev/null +++ b/ultralytics/docs/pt/models/rtdetr.md @@ -0,0 +1,93 @@ +--- +comments: true +description: Descubra as caracterรญsticas e benefรญcios do RT-DETR da Baidu, um detector de objetos em tempo real eficiente e adaptรกvel baseado em Vision Transformers, incluindo modelos prรฉ-treinados. +keywords: RT-DETR, Baidu, Vision Transformers, detecรงรฃo de objetos, desempenho em tempo real, CUDA, TensorRT, seleรงรฃo de consulta IoU, Ultralytics, API Python, PaddlePaddle +--- + +# RT-DETR da Baidu: Um Detector de Objetos em Tempo Real Baseado em Vision Transformers + +## Visรฃo Geral + +O Real-Time Detection Transformer (RT-DETR), desenvolvido pela Baidu, รฉ um detector de objetos de รบltima geraรงรฃo que proporciona desempenho em tempo real mantendo alta precisรฃo. Ele utiliza a potรชncia dos Vision Transformers (ViT) para processar eficientemente recursos multiescala, separando a interaรงรฃo intra-escala e a fusรฃo entre escalas. O RT-DETR รฉ altamente adaptรกvel, com suporte para ajuste flexรญvel da velocidade de inferรชncia usando diferentes camadas de decodificador sem a necessidade de retratamento. O modelo se destaca em backends acelerados como o CUDA com o TensorRT, superando muitos outros detectores de objetos em tempo real. + +![Exemplo de imagem do modelo](https://user-images.githubusercontent.com/26833433/238963168-90e8483f-90aa-4eb6-a5e1-0d408b23dd33.png) +**Visรฃo geral do RT-DETR da Baidu.** O diagrama da arquitetura do modelo RT-DETR mostra as รบltimas trรชs etapas da espinha dorsal {S3, S4, S5} como entrada para o codificador. O codificador hรญbrido eficiente transforma recursos multiescala em uma sequรชncia de recursos de imagem por meio da interaรงรฃo de recursos intra-escala (AIFI) e do mรณdulo de fusรฃo de recursos entre escalas (CCFM). A seleรงรฃo de consulta, consciente da IoU, รฉ utilizada para selecionar um nรบmero fixo de recursos de imagem para servir como consultas de objeto iniciais para o decodificador. Por fim, o decodificador com cabeรงotes de previsรฃo auxiliares otimiza iterativamente as consultas de objeto para gerar caixas e pontuaรงรตes de confianรงa ([fonte](https://arxiv.org/pdf/2304.08069.pdf)). + +### Caracterรญsticas Principais + +- **Codificador Hรญbrido Eficiente:** O RT-DETR da Baidu utiliza um codificador hรญbrido eficiente para processar recursos multiescala por meio da separaรงรฃo da interaรงรฃo intra-escala e da fusรฃo entre escalas. Esse design exclusivo baseado em Vision Transformers reduz os custos computacionais e permite a detecรงรฃo de objetos em tempo real. +- **Seleรงรฃo de Consulta Consciente de IoU:** O RT-DETR da Baidu melhora a inicializaรงรฃo das consultas de objeto ao utilizar seleรงรฃo de consulta consciente de IoU. Isso permite que o modelo foque nos objetos mais relevantes na cena, aprimorando a precisรฃo da detecรงรฃo. +- **Velocidade de Inferรชncia Adaptรกvel:** O RT-DETR da Baidu suporta ajustes flexรญveis da velocidade de inferรชncia ao utilizar diferentes camadas de decodificador sem a necessidade de retratamento. Essa adaptabilidade facilita a aplicaรงรฃo prรกtica em diversos cenรกrios de detecรงรฃo de objetos em tempo real. + +## Modelos Prรฉ-Treinados + +A API Python do Ultralytics fornece modelos prรฉ-treinados do RT-DETR do PaddlePaddle com diferentes escalas: + +- RT-DETR-L: 53,0% de AP em COCO val2017, 114 FPS em GPU T4 +- RT-DETR-X: 54,8% de AP em COCO val2017, 74 FPS em GPU T4 + +## Exemplos de Uso + +Este exemplo fornece exemplos simples de treinamento e inferรชncia com o RT-DETRR. Para obter documentaรงรฃo completa sobre esses e outros [modos](../modes/index.md), consulte as pรกginas de documentaรงรฃo [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) e [Export](../modes/export.md). + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import RTDETR + + # Carregue um modelo RT-DETR-l prรฉ-treinado no COCO + model = RTDETR('rtdetr-l.pt') + + # Exiba informaรงรตes do modelo (opcional) + model.info() + + # Treine o modelo com o conjunto de dados de exemplo COCO8 por 100 รฉpocas + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Execute a inferรชncia com o modelo RT-DETR-l na imagem 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ```bash + # Carregue um modelo RT-DETR-l prรฉ-treinado no COCO e treine-o com o conjunto de dados de exemplo COCO8 por 100 รฉpocas + yolo train model=rtdetr-l.pt data=coco8.yaml epochs=100 imgsz=640 + + # Carregue um modelo RT-DETR-l prรฉ-treinado no COCO e execute a inferรชncia na imagem 'bus.jpg' + yolo predict model=rtdetr-l.pt source=path/to/bus.jpg + ``` + +## Tarefas e Modos Suportados + +Esta tabela apresenta os tipos de modelo, os pesos prรฉ-treinados especรญficos, as tarefas suportadas por cada modelo e os vรกrios modos ([Train](../modes/train.md), [Val](../modes/val.md), [Predict](../modes/predict.md), [Export](../modes/export.md)) que sรฃo suportados, indicados por emojis โœ…. + +| Tipo de Modelo | Pesos Prรฉ-treinados | Tarefas Suportadas | Inferรชncia | Validaรงรฃo | Treinamento | Exportaรงรฃo | +|----------------------|---------------------|-------------------------------------------|------------|-----------|-------------|------------| +| RT-DETR Grande | `rtdetr-l.pt` | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| RT-DETR Extra-Grande | `rtdetr-x.pt` | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +## Citaรงรตes e Reconhecimentos + +Se vocรช utilizar o RT-DETR da Baidu em seu trabalho de pesquisa ou desenvolvimento, por favor cite o [artigo original](https://arxiv.org/abs/2304.08069): + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{lv2023detrs, + title={DETRs Beat YOLOs on Real-time Object Detection}, + author={Wenyu Lv and Shangliang Xu and Yian Zhao and Guanzhong Wang and Jinman Wei and Cheng Cui and Yuning Du and Qingqing Dang and Yi Liu}, + year={2023}, + eprint={2304.08069}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +Gostarรญamos de agradecer ร  Baidu e ร  equipe do [PaddlePaddle](https://github.com/PaddlePaddle/PaddleDetection) por criar e manter esse recurso valioso para a comunidade de visรฃo computacional. Sua contribuiรงรฃo para o campo com o desenvolvimento do detector de objetos em tempo real baseado em Vision Transformers, RT-DETR, รฉ muito apreciada. + +*keywords: RT-DETR, Transformer, ViT, Vision Transformers, RT-DETR da Baidu, PaddlePaddle, modelos prรฉ-treinados PaddlePaddle RT-DETR, uso do RT-DETR da Baidu, API Python do Ultralytics* diff --git a/ultralytics/docs/pt/models/rtdetr.md:Zone.Identifier b/ultralytics/docs/pt/models/rtdetr.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/models/rtdetr.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/models/sam.md b/ultralytics/docs/pt/models/sam.md new file mode 100755 index 0000000..408af95 --- /dev/null +++ b/ultralytics/docs/pt/models/sam.md @@ -0,0 +1,226 @@ +--- +comments: true +description: Explore o Modelo de Segmentaรงรฃo de Qualquer Coisa (SAM) de รบltima geraรงรฃo da Ultralytics que permite a segmentaรงรฃo de imagens em tempo real. Aprenda sobre a segmentaรงรฃo baseada em prompts, o desempenho de transferรชncia zero e como utilizรก-lo. +keywords: Ultralytics, segmentaรงรฃo de imagem, Modelo de Segmentaรงรฃo de Qualquer Coisa, SAM, conjunto de dados SA-1B, desempenho em tempo real, transferรชncia zero, detecรงรฃo de objetos, anรกlise de imagens, aprendizado de mรกquina +--- + +# Modelo de Segmentaรงรฃo de Qualquer Coisa (SAM) + +Bem-vindo ร  fronteira da segmentaรงรฃo de imagem com o Modelo de Segmentaรงรฃo de Qualquer Coisa, ou SAM. Este modelo revolucionรกrio mudou o jogo ao introduzir a segmentaรงรฃo de imagem baseada em prompts com desempenho em tempo real, estabelecendo novos padrรตes no campo. + +## Introduรงรฃo ao SAM: O Modelo de Segmentaรงรฃo de Qualquer Coisa + +O Modelo de Segmentaรงรฃo de Qualquer Coisa, ou SAM, รฉ um modelo de segmentaรงรฃo de imagem de ponta que permite a segmentaรงรฃo baseada em prompts, proporcionando uma versatilidade incomparรกvel em tarefas de anรกlise de imagem. O SAM รฉ o cerne da iniciativa Segment Anything, um projeto inovador que introduz um modelo, tarefa e conjunto de dados novos para a segmentaรงรฃo de imagem. + +O design avanรงado do SAM permite que ele se adapte a novas distribuiรงรตes de imagem e tarefas sem conhecimento prรฉvio, um recurso conhecido como transferรชncia zero. Treinado no abrangente [conjunto de dados SA-1B](https://ai.facebook.com/datasets/segment-anything/), que contรฉm mais de 1 bilhรฃo de mรกscaras espalhadas por 11 milhรตes de imagens cuidadosamente selecionadas, o SAM tem demonstrado um impressionante desempenho de transferรชncia zero, superando os resultados totalmente supervisionados anteriores em muitos casos. + +![Exemplo de imagem do conjunto de dados](https://user-images.githubusercontent.com/26833433/238056229-0e8ffbeb-f81a-477e-a490-aff3d82fd8ce.jpg) +Imagens de exemplo com mรกscaras sobrepostas do nosso conjunto de dados recรฉm-introduzido, SA-1B. O SA-1B contรฉm 11 milhรตes de imagens diversas, de alta resoluรงรฃo, licenciadas e com proteรงรฃo de privacidade, e 1,1 bilhรฃo de mรกscaras de segmentaรงรฃo de alta qualidade. Essas mรกscaras foram anotadas totalmente automaticamente pelo SAM, e, como verificado por classificaรงรตes humanas e inรบmeros experimentos, sรฃo de alta qualidade e diversidade. As imagens sรฃo agrupadas pelo nรบmero de mรกscaras por imagem para visualizaรงรฃo (em mรฉdia, hรก โˆผ100 mรกscaras por imagem). + +## Recursos Principais do Modelo de Segmentaรงรฃo de Qualquer Coisa (SAM) + +- **Tarefa de Segmentaรงรฃo Baseada em Prompts:** O SAM foi projetado com uma tarefa de segmentaรงรฃo baseada em prompts em mente, permitindo que ele gere mรกscaras de segmentaรงรฃo vรกlidas a partir de qualquer prompt fornecido, como dicas espaciais ou textuais que identifiquem um objeto. +- **Arquitetura Avanรงada:** O Modelo de Segmentaรงรฃo de Qualquer Coisa utiliza um poderoso codificador de imagens, um codificador de prompts e um decodificador de mรกscaras leve. Essa arquitetura รบnica possibilita o uso flexรญvel de prompts, cรกlculo de mรกscaras em tempo real e consciรชncia de ambiguidade em tarefas de segmentaรงรฃo. +- **O Conjunto de Dados SA-1B:** Introduzido pelo projeto Segment Anything, o conjunto de dados SA-1B apresenta mais de 1 bilhรฃo de mรกscaras em 11 milhรตes de imagens. Como o maior conjunto de dados de segmentaรงรฃo atรฉ o momento, ele fornece ao SAM uma fonte diversificada e em grande escala de dados de treinamento. +- **Desempenho de Transferรชncia Zero:** O SAM apresenta um desempenho de transferรชncia zero excepcional em diversas tarefas de segmentaรงรฃo, tornando-se uma ferramenta pronta para uso em aplicaรงรตes diversas com necessidade mรญnima de engenharia de prompts. + +Para obter uma visรฃo mais aprofundada do Modelo de Segmentaรงรฃo de Qualquer Coisa e do conjunto de dados SA-1B, visite o [site do Segment Anything](https://segment-anything.com) e consulte o artigo de pesquisa [Segment Anything](https://arxiv.org/abs/2304.02643). + +## Modelos Disponรญveis, Tarefas Suportadas e Modos de Operaรงรฃo + +Esta tabela apresenta os modelos disponรญveis com seus pesos prรฉ-treinados especรญficos, as tarefas suportadas por eles e sua compatibilidade com diferentes modos de operaรงรฃo, como [Inferรชncia](../modes/predict.md), [Validaรงรฃo](../modes/val.md), [Treinamento](../modes/train.md) e [Exportaรงรฃo](../modes/export.md), indicados pelos emojis โœ… para modos suportados e โŒ para modos nรฃo suportados. + +| Tipo de Modelo | Pesos Prรฉ-Treinados | Tarefas Suportadas | Inferรชncia | Validaรงรฃo | Treinamento | Exportaรงรฃo | +|----------------|---------------------|--------------------------------------------------|------------|-----------|-------------|------------| +| SAM base | `sam_b.pt` | [Segmentaรงรฃo de Instรขncias](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| SAM large | `sam_l.pt` | [Segmentaรงรฃo de Instรขncias](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## Como Usar o SAM: Versatilidade e Poder na Segmentaรงรฃo de Imagens + +O Modelo de Segmentaรงรฃo de Qualquer Coisa pode ser utilizado para uma variedade de tarefas secundรกrias que vรฃo alรฉm dos dados de treinamento. Isso inclui detecรงรฃo de bordas, geraรงรฃo de propostas de objeto, segmentaรงรฃo de instรขncias e prediรงรฃo preliminar de texto para mรกscara. Com a engenharia de prompts, o SAM pode se adaptar rapidamente a novas tarefas e distribuiรงรตes de dados de maneira inovadora, estabelecendo-se como uma ferramenta versรกtil e poderosa para todas as suas necessidades de segmentaรงรฃo de imagem. + +### Exemplo de prediรงรฃo do SAM + +!!! Example "Segmentar com prompts" + + Segmenta a imagem com prompts fornecidos. + + === "Python" + + ```python + from ultralytics import SAM + + # Carregar o modelo + modelo = SAM('sam_b.pt') + + # Exibir informaรงรตes do modelo (opcional) + modelo.info() + + # Executar inferรชncia com prompt de bboxes + modelo('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + + # Executar inferรชncia com prompt de pontos + modelo('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +!!! Example "Segmentar tudo" + + Segmenta toda a imagem. + + === "Python" + + ```python + from ultralytics import SAM + + # Carregar o modelo + modelo = SAM('sam_b.pt') + + # Exibir informaรงรตes do modelo (opcional) + modelo.info() + + # Executar inferรชncia + modelo('caminho/para/imagem.jpg') + ``` + + === "CLI" + + ```bash + # Executar inferรชncia com um modelo SAM + yolo predict model=sam_b.pt source=caminho/para/imagem.jpg + ``` + +- A lรณgica aqui รฉ segmentar toda a imagem se nenhum prompt (bboxes/pontos/mรกscaras) for especificado. + +!!! Example "Exemplo do SAMPredictor" + + Desta forma, vocรช pode definir a imagem uma vez e executar inferรชncia de prompts vรกrias vezes sem executar o codificador de imagem vรกrias vezes. + + === "Inferรชncia com prompt" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # Criar o SAMPredictor + substituiรงรตes = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(substituiรงรตes=substituiรงรตes) + + # Definir imagem + predictor.set_image("ultralytics/assets/zidane.jpg") # definir com arquivo de imagem + predictor.set_image(cv2.imread("ultralytics/assets/zidane.jpg")) # definir com np.ndarray + results = predictor(bboxes=[439, 437, 524, 709]) + results = predictor(points=[900, 370], labels=[1]) + + # Redefinir imagem + predictor.reset_image() + ``` + + Segmentar tudo com argumentos adicionais. + + === "Segmentar tudo" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # Criar o SAMPredictor + substituiรงรตes = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(substituiรงรตes=substituiรงรตes) + + # Segmentar com argumentos adicionais + results = predictor(source="ultralytics/assets/zidane.jpg", crop_n_layers=1, points_stride=64) + ``` + +- Mais argumentos adicionais para `Segmentar tudo` consulte a [Referรชncia do `Predictor/generate`](../../../reference/models/sam/predict.md). + +## Comparaรงรฃo SAM vs. YOLOv8 + +Aqui, comparamos o menor modelo SAM-b da Meta com o menor modelo de segmentaรงรฃo da Ultralytics, [YOLOv8n-seg](../tasks/segment.md): + +| Modelo | Tamanho | Parรขmetros | Velocidade (CPU) | +|-----------------------------------------------|-------------------------------|--------------------------------|--------------------------------------| +| SAM-b da Meta | 358 MB | 94,7 M | 51096 ms/im | +| [MobileSAM](mobile-sam.md) | 40,7 MB | 10,1 M | 46122 ms/im | +| [FastSAM-s](fast-sam.md) com YOLOv8 como base | 23,7 MB | 11,8 M | 115 ms/im | +| YOLOv8n-seg da Ultralytics | **6,7 MB** (53,4 vezes menor) | **3,4 M** (27,9 vezes a menos) | **59 ms/im** (866 vezes mais rรกpido) | + +Essa comparaรงรฃo mostra as diferenรงas de ordem de magnitude nos tamanhos e velocidades dos modelos. Enquanto o SAM apresenta capacidades exclusivas para segmentaรงรฃo automรกtica, ele nรฃo รฉ um concorrente direto dos modelos de segmentaรงรฃo YOLOv8, que sรฃo menores, mais rรกpidos e mais eficientes. + +Os testes foram executados em um MacBook Apple M2 de 2023 com 16GB de RAM. Para reproduzir este teste: + +!!! Example "Exemplo" + + === "Python" + ```python + from ultralytics import FastSAM, SAM, YOLO + + # Perfil do SAM-b + modelo = SAM('sam_b.pt') + modelo.info() + modelo('ultralytics/assets') + + # Perfil do MobileSAM + modelo = SAM('mobile_sam.pt') + modelo.info() + modelo('ultralytics/assets') + + # Perfil do FastSAM-s + modelo = FastSAM('FastSAM-s.pt') + modelo.info() + modelo('ultralytics/assets') + + # Perfil do YOLOv8n-seg + modelo = YOLO('yolov8n-seg.pt') + modelo.info() + modelo('ultralytics/assets') + ``` + +## Autoanotaรงรฃo: Um Caminho Rรกpido para Conjuntos de Dados de Segmentaรงรฃo + +A autoanotaรงรฃo รฉ um recurso-chave do SAM que permite aos usuรกrios gerar um [conjunto de dados de segmentaรงรฃo](https://docs.ultralytics.com/datasets/segment) usando um modelo de detecรงรฃo prรฉ-treinado. Esse recurso permite a anotaรงรฃo rรกpida e precisa de um grande nรบmero de imagens, contornando a necessidade de anotaรงรฃo manual demorada. + +### Gere seu Conjunto de Dados de Segmentaรงรฃo Usando um Modelo de Detecรงรฃo + +Para fazer a autoanotaรงรฃo do seu conjunto de dados com o framework Ultralytics, use a funรงรฃo `auto_annotate` conforme mostrado abaixo: + +!!! Example "Exemplo" + + === "Python" + ```python + from ultralytics.data.annotator import auto_annotate + + auto_annotate(data="caminho/para/imagens", det_model="yolov8x.pt", sam_model='sam_b.pt') + ``` + +| Argumento | Tipo | Descriรงรฃo | Padrรฃo | +|------------|---------------------|-----------------------------------------------------------------------------------------------------------|--------------| +| data | str | Caminho para uma pasta que contรฉm as imagens a serem anotadas. | | +| det_model | str, opcional | Modelo de detecรงรฃo YOLO prรฉ-treinado. O padrรฃo รฉ 'yolov8x.pt'. | 'yolov8x.pt' | +| sam_model | str, opcional | Modelo de segmentaรงรฃo SAM prรฉ-treinado. O padrรฃo รฉ 'sam_b.pt'. | 'sam_b.pt' | +| device | str, opcional | Dispositivo no qual executar os modelos. O padrรฃo รฉ uma string vazia (CPU ou GPU, se disponรญvel). | | +| output_dir | str, None, opcional | Diretรณrio para salvar os resultados anotados. O padrรฃo รฉ uma pasta 'labels' no mesmo diretรณrio de 'data'. | None | + +A funรงรฃo `auto_annotate` recebe o caminho para suas imagens, com argumentos opcionais para especificar os modelos de detecรงรฃo prรฉ-treinados e de segmentaรงรฃo SAM, o dispositivo onde executar os modelos e o diretรณrio de saรญda para salvar os resultados anotados. + +A autoanotaรงรฃo com modelos prรฉ-treinados pode reduzir drasticamente o tempo e o esforรงo necessรกrios para criar conjuntos de dados de segmentaรงรฃo de alta qualidade. Esse recurso รฉ especialmente benรฉfico para pesquisadores e desenvolvedores que lidam com grandes coleรงรตes de imagens, pois permite que eles se concentrem no desenvolvimento e na avaliaรงรฃo do modelo, em vez de na anotaรงรฃo manual. + +## Citaรงรตes e Reconhecimentos + +Se vocรช encontrar o SAM รบtil em seu trabalho de pesquisa ou desenvolvimento, considere citar nosso artigo: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{kirillov2023segment, + title={Segment Anything}, + author={Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollรกr and Ross Girshick}, + year={2023}, + eprint={2304.02643}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +Gostarรญamos de expressar nossa gratidรฃo ร  Meta AI por criar e manter esse recurso valioso para a comunidade de visรฃo computacional. + +*keywords: Segment Anything, Modelo de Segmentaรงรฃo de Qualquer Coisa, SAM, SAM da Meta, segmentaรงรฃo de imagem, segmentaรงรฃo baseada em prompts, desempenho de transferรชncia zero, conjunto de dados SA-1B, arquitetura avanรงada, autoanotaรงรฃo, Ultralytics, modelos prรฉ-treinados, SAM base, SAM large, segmentaรงรฃo de instรขncias, visรฃo computacional, IA, inteligรชncia artificial, aprendizado de mรกquina, anotaรงรฃo de dados, mรกscaras de segmentaรงรฃo, modelo de detecรงรฃo, modelo de detecรงรฃo YOLO, bibtex, Meta AI.* diff --git a/ultralytics/docs/pt/models/sam.md:Zone.Identifier b/ultralytics/docs/pt/models/sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/models/sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/models/yolo-nas.md b/ultralytics/docs/pt/models/yolo-nas.md new file mode 100755 index 0000000..2a10705 --- /dev/null +++ b/ultralytics/docs/pt/models/yolo-nas.md @@ -0,0 +1,121 @@ +--- +comments: true +description: Explore a documentaรงรฃo detalhada do YOLO-NAS, um modelo superior de detecรงรฃo de objetos. Saiba mais sobre suas funcionalidades, modelos prรฉ-treinados, uso com a API do Ultralytics Python e muito mais. +keywords: YOLO-NAS, Deci AI, detecรงรฃo de objetos, aprendizado profundo, busca de arquitetura neural, API do Ultralytics Python, modelo YOLO, modelos prรฉ-treinados, quantizaรงรฃo, otimizaรงรฃo, COCO, Objects365, Roboflow 100 +--- + +# YOLO-NAS + +## Visรฃo Geral + +Desenvolvido pela Deci AI, o YOLO-NAS รฉ um modelo de detecรงรฃo de objetos inovador. ร‰ o produto da tecnologia avanรงada de Busca de Arquitetura Neural, meticulosamente projetado para superar as limitaรงรตes dos modelos YOLO anteriores. Com melhorias significativas no suporte ร  quantizaรงรฃo e compromisso entre precisรฃo e latรชncia, o YOLO-NAS representa um grande avanรงo na detecรงรฃo de objetos. + +![Exemplo de imagem do modelo](https://learnopencv.com/wp-content/uploads/2023/05/yolo-nas_COCO_map_metrics.png) +**Visรฃo geral do YOLO-NAS.** O YOLO-NAS utiliza blocos que suportam quantizaรงรฃo e quantizaรงรฃo seletiva para obter um desempenho ideal. O modelo, quando convertido para sua versรฃo quantizada INT8, apresenta uma queda mรญnima na precisรฃo, uma melhoria significativa em relaรงรฃo a outros modelos. Esses avanรงos culminam em uma arquitetura superior com capacidades de detecรงรฃo de objetos sem precedentes e desempenho excepcional. + +### Principais Caracterรญsticas + +- **Bloco Bรกsico Amigรกvel para Quantizaรงรฃo:** O YOLO-NAS introduz um novo bloco bรกsico que รฉ amigo da quantizaรงรฃo, abordando uma das limitaรงรตes significativas dos modelos YOLO anteriores. +- **Treinamento e Quantizaรงรฃo Sofisticados:** O YOLO-NAS utiliza esquemas avanรงados de treinamento e quantizaรงรฃo pรณs-treinamento para melhorar o desempenho. +- **Otimizaรงรฃo AutoNAC e Prรฉ-Treinamento:** O YOLO-NAS utiliza a otimizaรงรฃo AutoNAC e รฉ prรฉ-treinado em conjuntos de dados proeminentes, como COCO, Objects365 e Roboflow 100. Esse prรฉ-treinamento torna o modelo extremamente adequado para tarefas de detecรงรฃo de objetos em ambientes de produรงรฃo. + +## Modelos Prรฉ-Treinados + +Experimente o poder da detecรงรฃo de objetos de รบltima geraรงรฃo com os modelos prรฉ-treinados do YOLO-NAS fornecidos pela Ultralytics. Esses modelos foram projetados para oferecer um desempenho excelente em termos de velocidade e precisรฃo. Escolha entre vรกrias opรงรตes adaptadas ร s suas necessidades especรญficas: + +| Modelo | mAP | Latรชncia (ms) | +|------------------|-------|---------------| +| YOLO-NAS S | 47.5 | 3.21 | +| YOLO-NAS M | 51.55 | 5.85 | +| YOLO-NAS L | 52.22 | 7.87 | +| YOLO-NAS S INT-8 | 47.03 | 2.36 | +| YOLO-NAS M INT-8 | 51.0 | 3.78 | +| YOLO-NAS L INT-8 | 52.1 | 4.78 | + +Cada variante do modelo foi projetada para oferecer um equilรญbrio entre Precisรฃo Mรฉdia Mรฉdia (mAP) e latรชncia, ajudando vocรช a otimizar suas tarefas de detecรงรฃo de objetos em termos de desempenho e velocidade. + +## Exemplos de Uso + +A Ultralytics tornou os modelos YOLO-NAS fรกceis de serem integrados em suas aplicaรงรตes Python por meio de nosso pacote `ultralytics`. O pacote fornece uma API Python de fรกcil utilizaรงรฃo para simplificar o processo. + +Os seguintes exemplos mostram como usar os modelos YOLO-NAS com o pacote `ultralytics` para inferรชncia e validaรงรฃo: + +### Exemplos de Inferรชncia e Validaรงรฃo + +Neste exemplo, validamos o YOLO-NAS-s no conjunto de dados COCO8. + +!!! Example "Exemplo" + + Este exemplo fornece um cรณdigo simples de inferรชncia e validaรงรฃo para o YOLO-NAS. Para lidar com os resultados da inferรชncia, consulte o modo [Predict](../modes/predict.md). Para usar o YOLO-NAS com modos adicionais, consulte [Val](../modes/val.md) e [Export](../modes/export.md). O YOLO-NAS no pacote `ultralytics` nรฃo suporta treinamento. + + === "Python" + + Arquivos de modelos prรฉ-treinados `*.pt` do PyTorch podem ser passados para a classe `NAS()` para criar uma instรขncia do modelo em Python: + + ```python + from ultralytics import NAS + + # Carrega um modelo YOLO-NAS-s prรฉ-treinado no COCO + model = NAS('yolo_nas_s.pt') + + # Exibe informaรงรตes do modelo (opcional) + model.info() + + # Valida o modelo no conjunto de dados de exemplo COCO8 + results = model.val(data='coco8.yaml') + + # Executa inferรชncia com o modelo YOLO-NAS-s na imagem 'bus.jpg' + results = model('caminho/para/bus.jpg') + ``` + + === "CLI" + + Comandos de CLI estรฃo disponรญveis para executar diretamente os modelos: + + ```bash + # Carrega um modelo YOLO-NAS-s prรฉ-treinado no COCO e valida seu desempenho no conjunto de dados de exemplo COCO8 + yolo val model=yolo_nas_s.pt data=coco8.yaml + + # Carrega um modelo YOLO-NAS-s prรฉ-treinado no COCO e executa inferรชncia na imagem 'bus.jpg' + yolo predict model=yolo_nas_s.pt source=caminho/para/bus.jpg + ``` + +## Tarefas e Modos Compatรญveis + +Oferecemos trรชs variantes dos modelos YOLO-NAS: Pequeno (s), Mรฉdio (m) e Grande (l). Cada variante foi projetada para atender a diferentes necessidades computacionais e de desempenho: + +- **YOLO-NAS-s**: Otimizado para ambientes com recursos computacionais limitados, mas eficiรชncia รฉ fundamental. +- **YOLO-NAS-m**: Oferece uma abordagem equilibrada, adequada para detecรงรฃo de objetos em geral com maior precisรฃo. +- **YOLO-NAS-l**: Adaptado para cenรกrios que requerem a maior precisรฃo, onde os recursos computacionais sรฃo menos restritos. + +Abaixo estรก uma visรฃo geral detalhada de cada modelo, incluindo links para seus pesos prรฉ-treinados, as tarefas que eles suportam e sua compatibilidade com diferentes modos de operaรงรฃo. + +| Tipo de Modelo | Pesos Prรฉ-Treinados | Tarefas Suportadas | Inferรชncia | Validaรงรฃo | Treinamento | Exportaรงรฃo | +|----------------|-----------------------------------------------------------------------------------------------|-------------------------------------------|------------|-----------|-------------|------------| +| YOLO-NAS-s | [yolo_nas_s.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_s.pt) | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-m | [yolo_nas_m.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_m.pt) | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-l | [yolo_nas_l.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_l.pt) | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | + +## Citaรงรตes e Agradecimentos + +Se vocรช utilizar o YOLO-NAS em seus estudos ou trabalho de desenvolvimento, por favor, cite o SuperGradients: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{supergradients, + doi = {10.5281/ZENODO.7789328}, + url = {https://zenodo.org/record/7789328}, + author = {Aharon, Shay and {Louis-Dupont} and {Ofri Masad} and Yurkova, Kate and {Lotem Fridman} and {Lkdci} and Khvedchenya, Eugene and Rubin, Ran and Bagrov, Natan and Tymchenko, Borys and Keren, Tomer and Zhilko, Alexander and {Eran-Deci}}, + title = {Super-Gradients}, + publisher = {GitHub}, + journal = {GitHub repository}, + year = {2021}, + } + ``` + +Expressamos nossa gratidรฃo ร  equipe [SuperGradients](https://github.com/Deci-AI/super-gradients/) da Deci AI por seus esforรงos na criaรงรฃo e manutenรงรฃo deste recurso valioso para a comunidade de visรฃo computacional. Acreditamos que o YOLO-NAS, com sua arquitetura inovadora e capacidades superiores de detecรงรฃo de objetos, se tornarรก uma ferramenta fundamental para desenvolvedores e pesquisadores. + +*keywords: YOLO-NAS, Deci AI, detecรงรฃo de objetos, aprendizado profundo, busca de arquitetura neural, API do Ultralytics Python, modelo YOLO, SuperGradients, modelos prรฉ-treinados, bloco bรกsico amigรกvel para quantizaรงรฃo, esquemas avanรงados de treinamento, quantizaรงรฃo pรณs-treinamento, otimizaรงรฃo AutoNAC, COCO, Objects365, Roboflow 100* diff --git a/ultralytics/docs/pt/models/yolo-nas.md:Zone.Identifier b/ultralytics/docs/pt/models/yolo-nas.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/models/yolo-nas.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/models/yolov3.md b/ultralytics/docs/pt/models/yolov3.md new file mode 100755 index 0000000..f706d99 --- /dev/null +++ b/ultralytics/docs/pt/models/yolov3.md @@ -0,0 +1,98 @@ +--- +comments: true +description: Obtenha uma visรฃo geral do YOLOv3, YOLOv3-Ultralytics e YOLOv3u. Saiba mais sobre suas principais caracterรญsticas, uso e tarefas suportadas para detecรงรฃo de objetos. +keywords: YOLOv3, YOLOv3-Ultralytics, YOLOv3u, Detecรงรฃo de Objetos, Inferรชncia, Treinamento, Ultralytics +--- + +# YOLOv3, YOLOv3-Ultralytics, e YOLOv3u + +## Visรฃo Geral + +Este documento apresenta uma visรฃo geral de trรชs modelos de detecรงรฃo de objetos intimamente relacionados, nomeadamente o [YOLOv3](https://pjreddie.com/darknet/yolo/), [YOLOv3-Ultralytics](https://github.com/ultralytics/yolov3) e [YOLOv3u](https://github.com/ultralytics/ultralytics). + +1. **YOLOv3:** Esta รฉ a terceira versรฃo do algoritmo de detecรงรฃo de objetos You Only Look Once (YOLO). Originalmente desenvolvido por Joseph Redmon, o YOLOv3 melhorou seus predecessores ao introduzir recursos como previsรตes em vรกrias escalas e trรชs tamanhos diferentes de kernels de detecรงรฃo. + +2. **YOLOv3-Ultralytics:** Esta รฉ a implementaรงรฃo do YOLOv3 pela Ultralytics. Ela reproduz a arquitetura original do YOLOv3 e oferece funcionalidades adicionais, como suporte para mais modelos prรฉ-treinados e opรงรตes de personalizaรงรฃo mais fรกceis. + +3. **YOLOv3u:** Esta รฉ uma versรฃo atualizada do YOLOv3-Ultralytics que incorpora o cabeรงalho dividido livre de รขncoras e sem "objectness" usado nos modelos YOLOv8. O YOLOv3u mantรฉm a mesma arquitetura de "backbone" e "neck" do YOLOv3, mas com o cabeรงalho de detecรงรฃo atualizado do YOLOv8. + +![Ultralytics YOLOv3](https://raw.githubusercontent.com/ultralytics/assets/main/yolov3/banner-yolov3.png) + +## Principais Caracterรญsticas + +- **YOLOv3:** Introduziu o uso de trรชs escalas diferentes para detecรงรฃo, aproveitando trรชs tamanhos diferentes de kernels de detecรงรฃo: 13x13, 26x26 e 52x52. Isso melhorou significativamente a precisรฃo da detecรงรฃo para objetos de diferentes tamanhos. Alรฉm disso, o YOLOv3 adicionou recursos como previsรตes multi-rรณtulos para cada caixa delimitadora e uma rede de extraรงรฃo de caracterรญsticas melhor. + +- **YOLOv3-Ultralytics:** A implementaรงรฃo do YOLOv3 pela Ultralytics oferece o mesmo desempenho do modelo original, porรฉm possui suporte adicional para mais modelos prรฉ-treinados, mรฉtodos de treinamento adicionais e opรงรตes de personalizaรงรฃo mais fรกceis. Isso torna o modelo mais versรกtil e fรกcil de usar para aplicaรงรตes prรกticas. + +- **YOLOv3u:** Este modelo atualizado incorpora o cabeรงalho dividido livre de รขncoras e "objectness" do YOLOv8. Ao eliminar a necessidade de caixas de รขncoras prรฉ-definidas e pontuaรงรตes de "objectness", esse design de cabeรงalho de detecรงรฃo pode melhorar a capacidade do modelo de detectar objetos de tamanhos e formatos variados. Isso torna o YOLOv3u mais robusto e preciso para tarefas de detecรงรฃo de objetos. + +## Tarefas e Modos Suportados + +A sรฉrie YOLOv3, incluindo YOLOv3, YOLOv3-Ultralytics e YOLOv3u, foi projetada especificamente para tarefas de detecรงรฃo de objetos. Esses modelos sรฃo conhecidos por sua eficรกcia em vรกrios cenรกrios do mundo real, equilibrando precisรฃo e velocidade. Cada variante oferece recursos e otimizaรงรตes รบnicos, tornando-os adequados para uma variedade de aplicaรงรตes. + +Os trรชs modelos suportam um conjunto abrangente de modos, garantindo versatilidade em vรกrias etapas do desenvolvimento e implantaรงรฃo de modelos. Esses modos incluem [Inferรชncia](../modes/predict.md), [Validaรงรฃo](../modes/val.md), [Treinamento](../modes/train.md) e [Exportaรงรฃo](../modes/export.md), fornecendo aos usuรกrios um conjunto completo de ferramentas para detecรงรฃo eficaz de objetos. + +| Tipo de Modelo | Tarefas Suportadas | Inferรชncia | Validaรงรฃo | Treinamento | Exportaรงรฃo | +|--------------------|-------------------------------------------|------------|-----------|-------------|------------| +| YOLOv3 | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3-Ultralytics | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3u | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +Esta tabela fornece uma visรฃo rรกpida das capacidades de cada variante do YOLOv3, destacando sua versatilidade e adequaรงรฃo para vรกrias tarefas e modos operacionais em fluxos de trabalho de detecรงรฃo de objetos. + +## Exemplos de Uso + +Este exemplo apresenta exemplos simples de treinamento e inferรชncia do YOLOv3. Para obter documentaรงรฃo completa sobre esses e outros [modos](../modes/index.md), consulte as pรกginas de documentaรงรฃo do [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) e [Export](../modes/export.md). + +!!! Example "Exemplo" + + === "Python" + + Modelos prรฉ-treinados do PyTorch `*.pt`, bem como arquivos de configuraรงรฃo `*.yaml`, podem ser passados para a classe `YOLO()` para criar uma instรขncia do modelo em Python: + + ```python + from ultralytics import YOLO + + # Carregue um modelo YOLOv3n prรฉ-treinado na COCO + model = YOLO('yolov3n.pt') + + # Exiba informaรงรตes sobre o modelo (opcional) + model.info() + + # Treine o modelo no conjunto de dados de exemplo COCO8 por 100 รฉpocas + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Execute inferรชncia com o modelo YOLOv3n na imagem 'bus.jpg' + results = model('caminho/para/bus.jpg') + ``` + + === "CLI" + + Comandos CLI estรฃo disponรญveis para executar diretamente os modelos: + + ```bash + # Carregue um modelo YOLOv3n prรฉ-treinado na COCO e treine-o no conjunto de dados de exemplo COCO8 por 100 รฉpocas + yolo train model=yolov3n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Carregue um modelo YOLOv3n prรฉ-treinado na COCO e execute inferรชncia na imagem 'bus.jpg' + yolo predict model=yolov3n.pt source=caminho/para/bus.jpg + ``` + +## Citaรงรตes e Reconhecimentos + +Se vocรช utilizar o YOLOv3 em sua pesquisa, por favor, cite os artigos originais do YOLO e o repositรณrio Ultralytics YOLOv3: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Redmon, Joseph and Farhadi, Ali}, + journal={arXiv preprint arXiv:1804.02767}, + year={2018} + } + ``` + +Agradecemos a Joseph Redmon e Ali Farhadi por desenvolverem o YOLOv3 original. diff --git a/ultralytics/docs/pt/models/yolov3.md:Zone.Identifier b/ultralytics/docs/pt/models/yolov3.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/models/yolov3.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/models/yolov4.md b/ultralytics/docs/pt/models/yolov4.md new file mode 100755 index 0000000..297be59 --- /dev/null +++ b/ultralytics/docs/pt/models/yolov4.md @@ -0,0 +1,71 @@ +--- +comments: true +description: Explore nosso guia detalhado sobre o YOLOv4, um detector de objetos em tempo real de รบltima geraรงรฃo. Entenda seus destaques arquiteturais, recursos inovadores e exemplos de aplicaรงรฃo. +keywords: ultralytics, YOLOv4, detecรงรฃo de objetos, rede neural, detecรงรฃo em tempo real, detector de objetos, aprendizado de mรกquina +--- + +# YOLOv4: Detecรงรฃo de Objetos Rรกpida e Precisa + +Bem-vindo ร  pรกgina de documentaรงรฃo do Ultralytics para o YOLOv4, um detector de objetos em tempo real de รบltima geraรงรฃo lanรงado em 2020 por Alexey Bochkovskiy em [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). O YOLOv4 foi projetado para fornecer o equilรญbrio ideal entre velocidade e precisรฃo, tornando-o uma excelente escolha para muitas aplicaรงรตes. + +![Diagrama da arquitetura do YOLOv4](https://user-images.githubusercontent.com/26833433/246185689-530b7fe8-737b-4bb0-b5dd-de10ef5aface.png) +**Diagrama da arquitetura do YOLOv4**. Mostra o design intricado da rede do YOLOv4, incluindo os componentes backbone, neck e head, bem como suas camadas interconectadas para uma detecรงรฃo de objetos em tempo real otimizada. + +## Introduรงรฃo + +YOLOv4 significa You Only Look Once versรฃo 4. ร‰ um modelo de detecรงรฃo de objetos em tempo real desenvolvido para superar as limitaรงรตes de versรตes anteriores do YOLO, como [YOLOv3](yolov3.md) e outros modelos de detecรงรฃo de objetos. Ao contrรกrio de outros detectores de objetos baseados em redes neurais convolucionais (CNN), o YOLOv4 รฉ aplicรกvel nรฃo apenas a sistemas de recomendaรงรฃo, mas tambรฉm ao gerenciamento de processos independentes e ร  reduรงรฃo da entrada humana. Sua operaรงรฃo em unidades de processamento grรกfico (GPUs) convencionais permite o uso em massa a um preรงo acessรญvel, e foi projetado para funcionar em tempo real em uma GPU convencional, exigindo apenas uma GPU para treinamento. + +## Arquitetura + +O YOLOv4 faz uso de vรกrias caracterรญsticas inovadoras que trabalham juntas para otimizar seu desempenho. Estas incluem Conexรตes Residuais Ponderadas (WRC), Conexรตes Parciais Cruzadas de Estรกgio (CSP), Normalizaรงรฃo Cruzada em Mini Lote (CmBN), Treinamento Autoadversรกrio (SAT), Ativaรงรฃo Mish, Aumento de Dados Mosaic, Regularizaรงรฃo DropBlock e Perda CIoU. Essas caracterรญsticas sรฃo combinadas para obter resultados de รบltima geraรงรฃo. + +Um detector de objetos tรญpico รฉ composto por vรกrias partes, incluindo a entrada, o backbone, o neck e o head. O backbone do YOLOv4 รฉ prรฉ-treinado no ImageNet e รฉ usado para prever as classes e caixas delimitadoras dos objetos. O backbone pode ser de vรกrios modelos, incluindo VGG, ResNet, ResNeXt ou DenseNet. A parte neck do detector รฉ usada para coletar mapas de caracterรญsticas de diferentes estรกgios e geralmente inclui vรกrias caminhadas bottom-up e vรกrias caminhadas top-down. A parte head รฉ responsรกvel por fazer as detecรงรตes e classificaรงรตes finais dos objetos. + +## Bag of Freebies + +O YOLOv4 tambรฉm faz uso de mรฉtodos conhecidos como "bag of freebies" (saco de brindes), que sรฃo tรฉcnicas que melhoram a precisรฃo do modelo durante o treinamento sem aumentar o custo da inferรชncia. O aumento de dados รฉ uma tรฉcnica comum de "bag of freebies" usada na detecรงรฃo de objetos, que aumenta a variabilidade das imagens de entrada para melhorar a robustez do modelo. Alguns exemplos de aumento de dados incluem distorรงรตes fotomรฉtricas (ajustando o brilho, contraste, matiz, saturaรงรฃo e ruรญdo de uma imagem) e distorรงรตes geomรฉtricas (adicionando dimensionamento aleatรณrio, recorte, espelhamento e rotaรงรฃo). Essas tรฉcnicas ajudam o modelo a generalizar melhor para diferentes tipos de imagens. + +## Recursos e Desempenho + +O YOLOv4 foi projetado para oferecer velocidade e precisรฃo ideais na detecรงรฃo de objetos. A arquitetura do YOLOv4 inclui o CSPDarknet53 como o backbone, o PANet como o neck e o YOLOv3 como a cabeรงa de detecรงรฃo. Esse design permite que o YOLOv4 realize detecรงรฃo de objetos em uma velocidade impressionante, tornando-o adequado para aplicaรงรตes em tempo real. O YOLOv4 tambรฉm se destaca em termos de precisรฃo, alcanรงando resultados de รบltima geraรงรฃo em benchmarks de detecรงรฃo de objetos. + +## Exemplos de Uso + +No momento da escrita, o Ultralytics nรฃo oferece suporte a modelos YOLOv4. Portanto, os usuรกrios interessados em usar o YOLOv4 deverรฃo consultar diretamente o repositรณrio YOLOv4 no GitHub para instruรงรตes de instalaรงรฃo e uso. + +Aqui estรก uma breve visรฃo geral das etapas tรญpicas que vocรช pode seguir para usar o YOLOv4: + +1. Visite o repositรณrio YOLOv4 no GitHub: [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). + +2. Siga as instruรงรตes fornecidas no arquivo README para a instalaรงรฃo. Isso geralmente envolve clonar o repositรณrio, instalar as dependรชncias necessรกrias e configurar as variรกveis de ambiente necessรกrias. + +3. Uma vez que a instalaรงรฃo esteja completa, vocรช pode treinar e usar o modelo de acordo com as instruรงรตes de uso fornecidas no repositรณrio. Isso geralmente envolve a preparaรงรฃo do seu conjunto de dados, a configuraรงรฃo dos parรขmetros do modelo, o treinamento do modelo e, em seguida, o uso do modelo treinado para realizar a detecรงรฃo de objetos. + +Observe que as etapas especรญficas podem variar dependendo do seu caso de uso especรญfico e do estado atual do repositรณrio YOLOv4. Portanto, รฉ altamente recomendรกvel consultar diretamente as instruรงรตes fornecidas no repositรณrio YOLOv4 do GitHub. + +Lamentamos qualquer inconveniente que isso possa causar e nos esforรงaremos para atualizar este documento com exemplos de uso para o Ultralytics assim que o suporte para o YOLOv4 for implementado. + +## Conclusรฃo + +O YOLOv4 รฉ um modelo poderoso e eficiente de detecรงรฃo de objetos que oferece um equilรญbrio entre velocidade e precisรฃo. O uso de recursos exclusivos e tรฉcnicas "Bag of Freebies" durante o treinamento permite que ele tenha um excelente desempenho em tarefas de detecรงรฃo de objetos em tempo real. O YOLOv4 pode ser treinado e usado por qualquer pessoa com uma GPU convencional, tornando-o acessรญvel e prรกtico para uma ampla variedade de aplicaรงรตes. + +## Referรชncias e Agradecimentos + +Gostarรญamos de agradecer aos autores do YOLOv4 por suas contribuiรงรตes significativas no campo da detecรงรฃo de objetos em tempo real: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{bochkovskiy2020yolov4, + title={YOLOv4: Optimal Speed and Accuracy of Object Detection}, + author={Alexey Bochkovskiy and Chien-Yao Wang and Hong-Yuan Mark Liao}, + year={2020}, + eprint={2004.10934}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +O artigo original do YOLOv4 pode ser encontrado no [arXiv](https://arxiv.org/abs/2004.10934). Os autores disponibilizaram seu trabalho publicamente, e o cรณdigo pode ser acessado no [GitHub](https://github.com/AlexeyAB/darknet). Agradecemos seus esforรงos em avanรงar o campo e tornar seu trabalho acessรญvel ร  comunidade em geral. diff --git a/ultralytics/docs/pt/models/yolov4.md:Zone.Identifier b/ultralytics/docs/pt/models/yolov4.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/models/yolov4.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/models/yolov5.md b/ultralytics/docs/pt/models/yolov5.md new file mode 100755 index 0000000..942b1d4 --- /dev/null +++ b/ultralytics/docs/pt/models/yolov5.md @@ -0,0 +1,113 @@ +--- +comments: true +description: Descubra o YOLOv5u, uma versรฃo aprimorada do modelo YOLOv5 com uma relaรงรฃo aprimorada entre precisรฃo e velocidade e vรกrios modelos prรฉ-treinados para vรกrias tarefas de detecรงรฃo de objetos. +keywords: YOLOv5u, detecรงรฃo de objetos, modelos prรฉ-treinados, Ultralytics, Inferรชncia, Validaรงรฃo, YOLOv5, YOLOv8, sem รขncora, sem certeza de objectness, aplicativos em tempo real, machine learning +--- + +# YOLOv5 + +## Visรฃo Geral + +O YOLOv5u representa um avanรงo nas metodologias de detecรงรฃo de objetos. Originรกrio da arquitetura fundamental do modelo [YOLOv5](https://github.com/ultralytics/yolov5) desenvolvido pela Ultralytics, o YOLOv5u integra a divisรฃo da cabeรงa do Ultralytics sem รขncora e sem certeza de objectness, uma formaรงรฃo introduzida anteriormente nos modelos [YOLOv8](yolov8.md). Essa adaptaรงรฃo aprimora a arquitetura do modelo, resultando em uma relaรงรฃo aprimorada entre precisรฃo e velocidade em tarefas de detecรงรฃo de objetos. Com base nos resultados empรญricos e em suas caracterรญsticas derivadas, o YOLOv5u oferece uma alternativa eficiente para aqueles que procuram soluรงรตes robustas tanto na pesquisa quanto em aplicaรงรตes prรกticas. + +![Ultralytics YOLOv5](https://raw.githubusercontent.com/ultralytics/assets/main/yolov5/v70/splash.png) + +## Principais Recursos + +- **Cabeรงa do Ultralytics sem ร‚ncora:** Modelos tradicionais de detecรงรฃo de objetos dependem de caixas รขncora predefinidas para prever as localizaรงรตes dos objetos. No entanto, o YOLOv5u moderniza essa abordagem. Ao adotar uma cabeรงa do Ultralytics sem รขncora, ele garante um mecanismo de detecรงรฃo mais flexรญvel e adaptรกvel, melhorando consequentemente o desempenho em cenรกrios diversos. + +- **Equilรญbrio otimizado entre precisรฃo e velocidade:** Velocidade e precisรฃo muitas vezes puxam em direรงรตes opostas. Mas o YOLOv5u desafia esse equilรญbrio. Ele oferece um equilรญbrio calibrado, garantindo detecรงรตes em tempo real sem comprometer a precisรฃo. Esse recurso รฉ particularmente valioso para aplicativos que exigem respostas rรกpidas, como veรญculos autรดnomos, robรณtica e anรกlise de vรญdeo em tempo real. + +- **Variedade de Modelos Prรฉ-Treinados:** Entendendo que diferentes tarefas exigem conjuntos de ferramentas diferentes, o YOLOv5u oferece uma variedade de modelos prรฉ-treinados. Se vocรช estรก focado em Inferรชncia, Validaรงรฃo ou Treinamento, hรก um modelo personalizado esperando por vocรช. Essa variedade garante que vocรช nรฃo esteja apenas usando uma soluรงรฃo genรฉrica, mas sim um modelo ajustado especificamente para o seu desafio รบnico. + +## Tarefas e Modos Suportados + +Os modelos YOLOv5u, com vรกrios pesos prรฉ-treinados, se destacam nas tarefas de [Detecรงรฃo de Objetos](../tasks/detect.md). Eles suportam uma ampla gama de modos, tornando-os adequados para aplicaรงรตes diversas, desde o desenvolvimento atรฉ a implantaรงรฃo. + +| Tipo de Modelo | Pesos Prรฉ-Treinados | Tarefa | Inferรชncia | Validaรงรฃo | Treinamento | Exportaรงรฃo | +|----------------|-----------------------------------------------------------------------------------------------------------------------------|-------------------------------------------|------------|-----------|-------------|------------| +| YOLOv5u | `yolov5nu`, `yolov5su`, `yolov5mu`, `yolov5lu`, `yolov5xu`, `yolov5n6u`, `yolov5s6u`, `yolov5m6u`, `yolov5l6u`, `yolov5x6u` | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +Essa tabela oferece uma visรฃo detalhada das variantes do modelo YOLOv5u, destacando sua aplicabilidade em tarefas de detecรงรฃo de objetos e suporte a diversos modos operacionais, como [Inferรชncia](../modes/predict.md), [Validaรงรฃo](../modes/val.md), [Treinamento](../modes/train.md) e [Exportaรงรฃo](../modes/export.md). Esse suporte abrangente garante que os usuรกrios possam aproveitar totalmente as capacidades dos modelos YOLOv5u em uma ampla gama de cenรกrios de detecรงรฃo de objetos. + +## Mรฉtricas de Desempenho + +!!! Desempenho + + === "Detecรงรฃo" + + Consulte a [Documentaรงรฃo de Detecรงรฃo](https://docs.ultralytics.com/tasks/detect/) para exemplos de uso com esses modelos treinados no conjunto de dados [COCO](https://docs.ultralytics.com/datasets/detect/coco/), que incluem 80 classes prรฉ-treinadas. + + | Modelo | YAML | tamanho
(pixels) | mAPval
50-95 | Velocidade
CPU ONNX
(ms) | Velocidade
A100 TensorRT
(ms) | parรขmetros
(M) | FLOPs
(B) | + | --------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------| -------------------------| ----------------------| -------------------------------------| -------------------------------------- | ---------------------- | ----------------- | + | [yolov5nu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5nu.pt) | [yolov5n.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 34.3 | 73.6 | 1.06 | 2.6 | 7.7 | + | [yolov5su.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5su.pt) | [yolov5s.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 43.0 | 120.7 | 1.27 | 9.1 | 24.0 | + | [yolov5mu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5mu.pt) | [yolov5m.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 49.0 | 233.9 | 1.86 | 25.1 | 64.2 | + | [yolov5lu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5lu.pt) | [yolov5l.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 52.2 | 408.4 | 2.50 | 53.2 | 135.0 | + | [yolov5xu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5xu.pt) | [yolov5x.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 53.2 | 763.2 | 3.81 | 97.2 | 246.4 | + | | | | | | | | | + | [yolov5n6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5n6u.pt) | [yolov5n6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 42.1 | 211.0 | 1.83 | 4.3 | 7.8 | + | [yolov5s6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5s6u.pt) | [yolov5s6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 48.6 | 422.6 | 2.34 | 15.3 | 24.6 | + | [yolov5m6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5m6u.pt) | [yolov5m6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 53.6 | 810.9 | 4.36 | 41.2 | 65.7 | + | [yolov5l6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5l6u.pt) | [yolov5l6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 55.7 | 1470.9 | 5.47 | 86.1 | 137.4 | + | [yolov5x6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5x6u.pt) | [yolov5x6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 56.8 | 2436.5 | 8.98 | 155.4 | 250.7 | + +## Exemplos de Uso + +Este exemplo fornece exemplos simples de treinamento e inferรชncia do YOLOv5. Para documentaรงรฃo completa sobre esses e outros [modos](../modes/index.md), consulte as pรกginas de documentaรงรฃo [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) e [Export](../modes/export.md). + +!!! Example "Exemplo" + + === "Python" + + Modelos prรฉ-treinados `*.pt` do PyTorch, assim como os arquivos de configuraรงรฃo `*.yaml`, podem ser passados para a classe `YOLO()` para criar uma instรขncia do modelo em Python: + + ```python + from ultralytics import YOLO + + # Carrega um modelo YOLOv5n prรฉ-treinado no COCO + modelo = YOLO('yolov5n.pt') + + # Mostra informaรงรตes do modelo (opcional) + modelo.info() + + # Treina o modelo no conjunto de dados de exemplo COCO8 por 100 รฉpocas + resultados = modelo.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Executa a inferรชncia com o modelo YOLOv5n na imagem 'bus.jpg' + resultados = modelo('path/to/bus.jpg') + ``` + + === "CLI" + + Comandos CLI estรฃo disponรญveis para executar diretamente os modelos: + + ```bash + # Carrega um modelo YOLOv5n prรฉ-treinado no COCO e o treina no conjunto de dados de exemplo COCO8 por 100 รฉpocas + yolo train model=yolov5n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Carrega um modelo YOLOv5n prรฉ-treinado no COCO e executa a inferรชncia na imagem 'bus.jpg' + yolo predict model=yolov5n.pt source=path/to/bus.jpg + ``` + +## Citaรงรตes e Agradecimentos + +Se vocรช usar o YOLOv5 ou YOLOv5u em sua pesquisa, por favor, cite o repositรณrio YOLOv5 da Ultralytics da seguinte forma: + +!!! Quote "" + + === "BibTeX" + ```bibtex + @software{yolov5, + title = {Ultralytics YOLOv5}, + author = {Glenn Jocher}, + year = {2020}, + version = {7.0}, + license = {AGPL-3.0}, + url = {https://github.com/ultralytics/yolov5}, + doi = {10.5281/zenodo.3908559}, + orcid = {0000-0001-5950-6979} + } + ``` + +Observe que os modelos YOLOv5 sรฃo fornecidos sob licenรงas [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) e [Enterprise](https://ultralytics.com/license). diff --git a/ultralytics/docs/pt/models/yolov5.md:Zone.Identifier b/ultralytics/docs/pt/models/yolov5.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/models/yolov5.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/models/yolov6.md b/ultralytics/docs/pt/models/yolov6.md new file mode 100755 index 0000000..4db8e46 --- /dev/null +++ b/ultralytics/docs/pt/models/yolov6.md @@ -0,0 +1,107 @@ +--- +comments: true +description: Explore Meituan YOLOv6, um modelo avanรงado de detecรงรฃo de objetos que alcanรงa um equilรญbrio entre velocidade e precisรฃo. Saiba mais sobre suas caracterรญsticas, modelos prรฉ-treinados e uso em Python. +keywords: Meituan YOLOv6, detecรงรฃo de objetos, Ultralytics, documentaรงรฃo YOLOv6, Concatenaรงรฃo Bidirecional, Treinamento Assistido por ร‚ncora, modelos prรฉ-treinados, aplicaรงรตes em tempo real +--- + +# Meituan YOLOv6 + +## Visรฃo Geral + +O Meituan YOLOv6 รฉ um detector de objetos de ponta que oferece um equilรญbrio notรกvel entre velocidade e precisรฃo, tornando-se uma escolha popular para aplicaรงรตes em tempo real. Este modelo apresenta vรกrias melhorias em sua arquitetura e esquema de treinamento, incluindo a implementaรงรฃo de um mรณdulo de Concatenaรงรฃo Bidirecional (BiC), uma estratรฉgia de treinamento assistido por รขncora (AAT) e um design aprimorado de espinha dorsal e pescoรงo para obter precisรฃo de รบltima geraรงรฃo no conjunto de dados COCO. + +![Meituan YOLOv6](https://user-images.githubusercontent.com/26833433/240750495-4da954ce-8b3b-41c4-8afd-ddb74361d3c2.png) +![Modelo exemplo de imagem](https://user-images.githubusercontent.com/26833433/240750557-3e9ec4f0-0598-49a8-83ea-f33c91eb6d68.png) +**Visรฃo geral do YOLOv6.** Diagrama da arquitetura do modelo mostrando os componentes de rede redesenhados e as estratรฉgias de treinamento que levaram a melhorias significativas no desempenho. (a) O pescoรงo do YOLOv6 (N e S sรฃo mostrados). RepBlocks รฉ substituรญda por CSPStackRep para M/L. (b) A estrutura de um mรณdulo BiC. (c) Um bloco SimCSPSPPF. ([fonte](https://arxiv.org/pdf/2301.05586.pdf)). + +### Principais Caracterรญsticas + +- **Mรณdulo de Concatenaรงรฃo Bidirecional (BiC):** O YOLOv6 introduz um mรณdulo BiC no pescoรงo do detector, aprimorando os sinais de localizaรงรฃo e oferecendo ganhos de desempenho com uma degradaรงรฃo de velocidade insignificante. +- **Estratรฉgia de Treinamento Assistido por ร‚ncora (AAT):** Este modelo propรตe AAT para aproveitar os benefรญcios dos paradigmas baseados em รขncoras e sem รขncoras sem comprometer a eficiรชncia da inferรชncia. +- **Design de Espinha Dorsal e Pescoรงo Aprimorado:** Ao aprofundar o YOLOv6 para incluir mais uma etapa na espinha dorsal e no pescoรงo, este modelo alcanรงa desempenho de รบltima geraรงรฃo no conjunto de dados COCO com entrada de alta resoluรงรฃo. +- **Estratรฉgia de Auto-Destilaรงรฃo:** Uma nova estratรฉgia de auto-destilaรงรฃo รฉ implementada para aumentar o desempenho de modelos menores do YOLOv6, aprimorando o ramo auxiliar de regressรฃo durante o treinamento e removendo-o durante a inferรชncia para evitar uma queda significativa na velocidade. + +## Mรฉtricas de Desempenho + +O YOLOv6 fornece vรกrios modelos prรฉ-treinados com diferentes escalas: + +- YOLOv6-N: 37,5% AP na val2017 do COCO a 1187 FPS com GPU NVIDIA Tesla T4. +- YOLOv6-S: 45,0% de AP a 484 FPS. +- YOLOv6-M: 50,0% de AP a 226 FPS. +- YOLOv6-L: 52,8% de AP a 116 FPS. +- YOLOv6-L6: Precisรฃo de รบltima geraรงรฃo em tempo real. + +O YOLOv6 tambรฉm fornece modelos quantizados para diferentes precisรตes e modelos otimizados para plataformas mรณveis. + +## Exemplos de Uso + +Este exemplo fornece exemplos simples de treinamento e inferรชncia do YOLOv6. Para documentaรงรฃo completa sobre esses e outros [modos](../modes/index.md), consulte as pรกginas de documentaรงรฃo [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) e [Export](../modes/export.md). + +!!! Example "Exemplo" + + === "Python" + + Modelos prรฉ-treinados `*.pt` do PyTorch, assim como arquivos de configuraรงรฃo `*.yaml`, podem ser passados ร  classe `YOLO()` para criar uma instรขncia do modelo em Python: + + ```python + from ultralytics import YOLO + + # Constrรณi um modelo YOLOv6n do zero + model = YOLO('yolov6n.yaml') + + # Exibe informaรงรตes do modelo (opcional) + model.info() + + # Treina o modelo no conjunto de dados de exemplo COCO8 por 100 รฉpocas + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Executa inferรชncia com o modelo YOLOv6n na imagem 'bus.jpg' + results = model('caminho/para/onibus.jpg') + ``` + + === "CLI" + + Comandos da CLI estรฃo disponรญveis para executar diretamente os modelos: + + ```bash + # Constrรณi um modelo YOLOv6n do zero e o treina no conjunto de dados de exemplo COCO8 por 100 รฉpocas + yolo train model=yolov6n.yaml data=coco8.yaml epochs=100 imgsz=640 + + # Constrรณi um modelo YOLOv6n do zero e executa inferรชncia na imagem 'bus.jpg' + yolo predict model=yolov6n.yaml source=caminho/para/onibus.jpg + ``` + +## Tarefas e Modos Suportados + +A sรฉrie YOLOv6 oferece uma variedade de modelos, cada um otimizado para [Detecรงรฃo de Objetos](../tasks/detect.md) de alta performance. Esses modelos atendem a diferentes necessidades computacionais e requisitos de precisรฃo, tornando-os versรกteis para uma ampla variedade de aplicaรงรตes. + +| Tipo de Modelo | Pesos Prรฉ-treinados | Tarefas Suportadas | Inferรชncia | Validaรงรฃo | Treinamento | Exportaรงรฃo | +|----------------|---------------------|-------------------------------------------|------------|-----------|-------------|------------| +| YOLOv6-N | `yolov6-n.pt` | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-S | `yolov6-s.pt` | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-M | `yolov6-m.pt` | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L | `yolov6-l.pt` | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L6 | `yolov6-l6.pt` | [Detecรงรฃo de Objetos](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +Esta tabela fornece uma visรฃo geral detalhada das variantes do modelo YOLOv6, destacando suas capacidades em tarefas de detecรงรฃo de objetos e sua compatibilidade com vรกrios modos operacionais, como [inferรชncia](../modes/predict.md), [validaรงรฃo](../modes/val.md), [treinamento](../modes/train.md) e [exportaรงรฃo](../modes/export.md). Esse suporte abrangente garante que os usuรกrios possam aproveitar totalmente as capacidades dos modelos YOLOv6 em uma ampla gama de cenรกrios de detecรงรฃo de objetos. + +## Citaรงรตes e Agradecimentos + +Gostarรญamos de agradecer aos autores por suas contribuiรงรตes significativas no campo da detecรงรฃo de objetos em tempo real: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{li2023yolov6, + title={YOLOv6 v3.0: A Full-Scale Reloading}, + author={Chuyi Li and Lulu Li and Yifei Geng and Hongliang Jiang and Meng Cheng and Bo Zhang and Zaidan Ke and Xiaoming Xu and Xiangxiang Chu}, + year={2023}, + eprint={2301.05586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + + O artigo original do YOLOv6 pode ser encontrado no [arXiv](https://arxiv.org/abs/2301.05586). Os autores disponibilizaram publicamente seu trabalho, e o cรณdigo pode ser acessado no [GitHub](https://github.com/meituan/YOLOv6). Agradecemos seus esforรงos em avanรงar no campo e disponibilizar seu trabalho para a comunidade em geral. diff --git a/ultralytics/docs/pt/models/yolov6.md:Zone.Identifier b/ultralytics/docs/pt/models/yolov6.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/models/yolov6.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/models/yolov7.md b/ultralytics/docs/pt/models/yolov7.md new file mode 100755 index 0000000..69896b9 --- /dev/null +++ b/ultralytics/docs/pt/models/yolov7.md @@ -0,0 +1,66 @@ +--- +comments: true +description: Explore o YOLOv7, um detector de objetos em tempo real. Entenda sua velocidade superior, impressionante precisรฃo e foco exclusivo em otimizaรงรฃo treinรกvel de recursos gratuitos. +keywords: YOLOv7, detector de objetos em tempo real, state-of-the-art, Ultralytics, conjunto de dados MS COCO, reparametrizaรงรฃo de modelo, atribuiรงรฃo dinรขmica de rรณtulo, escalonamento estendido, escalonamento composto +--- + +# YOLOv7: Treinรกvel Bag-of-Freebies + +O YOLOv7 รฉ um detector de objetos em tempo real state-of-the-art que supera todos os detectores de objetos conhecidos em termos de velocidade e precisรฃo na faixa de 5 FPS a 160 FPS. Ele possui a maior precisรฃo (56,8% de AP) entre todos os detectores de objetos em tempo real conhecidos com 30 FPS ou mais no GPU V100. Alรฉm disso, o YOLOv7 supera outros detectores de objetos, como YOLOR, YOLOX, Scaled-YOLOv4, YOLOv5 e muitos outros em velocidade e precisรฃo. O modelo รฉ treinado no conjunto de dados MS COCO do zero, sem usar outros conjuntos de dados ou pesos prรฉ-treinados. O cรณdigo-fonte para o YOLOv7 estรก disponรญvel no GitHub. + +![Comparaรงรฃo YOLOv7 com outros detectores de objetos](https://github.com/ultralytics/ultralytics/assets/26833433/5e1e0420-8122-4c79-b8d0-2860aa79af92) +**Comparaรงรฃo de detectores de objetos state-of-the-art. +** A partir dos resultados na Tabela 2, sabemos que o mรฉtodo proposto tem a melhor relaรงรฃo velocidade-precisรฃo de forma abrangente. Se compararmos o YOLOv7-tiny-SiLU com o YOLOv5-N (r6.1), nosso mรฉtodo รฉ 127 FPS mais rรกpido e 10,7% mais preciso em AP. Alรฉm disso, o YOLOv7 tem 51,4% de AP em uma taxa de quadros de 161 FPS, enquanto o PPYOLOE-L com o mesmo AP tem apenas uma taxa de quadros de 78 FPS. Em termos de uso de parรขmetros, o YOLOv7 รฉ 41% menor do que o PPYOLOE-L. Se compararmos o YOLOv7-X com uma velocidade de inferรชncia de 114 FPS com o YOLOv5-L (r6.1) com uma velocidade de inferรชncia de 99 FPS, o YOLOv7-X pode melhorar o AP em 3,9%. Se o YOLOv7-X for comparado com o YOLOv5-X (r6.1) de escala similar, a velocidade de inferรชncia do YOLOv7-X รฉ 31 FPS mais rรกpida. Alรฉm disso, em termos da quantidade de parรขmetros e cรกlculos, o YOLOv7-X reduz 22% dos parรขmetros e 8% dos cรกlculos em comparaรงรฃo com o YOLOv5-X (r6.1), mas melhora o AP em 2,2% ([Fonte](https://arxiv.org/pdf/2207.02696.pdf)). + +## Visรฃo Geral + +A detecรงรฃo de objetos em tempo real รฉ um componente importante em muitos sistemas de visรฃo computacional, incluindo rastreamento de mรบltiplos objetos, direรงรฃo autรดnoma, robรณtica e anรกlise de imagens mรฉdicas. Nos รบltimos anos, o desenvolvimento de detecรงรฃo de objetos em tempo real tem se concentrado em projetar arquiteturas eficientes e melhorar a velocidade de inferรชncia de vรกrias CPUs, GPUs e unidades de processamento neural (NPUs). O YOLOv7 suporta tanto GPUs mรณveis quanto dispositivos GPU, desde a borda atรฉ a nuvem. + +Ao contrรกrio dos detectores de objetos em tempo real tradicionais que se concentram na otimizaรงรฃo de arquitetura, o YOLOv7 introduz um foco na otimizaรงรฃo do processo de treinamento. Isso inclui mรณdulos e mรฉtodos de otimizaรงรฃo projetados para melhorar a precisรฃo da detecรงรฃo de objetos sem aumentar o custo de inferรชncia, um conceito conhecido como "treinรกvel bag-of-freebies". + +## Recursos Principais + +O YOLOv7 apresenta vรกrios recursos principais: + +1. **Reparametrizaรงรฃo do Modelo**: O YOLOv7 propรตe um modelo reparametrizado planejado, que รฉ uma estratรฉgia aplicรกvel a camadas em diferentes redes com o conceito de caminho de propagaรงรฃo de gradiente. + +2. **Atribuiรงรฃo Dinรขmica de Rรณtulo**: O treinamento do modelo com vรกrias camadas de saรญda apresenta um novo problema: "Como atribuir alvos dinรขmicos para as saรญdas de diferentes ramificaรงรตes?" Para resolver esse problema, o YOLOv7 introduz um novo mรฉtodo de atribuiรงรฃo de rรณtulo chamado atribuiรงรฃo de rรณtulo orientada por lideranรงa de granularidade fina (coarse-to-fine). + +3. **Escalonamento Estendido e Composto**: O YOLOv7 propรตe mรฉtodos de "escalonamento estendido" e "escalonamento composto" para o detector de objetos em tempo real que podem utilizar efetivamente parรขmetros e cรกlculos. + +4. **Eficiรชncia**: O mรฉtodo proposto pelo YOLOv7 pode reduzir efetivamente cerca de 40% dos parรขmetros e 50% dos cรกlculos do detector de objetos em tempo real state-of-the-art, alรฉm de apresentar uma velocidade de inferรชncia mais rรกpida e maior precisรฃo de detecรงรฃo. + +## Exemplos de Uso + +No momento em que este texto foi escrito, a Ultralytics ainda nรฃo oferece suporte aos modelos YOLOv7. Portanto, qualquer usuรกrio interessado em usar o YOLOv7 precisarรก se referir diretamente ao repositรณrio do YOLOv7 no GitHub para obter instruรงรตes de instalaรงรฃo e uso. + +Aqui estรก uma breve visรฃo geral das etapas tรญpicas que vocรช pode seguir para usar o YOLOv7: + +1. Acesse o repositรณrio do YOLOv7 no GitHub: [https://github.com/WongKinYiu/yolov7](https://github.com/WongKinYiu/yolov7). + +2. Siga as instruรงรตes fornecidas no arquivo README para a instalaรงรฃo. Isso normalmente envolve clonar o repositรณrio, instalar as dependรชncias necessรกrias e configurar quaisquer variรกveis de ambiente necessรกrias. + +3. Apรณs a conclusรฃo da instalaรงรฃo, vocรช pode treinar e usar o modelo conforme as instruรงรตes de uso fornecidas no repositรณrio. Isso geralmente envolve a preparaรงรฃo do conjunto de dados, a configuraรงรฃo dos parรขmetros do modelo, o treinamento do modelo e, em seguida, o uso do modelo treinado para realizar a detecรงรฃo de objetos. + +Observe que as etapas especรญficas podem variar dependendo do caso de uso especรญfico e do estado atual do repositรณrio do YOLOv7. Portanto, รฉ altamente recomendรกvel consultar diretamente as instruรงรตes fornecidas no repositรณrio do YOLOv7 no GitHub. + +Lamentamos qualquer inconveniente que isso possa causar e nos esforรงaremos para atualizar este documento com exemplos de uso para a Ultralytics assim que o suporte para o YOLOv7 for implementado. + +## Citaรงรตes e Agradecimentos + +Gostarรญamos de agradecer aos autores do YOLOv7 por suas contribuiรงรตes significativas no campo da detecรงรฃo de objetos em tempo real: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{wang2022yolov7, + title={{YOLOv7}: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors}, + author={Wang, Chien-Yao and Bochkovskiy, Alexey and Liao, Hong-Yuan Mark}, + journal={arXiv preprint arXiv:2207.02696}, + year={2022} + } + ``` + +O artigo original do YOLOv7 pode ser encontrado no [arXiv](https://arxiv.org/pdf/2207.02696.pdf). Os autores disponibilizaram publicamente seu trabalho, e o cรณdigo pode ser acessado no [GitHub](https://github.com/WongKinYiu/yolov7). Agradecemos seus esforรงos em avanรงar o campo e tornar seu trabalho acessรญvel ร  comunidade em geral. diff --git a/ultralytics/docs/pt/models/yolov7.md:Zone.Identifier b/ultralytics/docs/pt/models/yolov7.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/models/yolov7.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/models/yolov8.md b/ultralytics/docs/pt/models/yolov8.md new file mode 100755 index 0000000..0240a43 --- /dev/null +++ b/ultralytics/docs/pt/models/yolov8.md @@ -0,0 +1,162 @@ +--- +comments: true +description: Explore as emocionantes caracterรญsticas do YOLOv8, a versรฃo mais recente do nosso detector de objetos em tempo real! Saiba como as arquiteturas avanรงadas, modelos prรฉ-treinados e o equilรญbrio ideal entre precisรฃo e velocidade tornam o YOLOv8 a escolha perfeita para as suas tarefas de detecรงรฃo de objetos. +keywords: YOLOv8, Ultralytics, detector de objetos em tempo real, modelos prรฉ-treinados, documentaรงรฃo, detecรงรฃo de objetos, sรฉrie YOLO, arquiteturas avanรงadas, precisรฃo, velocidade +--- + +# YOLOv8 + +## Visรฃo Geral + +O YOLOv8 รฉ a versรฃo mais recente da sรฉrie YOLO de detectores de objetos em tempo real, oferecendo um desempenho de ponta em termos de precisรฃo e velocidade. Construindo sobre as inovaรงรตes das versรตes anteriores do YOLO, o YOLOv8 introduz novas caracterรญsticas e otimizaรงรตes que o tornam uma escolha ideal para diversas tarefas de detecรงรฃo de objetos em uma ampla variedade de aplicaรงรตes. + +![YOLOv8 da Ultralytics](https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png) + +## Principais Caracterรญsticas + +- **Arquiteturas Avanรงadas de Backbone e Neck:** O YOLOv8 utiliza arquiteturas avanรงadas de backbone e neck, resultando em uma melhor extraรงรฃo de caracterรญsticas e desempenho na detecรงรฃo de objetos. +- **Anchor-free Split Ultralytics Head:** O YOLOv8 adota um head Ultralytics dividido sem ancoragem, o que contribui para uma melhor precisรฃo e um processo de detecรงรฃo mais eficiente em comparaรงรฃo com abordagens baseadas em รขncoras. +- **Equilรญbrio Otimizado entre Precisรฃo e Velocidade:** Com foco em manter um equilรญbrio ideal entre precisรฃo e velocidade, o YOLOv8 รฉ adequado para tarefas de detecรงรฃo de objetos em tempo real em diversas รกreas de aplicaรงรฃo. +- **Variedade de Modelos Prรฉ-treinados:** O YOLOv8 oferece uma variedade de modelos prรฉ-treinados para atender a diversas tarefas e requisitos de desempenho, tornando mais fรกcil encontrar o modelo adequado para o seu caso de uso especรญfico. + +## Tarefas e Modos Suportados + +A sรฉrie YOLOv8 oferece uma variedade de modelos, cada um especializado em tarefas especรญficas de visรฃo computacional. Esses modelos sรฃo projetados para atender a diversos requisitos, desde a detecรงรฃo de objetos atรฉ tarefas mais complexas, como segmentaรงรฃo de instรขncias, detecรงรฃo de poses/pontos-chave e classificaรงรฃo. + +Cada variante da sรฉrie YOLOv8 รฉ otimizada para a respectiva tarefa, garantindo alto desempenho e precisรฃo. Alรฉm disso, esses modelos sรฃo compatรญveis com diversos modos operacionais, incluindo [Inferรชncia](../modes/predict.md), [Validaรงรฃo](../modes/val.md), [Treinamento](../modes/train.md) e [Exportaรงรฃo](../modes/export.md), facilitando o uso em diferentes estรกgios de implantaรงรฃo e desenvolvimento. + +| Modelo | Nomes de Arquivo | Tarefa | Inferรชncia | Validaรงรฃo | Treinamento | Exportaรงรฃo | +|-------------|----------------------------------------------------------------------------------------------------------------|--------------------------------------------------|------------|-----------|-------------|------------| +| YOLOv8 | `yolov8n.pt` `yolov8s.pt` `yolov8m.pt` `yolov8l.pt` `yolov8x.pt` | [Detecรงรฃo](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-seg | `yolov8n-seg.pt` `yolov8s-seg.pt` `yolov8m-seg.pt` `yolov8l-seg.pt` `yolov8x-seg.pt` | [Segmentaรงรฃo de Instรขncias](../tasks/segment.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-pose | `yolov8n-pose.pt` `yolov8s-pose.pt` `yolov8m-pose.pt` `yolov8l-pose.pt` `yolov8x-pose.pt` `yolov8x-pose-p6.pt` | [Pose/Pontos-chave](../tasks/pose.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-cls | `yolov8n-cls.pt` `yolov8s-cls.pt` `yolov8m-cls.pt` `yolov8l-cls.pt` `yolov8x-cls.pt` | [Classificaรงรฃo](../tasks/classify.md) | โœ… | โœ… | โœ… | โœ… | + +Esta tabela fornece uma visรฃo geral das variantes de modelos YOLOv8, destacando suas aplicaรงรตes em tarefas especรญficas e sua compatibilidade com diversos modos operacionais, como inferรชncia, validaรงรฃo, treinamento e exportaรงรฃo. Ela demonstra a versatilidade e robustez da sรฉrie YOLOv8, tornando-os adequados para diversas aplicaรงรตes em visรฃo computacional. + +## Mรฉtricas de Desempenho + +!!! Desempenho + + === "Detecรงรฃo (COCO)" + + Consulte a [Documentaรงรฃo de Detecรงรฃo](https://docs.ultralytics.com/tasks/detect/) para exemplos de uso com esses modelos treinados no conjunto de dados [COCO](https://docs.ultralytics.com/datasets/detect/coco/), que inclui 80 classes prรฉ-treinadas. + + | Modelo | tamanho
(pixels) | mAPval
50-95 | Velocidade
CPU ONNX
(ms) | Velocidade
A100 TensorRT
(ms) | parรขmetros
(M) | FLOPs
(B) | + | --------------------------------------------------------------------------------------- | ----------------------- | -------------------- | ----------------------------------- | --------------------------------------- | ---------------------- | ------------------ | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37,3 | 80,4 | 0,99 | 3,2 | 8,7 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44,9 | 128,4 | 1,20 | 11,2 | 28,6 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50,2 | 234,7 | 1,83 | 25,9 | 78,9 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52,9 | 375,2 | 2,39 | 43,7 | 165,2 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53,9 | 479,1 | 3,53 | 68,2 | 257,8 | + + === "Detecรงรฃo (Open Images V7)" + + Consulte a [Documentaรงรฃo de Detecรงรฃo](https://docs.ultralytics.com/tasks/detect/) para exemplos de uso com esses modelos treinados no conjunto de dados [Open Images V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/), que inclui 600 classes prรฉ-treinadas. + + | Modelo | tamanho
(pixels) | mAPval
50-95 | Velocidade
CPU ONNX
(ms) | Velocidade
A100 TensorRT
(ms) | parรขmetros
(M) | FLOPs
(B) | + | ----------------------------------------------------------------------------------------- | ----------------------- | -------------------- | ----------------------------------- | --------------------------------------- | ---------------------- | ------------------ | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18,4 | 142,4 | 1,21 | 3,5 | 10,5 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27,7 | 183,1 | 1,40 | 11,4 | 29,7 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33,6 | 408,5 | 2,26 | 26,2 | 80,6 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34,9 | 596,9 | 2,43 | 44,1 | 167,4 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36,3 | 860,6 | 3,56 | 68,7 | 260,6 | + + === "Segmentaรงรฃo (COCO)" + + Consulte a [Documentaรงรฃo de Segmentaรงรฃo](https://docs.ultralytics.com/tasks/segment/) para exemplos de uso com esses modelos treinados no conjunto de dados [COCO](https://docs.ultralytics.com/datasets/segment/coco/), que inclui 80 classes prรฉ-treinadas. + + | Modelo | tamanho
(pixels) | mAPbox
50-95 | mAPmรกscara
50-95 | Velocidade
CPU ONNX
(ms) | Velocidade
A100 TensorRT
(ms) | parรขmetros
(M) | FLOPs
(B) | + | ---------------------------------------------------------------------------------------------- | ----------------------- | -------------------- | ------------------------ | ----------------------------------- | --------------------------------------- | ---------------------- | ------------------ | + | [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36,7 | 30,5 | 96,1 | 1,21 | 3,4 | 12,6 | + | [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44,6 | 36,8 | 155,7 | 1,47 | 11,8 | 42,6 | + | [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49,9 | 40,8 | 317,0 | 2,18 | 27,3 | 110,2 | + | [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52,3 | 42,6 | 572,4 | 2,79 | 46,0 | 220,5 | + | [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53,4 | 43,4 | 712,1 | 4,02 | 71,8 | 344,1 | + + === "Classificaรงรฃo (ImageNet)" + + Consulte a [Documentaรงรฃo de Classificaรงรฃo](https://docs.ultralytics.com/tasks/classify/) para exemplos de uso com esses modelos treinados no conjunto de dados [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), que inclui 1000 classes prรฉ-treinadas. + + | Modelo | tamanho
(pixels) | acurรกcia
top1 | acurรกcia
top5 | Velocidade
CPU ONNX
(ms) | Velocidade
A100 TensorRT
(ms) | parรขmetros
(M) | FLOPs
(B) a 640 | + | ------------------------------------------------------------------------------------------------ | ----------------------- | --------------------- | --------------------- | ----------------------------------- | --------------------------------------- | ---------------------- | ------------------------ | + | [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66,6 | 87,0 | 12,9 | 0,31 | 2,7 | 4,3 | + | [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72,3 | 91,1 | 23,4 | 0,35 | 6,4 | 13,5 | + | [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76,4 | 93,2 | 85,4 | 0,62 | 17,0 | 42,7 | + | [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78,0 | 94,1 | 163,0 | 0,87 | 37,5 | 99,7 | + | [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78,4 | 94,3 | 232,0 | 1,01 | 57,4 | 154,8 | + + === "Pose (COCO)" + + Consulte a [Documentaรงรฃo de Estimativa de Pose](https://docs.ultralytics.com/tasks/segment/) para exemplos de uso com esses modelos treinados no conjunto de dados [COCO](https://docs.ultralytics.com/datasets/pose/coco/), que inclui 1 classe prรฉ-treinada, 'person'. + + | Modelo | tamanho
(pixels) | mAPpose
50-95 | mAPpose
50 | Velocidade
CPU ONNX
(ms) | Velocidade
A100 TensorRT
(ms) | parรขmetros
(M) | FLOPs
(B) | + | ---------------------------------------------------------------------------------------------- | ----------------------- | --------------------- | ------------------ | ----------------------------------- | --------------------------------------- | ---------------------- | ------------------ | + | [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50,4 | 80,1 | 131,8 | 1,18 | 3,3 | 9,2 | + | [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60,0 | 86,2 | 233,2 | 1,42 | 11,6 | 30,2 | + | [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65,0 | 88,8 | 456,3 | 2,00 | 26,4 | 81,0 | + | [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67,6 | 90,0 | 784,5 | 2,59 | 44,4 | 168,6 | + | [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69,2 | 90,2 | 1607,1 | 3,73 | 69,4 | 263,2 | + | [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71,6 | 91,2 | 4088,7 | 10,04 | 99,1 | 1066,4 | + +## Exemplos de Uso + +Este exemplo fornece exemplos simples de treinamento e inferรชncia do YOLOv8. Para a documentaรงรฃo completa desses e outros [modos](../modes/index.md), consulte as pรกginas de documentaรงรฃo [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) e [Export](../modes/export.md). + +Observe que o exemplo abaixo รฉ para modelos YOLOv8 de [Detecรงรฃo](../tasks/detect.md) para detecรงรฃo de objetos. Para outras tarefas suportadas, consulte a documentaรงรฃo de [Segmentaรงรฃo](../tasks/segment.md), [Classificaรงรฃo](../tasks/classify.md) e [Pose](../tasks/pose.md). + +!!! Example "Exemplo" + + === "Python" + + Modelos prรฉ-treinados `*.pt` PyTorch, bem como arquivos de configuraรงรฃo `*.yaml`, podem ser passados para a classe `YOLO()` para criar uma instรขncia do modelo em Python: + + ```python + from ultralytics import YOLO + + # Carregar um modelo YOLOv8n prรฉ-treinado para COCO + model = YOLO('yolov8n.pt') + + # Exibir informaรงรตes do modelo (opcional) + model.info() + + # Treinar o modelo no exemplo de conjunto de dados COCO8 por 100 รฉpocas + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # Executar inferรชncia com o modelo YOLOv8n na imagem 'bus.jpg' + results = model('caminho/para/bus.jpg') + ``` + + === "CLI" + + Comandos da CLI estรฃo disponรญveis para executar os modelos diretamente: + + ```bash + # Carregar um modelo YOLOv8n prรฉ-treinado para COCO e treinรก-lo no exemplo de conjunto de dados COCO8 por 100 รฉpocas + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # Carregar um modelo YOLOv8n prรฉ-treinado para COCO e executar inferรชncia na imagem 'bus.jpg' + yolo predict model=yolov8n.pt source=caminho/para/bus.jpg + ``` + +## Citaรงรตes e Reconhecimentos + +Se vocรช utilizar o modelo YOLOv8 ou qualquer outro software deste repositรณrio em seu trabalho, por favor cite-o utilizando o formato abaixo: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @software{yolov8_ultralytics, + author = {Glenn Jocher and Ayush Chaurasia and Jing Qiu}, + title = {Ultralytics YOLOv8}, + version = {8.0.0}, + year = {2023}, + url = {https://github.com/ultralytics/ultralytics}, + orcid = {0000-0001-5950-6979, 0000-0002-7603-6750, 0000-0003-3783-7069}, + license = {AGPL-3.0} + } + ``` + +Observe que o DOI estรก pendente e serรก adicionado ร  citaรงรฃo assim que estiver disponรญvel. Os modelos YOLOv8 sรฃo disponibilizados sob as licenรงas [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) e [Enterprise](https://ultralytics.com/license). diff --git a/ultralytics/docs/pt/models/yolov8.md:Zone.Identifier b/ultralytics/docs/pt/models/yolov8.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/models/yolov8.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/modes/benchmark.md b/ultralytics/docs/pt/modes/benchmark.md new file mode 100755 index 0000000..f311481 --- /dev/null +++ b/ultralytics/docs/pt/modes/benchmark.md @@ -0,0 +1,94 @@ +--- +comments: true +description: Aprenda a avaliar a velocidade e a precisรฃo do YOLOv8 em diversos formatos de exportaรงรฃo; obtenha informaรงรตes sobre mรฉtricas mAP50-95, accuracy_top5 e mais. +keywords: Ultralytics, YOLOv8, benchmarking, perfilagem de velocidade, perfilagem de precisรฃo, mAP50-95, accuracy_top5, ONNX, OpenVINO, TensorRT, formatos de exportaรงรฃo YOLO +--- + +# Benchmarking de Modelos com o Ultralytics YOLO + +Ecossistema Ultralytics YOLO e integraรงรตes + +## Introduรงรฃo + +Uma vez que seu modelo esteja treinado e validado, o prรณximo passo lรณgico รฉ avaliar seu desempenho em diversos cenรกrios do mundo real. O modo de benchmark no Ultralytics YOLOv8 serve a esse propรณsito, oferecendo uma estrutura robusta para avaliar a velocidade e a precisรฃo do seu modelo em uma gama de formatos de exportaรงรฃo. + +## Por Que o Benchmarking รฉ Crucial? + +- **Decisรตes Informadas:** Obtenha insights sobre o equilรญbrio entre velocidade e precisรฃo. +- **Alocaรงรฃo de Recursos:** Entenda como diferentes formatos de exportaรงรฃo se comportam em diferentes hardwares. +- **Otimizaรงรฃo:** Aprenda qual formato de exportaรงรฃo oferece o melhor desempenho para o seu caso especรญfico. +- **Eficiรชncia de Custos:** Faรงa uso mais eficiente dos recursos de hardware com base nos resultados do benchmark. + +### Mรฉtricas Chave no Modo de Benchmark + +- **mAP50-95:** Para detecรงรฃo de objetos, segmentaรงรฃo e estimativa de pose. +- **accuracy_top5:** Para classificaรงรฃo de imagens. +- **Tempo de Inferรชncia:** Tempo levado para cada imagem em milissegundos. + +### Formatos de Exportaรงรฃo Suportados + +- **ONNX:** Para desempenho รณtimo em CPU +- **TensorRT:** Para eficiรชncia mรกxima em GPU +- **OpenVINO:** Para otimizaรงรฃo em hardware Intel +- **CoreML, TensorFlow SavedModel e Mais:** Para uma variedade de necessidades de implantaรงรฃo. + +!!! Tip "Dica" + + * Exporte para ONNX ou OpenVINO para acelerar atรฉ 3x a velocidade em CPU. + * Exporte para TensorRT para acelerar atรฉ 5x em GPU. + +## Exemplos de Uso + +Execute benchmarks do YOLOv8n em todos os formatos de exportaรงรฃo suportados incluindo ONNX, TensorRT etc. Consulte a seรงรฃo Argumentos abaixo para ver uma lista completa de argumentos de exportaรงรฃo. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics.utils.benchmarks import benchmark + + # Benchmark na GPU + benchmark(model='yolov8n.pt', data='coco8.yaml', imgsz=640, half=False, device=0) + ``` + === "CLI" + + ```bash + yolo benchmark model=yolov8n.pt data='coco8.yaml' imgsz=640 half=False device=0 + ``` + +## Argumentos + +Argumentos como `model`, `data`, `imgsz`, `half`, `device` e `verbose` proporcionam aos usuรกrios flexibilidade para ajustar os benchmarks ร s suas necessidades especรญficas e comparar o desempenho de diferentes formatos de exportaรงรฃo com facilidade. + +| Chave | Valor | Descriรงรฃo | +|-----------|---------|----------------------------------------------------------------------------------------| +| `model` | `None` | caminho para o arquivo do modelo, ou seja, yolov8n.pt, yolov8n.yaml | +| `data` | `None` | caminho para o YAML com dataset de benchmarking (sob o rรณtulo `val`) | +| `imgsz` | `640` | tamanho da imagem como um escalar ou lista (h, w), ou seja, (640, 480) | +| `half` | `False` | quantizaรงรฃo FP16 | +| `int8` | `False` | quantizaรงรฃo INT8 | +| `device` | `None` | dispositivo para execuรงรฃo, ou seja, dispositivo cuda=0 ou device=0,1,2,3 ou device=cpu | +| `verbose` | `False` | nรฃo continuar em erro (bool), ou limiar mรญnimo para val (float) | + +## Formatos de Exportaรงรฃo + +Os benchmarks tentarรฃo executar automaticamente em todos os possรญveis formatos de exportaรงรฃo listados abaixo. + +| Formato | Argumento `format` | Modelo | Metadados | Argumentos | +|-----------------------------------------------------------------------|--------------------|---------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [Modelo Salvo do TF](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [GraphDef do TF](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +Veja os detalhes completos de `exportaรงรฃo` na pรกgina [Export](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/pt/modes/benchmark.md:Zone.Identifier b/ultralytics/docs/pt/modes/benchmark.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/modes/benchmark.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/modes/export.md b/ultralytics/docs/pt/modes/export.md new file mode 100755 index 0000000..a6cb313 --- /dev/null +++ b/ultralytics/docs/pt/modes/export.md @@ -0,0 +1,108 @@ +--- +comments: true +description: Guia passo a passo sobre como exportar seus modelos YOLOv8 para vรกrios formatos como ONNX, TensorRT, CoreML e mais para implantaรงรฃo. Explore agora! +keywords: YOLO, YOLOv8, Ultralytics, Exportaรงรฃo de modelo, ONNX, TensorRT, CoreML, TensorFlow SavedModel, OpenVINO, PyTorch, exportar modelo +--- + +# Exportaรงรฃo de Modelo com Ultralytics YOLO + +Ecossistema Ultralytics YOLO e integraรงรตes + +## Introduรงรฃo + +O objetivo final de treinar um modelo รฉ implantรก-lo para aplicaรงรตes no mundo real. O modo de exportaรงรฃo no Ultralytics YOLOv8 oferece uma ampla gama de opรงรตes para exportar seu modelo treinado para diferentes formatos, tornando-o implantรกvel em vรกrias plataformas e dispositivos. Este guia abrangente visa orientรก-lo atravรฉs das nuances da exportaรงรฃo de modelos, mostrando como alcanรงar a mรกxima compatibilidade e performance. + +

+
+ +
+ Assista: Como Exportar Modelo Treinado Customizado do Ultralytics YOLOv8 e Executar Inferรชncia ao Vivo na Webcam. +

+ +## Por Que Escolher o Modo de Exportaรงรฃo do YOLOv8? + +- **Versatilidade:** Exporte para mรบltiplos formatos incluindo ONNX, TensorRT, CoreML e mais. +- **Performance:** Ganhe atรฉ 5x aceleraรงรฃo em GPU com TensorRT e 3x aceleraรงรฃo em CPU com ONNX ou OpenVINO. +- **Compatibilidade:** Torne seu modelo universalmente implantรกvel em numerosos ambientes de hardware e software. +- **Facilidade de Uso:** Interface de linha de comando simples e API Python para exportaรงรฃo rรกpida e direta de modelos. + +### Principais Recursos do Modo de Exportaรงรฃo + +Aqui estรฃo algumas das funcionalidades de destaque: + +- **Exportaรงรฃo com Um Clique:** Comandos simples para exportaรงรฃo em diferentes formatos. +- **Exportaรงรฃo em Lote:** Exporte modelos capazes de inferรชncia em lote. +- **Inferรชncia Otimizada:** Modelos exportados sรฃo otimizados para tempos de inferรชncia mais rรกpidos. +- **Vรญdeos Tutoriais:** Guias e tutoriais detalhados para uma experiรชncia de exportaรงรฃo tranquila. + +!!! Tip "Dica" + + * Exporte para ONNX ou OpenVINO para atรฉ 3x aceleraรงรฃo em CPU. + * Exporte para TensorRT para atรฉ 5x aceleraรงรฃo em GPU. + +## Exemplos de Uso + +Exporte um modelo YOLOv8n para um formato diferente como ONNX ou TensorRT. Veja a seรงรฃo de Argumentos abaixo para uma lista completa dos argumentos de exportaรงรฃo. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n.pt') # carrega um modelo oficial + model = YOLO('caminho/para/best.pt') # carrega um modelo treinado personalizado + + # Exportar o modelo + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # exporta modelo oficial + yolo export model=caminho/para/best.pt format=onnx # exporta modelo treinado personalizado + ``` + +## Argumentos + +Configuraรงรตes de exportaรงรฃo para modelos YOLO referem-se ร s vรกrias configuraรงรตes e opรงรตes usadas para salvar ou exportar o modelo para uso em outros ambientes ou plataformas. Essas configuraรงรตes podem afetar a performance, tamanho e compatibilidade do modelo com diferentes sistemas. Algumas configuraรงรตes comuns de exportaรงรฃo de YOLO incluem o formato do arquivo de modelo exportado (por exemplo, ONNX, TensorFlow SavedModel), o dispositivo em que o modelo serรก executado (por exemplo, CPU, GPU) e a presenรงa de recursos adicionais como mรกscaras ou mรบltiplos rรณtulos por caixa. Outros fatores que podem afetar o processo de exportaรงรฃo incluem a tarefa especรญfica para a qual o modelo estรก sendo usado e os requisitos ou restriรงรตes do ambiente ou plataforma alvo. ร‰ importante considerar e configurar cuidadosamente essas configuraรงรตes para garantir que o modelo exportado seja otimizado para o caso de uso pretendido e possa ser usado eficazmente no ambiente alvo. + +| Chave | Valor | Descriรงรฃo | +|-------------|-----------------|---------------------------------------------------------------------| +| `format` | `'torchscript'` | formato para exportaรงรฃo | +| `imgsz` | `640` | tamanho da imagem como escalar ou lista (h, w), ou seja, (640, 480) | +| `keras` | `False` | usar Keras para exportaรงรฃo TF SavedModel | +| `optimize` | `False` | TorchScript: otimizar para mobile | +| `half` | `False` | quantizaรงรฃo FP16 | +| `int8` | `False` | quantizaรงรฃo INT8 | +| `dynamic` | `False` | ONNX/TensorRT: eixos dinรขmicos | +| `simplify` | `False` | ONNX/TensorRT: simplificar modelo | +| `opset` | `None` | ONNX: versรฃo do opset (opcional, padrรฃo para a mais recente) | +| `workspace` | `4` | TensorRT: tamanho do espaรงo de trabalho (GB) | +| `nms` | `False` | CoreML: adicionar NMS | + +## Formatos de Exportaรงรฃo + +Os formatos de exportaรงรฃo disponรญveis para YOLOv8 estรฃo na tabela abaixo. Vocรช pode exportar para qualquer formato usando o argumento `format`, ou seja, `format='onnx'` ou `format='engine'`. + +| Formato | Argumento `format` | Modelo | Metadados | Argumentos | +|--------------------------------------------------------------------|--------------------|---------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | diff --git a/ultralytics/docs/pt/modes/export.md:Zone.Identifier b/ultralytics/docs/pt/modes/export.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/modes/export.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/modes/index.md b/ultralytics/docs/pt/modes/index.md new file mode 100755 index 0000000..332489b --- /dev/null +++ b/ultralytics/docs/pt/modes/index.md @@ -0,0 +1,73 @@ +--- +comments: true +description: Da treinamento a rastreamento, aproveite ao mรกximo o YOLOv8 da Ultralytics. Obtenha insights e exemplos para cada modo suportado, incluindo validaรงรฃo, exportaรงรฃo e benchmarking. +keywords: Ultralytics, YOLOv8, Aprendizado de Mรกquina, Detecรงรฃo de Objetos, Treinamento, Validaรงรฃo, Prediรงรฃo, Exportaรงรฃo, Rastreamento, Benchmarking +--- + +# Modos Ultralytics YOLOv8 + +Ecossistema e integraรงรตes do Ultralytics YOLO + +## Introduรงรฃo + +O Ultralytics YOLOv8 nรฃo รฉ apenas mais um modelo de detecรงรฃo de objetos; รฉ um framework versรกtil projetado para cobrir todo o ciclo de vida dos modelos de aprendizado de mรกquina โ€” desde a ingestรฃo de dados e treinamento do modelo atรฉ a validaรงรฃo, implantaรงรฃo e rastreamento no mundo real. Cada modo serve a um propรณsito especรญfico e รฉ projetado para oferecer a flexibilidade e eficiรชncia necessรกrias para diferentes tarefas e casos de uso. + +

+
+ +
+ Assista: Tutorial dos Modos Ultralytics: Treinar, Validar, Prever, Exportar e Benchmark. +

+ +### Visรฃo Geral dos Modos + +Entender os diferentes **modos** que o Ultralytics YOLOv8 suporta รฉ crรญtico para tirar o mรกximo proveito de seus modelos: + +- **Modo Treino**: Ajuste fino do seu modelo em conjuntos de dados personalizados ou prรฉ-carregados. +- **Modo Validaรงรฃo (Val)**: Um checkpoint pรณs-treinamento para validar o desempenho do modelo. +- **Modo Prediรงรฃo (Predict)**: Libere o poder preditivo do seu modelo em dados do mundo real. +- **Modo Exportaรงรฃo (Export)**: Prepare seu modelo para implantaรงรฃo em vรกrios formatos. +- **Modo Rastreamento (Track)**: Estenda seu modelo de detecรงรฃo de objetos para aplicaรงรตes de rastreamento em tempo real. +- **Modo Benchmarking**: Analise a velocidade e precisรฃo do seu modelo em diversos ambientes de implantaรงรฃo. + +Este guia abrangente visa fornecer uma visรฃo geral e insights prรกticos para cada modo, ajudando vocรช a aproveitar o potencial total do YOLOv8. + +## [Treinar](train.md) + +O modo Treinar รฉ utilizado para treinar um modelo YOLOv8 em um conjunto de dados personalizado. Neste modo, o modelo รฉ treinado usando o conjunto de dados especificado e os hiperparรขmetros escolhidos. O processo de treinamento envolve otimizar os parรขmetros do modelo para que ele possa prever com precisรฃo as classes e localizaรงรตes de objetos em uma imagem. + +[Exemplos de Treino](train.md){ .md-button } + +## [Validar](val.md) + +O modo Validar รฉ utilizado para validar um modelo YOLOv8 apรณs ter sido treinado. Neste modo, o modelo รฉ avaliado em um conjunto de validaรงรฃo para medir sua precisรฃo e desempenho de generalizaรงรฃo. Este modo pode ser usado para ajustar os hiperparรขmetros do modelo para melhorar seu desempenho. + +[Exemplos de Validaรงรฃo](val.md){ .md-button } + +## [Prever](predict.md) + +O modo Prever รฉ utilizado para fazer previsรตes usando um modelo YOLOv8 treinado em novas imagens ou vรญdeos. Neste modo, o modelo รฉ carregado de um arquivo de checkpoint, e o usuรกrio pode fornecer imagens ou vรญdeos para realizar a inferรชncia. O modelo prevรช as classes e localizaรงรตes dos objetos nas imagens ou vรญdeos fornecidos. + +[Exemplos de Prediรงรฃo](predict.md){ .md-button } + +## [Exportar](export.md) + +O modo Exportar รฉ utilizado para exportar um modelo YOLOv8 para um formato que possa ser utilizado para implantaรงรฃo. Neste modo, o modelo รฉ convertido para um formato que possa ser utilizado por outras aplicaรงรตes de software ou dispositivos de hardware. Este modo รฉ รบtil ao implantar o modelo em ambientes de produรงรฃo. + +[Exemplos de Exportaรงรฃo](export.md){ .md-button } + +## [Rastrear](track.md) + +O modo Rastrear รฉ utilizado para rastrear objetos em tempo real usando um modelo YOLOv8. Neste modo, o modelo รฉ carregado de um arquivo de checkpoint, e o usuรกrio pode fornecer um fluxo de vรญdeo ao vivo para realizar o rastreamento de objetos em tempo real. Este modo รฉ รบtil para aplicaรงรตes como sistemas de vigilรขncia ou carros autรดnomos. + +[Exemplos de Rastreamento](track.md){ .md-button } + +## [Benchmark](benchmark.md) + +O modo Benchmark รฉ utilizado para fazer um perfil da velocidade e precisรฃo de vรกrios formatos de exportaรงรฃo para o YOLOv8. Os benchmarks fornecem informaรงรตes sobre o tamanho do formato exportado, suas mรฉtricas `mAP50-95` (para detecรงรฃo de objetos, segmentaรงรฃo e pose) ou `accuracy_top5` (para classificaรงรฃo), e o tempo de inferรชncia em milissegundos por imagem em diversos formatos de exportaรงรฃo, como ONNX, OpenVINO, TensorRT e outros. Essas informaรงรตes podem ajudar os usuรกrios a escolher o formato de exportaรงรฃo รณtimo para seu caso de uso especรญfico, com base em seus requisitos de velocidade e precisรฃo. + +[Exemplos de Benchmark](benchmark.md){ .md-button } diff --git a/ultralytics/docs/pt/modes/index.md:Zone.Identifier b/ultralytics/docs/pt/modes/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/modes/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/modes/predict.md b/ultralytics/docs/pt/modes/predict.md new file mode 100755 index 0000000..319f1d2 --- /dev/null +++ b/ultralytics/docs/pt/modes/predict.md @@ -0,0 +1,226 @@ +--- +comments: true +description: Descubra como usar o modo predict do YOLOv8 para diversas tarefas. Aprenda sobre diferentes fontes de inferรชncia, como imagens, vรญdeos e formatos de dados. +keywords: Ultralytics, YOLOv8, modo predict, fontes de inferรชncia, tarefas de previsรฃo, modo de streaming, processamento de imagens, processamento de vรญdeo, aprendizado de mรกquina, IA +--- + +# Prediรงรฃo de Modelo com Ultralytics YOLO + +Ecossistema e integraรงรตes do Ultralytics YOLO + +## Introduรงรฃo + +No mundo do aprendizado de mรกquina e visรฃo computacional, o processo de fazer sentido a partir de dados visuais รฉ chamado de 'inferรชncia' ou 'prediรงรฃo'. O Ultralytics YOLOv8 oferece um recurso poderoso conhecido como **modo predict** que รฉ personalizado para inferรชncia em tempo real de alto desempenho em uma ampla gama de fontes de dados. + +

+
+ +
+ Assista: Como Extrair as Saรญdas do Modelo Ultralytics YOLOv8 para Projetos Personalizados. +

+ +## Aplicaรงรตes no Mundo Real + +| Manufatura | Esportes | Seguranรงa | +|:-----------------------------------------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------------------------:| +| ![Detecรงรฃo de Peรงas de Reposiรงรฃo de Veรญculo](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a0f802a8-0776-44cf-8f17-93974a4a28a1) | ![Detecรงรฃo de Jogador de Futebol](https://github.com/RizwanMunawar/ultralytics/assets/62513924/7d320e1f-fc57-4d7f-a691-78ee579c3442) | ![Detecรงรฃo de Queda de Pessoas](https://github.com/RizwanMunawar/ultralytics/assets/62513924/86437c4a-3227-4eee-90ef-9efb697bdb43) | +| Detecรงรฃo de Peรงas de Reposiรงรฃo de Veรญculo | Detecรงรฃo de Jogador de Futebol | Detecรงรฃo de Queda de Pessoas | + +## Por Que Usar o Ultralytics YOLO para Inferรชncia? + +Aqui estรก o porquรช de vocรช considerar o modo predict do YOLOv8 para suas diversas necessidades de inferรชncia: + +- **Versatilidade:** Capaz de fazer inferรชncias em imagens, vรญdeos e atรฉ transmissรตes ao vivo. +- **Desempenho:** Projetado para processamento em tempo real e de alta velocidade sem sacrificar a precisรฃo. +- **Facilidade de Uso:** Interfaces Python e CLI intuitivas para implantaรงรฃo e testes rรกpidos. +- **Altamente Customizรกvel:** Vรกrias configuraรงรตes e parรขmetros para ajustar o comportamento de inferรชncia do modelo de acordo com suas necessidades especรญficas. + +### Recursos Chave do Modo Predict + +O modo predict do YOLOv8 รฉ projetado para ser robusto e versรกtil, apresentando: + +- **Compatibilidade com Mรบltiplas Fontes de Dados:** Se seus dados estรฃo na forma de imagens individuais, uma coleรงรฃo de imagens, arquivos de vรญdeo ou transmissรตes de vรญdeo em tempo real, o modo predict atende a todas as necessidades. +- **Modo de Streaming:** Use o recurso de streaming para gerar um gerador eficiente de memรณria de objetos `Results`. Ative isso definindo `stream=True` no mรฉtodo de chamada do preditor. +- **Processamento em Lote:** A capacidade de processar vรกrias imagens ou quadros de vรญdeo em um รบnico lote, acelerando ainda mais o tempo de inferรชncia. +- **Integraรงรฃo Amigรกvel:** Integraรงรฃo fรกcil com pipelines de dados existentes e outros componentes de software, graรงas ร  sua API flexรญvel. + +Os modelos Ultralytics YOLO retornam ou uma lista de objetos `Results` em Python, ou um gerador em Python eficiente de memรณria de objetos `Results` quando `stream=True` รฉ passado para o modelo durante a inferรชncia: + +!!! Example "Predict" + + === "Retorna uma lista com `stream=False`" + ```python + from ultralytics import YOLO + + # Carrega um modelo + model = YOLO('yolov8n.pt') # modelo YOLOv8n prรฉ-treinado + + # Executa a inferรชncia em lote em uma lista de imagens + results = model(['im1.jpg', 'im2.jpg']) # retorna uma lista de objetos Results + + # Processa a lista de resultados + for result in results: + boxes = result.boxes # Objeto Boxes para saรญdas de bbox + masks = result.masks # Objeto Masks para saรญdas de mรกscaras de segmentaรงรฃo + keypoints = result.keypoints # Objeto Keypoints para saรญdas de pose + probs = result.probs # Objeto Probs para saรญdas de classificaรงรฃo + ``` + + === "Retorna um gerador com `stream=True`" + ```python + from ultralytics import YOLO + + # Carrega um modelo + model = YOLO('yolov8n.pt') # modelo YOLOv8n prรฉ-treinado + + # Executa a inferรชncia em lote em uma lista de imagens + results = model(['im1.jpg', 'im2.jpg'], stream=True) # retorna um gerador de objetos Results + + # Processa o gerador de resultados + for result in results: + boxes = result.boxes # Objeto Boxes para saรญdas de bbox + masks = result.masks # Objeto Masks para saรญdas de mรกscaras de segmentaรงรฃo + keypoints = result.keypoints # Objeto Keypoints para saรญdas de pose + probs = result.probs # Objeto Probs para saรญdas de classificaรงรฃo + ``` + +## Fontes de Inferรชncia + +O YOLOv8 pode processar diferentes tipos de fontes de entrada para inferรชncia, conforme mostrado na tabela abaixo. As fontes incluem imagens estรกticas, transmissรตes de vรญdeo e vรกrios formatos de dados. A tabela tambรฉm indica se cada fonte pode ser usada no modo de streaming com o argumento `stream=True` โœ…. O modo de streaming รฉ benรฉfico para processar vรญdeos ou transmissรตes ao vivo, pois cria um gerador de resultados em vez de carregar todos os quadros na memรณria. + +!!! Tip "Dica" + + Use `stream=True` para processar vรญdeos longos ou grandes conjuntos de dados para gerenciar a memรณria de forma eficiente. Quando `stream=False`, os resultados de todos os quadros ou pontos de dados sรฃo armazenados na memรณria, o que pode aumentar rapidamente e causar erros de falta de memรณria para grandes entradas. Em contraste, `stream=True` utiliza um gerador, que mantรฉm apenas os resultados do quadro atual ou ponto de dados na memรณria, reduzindo significativamente o consumo de memรณria e prevenindo problemas de falta dela. + +| Fonte | Argumento | Tipo | Notas | +|-----------------|--------------------------------------------|-----------------|-------------------------------------------------------------------------------------------------------------------------| +| imagem | `'image.jpg'` | `str` ou `Path` | Arquivo de imagem รบnico. | +| URL | `'https://ultralytics.com/images/bus.jpg'` | `str` | URL para uma imagem. | +| captura de tela | `'screen'` | `str` | Captura uma captura de tela. | +| PIL | `Image.open('im.jpg')` | `PIL.Image` | Formato HWC com canais RGB. | +| OpenCV | `cv2.imread('im.jpg')` | `np.ndarray` | Formato HWC com canais BGR `uint8 (0-255)`. | +| numpy | `np.zeros((640,1280,3))` | `np.ndarray` | Formato HWC com canais BGR `uint8 (0-255)`. | +| torch | `torch.zeros(16,3,320,640)` | `torch.Tensor` | Formato BCHW com canais RGB `float32 (0.0-1.0)`. | +| CSV | `'sources.csv'` | `str` ou `Path` | Arquivo CSV contendo caminhos para imagens, vรญdeos ou diretรณrios. | +| vรญdeo โœ… | `'video.mp4'` | `str` ou `Path` | Arquivo de vรญdeo em formatos como MP4, AVI, etc. | +| diretรณrio โœ… | `'path/'` | `str` ou `Path` | Caminho para um diretรณrio contendo imagens ou vรญdeos. | +| glob โœ… | `'path/*.jpg'` | `str` | Padrรฃo glob para combinar vรกrios arquivos. Use o caractere `*` como curinga. | +| YouTube โœ… | `'https://youtu.be/LNwODJXcvt4'` | `str` | URL para um vรญdeo do YouTube. | +| stream โœ… | `'rtsp://example.com/media.mp4'` | `str` | URL para protocolos de streaming como RTSP, RTMP, TCP ou um endereรงo IP. | +| multi-stream โœ… | `'list.streams'` | `str` ou `Path` | Arquivo de texto `*.streams` com uma URL de stream por linha, ou seja, 8 streams serรฃo executados em lote de tamanho 8. | + +Abaixo estรฃo exemplos de cรณdigo para usar cada tipo de fonte: + +!!! Example "Fontes de previsรฃo" + + === "imagem" + Executa a inferรชncia em um arquivo de imagem. + ```python + from ultralytics import YOLO + + # Carrega um modelo YOLOv8n prรฉ-treinado + model = YOLO('yolov8n.pt') + + # Define o caminho para o arquivo de imagem + source = 'caminho/para/imagem.jpg' + + # Executa a inferรชncia na fonte + results = model(source) # lista de objetos Results + ``` + + === "captura de tela" + Executa a inferรชncia no conteรบdo atual da tela como uma captura de tela. + ```python + from ultralytics import YOLO + + # Carrega um modelo YOLOv8n prรฉ-treinado + model = YOLO('yolov8n.pt') + + # Define a captura de tela atual como fonte + source = 'screen' + + # Executa a inferรชncia na fonte + results = model(source) # lista de objetos Results + ``` + + === "URL" + Executa a inferรชncia em uma imagem ou vรญdeo hospedado remotamente via URL. + ```python + from ultralytics import YOLO + + # Carrega um modelo YOLOv8n prรฉ-treinado + model = YOLO('yolov8n.pt') + + # Define a URL remota da imagem ou vรญdeo + source = 'https://ultralytics.com/images/bus.jpg' + + # Executa a inferรชncia na fonte + results = model(source) # lista de objetos Results + ``` + + === "PIL" + Executa a inferรชncia em uma imagem aberta com a Biblioteca de Imagens do Python (PIL). + ```python + from PIL import Image + from ultralytics import YOLO + + # Carrega um modelo YOLOv8n prรฉ-treinado + model = YOLO('yolov8n.pt') + + # Abre uma imagem usando PIL + source = Image.open('caminho/para/imagem.jpg') + + # Executa a inferรชncia na fonte + results = model(source) # lista de objetos Results + ``` + + === "OpenCV" + Executa a inferรชncia em uma imagem lida com OpenCV. + ```python + import cv2 + from ultralytics import YOLO + + # Carrega um modelo YOLOv8n prรฉ-treinado + model = YOLO('yolov8n.pt') + + # Lรช uma imagem usando OpenCV + source = cv2.imread('caminho/para/imagem.jpg') + + # Executa a inferรชncia na fonte + results = model(source) # lista de objetos Results + ``` + + === "numpy" + Executa a inferรชncia em uma imagem representada como um array numpy. + ```python + import numpy as np + from ultralytics import YOLO + + # Carrega um modelo YOLOv8n prรฉ-treinado + model = YOLO('yolov8n.pt') + + # Cria um array random de numpy com forma HWC (640, 640, 3) com valores no intervalo [0, 255] e tipo uint8 + source = np.random.randint(low=0, high=255, size=(640, 640, 3), dtype='uint8') + + # Executa a inferรชncia na fonte + results = model(source) # lista de objetos Results + ``` + + === "torch" + Executa a inferรชncia em uma imagem representada como um tensor PyTorch. + ```python + import torch + from ultralytics import YOLO + + # Carrega um modelo YOLOv8n prรฉ-treinado + model = YOLO('yolov8n.pt') + + # Cria um tensor random de torch com forma BCHW (1, 3, 640, 640) com valores no intervalo [0, 1] e tipo float32 + source = torch.rand(1, 3, 640, 640, dtype=torch.float32) + + # Executa a inferรชncia na fonte + results = model(source) # lista de objetos Results diff --git a/ultralytics/docs/pt/modes/predict.md:Zone.Identifier b/ultralytics/docs/pt/modes/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/modes/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/modes/track.md b/ultralytics/docs/pt/modes/track.md new file mode 100755 index 0000000..b2148f8 --- /dev/null +++ b/ultralytics/docs/pt/modes/track.md @@ -0,0 +1,200 @@ +--- +comments: true +description: Aprenda a usar o Ultralytics YOLO para rastreamento de objetos em fluxos de vรญdeo. Guias para usar diferentes rastreadores e personalizar configuraรงรตes de rastreador. +keywords: Ultralytics, YOLO, rastreamento de objetos, fluxos de vรญdeo, BoT-SORT, ByteTrack, guia em Python, guia CLI +--- + +# Rastreamento de Mรบltiplos Objetos com Ultralytics YOLO + +Exemplos de rastreamento de mรบltiplos objetos + +Rastreamento de objetos no รขmbito da anรกlise de vรญdeo รฉ uma tarefa crucial que nรฃo apenas identifica a localizaรงรฃo e classe dos objetos dentro do quadro, mas tambรฉm mantรฉm um ID รบnico para cada objeto detectado ร  medida que o vรญdeo avanรงa. As aplicaรงรตes sรฃo ilimitadas โ€” variando desde vigilรขncia e seguranรงa atรฉ anรกlises esportivas em tempo real. + +## Por Que Escolher Ultralytics YOLO para Rastreamento de Objetos? + +A saรญda dos rastreadores da Ultralytics รฉ consistente com a detecรงรฃo de objetos padrรฃo, mas com o valor agregado dos IDs dos objetos. Isso facilita o rastreamento de objetos em fluxos de vรญdeo e a realizaรงรฃo de anรกlises subsequentes. Aqui estรก o porquรช de considerar usar Ultralytics YOLO para suas necessidades de rastreamento de objetos: + +- **Eficiรชncia:** Processa fluxos de vรญdeo em tempo real sem comprometer a precisรฃo. +- **Flexibilidade:** Suporta mรบltiplos algoritmos de rastreamento e configuraรงรตes. +- **Facilidade de Uso:** Simples API em Python e opรงรตes CLI para rรกpida integraรงรฃo e implantaรงรฃo. +- **Personalizaรงรฃo:** Fรกcil de usar com modelos YOLO treinados personalizados, permitindo integraรงรฃo em aplicaรงรตes especรญficas de domรญnio. + +

+
+ +
+ Assistir: Detecรงรฃo e Rastreamento de Objetos com Ultralytics YOLOv8. +

+ +## Aplicaรงรตes no Mundo Real + +| Transporte | Varejo | Aquicultura | +|:------------------------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------------------:| +| ![Rastreamento de Veรญculos](https://github.com/RizwanMunawar/ultralytics/assets/62513924/ee6e6038-383b-4f21-ac29-b2a1c7d386ab) | ![Rastreamento de Pessoas](https://github.com/RizwanMunawar/ultralytics/assets/62513924/93bb4ee2-77a0-4e4e-8eb6-eb8f527f0527) | ![Rastreamento de Peixes](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a5146d0f-bfa8-4e0a-b7df-3c1446cd8142) | +| Rastreamento de Veรญculos | Rastreamento de Pessoas | Rastreamento de Peixes | + +## Caracterรญsticas em Destaque + +Ultralytics YOLO estende suas funcionalidades de detecรงรฃo de objetos para fornecer rastreamento de objetos robusto e versรกtil: + +- **Rastreamento em Tempo Real:** Acompanha objetos de forma contรญnua em vรญdeos de alta taxa de quadros. +- **Suporte a Mรบltiplos Rastreadores:** Escolha dentre uma variedade de algoritmos de rastreamento estabelecidos. +- **Configuraรงรตes de Rastreador Personalizรกveis:** Adapte o algoritmo de rastreamento para atender requisitos especรญficos ajustando vรกrios parรขmetros. + +## Rastreadores Disponรญveis + +Ultralytics YOLO suporta os seguintes algoritmos de rastreamento. Eles podem ser ativados passando o respectivo arquivo de configuraรงรฃo YAML, como `tracker=tracker_type.yaml`: + +* [BoT-SORT](https://github.com/NirAharon/BoT-SORT) - Use `botsort.yaml` para ativar este rastreador. +* [ByteTrack](https://github.com/ifzhang/ByteTrack) - Use `bytetrack.yaml` para ativar este rastreador. + +O rastreador padrรฃo รฉ o BoT-SORT. + +## Rastreamento + +Para executar o rastreador em fluxos de vรญdeo, use um modelo Detect, Segment ou Pose treinado, como YOLOv8n, YOLOv8n-seg e YOLOv8n-pose. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo oficial ou personalizado + model = YOLO('yolov8n.pt') # Carregar um modelo Detect oficial + model = YOLO('yolov8n-seg.pt') # Carregar um modelo Segment oficial + model = YOLO('yolov8n-pose.pt') # Carregar um modelo Pose oficial + model = YOLO('caminho/para/melhor.pt') # Carregar um modelo treinado personalizado + + # Realizar rastreamento com o modelo + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True) # Rastreamento com rastreador padrรฃo + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True, tracker="bytetrack.yaml") # Rastreamento com o rastreador ByteTrack + ``` + + === "CLI" + + ```bash + # Realizar rastreamento com vรกrios modelos usando a interface de linha de comando + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" # Modelo Detect oficial + yolo track model=yolov8n-seg.pt source="https://youtu.be/LNwODJXcvt4" # Modelo Segment oficial + yolo track model=yolov8n-pose.pt source="https://youtu.be/LNwODJXcvt4" # Modelo Pose oficial + yolo track model=caminho/para/melhor.pt source="https://youtu.be/LNwODJXcvt4" # Modelo treinado personalizado + + # Rastrear usando o rastreador ByteTrack + yolo track model=caminho/para/melhor.pt tracker="bytetrack.yaml" + ``` + +Como pode ser visto no uso acima, o rastreamento estรก disponรญvel para todos os modelos Detect, Segment e Pose executados em vรญdeos ou fontes de streaming. + +## Configuraรงรฃo + +### Argumentos de Rastreamento + +A configuraรงรฃo de rastreamento compartilha propriedades com o modo Predict, como `conf`, `iou`, e `show`. Para mais configuraรงรตes, consulte a pรกgina de [Predict](https://docs.ultralytics.com/modes/predict/) model page. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Configurar os parรขmetros de rastreamento e executar o rastreador + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", conf=0.3, iou=0.5, show=True) + ``` + + === "CLI" + + ```bash + # Configurar parรขmetros de rastreamento e executar o rastreador usando a interface de linha de comando + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" conf=0.3, iou=0.5 show + ``` + +### Seleรงรฃo de Rastreador + +A Ultralytics tambรฉm permite que vocรช use um arquivo de configuraรงรฃo de rastreador modificado. Para fazer isso, simplesmente faรงa uma cรณpia de um arquivo de configuraรงรฃo de rastreador (por exemplo, `custom_tracker.yaml`) de [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) e modifique quaisquer configuraรงรตes (exceto `tracker_type`) conforme suas necessidades. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar o modelo e executar o rastreador com um arquivo de configuraรงรฃo personalizado + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", tracker='custom_tracker.yaml') + ``` + + === "CLI" + + ```bash + # Carregar o modelo e executar o rastreador com um arquivo de configuraรงรฃo personalizado usando a interface de linha de comando + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" tracker='custom_tracker.yaml' + ``` + +Para uma lista completa de argumentos de rastreamento, consulte a pรกgina [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers). + +## Exemplos em Python + +### Loop de Persistรชncia de Rastreamentos + +Aqui estรก um script em Python usando OpenCV (`cv2`) e YOLOv8 para executar rastreamento de objetos em quadros de vรญdeo. Este script ainda pressupรตe que vocรช jรก instalou os pacotes necessรกrios (`opencv-python` e `ultralytics`). O argumento `persist=True` indica ao rastreador que a imagem ou quadro atual รฉ o prรณximo de uma sequรชncia e que espera rastreamentos da imagem anterior na imagem atual. + +!!! Example "Loop de fluxo com rastreamento" + + ```python + import cv2 + from ultralytics import YOLO + + # Carregar o modelo YOLOv8 + model = YOLO('yolov8n.pt') + + # Abrir o arquivo de vรญdeo + video_path = "caminho/para/video.mp4" + cap = cv2.VideoCapture(video_path) + + # Repetir atravรฉs dos quadros de vรญdeo + while cap.isOpened(): + # Ler um quadro do vรญdeo + success, frame = cap.read() + + if success: + # Executar rastreamento YOLOv8 no quadro, persistindo rastreamentos entre quadros + results = model.track(frame, persist=True) + + # Visualizar os resultados no quadro + annotated_frame = results[0].plot() + + # Exibir o quadro anotado + cv2.imshow("Rastreamento YOLOv8", annotated_frame) + + # Interromper o loop se 'q' for pressionado + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # Interromper o loop se o fim do vรญdeo for atingido + break + + # Liberar o objeto de captura de vรญdeo e fechar a janela de exibiรงรฃo + cap.release() + cv2.destroyAllWindows() + ``` + +Note a mudanรงa de `model(frame)` para `model.track(frame)`, que habilita o rastreamento de objetos ao invรฉs de detecรงรฃo simples. Este script modificado irรก executar o rastreador em cada quadro do vรญdeo, visualizar os resultados e exibi-los em uma janela. O loop pode ser encerrado pressionando 'q'. + +## Contribuir com Novos Rastreadores + +Vocรช รฉ proficiente em rastreamento de mรบltiplos objetos e implementou ou adaptou com sucesso um algoritmo de rastreamento com Ultralytics YOLO? Convidamos vocรช a contribuir para nossa seรงรฃo de Rastreadores em [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers)! Suas aplicaรงรตes do mundo real e soluรงรตes podem ser inestimรกveis para usuรกrios trabalhando em tarefas de rastreamento. + +Ao contribuir para esta seรงรฃo, vocรช ajuda a expandir o escopo de soluรงรตes de rastreamento disponรญveis dentro do framework Ultralytics YOLO, adicionando outra camada de funcionalidade e utilidade para a comunidade. + +Para iniciar sua contribuiรงรฃo, por favor, consulte nosso [Guia de Contribuiรงรฃo](https://docs.ultralytics.com/help/contributing) para instruรงรตes completas sobre como enviar um Pedido de Pull (PR) ๐Ÿ› ๏ธ. Estamos ansiosos para ver o que vocรช traz para a mesa! + +Juntos, vamos aprimorar as capacidades de rastreamento do ecossistema Ultralytics YOLO ๐Ÿ™! diff --git a/ultralytics/docs/pt/modes/track.md:Zone.Identifier b/ultralytics/docs/pt/modes/track.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/modes/track.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/modes/train.md b/ultralytics/docs/pt/modes/train.md new file mode 100755 index 0000000..b677b4b --- /dev/null +++ b/ultralytics/docs/pt/modes/train.md @@ -0,0 +1,206 @@ +--- +comments: true +description: Guia passo a passo para treinar modelos YOLOv8 com a YOLO da Ultralytics, incluindo exemplos de treinamento com uma รบnica GPU e mรบltiplas GPUs +keywords: Ultralytics, YOLOv8, YOLO, detecรงรฃo de objetos, modo de treino, conjunto de dados personalizado, treinamento com GPU, multi-GPU, hiperparรขmetros, exemplos de CLI, exemplos em Python +--- + +# Treinamento de Modelos com a YOLO da Ultralytics + +Ecossistema e integraรงรตes da YOLO da Ultralytics + +## Introduรงรฃo + +O treinamento de um modelo de aprendizado profundo envolve fornecer dados e ajustar seus parรขmetros para que ele possa fazer previsรตes precisas. O modo de treino na YOLOv8 da Ultralytics รฉ projetado para um treinamento eficaz e eficiente de modelos de detecรงรฃo de objetos, aproveitando totalmente as capacidades do hardware moderno. Este guia visa cobrir todos os detalhes que vocรช precisa para comeรงar a treinar seus prรณprios modelos usando o robusto conjunto de recursos da YOLOv8. + +

+
+ +
+ Assista: Como Treinar um modelo YOLOv8 no Seu Conjunto de Dados Personalizado no Google Colab. +

+ +## Por Que Escolher a YOLO da Ultralytics para Treinamento? + +Aqui estรฃo algumas razรตes convincentes para optar pelo modo de Treino da YOLOv8: + +- **Eficiรชncia:** Aproveite ao mรกximo seu hardware, seja em um setup com uma รบnica GPU ou expandindo para mรบltiplas GPUs. +- **Versatilidade:** Treine em conjuntos de dados personalizados, alรฉm dos jรก disponรญveis, como COCO, VOC e ImageNet. +- **Facilidade de Uso:** Interfaces de linha de comando (CLI) e em Python simples, porรฉm poderosas, para uma experiรชncia de treinamento direta. +- **Flexibilidade de Hiperparรขmetros:** Uma ampla gama de hiperparรขmetros personalizรกveis para ajustar o desempenho do modelo. + +### Principais Recursos do Modo de Treino + +Os seguintes sรฃo alguns recursos notรกveis โ€‹โ€‹do modo de Treino da YOLOv8: + +- **Download Automรกtico de Datasets:** Datasets padrรตes como COCO, VOC e ImageNet sรฃo baixados automaticamente na primeira utilizaรงรฃo. +- **Suporte a Multi-GPU:** Escalone seus esforรงos de treinamento de maneira uniforme entre vรกrias GPUs para acelerar o processo. +- **Configuraรงรฃo de Hiperparรขmetros:** Opรงรฃo de modificar hiperparรขmetros atravรฉs de arquivos de configuraรงรฃo YAML ou argumentos de CLI. +- **Visualizaรงรฃo e Monitoramento:** Acompanhamento em tempo real das mรฉtricas de treinamento e visualizaรงรฃo do processo de aprendizagem para obter melhores insights. + +!!! Tip "Dica" + + * Conjuntos de dados YOLOv8 como COCO, VOC, ImageNet e muitos outros sรฃo baixados automaticamente na primeira utilizaรงรฃo, ou seja, `yolo train data=coco.yaml` + +## Exemplos de Uso + +Treine o YOLOv8n no conjunto de dados COCO128 por 100 รฉpocas com tamanho de imagem de 640. O dispositivo de treinamento pode ser especificado usando o argumento `device`. Se nenhum argumento for passado, a GPU `device=0` serรก usado se disponรญvel, caso contrรกrio, `device=cpu` serรก usado. Veja a seรงรฃo Argumentos abaixo para uma lista completa dos argumentos de treinamento. + +!!! Example "Exemplo de Treinamento em Uma รšnica GPU e CPU" + + O dispositivo รฉ determinado automaticamente. Se uma GPU estiver disponรญvel, ela serรก usada, caso contrรกrio, o treinamento comeรงarรก na CPU. + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n.yaml') # construir um novo modelo a partir do YAML + model = YOLO('yolov8n.pt') # carregar um modelo prรฉ-treinado (recomendado para treinamento) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # construir a partir do YAML e transferir pesos + + # Treinar o modelo + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + + === "CLI" + + ```bash + # Construir um novo modelo a partir do YAML e comeรงar o treinamento do zero + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # Comeรงar o treinamento a partir de um modelo *.pt prรฉ-treinado + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # Construir um novo modelo a partir do YAML, transferir pesos prรฉ-treinados para ele e comeรงar o treinamento + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### Treinamento com Multi-GPU + +O treinamento com mรบltiplas GPUs permite uma utilizaรงรฃo mais eficiente dos recursos de hardware disponรญveis, distribuindo a carga de treinamento entre vรกrias GPUs. Esse recurso estรก disponรญvel por meio da API do Python e da interface de linha de comando. Para habilitar o treinamento com vรกrias GPUs, especifique os IDs dos dispositivos de GPU que deseja usar. + +!!! Example "Exemplo de Treinamento com Multi-GPU" + + Para treinar com 2 GPUs, dispositivos CUDA 0 e 1 use os seguintes comandos. Expanda para GPUs adicionais conforme necessรกrio. + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n.pt') # carregar um modelo prรฉ-treinado (recomendado para treinamento) + + # Treinar o modelo com 2 GPUs + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device=[0, 1]) + ``` + + === "CLI" + + ```bash + # Comeรงar o treinamento a partir de um modelo *.pt prรฉ-treinado usando as GPUs 0 e 1 + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=0,1 + ``` + +### Treinamento com Apple M1 e M2 MPS + +Com a integraรงรฃo do suporte para os chips Apple M1 e M2 nos modelos YOLO da Ultralytics, agora รฉ possรญvel treinar seus modelos em dispositivos que utilizam o poderoso framework Metal Performance Shaders (MPS). O MPS oferece uma forma de alto desempenho de executar tarefas de computaรงรฃo e processamento de imagens no silรญcio personalizado da Apple. + +Para habilitar o treinamento nos chips Apple M1 e M2, vocรช deve especificar 'mps' como seu dispositivo ao iniciar o processo de treinamento. Abaixo estรก um exemplo de como vocรช pode fazer isso em Python e via linha de comando: + +!!! Example "Exemplo de Treinamento com MPS" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n.pt') # carregar um modelo prรฉ-treinado (recomendado para treinamento) + + # Treinar o modelo com MPS + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device='mps') + ``` + + === "CLI" + + ```bash + # Comeรงar o treinamento a partir de um modelo *.pt prรฉ-treinado usando MPS + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=mps + ``` + +Ao aproveitar o poder computacional dos chips M1/M2, isso possibilita o processamento mais eficiente das tarefas de treinamento. Para orientaรงรตes mais detalhadas e opรงรตes avanรงadas de configuraรงรฃo, consulte a [documentaรงรฃo do PyTorch MPS](https://pytorch.org/docs/stable/notes/mps.html). + +## Registro de Logs + +Ao treinar um modelo YOLOv8, vocรช pode achar valioso acompanhar o desempenho do modelo ao longo do tempo. ร‰ aqui que o registro de logs se torna รบtil. O YOLO da Ultralytics oferece suporte para trรชs tipos de loggers - Comet, ClearML e TensorBoard. + +Para usar um logger, selecione-o no menu suspenso no trecho de cรณdigo acima e execute-o. O logger escolhido serรก instalado e inicializado. + +### Comet + +[Comet](https://www.comet.ml/site/) รฉ uma plataforma que permite a cientistas de dados e desenvolvedores rastrear, comparar, explicar e otimizar experimentos e modelos. Oferece funcionalidades como mรฉtricas em tempo real, diffs de cรณdigo e acompanhamento de hiperparรขmetros. + +Para usar o Comet: + +!!! Example "Exemplo" + + === "Python" + ```python + # pip install comet_ml + import comet_ml + + comet_ml.init() + ``` + +Lembre-se de fazer login na sua conta Comet no site deles e obter sua chave de API. Vocรช precisarรก adicionar isso ร s suas variรกveis de ambiente ou ao seu script para registrar seus experimentos. + +### ClearML + +[ClearML](https://www.clear.ml/) รฉ uma plataforma de cรณdigo aberto que automatiza o rastreamento de experimentos e ajuda com o compartilhamento eficiente de recursos. ร‰ projetada para ajudar as equipes a gerenciar, executar e reproduzir seus trabalhos de ML de maneira mais eficiente. + +Para usar o ClearML: + +!!! Example "Exemplo" + + === "Python" + ```python + # pip install clearml + import clearml + + clearml.browser_login() + ``` + +Apรณs executar este script, vocรช precisarรก fazer login na sua conta ClearML no navegador e autenticar sua sessรฃo. + +### TensorBoard + +[TensorBoard](https://www.tensorflow.org/tensorboard) รฉ um kit de ferramentas de visualizaรงรฃo para TensorFlow. Permite visualizar o seu grรกfico TensorFlow, plotar mรฉtricas quantitativas sobre a execuรงรฃo do seu grรกfico e mostrar dados adicionais como imagens que passam por ele. + +Para usar o TensorBoard em [Google Colab](https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb): + +!!! Example "Exemplo" + + === "CLI" + ```bash + load_ext tensorboard + tensorboard --logdir ultralytics/runs # substitua pelo diretรณrio 'runs' + ``` + +Para usar o TensorBoard localmente, execute o comando abaixo e veja os resultados em http://localhost:6006/: + +!!! Example "Exemplo" + + === "CLI" + ```bash + tensorboard --logdir ultralytics/runs # substitua pelo diretรณrio 'runs' + ``` + +Isso irรก carregar o TensorBoard e direcionรก-lo para o diretรณrio onde seus logs de treinamento estรฃo salvos. + +Depois de configurar o seu logger, vocรช pode entรฃo prosseguir com o treinamento do seu modelo. Todas as mรฉtricas de treinamento serรฃo registradas automaticamente na sua plataforma escolhida, e vocรช pode acessar esses logs para monitorar o desempenho do seu modelo ao longo do tempo, comparar diferentes modelos e identificar รกreas para melhoria. diff --git a/ultralytics/docs/pt/modes/train.md:Zone.Identifier b/ultralytics/docs/pt/modes/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/modes/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/modes/val.md b/ultralytics/docs/pt/modes/val.md new file mode 100755 index 0000000..a482f14 --- /dev/null +++ b/ultralytics/docs/pt/modes/val.md @@ -0,0 +1,86 @@ +--- +comments: true +description: Guia para Validaรงรฃo de Modelos YOLOv8. Aprenda como avaliar o desempenho dos seus modelos YOLO utilizando configuraรงรตes e mรฉtricas de validaรงรฃo com exemplos em Python e CLI. +keywords: Ultralytics, Documentaรงรฃo YOLO, YOLOv8, validaรงรฃo, avaliaรงรฃo de modelo, hiperparรขmetros, precisรฃo, mรฉtricas, Python, CLI +--- + +# Validaรงรฃo de Modelos com Ultralytics YOLO + +Ecossistema e integraรงรตes do Ultralytics YOLO + +## Introduรงรฃo + +A validaรงรฃo รฉ um passo crรญtico no pipeline de aprendizado de mรกquina, permitindo que vocรช avalie a qualidade dos seus modelos treinados. O modo Val no Ultralytics YOLOv8 fornece um robusto conjunto de ferramentas e mรฉtricas para avaliar o desempenho dos seus modelos de detecรงรฃo de objetos. Este guia serve como um recurso completo para entender como usar efetivamente o modo Val para garantir que seus modelos sejam precisos e confiรกveis. + +## Por Que Validar com o Ultralytics YOLO? + +Aqui estรฃo as vantagens de usar o modo Val no YOLOv8: + +- **Precisรฃo:** Obtenha mรฉtricas precisas como mAP50, mAP75 e mAP50-95 para avaliar seu modelo de forma abrangente. +- **Conveniรชncia:** Utilize recursos integrados que lembram as configuraรงรตes de treinamento, simplificando o processo de validaรงรฃo. +- **Flexibilidade:** Valide seu modelo com os mesmos ou diferentes conjuntos de dados e tamanhos de imagem. +- **Ajuste de Hiperparรขmetros:** Utilize as mรฉtricas de validaรงรฃo para refinar seu modelo e obter um desempenho melhor. + +### Principais Recursos do Modo Val + +Estas sรฃo as funcionalidades notรกveis oferecidas pelo modo Val do YOLOv8: + +- **Configuraรงรตes Automatizadas:** Os modelos lembram suas configuraรงรตes de treinamento para validaรงรฃo direta. +- **Suporte Multi-Mรฉtrico:** Avalie seu modelo com base em uma variedade de mรฉtricas de precisรฃo. +- **API em Python e CLI:** Escolha entre a interface de linha de comando ou API em Python com base na sua preferรชncia de validaรงรฃo. +- **Compatibilidade de Dados:** Funciona perfeitamente com conjuntos de dados usados durante a fase de treinamento, bem como conjuntos de dados personalizados. + +!!! Tip "Dica" + + * Os modelos YOLOv8 lembram automaticamente suas configuraรงรตes de treinamento, entรฃo vocรช pode validar um modelo no mesmo tamanho de imagem e no conjunto de dados original facilmente com apenas `yolo val model=yolov8n.pt` ou `model('yolov8n.pt').val()` + +## Exemplos de Uso + +Validar a precisรฃo do modelo YOLOv8n treinado no conjunto de dados COCO128. Nenhum argumento precisa ser passado, pois o `model` retรฉm os dados de treinamento e argumentos como atributos do modelo. Veja a seรงรฃo de Argumentos abaixo para uma lista completa dos argumentos de exportaรงรฃo. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n.pt') # carregar um modelo oficial + model = YOLO('path/to/best.pt') # carregar um modelo personalizado + + # Validar o modelo + metrics = model.val() # nenhum argumento necessรกrio, conjunto de dados e configuraรงรตes lembrados + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # uma lista contรฉm map50-95 de cada categoria + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # validar modelo oficial + yolo detect val model=path/to/best.pt # validar modelo personalizado + ``` + +## Argumentos + +As configuraรงรตes de validaรงรฃo para os modelos YOLO referem-se aos vรกrios hiperparรขmetros e configuraรงรตes usados para avaliar o desempenho do modelo em um conjunto de dados de validaรงรฃo. Essas configuraรงรตes podem afetar o desempenho, velocidade e precisรฃo do modelo. Algumas configuraรงรตes comuns de validaรงรฃo do YOLO incluem o tamanho do lote, a frequรชncia com que a validaรงรฃo รฉ realizada durante o treinamento e as mรฉtricas usadas para avaliar o desempenho do modelo. Outros fatores que podem afetar o processo de validaรงรฃo incluem o tamanho e a composiรงรฃo do conjunto de dados de validaรงรฃo e a tarefa especรญfica para a qual o modelo estรก sendo usado. ร‰ importante ajustar e experimentar cuidadosamente essas configuraรงรตes para garantir que o modelo apresente um bom desempenho no conjunto de dados de validaรงรฃo e para detectar e prevenir o sobreajuste. + +| Chave | Valor | Descriรงรฃo | +|---------------|---------|-----------------------------------------------------------------------------------| +| `data` | `None` | caminho para o arquivo de dados, ex. coco128.yaml | +| `imgsz` | `640` | tamanho das imagens de entrada como inteiro | +| `batch` | `16` | nรบmero de imagens por lote (-1 para AutoBatch) | +| `save_json` | `False` | salvar resultados em arquivo JSON | +| `save_hybrid` | `False` | salvar versรฃo hรญbrida das etiquetas (etiquetas + previsรตes adicionais) | +| `conf` | `0.001` | limite de confianรงa do objeto para detecรงรฃo | +| `iou` | `0.6` | limiar de interseรงรฃo sobre uniรฃo (IoU) para NMS | +| `max_det` | `300` | nรบmero mรกximo de detecรงรตes por imagem | +| `half` | `True` | usar precisรฃo meia (FP16) | +| `device` | `None` | dispositivo para execuรงรฃo, ex. dispositivo cuda=0/1/2/3 ou device=cpu | +| `dnn` | `False` | usar OpenCV DNN para inferรชncia ONNX | +| `plots` | `False` | mostrar grรกficos durante o treinamento | +| `rect` | `False` | val retangular com cada lote colado para minimizar o preenchimento | +| `split` | `val` | divisรฃo do conjunto de dados para usar na validaรงรฃo, ex. 'val', 'test' ou 'train' | +| diff --git a/ultralytics/docs/pt/modes/val.md:Zone.Identifier b/ultralytics/docs/pt/modes/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/modes/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/quickstart.md b/ultralytics/docs/pt/quickstart.md new file mode 100755 index 0000000..242193a --- /dev/null +++ b/ultralytics/docs/pt/quickstart.md @@ -0,0 +1,198 @@ +--- +comments: true +description: Explore os diversos mรฉtodos para instalar o Ultralytics usando pip, conda, git e Docker. Aprenda a usar o Ultralytics com a interface de linha de comando ou dentro dos seus projetos Python. +keywords: Instalaรงรฃo do Ultralytics, pip install Ultralytics, Docker install Ultralytics, interface de linha de comando do Ultralytics, interface Python do Ultralytics +--- + +## Instalaรงรฃo do Ultralytics + +O Ultralytics oferece diversos mรฉtodos de instalaรงรฃo, incluindo pip, conda e Docker. Instale o YOLOv8 atravรฉs do pacote `ultralytics` pip para a versรฃo estรกvel mais recente ou clonando o [repositรณrio GitHub do Ultralytics](https://github.com/ultralytics/ultralytics) para obter a versรฃo mais atualizada. O Docker pode ser usado para executar o pacote em um contรชiner isolado, evitando a instalaรงรฃo local. + +!!! Example "Instalar" + + === "Pip install (recomendado)" + Instale o pacote `ultralytics` usando pip, ou atualize uma instalaรงรฃo existente executando `pip install -U ultralytics`. Visite o รndice de Pacotes Python (PyPI) para mais detalhes sobre o pacote `ultralytics`: [https://pypi.org/project/ultralytics/](https://pypi.org/project/ultralytics/). + + [![PyPI version](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![Downloads](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + + ```bash + # Instalar o pacote ultralytics do PyPI + pip install ultralytics + ``` + + Vocรช tambรฉm pode instalar o pacote `ultralytics` diretamente do [repositรณrio](https://github.com/ultralytics/ultralytics) GitHub. Isso pode ser รบtil se vocรช desejar a versรฃo de desenvolvimento mais recente. Certifique-se de ter a ferramenta de linha de comando Git instalada no seu sistema. O comando `@main` instala a branch `main` e pode ser modificado para outra branch, ou seja, `@my-branch`, ou removido completamente para padrรฃo na branch `main`. + + ```bash + # Instalar o pacote ultralytics do GitHub + pip install git+https://github.com/ultralytics/ultralytics.git@main + ``` + + + === "Conda install" + Conda รฉ um gerenciador de pacotes alternativo ao pip que tambรฉm pode ser usado para instalaรงรฃo. Visite Anaconda para mais detalhes em [https://anaconda.org/conda-forge/ultralytics](https://anaconda.org/conda-forge/ultralytics). O repositรณrio de feedstock do Ultralytics para atualizar o pacote conda estรก em [https://github.com/conda-forge/ultralytics-feedstock/](https://github.com/conda-forge/ultralytics-feedstock/). + + + [![Conda Recipe](https://img.shields.io/badge/recipe-ultralytics-green.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Version](https://img.shields.io/conda/vn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) + + ```bash + # Instalar o pacote ultralytics usando conda + conda install -c conda-forge ultralytics + ``` + + !!! Note "Nota" + + Se vocรช estรก instalando em um ambiente CUDA a prรกtica recomendada รฉ instalar `ultralytics`, `pytorch` e `pytorch-cuda` no mesmo comando para permitir que o gerenciador de pacotes conda resolva quaisquer conflitos, ou instalar `pytorch-cuda` por รบltimo para permitir que ele substitua o pacote especรญfico para CPU `pytorch`, se necessรกrio. + ```bash + # Instalar todos os pacotes juntos usando conda + conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics + ``` + + ### Imagem Docker Conda + + As imagens Docker Conda do Ultralytics tambรฉm estรฃo disponรญveis em [DockerHub](https://hub.docker.com/r/ultralytics/ultralytics). Estas imagens sรฃo baseadas em [Miniconda3](https://docs.conda.io/projects/miniconda/en/latest/) e sรฃo um modo simples de comeรงar a usar `ultralytics` em um ambiente Conda. + + ```bash + # Definir o nome da imagem como uma variรกvel + t=ultralytics/ultralytics:latest-conda + + # Puxar a imagem mais recente do ultralytics do Docker Hub + sudo docker pull $t + + # Executar a imagem ultralytics em um contรชiner com suporte a GPU + sudo docker run -it --ipc=host --gpus all $t # todas as GPUs + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # especificar GPUs + ``` + + === "Git clone" + Clone o repositรณrio `ultralytics` se vocรช estรก interessado em contribuir para o desenvolvimento ou deseja experimentar com o cรณdigo-fonte mais recente. Apรณs clonar, navegue atรฉ o diretรณrio e instale o pacote em modo editรกvel `-e` usando pip. + ```bash + # Clonar o repositรณrio ultralytics + git clone https://github.com/ultralytics/ultralytics + + # Navegar para o diretรณrio clonado + cd ultralytics + + # Instalar o pacote em modo editรกvel para desenvolvimento + pip install -e . + ``` + +Veja o arquivo [requirements.txt](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) do `ultralytics` para uma lista de dependรชncias. Note que todos os exemplos acima instalam todas as dependรชncias necessรกrias. + +

+
+ +
+ Watch: Ultralytics YOLO Quick Start Guide +

+ +!!! Tip "Dica" + + Os requisitos do PyTorch variam pelo sistema operacional e pelos requisitos de CUDA, entรฃo รฉ recomendado instalar o PyTorch primeiro seguindo as instruรงรตes em [https://pytorch.org/get-started/locally](https://pytorch.org/get-started/locally). + + + Instruรงรตes de Instalaรงรฃo do PyTorch + + +## Use o Ultralytics com CLI + +A interface de linha de comando (CLI) do Ultralytics permite comandos simples de uma รบnica linha sem a necessidade de um ambiente Python. O CLI nรฃo requer personalizaรงรฃo ou cรณdigo Python. Vocรช pode simplesmente rodar todas as tarefas do terminal com o comando `yolo`. Confira o [Guia CLI](/../usage/cli.md) para aprender mais sobre o uso do YOLOv8 pela linha de comando. + +!!! Example "Exemplo" + + === "Sintaxe" + + Os comandos `yolo` do Ultralytics usam a seguinte sintaxe: + ```bash + yolo TAREFA MODO ARGUMENTOS + + Onde TAREFA (opcional) รฉ um entre [detect, segment, classify] + MODO (obrigatรณrio) รฉ um entre [train, val, predict, export, track] + ARGUMENTOS (opcional) sรฃo qualquer nรบmero de pares personalizados 'arg=valor' como 'imgsz=320' que substituem os padrรตes. + ``` + Veja todos os ARGUMENTOS no guia completo de [Configuraรงรฃo](/../usage/cfg.md) ou com `yolo cfg` + + === "Train" + + Treinar um modelo de detecรงรฃo por 10 รฉpocas com uma taxa de aprendizado inicial de 0.01 + ```bash + yolo train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "Predict" + + Prever um vรญdeo do YouTube usando um modelo de segmentaรงรฃo prรฉ-treinado com tamanho de imagem 320: + ```bash + yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "Val" + + Validar um modelo de detecรงรฃo prรฉ-treinado com tamanho de lote 1 e tamanho de imagem 640: + ```bash + yolo val model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + + === "Export" + + Exportar um modelo de classificaรงรฃo YOLOv8n para formato ONNX com tamanho de imagem 224 por 128 (nenhuma TAREFA necessรกria) + ```bash + yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128 + ``` + + === "Special" + + Executar comandos especiais para ver versรฃo, visualizar configuraรงรตes, rodar verificaรงรตes e mais: + ```bash + yolo help + yolo checks + yolo version + yolo settings + yolo copy-cfg + yolo cfg + ``` + +!!! Warning "Aviso" + + Argumentos devem ser passados como pares `arg=valor`, separados por um sinal de igual `=` e delimitados por espaรงos ` ` entre pares. Nรฃo use prefixos de argumentos `--` ou vรญrgulas `,` entre os argumentos. + + - `yolo predict model=yolov8n.pt imgsz=640 conf=0.25`   โœ… + - `yolo predict model yolov8n.pt imgsz 640 conf 0.25`   โŒ + - `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25`   โŒ + +[Guia CLI](/../usage/cli.md){ .md-button } + +## Use o Ultralytics com Python + +A interface Python do YOLOv8 permite uma integraรงรฃo tranquila em seus projetos Python, tornando fรกcil carregar, executar e processar a saรญda do modelo. Projetada com simplicidade e facilidade de uso em mente, a interface Python permite que os usuรกrios implementem rapidamente detecรงรฃo de objetos, segmentaรงรฃo e classificaรงรฃo em seus projetos. Isto torna a interface Python do YOLOv8 uma ferramenta inestimรกvel para qualquer pessoa buscando incorporar essas funcionalidades em seus projetos Python. + +Por exemplo, os usuรกrios podem carregar um modelo, treinรก-lo, avaliar o seu desempenho em um conjunto de validaรงรฃo e atรฉ exportรก-lo para o formato ONNX com apenas algumas linhas de cรณdigo. Confira o [Guia Python](/../usage/python.md) para aprender mais sobre o uso do YOLOv8 dentro dos seus projetos Python. + +!!! Example "Exemplo" + + ```python + from ultralytics import YOLO + + # Criar um novo modelo YOLO do zero + model = YOLO('yolov8n.yaml') + + # Carregar um modelo YOLO prรฉ-treinado (recomendado para treinamento) + model = YOLO('yolov8n.pt') + + # Treinar o modelo usando o conjunto de dados 'coco128.yaml' por 3 รฉpocas + results = model.train(data='coco128.yaml', epochs=3) + + # Avaliar o desempenho do modelo no conjunto de validaรงรฃo + results = model.val() + + # Realizar detecรงรฃo de objetos em uma imagem usando o modelo + results = model('https://ultralytics.com/images/bus.jpg') + + # Exportar o modelo para formato ONNX + success = model.export(format='onnx') + ``` + +[Guia Python](/../usage/python.md){.md-button .md-button--primary} diff --git a/ultralytics/docs/pt/quickstart.md:Zone.Identifier b/ultralytics/docs/pt/quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/tasks/classify.md b/ultralytics/docs/pt/tasks/classify.md new file mode 100755 index 0000000..0e04bec --- /dev/null +++ b/ultralytics/docs/pt/tasks/classify.md @@ -0,0 +1,172 @@ +--- +comments: true +description: Aprenda sobre modelos YOLOv8 Classify para classificaรงรฃo de imagens. Obtenha informaรงรตes detalhadas sobre Lista de Modelos Prรฉ-treinados e como Treinar, Validar, Prever e Exportar modelos. +keywords: Ultralytics, YOLOv8, Classificaรงรฃo de Imagem, Modelos Prรฉ-treinados, YOLOv8n-cls, Treinamento, Validaรงรฃo, Previsรฃo, Exportaรงรฃo de Modelo +--- + +# Classificaรงรฃo de Imagens + +Exemplos de classificaรงรฃo de imagens + +A classificaรงรฃo de imagens รฉ a tarefa mais simples das trรชs e envolve classificar uma imagem inteira em uma de um conjunto de classes prรฉ-definidas. + +A saรญda de um classificador de imagem รฉ um รบnico rรณtulo de classe e uma pontuaรงรฃo de confianรงa. A classificaรงรฃo de imagem รฉ รบtil quando vocรช precisa saber apenas a qual classe uma imagem pertence e nรฃo precisa conhecer a localizaรงรฃo dos objetos dessa classe ou o formato exato deles. + +!!! Tip "Dica" + + Os modelos YOLOv8 Classify usam o sufixo `-cls`, ou seja, `yolov8n-cls.pt` e sรฃo prรฉ-treinados na [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +## [Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Aqui sรฃo mostrados os modelos prรฉ-treinados YOLOv8 Classify. Modelos de Detecรงรฃo, Segmentaรงรฃo e Pose sรฃo prรฉ-treinados no dataset [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), enquanto que os modelos de Classificaรงรฃo sรฃo prรฉ-treinados no dataset [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +[Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) sรฃo baixados automaticamente do รบltimo lanรงamento da Ultralytics [release](https://github.com/ultralytics/assets/releases) no primeiro uso. + +| Modelo | Tamanho
(pixels) | acurรกcia
top1 | acurรกcia
top5 | Velocidade
CPU ONNX
(ms) | Velocidade
A100 TensorRT
(ms) | parรขmetros
(M) | FLOPs
(B) a 640 | +|----------------------------------------------------------------------------------------------|--------------------------|-----------------------|-----------------------|-------------------------------------|------------------------------------------|------------------------|-------------------------| +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- Os valores de **acc** sรฃo as acurรกcias dos modelos no conjunto de validaรงรฃo do dataset [ImageNet](https://www.image-net.org/). +
Reproduza com `yolo val classify data=path/to/ImageNet device=0` +- **Velocidade** mรฉdia observada sobre imagens de validaรงรฃo da ImageNet usando uma instรขncia [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/). +
Reproduza com `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu` + +## Treino + +Treine o modelo YOLOv8n-cls no dataset MNIST160 por 100 รฉpocas com tamanho de imagem 64. Para uma lista completa de argumentos disponรญveis, veja a pรกgina de [Configuraรงรฃo](/../usage/cfg.md). + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n-cls.yaml') # construir um novo modelo a partir do YAML + model = YOLO('yolov8n-cls.pt') # carregar um modelo prรฉ-treinado (recomendado para treino) + model = YOLO('yolov8n-cls.yaml').load('yolov8n-cls.pt') # construir a partir do YAML e transferir pesos + + # Treinar o modelo + results = model.train(data='mnist160', epochs=100, imgsz=64) + ``` + + === "CLI" + + ```bash + # Construir um novo modelo a partir do YAML e comeรงar treino do zero + yolo classify train data=mnist160 model=yolov8n-cls.yaml epochs=100 imgsz=64 + + # Comeรงar treino de um modelo prรฉ-treinado *.pt + yolo classify train data=mnist160 model=yolov8n-cls.pt epochs=100 imgsz=64 + + # Construir um novo modelo do YAML, transferir pesos prรฉ-treinados e comeรงar treino + yolo classify train data=mnist160 model=yolov8n-cls.yaml pretrained=yolov8n-cls.pt epochs=100 imgsz=64 + ``` + +### Formato do dataset + +O formato do dataset de classificaรงรฃo YOLO pode ser encontrado em detalhes no [Guia de Datasets](../../../datasets/classify/index.md). + +## Val + +Valide a acurรกcia do modelo YOLOv8n-cls treinado no dataset MNIST160. Nรฃo รฉ necessรกrio passar argumento, pois o `modelo` retรฉm seus dados de treinamento e argumentos como atributos do modelo. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n-cls.pt') # carregar um modelo oficial + model = YOLO('path/to/best.pt') # carregar um modelo personalizado + + # Validar o modelo + metrics = model.val() # sem argumentos necessรกrios, dataset e configuraรงรตes lembrados + metrics.top1 # acurรกcia top1 + metrics.top5 # acurรกcia top5 + ``` + === "CLI" + + ```bash + yolo classify val model=yolov8n-cls.pt # validar modelo oficial + yolo classify val model=path/to/best.pt # validar modelo personalizado + ``` + +## Previsรฃo + +Use um modelo YOLOv8n-cls treinado para realizar previsรตes em imagens. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n-cls.pt') # carregar um modelo oficial + model = YOLO('path/to/best.pt') # carregar um modelo personalizado + + # Prever com o modelo + results = model('https://ultralytics.com/images/bus.jpg') # prever em uma imagem + ``` + === "CLI" + + ```bash + yolo classify predict model=yolov8n-cls.pt source='https://ultralytics.com/images/bus.jpg' # prever com modelo oficial + yolo classify predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # prever com modelo personalizado + ``` + +Veja detalhes completos do modo de `previsรฃo` na pรกgina [Predict](https://docs.ultralytics.com/modes/predict/). + +## Exportar + +Exporte um modelo YOLOv8n-cls para um formato diferente, como ONNX, CoreML, etc. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n-cls.pt') # carregar um modelo oficial + model = YOLO('path/to/best.pt') # carregar um modelo treinado personalizado + + # Exportar o modelo + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-cls.pt format=onnx # exportar modelo oficial + yolo export model=path/to/best.pt format=onnx # exportar modelo treinado personalizado + ``` + +Os formatos de exportaรงรฃo YOLOv8-cls disponรญveis estรฃo na tabela abaixo. Vocรช pode prever ou validar diretamente nos modelos exportados, ou seja, `yolo predict model=yolov8n-cls.onnx`. Exemplos de uso sรฃo mostrados para seu modelo apรณs a conclusรฃo da exportaรงรฃo. + +| Formato | Argumento `format` | Modelo | Metadata | Argumentos | +|--------------------------------------------------------------------|--------------------|-------------------------------|----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-cls.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-cls.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-cls.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-cls_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-cls.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-cls.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-cls_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-cls.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-cls.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-cls_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-cls_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-cls_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-cls_ncnn_model/` | โœ… | `imgsz`, `half` | + +Veja detalhes completos da `exportaรงรฃo` na pรกgina [Export](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/pt/tasks/classify.md:Zone.Identifier b/ultralytics/docs/pt/tasks/classify.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/tasks/classify.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/tasks/detect.md b/ultralytics/docs/pt/tasks/detect.md new file mode 100755 index 0000000..d3f84c0 --- /dev/null +++ b/ultralytics/docs/pt/tasks/detect.md @@ -0,0 +1,185 @@ +--- +comments: true +description: Documentaรงรฃo oficial do YOLOv8 por Ultralytics. Aprenda como treinar, validar, predizer e exportar modelos em vรกrios formatos. Incluindo estatรญsticas detalhadas de desempenho. +keywords: YOLOv8, Ultralytics, detecรงรฃo de objetos, modelos prรฉ-treinados, treinamento, validaรงรฃo, prediรงรฃo, exportaรงรฃo de modelos, COCO, ImageNet, PyTorch, ONNX, CoreML +--- + +# Detecรงรฃo de Objetos + +Exemplos de detecรงรฃo de objetos + +Detecรงรฃo de objetos รฉ uma tarefa que envolve identificar a localizaรงรฃo e a classe de objetos em uma imagem ou fluxo de vรญdeo. + +A saรญda de um detector de objetos รฉ um conjunto de caixas delimitadoras que cercam os objetos na imagem, junto com rรณtulos de classe e pontuaรงรตes de confianรงa para cada caixa. A detecรงรฃo de objetos รฉ uma boa escolha quando vocรช precisa identificar objetos de interesse em uma cena, mas nรฃo precisa saber exatamente onde o objeto estรก ou seu formato exato. + +

+
+ +
+ Assista: Detecรงรฃo de Objetos com Modelo Pre-treinado Ultralytics YOLOv8. +

+ +!!! Tip "Dica" + + Os modelos YOLOv8 Detect sรฃo os modelos padrรฃo do YOLOv8, ou seja, `yolov8n.pt` e sรฃo prรฉ-treinados no [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml). + +## [Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Os modelos prรฉ-treinados YOLOv8 Detect sรฃo mostrados aqui. Os modelos Detect, Segment e Pose sรฃo prรฉ-treinados no dataset [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), enquanto os modelos Classify sรฃo prรฉ-treinados no dataset [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +Os [Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) sรฃo baixados automaticamente a partir do รบltimo lanรงamento da Ultralytics [release](https://github.com/ultralytics/assets/releases) no primeiro uso. + +| Modelo | Tamanho
(pixels) | mAPval
50-95 | Velocidade
CPU ONNX
(ms) | Velocidade
A100 TensorRT
(ms) | Parรขmetros
(M) | FLOPs
(B) | +|--------------------------------------------------------------------------------------|--------------------------|----------------------|-------------------------------------|------------------------------------------|------------------------|-------------------| +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- Os valores de **mAPval** sรฃo para um รบnico modelo e uma รบnica escala no dataset [COCO val2017](http://cocodataset.org). +
Reproduza usando `yolo val detect data=coco.yaml device=0` +- A **Velocidade** รฉ mรฉdia tirada sobre as imagens do COCO val num [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) + instรขncia. +
Reproduza usando `yolo val detect data=coco128.yaml batch=1 device=0|cpu` + +## Treinar + +Treine o YOLOv8n no dataset COCO128 por 100 รฉpocas com tamanho de imagem 640. Para uma lista completa de argumentos disponรญveis, veja a pรกgina [Configuraรงรฃo](/../usage/cfg.md). + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n.yaml') # construir um novo modelo pelo YAML + model = YOLO('yolov8n.pt') # carregar um modelo prรฉ-treinado (recomendado para treinamento) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # construir pelo YAML e transferir pesos + + # Treinar o modelo + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Construir um novo modelo pelo YAML e comeรงar o treinamento do zero + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # Comeรงar o treinamento a partir de um modelo prรฉ-treinado *.pt + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # Construir um novo modelo pelo YAML, transferir pesos prรฉ-treinados e comeรงar o treinamento + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### Formato do Dataset + +O formato do dataset de detecรงรฃo do YOLO pode ser encontrado em detalhes no [Guia de Datasets](../../../datasets/detect/index.md). Para converter seu dataset existente de outros formatos (como COCO, etc.) para o formato YOLO, por favor utilize a ferramenta [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) da Ultralytics. + +## Validar + +Valide a precisรฃo do modelo YOLOv8n treinado no dataset COCO128. Nรฃo รฉ necessรกrio passar nenhum argumento, pois o `modelo` mantรฉm seus `dados` de treino e argumentos como atributos do modelo. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n.pt') # carregar um modelo oficial + model = YOLO('caminho/para/best.pt') # carregar um modelo personalizado + + # Validar o modelo + metrics = model.val() # sem a necessidade de argumentos, dataset e configuraรงรตes lembradas + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # uma lista contรฉm map50-95 de cada categoria + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # validaรงรฃo do modelo oficial + yolo detect val model=caminho/para/best.pt # validaรงรฃo do modelo personalizado + ``` + +## Predizer + +Use um modelo YOLOv8n treinado para fazer prediรงรตes em imagens. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n.pt') # carregar um modelo oficial + model = YOLO('caminho/para/best.pt') # carregar um modelo personalizado + + # Predizer com o modelo + results = model('https://ultralytics.com/images/bus.jpg') # predizer em uma imagem + ``` + === "CLI" + + ```bash + yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' # predizer com modelo oficial + yolo detect predict model=caminho/para/best.pt source='https://ultralytics.com/images/bus.jpg' # predizer com modelo personalizado + ``` + +Veja os detalhes completos do modo `predict` na pรกgina [Prediรงรฃo](https://docs.ultralytics.com/modes/predict/). + +## Exportar + +Exporte um modelo YOLOv8n para um formato diferente, como ONNX, CoreML, etc. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n.pt') # carregar um modelo oficial + model = YOLO('caminho/para/best.pt') # carregar um modelo treinado personalizado + + # Exportar o modelo + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # exportar modelo oficial + yolo export model=caminho/para/best.pt format=onnx # exportar modelo treinado personalizado + ``` + +Os formatos de exportaรงรฃo YOLOv8 disponรญveis estรฃo na tabela abaixo. Vocรช pode fazer prediรงรตes ou validar diretamente em modelos exportados, ou seja, `yolo predict model=yolov8n.onnx`. Exemplos de uso sรฃo mostrados para o seu modelo apรณs a exportaรงรฃo ser concluรญda. + +| Formato | Argumento `format` | Modelo | Metadados | Argumentos | +|--------------------------------------------------------------------|--------------------|---------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +Veja os detalhes completos de `exportar` na pรกgina [Exportaรงรฃo](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/pt/tasks/detect.md:Zone.Identifier b/ultralytics/docs/pt/tasks/detect.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/tasks/detect.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/tasks/index.md b/ultralytics/docs/pt/tasks/index.md new file mode 100755 index 0000000..209cac7 --- /dev/null +++ b/ultralytics/docs/pt/tasks/index.md @@ -0,0 +1,55 @@ +--- +comments: true +description: Aprenda sobre as principais tarefas de visรฃo computacional que o YOLOv8 pode realizar, incluindo detecรงรฃo, segmentaรงรฃo, classificaรงรฃo e estimativa de pose. Entenda seus usos em seus projetos de IA. +keywords: Ultralytics, YOLOv8, Detecรงรฃo, Segmentaรงรฃo, Classificaรงรฃo, Estimativa de Pose, Framework de IA, Tarefas de Visรฃo Computacional +--- + +# Tarefas do Ultralytics YOLOv8 + +
+Tarefas suportadas pelo Ultralytics YOLO + +YOLOv8 รฉ um framework de IA que suporta mรบltiplas tarefas de **visรฃo computacional**. O framework pode ser usado para realizar [detecรงรฃo](detect.md), [segmentaรงรฃo](segment.md), [classificaรงรฃo](classify.md) e estimativa de [pose](pose.md). Cada uma dessas tarefas tem um objetivo e caso de uso diferente. + +!!! Note "Nota" + + ๐Ÿšง Nossa documentaรงรฃo multilรญngue estรก atualmente em construรงรฃo e estamos trabalhando para aprimorรก-la. Agradecemos sua paciรชncia! ๐Ÿ™ + +

+
+ +
+ Assista: Explore as Tarefas do Ultralytics YOLO: Detecรงรฃo de Objetos, Segmentaรงรฃo, Rastreamento e Estimativa de Pose. +

+ +## [Detecรงรฃo](detect.md) + +A detecรงรฃo รฉ a principal tarefa suportada pelo YOLOv8. Envolve detectar objetos em uma imagem ou quadro de vรญdeo e desenhar caixas delimitadoras ao redor deles. Os objetos detectados sรฃo classificados em diferentes categorias com base em suas caracterรญsticas. YOLOv8 pode detectar mรบltiplos objetos em uma รบnica imagem ou quadro de vรญdeo com alta precisรฃo e velocidade. + +[Exemplos de Detecรงรฃo](detect.md){ .md-button } + +## [Segmentaรงรฃo](segment.md) + +Segmentaรงรฃo รฉ uma tarefa que envolve segmentar uma imagem em diferentes regiรตes com base no conteรบdo da imagem. Cada regiรฃo recebe um rรณtulo com base em seu conteรบdo. Essa tarefa รฉ รบtil em aplicaรงรตes como segmentaรงรฃo de imagens e imagiologia mรฉdica. YOLOv8 usa uma variante da arquitetura U-Net para realizar a segmentaรงรฃo. + +[Exemplos de Segmentaรงรฃo](segment.md){ .md-button } + +## [Classificaรงรฃo](classify.md) + +Classificaรงรฃo รฉ uma tarefa que envolve classificar uma imagem em diferentes categorias. YOLOv8 pode ser usado para classificar imagens com base em seu conteรบdo. Utiliza uma variante da arquitetura EfficientNet para realizar a classificaรงรฃo. + +[Exemplos de Classificaรงรฃo](classify.md){ .md-button } + +## [Pose](pose.md) + +A detecรงรฃo de pose/pontos-chave รฉ uma tarefa que envolve detectar pontos especรญficos em uma imagem ou quadro de vรญdeo. Esses pontos sรฃo chamados de keypoints e sรฃo usados para rastrear movimento ou estimar poses. YOLOv8 pode detectar keypoints em uma imagem ou quadro de vรญdeo com alta precisรฃo e velocidade. + +[Exemplos de Pose](pose.md){ .md-button } + +## Conclusรฃo + +YOLOv8 suporta mรบltiplas tarefas, incluindo detecรงรฃo, segmentaรงรฃo, classificaรงรฃo e detecรงรฃo de keypoints. Cada uma dessas tarefas tem objetivos e casos de uso diferentes. Ao entender as diferenรงas entre essas tarefas, vocรช pode escolher a tarefa apropriada para sua aplicaรงรฃo de visรฃo computacional. diff --git a/ultralytics/docs/pt/tasks/index.md:Zone.Identifier b/ultralytics/docs/pt/tasks/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/tasks/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/tasks/pose.md b/ultralytics/docs/pt/tasks/pose.md new file mode 100755 index 0000000..5bb4505 --- /dev/null +++ b/ultralytics/docs/pt/tasks/pose.md @@ -0,0 +1,188 @@ +--- +comments: true +description: Aprenda a usar o Ultralytics YOLOv8 para tarefas de estimativa de pose. Encontre modelos prรฉ-treinados, aprenda a treinar, validar, prever e exportar seu prรณprio modelo. +keywords: Ultralytics, YOLO, YOLOv8, estimativa de pose, detecรงรฃo de pontos-chave, detecรงรฃo de objetos, modelos prรฉ-treinados, aprendizado de mรกquina, inteligรชncia artificial +--- + +# Estimativa de Pose + +Exemplos de estimativa de pose + +A estimativa de pose รฉ uma tarefa que envolve identificar a localizaรงรฃo de pontos especรญficos em uma imagem, geralmente referidos como pontos-chave. Os pontos-chave podem representar vรกrias partes do objeto como articulaรงรตes, pontos de referรชncia ou outras caracterรญsticas distintas. As localizaรงรตes dos pontos-chave sรฃo geralmente representadas como um conjunto de coordenadas 2D `[x, y]` ou 3D `[x, y, visรญvel]`. + +A saรญda de um modelo de estimativa de pose รฉ um conjunto de pontos que representam os pontos-chave em um objeto na imagem, geralmente junto com os escores de confianรงa para cada ponto. A estimativa de pose รฉ uma boa escolha quando vocรช precisa identificar partes especรญficas de um objeto em uma cena, e sua localizaรงรฃo relativa entre si. + +

+
+ +
+ Assista: Estimativa de Pose com Ultralytics YOLOv8. +

+ +!!! Tip "Dica" + + Modelos YOLOv8 _pose_ usam o sufixo `-pose`, isto รฉ `yolov8n-pose.pt`. Esses modelos sรฃo treinados no conjunto de dados [COCO keypoints](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco-pose.yaml) e sรฃo adequados para uma variedade de tarefas de estimativa de pose. + +## [Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Os modelos YOLOv8 Pose prรฉ-treinados sรฃo mostrados aqui. Os modelos Detect, Segment e Pose sรฃo prรฉ-treinados no conjunto de dados [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), enquanto os modelos Classify sรฃo prรฉ-treinados no conjunto de dados [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +[Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) sรฃo baixados automaticamente do รบltimo lanรงamento da Ultralytics [release](https://github.com/ultralytics/assets/releases) no primeiro uso. + +| Modelo | tamanho
(pixels) | mAPpose
50-95 | mAPpose
50 | Velocidade
CPU ONNX
(ms) | Velocidade
A100 TensorRT
(ms) | parรขmetros
(M) | FLOPs
(B) | +|------------------------------------------------------------------------------------------------------|--------------------------|-----------------------|--------------------|-------------------------------------|------------------------------------------|------------------------|-------------------| +| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | +| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | +| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | +| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | +| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | +| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +- **mAPval** valores sรฃo para um รบnico modelo em escala รบnica no conjunto de dados [COCO Keypoints val2017](http://cocodataset.org) + . +
Reproduza `yolo val pose data=coco-pose.yaml device=0` +- **Velocidade** mรฉdia em imagens COCO val usando uma instรขncia [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) + . +
Reproduza `yolo val pose data=coco8-pose.yaml batch=1 device=0|cpu` + +## Treinar + +Treine um modelo YOLOv8-pose no conjunto de dados COCO128-pose. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n-pose.yaml') # construir um novo modelo a partir do YAML + model = YOLO('yolov8n-pose.pt') # carregar um modelo prรฉ-treinado (recomendado para treinamento) + model = YOLO('yolov8n-pose.yaml').load('yolov8n-pose.pt') # construir a partir do YAML e transferir pesos + + # Treinar o modelo + results = model.train(data='coco8-pose.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Construir um novo modelo a partir do YAML e comeรงar o treinamento do zero + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml epochs=100 imgsz=640 + + # Comeรงar treinamento de um modelo *.pt prรฉ-treinado + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.pt epochs=100 imgsz=640 + + # Construir um novo modelo a partir do YAML, transferir pesos prรฉ-treinados para ele e comeรงar o treinamento + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml pretrained=yolov8n-pose.pt epochs=100 imgsz=640 + ``` + +### Formato do conjunto de dados + +O formato do conjunto de dados de pose YOLO pode ser encontrado em detalhes no [Guia de Conjuntos de Dados](../../../datasets/pose/index.md). Para converter seu conjunto de dados existente de outros formatos (como COCO etc.) para o formato YOLO, por favor, use a ferramenta [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) da Ultralytics. + +## Validar + +Valide a acurรกcia do modelo YOLOv8n-pose treinado no conjunto de dados COCO128-pose. Nรฃo รฉ necessรกrio passar nenhum argumento, pois o `model` +retรฉm seus `data` de treinamento e argumentos como atributos do modelo. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n-pose.pt') # carregar um modelo oficial + model = YOLO('caminho/para/melhor.pt') # carregar um modelo personalizado + + # Validar o modelo + metrics = model.val() # nenhum argumento necessรกrio, conjunto de dados e configuraรงรตes lembradas + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # uma lista contรฉm map50-95 de cada categoria + ``` + === "CLI" + + ```bash + yolo pose val model=yolov8n-pose.pt # validar modelo oficial + yolo pose val model=caminho/para/melhor.pt # validar modelo personalizado + ``` + +## Prever + +Use um modelo YOLOv8n-pose treinado para executar previsรตes em imagens. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n-pose.pt') # carregar um modelo oficial + model = YOLO('caminho/para/melhor.pt') # carregar um modelo personalizado + + # Prever com o modelo + results = model('https://ultralytics.com/images/bus.jpg') # prever em uma imagem + ``` + === "CLI" + + ```bash + yolo pose predict model=yolov8n-pose.pt source='https://ultralytics.com/images/bus.jpg' # prever com modelo oficial + yolo pose predict model=caminho/para/melhor.pt source='https://ultralytics.com/images/bus.jpg' # prever com modelo personalizado + ``` + +Veja detalhes completos do modo `predict` na pรกgina [Prever](https://docs.ultralytics.com/modes/predict/). + +## Exportar + +Exporte um modelo YOLOv8n Pose para um formato diferente como ONNX, CoreML, etc. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n-pose.pt') # carregar um modelo oficial + model = YOLO('caminho/para/melhor.pt') # carregar um modelo treinado personalizado + + # Exportar o modelo + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-pose.pt format=onnx # exportar modelo oficial + yolo export model=caminho/para/melhor.pt format=onnx # exportar modelo treinado personalizado + ``` + +Os formatos de exportaรงรฃo YOLOv8-pose disponรญveis estรฃo na tabela abaixo. Vocรช pode prever ou validar diretamente em modelos exportados, ou seja, `yolo predict model=yolov8n-pose.onnx`. Exemplos de uso sรฃo mostrados para o seu modelo apรณs a conclusรฃo da exportaรงรฃo. + +| Formato | Argumento `format` | Modelo | Metadados | Argumentos | +|--------------------------------------------------------------------|--------------------|--------------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-pose.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-pose.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-pose.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-pose_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-pose.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-pose.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-pose_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-pose.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-pose.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-pose_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-pose_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-pose_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-pose_ncnn_model/` | โœ… | `imgsz`, `half` | + +Veja detalhes completos da `exportaรงรฃo` na pรกgina [Exportar](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/pt/tasks/pose.md:Zone.Identifier b/ultralytics/docs/pt/tasks/pose.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/tasks/pose.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/pt/tasks/segment.md b/ultralytics/docs/pt/tasks/segment.md new file mode 100755 index 0000000..958b4b7 --- /dev/null +++ b/ultralytics/docs/pt/tasks/segment.md @@ -0,0 +1,188 @@ +--- +comments: true +description: Aprenda a usar modelos de segmentaรงรฃo de instรขncias com o Ultralytics YOLO. Instruรงรตes sobre treinamento, validaรงรฃo, previsรฃo de imagem e exportaรงรฃo de modelo. +keywords: yolov8, segmentaรงรฃo de instรขncias, Ultralytics, conjunto de dados COCO, segmentaรงรฃo de imagem, detecรงรฃo de objeto, treinamento de modelo, validaรงรฃo de modelo, previsรฃo de imagem, exportaรงรฃo de modelo +--- + +# Segmentaรงรฃo de Instรขncias + +Exemplos de segmentaรงรฃo de instรขncias + +A segmentaรงรฃo de instรขncias vai alรฉm da detecรงรฃo de objetos e envolve a identificaรงรฃo de objetos individuais em uma imagem e a sua segmentaรงรฃo do resto da imagem. + +A saรญda de um modelo de segmentaรงรฃo de instรขncias รฉ um conjunto de mรกscaras ou contornos que delineiam cada objeto na imagem, juntamente com rรณtulos de classe e pontuaรงรตes de confianรงa para cada objeto. A segmentaรงรฃo de instรขncias รฉ รบtil quando vocรช precisa saber nรฃo apenas onde os objetos estรฃo em uma imagem, mas tambรฉm qual รฉ a forma exata deles. + +

+
+ +
+ Assista: Executar Segmentaรงรฃo com o Modelo Treinado Ultralytics YOLOv8 em Python. +

+ +!!! Tip "Dica" + + Modelos YOLOv8 Segment usam o sufixo `-seg`, ou seja, `yolov8n-seg.pt` e sรฃo prรฉ-treinados no [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml). + +## [Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +Os modelos Segment prรฉ-treinados do YOLOv8 estรฃo mostrados aqui. Os modelos Detect, Segment e Pose sรฃo prรฉ-treinados no conjunto de dados [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), enquanto os modelos Classify sรฃo prรฉ-treinados no conjunto de dados [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +[Modelos](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) sรฃo baixados automaticamente do รบltimo lanรงamento da Ultralytics [release](https://github.com/ultralytics/assets/releases) na primeira utilizaรงรฃo. + +| Modelo | Tamanho
(pixels) | mAPbox
50-95 | mAPmรกscara
50-95 | Velocidade
CPU ONNX
(ms) | Velocidade
A100 TensorRT
(ms) | Parรขmetros
(M) | FLOPs
(B) | +|----------------------------------------------------------------------------------------------|--------------------------|----------------------|--------------------------|-------------------------------------|------------------------------------------|------------------------|-------------------| +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- Os valores de **mAPval** sรฃo para um รบnico modelo em uma รบnica escala no conjunto de dados [COCO val2017](http://cocodataset.org). +
Reproduza por meio de `yolo val segment data=coco.yaml device=0` +- **Velocidade** mรฉdia em imagens COCO val usando uma instรขncia [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/). +
Reproduza por meio de `yolo val segment data=coco128-seg.yaml batch=1 device=0|cpu` + +## Treinar + +Treine o modelo YOLOv8n-seg no conjunto de dados COCO128-seg por 100 รฉpocas com tamanho de imagem 640. Para uma lista completa de argumentos disponรญveis, consulte a pรกgina [Configuraรงรฃo](/../usage/cfg.md). + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n-seg.yaml') # construir um novo modelo a partir do YAML + model = YOLO('yolov8n-seg.pt') # carregar um modelo prรฉ-treinado (recomendado para treinamento) + model = YOLO('yolov8n-seg.yaml').load('yolov8n.pt') # construir a partir do YAML e transferir os pesos + + # Treinar o modelo + results = model.train(data='coco128-seg.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # Construir um novo modelo a partir do YAML e comeรงar o treinamento do zero + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml epochs=100 imgsz=640 + + # Comeรงar o treinamento a partir de um modelo *.pt prรฉ-treinado + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.pt epochs=100 imgsz=640 + + # Construir um novo modelo a partir do YAML, transferir pesos prรฉ-treinados para ele e comeรงar o treinamento + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml pretrained=yolov8n-seg.pt epochs=100 imgsz=640 + ``` + +### Formato do conjunto de dados + +O formato do conjunto de dados de segmentaรงรฃo YOLO pode ser encontrado em detalhes no [Guia de Conjuntos de Dados](../../../datasets/segment/index.md). Para converter seu conjunto de dados existente de outros formatos (como COCO etc.) para o formato YOLO, utilize a ferramenta [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) da Ultralytics. + +## Val + +Valide a acurรกcia do modelo YOLOv8n-seg treinado no conjunto de dados COCO128-seg. Nรฃo รฉ necessรกrio passar nenhum argumento, pois o `modelo` retรฉm seus `dados` de treino e argumentos como atributos do modelo. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n-seg.pt') # carregar um modelo oficial + model = YOLO('path/to/best.pt') # carregar um modelo personalizado + + # Validar o modelo + metrics = model.val() # sem necessidade de argumentos, conjunto de dados e configuraรงรตes sรฃo lembrados + metrics.box.map # map50-95(B) + metrics.box.map50 # map50(B) + metrics.box.map75 # map75(B) + metrics.box.maps # uma lista contendo map50-95(B) de cada categoria + metrics.seg.map # map50-95(M) + metrics.seg.map50 # map50(M) + metrics.seg.map75 # map75(M) + metrics.seg.maps # uma lista contendo map50-95(M) de cada categoria + ``` + === "CLI" + + ```bash + yolo segment val model=yolov8n-seg.pt # val modelo oficial + yolo segment val model=path/to/best.pt # val modelo personalizado + ``` + +## Prever + +Use um modelo YOLOv8n-seg treinado para realizar previsรตes em imagens. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n-seg.pt') # carregar um modelo oficial + model = YOLO('path/to/best.pt') # carregar um modelo personalizado + + # Realizar previsรฃo com o modelo + results = model('https://ultralytics.com/images/bus.jpg') # prever em uma imagem + ``` + === "CLI" + + ```bash + yolo segment predict model=yolov8n-seg.pt source='https://ultralytics.com/images/bus.jpg' # previsรฃo com modelo oficial + yolo segment predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # previsรฃo com modelo personalizado + ``` + +Veja detalhes completos do modo `predict` na pรกgina [Prever](https://docs.ultralytics.com/modes/predict/). + +## Exportar + +Exporte um modelo YOLOv8n-seg para um formato diferente como ONNX, CoreML, etc. + +!!! Example "Exemplo" + + === "Python" + + ```python + from ultralytics import YOLO + + # Carregar um modelo + model = YOLO('yolov8n-seg.pt') # carregar um modelo oficial + model = YOLO('path/to/best.pt') # carregar um modelo treinado personalizado + + # Exportar o modelo + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-seg.pt format=onnx # exportar modelo oficial + yolo export model=path/to/best.pt format=onnx # exportar modelo treinado personalizado + ``` + +Os formatos de exportaรงรฃo disponรญveis para YOLOv8-seg estรฃo na tabela abaixo. Vocรช pode prever ou validar diretamente em modelos exportados, ou seja, `yolo predict model=yolov8n-seg.onnx`. Exemplos de uso sรฃo mostrados para o seu modelo apรณs a conclusรฃo da exportaรงรฃo. + +| Formato | Argumento `format` | Modelo | Metadados | Argumentos | +|--------------------------------------------------------------------|--------------------|-------------------------------|-----------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-seg.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-seg.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-seg.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-seg_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-seg.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-seg.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-seg_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-seg.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-seg.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-seg_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-seg_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-seg_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-seg_ncnn_model/` | โœ… | `imgsz`, `half` | + +Veja detalhes completos da `exportaรงรฃo` na pรกgina [Exportar](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/pt/tasks/segment.md:Zone.Identifier b/ultralytics/docs/pt/tasks/segment.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/pt/tasks/segment.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/index.md b/ultralytics/docs/ru/index.md new file mode 100755 index 0000000..a4ccca1 --- /dev/null +++ b/ultralytics/docs/ru/index.md @@ -0,0 +1,83 @@ +--- +comments: true +description: ะ˜ะทัƒั‡ะธั‚ะต ะฟะพะปะฝะพะต ั€ัƒะบะพะฒะพะดัั‚ะฒะพ ะฟะพ Ultralytics YOLOv8, ะผะพะดะตะปะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน ั ะฒั‹ัะพะบะพะน ัะบะพั€ะพัั‚ัŒัŽ ะธ ั‚ะพั‡ะฝะพัั‚ัŒัŽ. ะฃั‡ะตะฑะฝะธะบะธ ะฟะพ ัƒัั‚ะฐะฝะพะฒะบะต, ะฟั€ะตะดัะบะฐะทะฐะฝะธัะผ, ั‚ั€ะตะฝะธั€ะพะฒะบะต ะธ ะผะฝะพะณะพะต ะดั€ัƒะณะพะต. +keywords: Ultralytics, YOLOv8, ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ัะตะณะผะตะฝั‚ะฐั†ะธั ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะผะฐัˆะธะฝะฝะพะต ะพะฑัƒั‡ะตะฝะธะต, ะณะปัƒะฑะพะบะพะต ะพะฑัƒั‡ะตะฝะธะต, ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะต ะทั€ะตะฝะธะต, ัƒัั‚ะฐะฝะพะฒะบะฐ YOLOv8, ะฟั€ะตะดัะบะฐะทะฐะฝะธะต YOLOv8, ั‚ั€ะตะฝะธั€ะพะฒะบะฐ YOLOv8, ะธัั‚ะพั€ะธั YOLO, ะปะธั†ะตะฝะทะธะธ YOLO +--- + +
+

+ + Ultralytics YOLO banner +

+ GitHub Ultralytics + space + LinkedIn Ultralytics + space + Twitter Ultralytics + space + YouTube Ultralytics + space + TikTok Ultralytics + space + Instagram Ultralytics + space + Discord Ultralytics +
+
+ CI Ultralytics + ะŸะพะบั€ั‹ั‚ะธะต ะบะพะดะฐ Ultralytics + ะฆะธั‚ะธั€ะพะฒะฐะฝะธะต YOLOv8 + ะ—ะฐะณั€ัƒะทะบะธ Docker + Discord +
+ ะ—ะฐะฟัƒัั‚ะธั‚ัŒ ะฝะฐ Gradient + ะžั‚ะบั€ั‹ั‚ัŒ ะฒ Colab + ะžั‚ะบั€ั‹ั‚ัŒ ะฒ Kaggle +
+ +ะŸั€ะตะดัั‚ะฐะฒะปัะตะผ [Ultralytics](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics) โ€” ะฟะพัะปะตะดะฝัŽัŽ ะฒะตั€ัะธัŽ ะทะฝะฐะผะตะฝะธั‚ะพะน ะผะพะดะตะปะธ ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ ะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน. YOLOv8 ะพัะฝะพะฒะฐะฝ ะฝะฐ ะฟะตั€ะตะดะพะฒะพะผ ะฟั€ะพะณั€ะตััะต ะฒ ะพะฑะปะฐัั‚ะธ ะณะปัƒะฑะพะบะพะณะพ ะพะฑัƒั‡ะตะฝะธั ะธ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั ะธ ะฟั€ะตะดะปะฐะณะฐะตั‚ ะฝะตะฟั€ะตะฒะทะพะนะดะตะฝะฝัƒัŽ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะฒ ะพั‚ะฝะพัˆะตะฝะธะธ ัะบะพั€ะพัั‚ะธ ะธ ั‚ะพั‡ะฝะพัั‚ะธ. ะ•ะณะพ ะพะฟั‚ะธะผะธะทะธั€ะพะฒะฐะฝะฝะฐั ะบะพะฝัั‚ั€ัƒะบั†ะธั ะดะตะปะฐะตั‚ ะตะณะพ ะฟะพะดั…ะพะดัั‰ะธะผ ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ะฟั€ะธะปะพะถะตะฝะธะน ะธ ะปะตะณะบะพ ะฐะดะฐะฟั‚ะธั€ัƒะตะผั‹ะผ ะบ ั€ะฐะทะปะธั‡ะฝั‹ะผ ะฐะฟะฟะฐั€ะฐั‚ะฝั‹ะผ ะฟะปะฐั‚ั„ะพั€ะผะฐะผ, ะพั‚ ัƒัั‚ั€ะพะนัั‚ะฒ ะฝะฐ ะบั€ะฐัŽ ัะตั‚ะธ ะดะพ ะพะฑะปะฐั‡ะฝั‹ั… API. + +ะ˜ััะปะตะดัƒะนั‚ะต ะดะพะบัƒะผะตะฝั‚ะฐั†ะธัŽ YOLOv8 โ€” ะพะฑัˆะธั€ะฝั‹ะน ั€ะตััƒั€ั, ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฝั‹ะน, ั‡ั‚ะพะฑั‹ ะฟะพะผะพั‡ัŒ ะฒะฐะผ ะฟะพะฝัั‚ัŒ ะธ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะตะณะพ ั„ัƒะฝะบั†ะธะธ ะธ ะฒะพะทะผะพะถะฝะพัั‚ะธ. ะะตะทะฐะฒะธัะธะผะพ ะพั‚ ั‚ะพะณะพ, ัะฒะปัะตั‚ะตััŒ ะปะธ ะฒั‹ ะพะฟั‹ั‚ะฝั‹ะผ ะฟั€ะฐะบั‚ะธะบะพะผ ะผะฐัˆะธะฝะฝะพะณะพ ะพะฑัƒั‡ะตะฝะธั ะธะปะธ ะฝะพะฒะธั‡ะบะพะผ ะฒ ัั‚ะพะน ะพะฑะปะฐัั‚ะธ, ัั‚ะพั‚ ั†ะตะฝั‚ั€ ั€ะตััƒั€ัะพะฒ ะฝะฐั†ะตะปะตะฝ ะฝะฐ ะผะฐะบัะธะผะฐะปัŒะฝะพะต ั€ะฐัะบั€ั‹ั‚ะธะต ะฟะพั‚ะตะฝั†ะธะฐะปะฐ YOLOv8 ะฒ ะฒะฐัˆะธั… ะฟั€ะพะตะบั‚ะฐั…. + +!!! Note "ะ—ะฐะผะตั‚ะบะฐ" + + ๐Ÿšง ะะฐัˆะฐ ะผะฝะพะณะพัะทั‹ั‡ะฝะฐั ะดะพะบัƒะผะตะฝั‚ะฐั†ะธั ะฒ ะฝะฐัั‚ะพัั‰ะตะต ะฒั€ะตะผั ะฝะฐั…ะพะดะธั‚ัั ะฒ ัั‚ะฐะดะธะธ ั€ะฐะทั€ะฐะฑะพั‚ะบะธ, ะธ ะผั‹ ัƒัะตั€ะดะฝะพ ั€ะฐะฑะพั‚ะฐะตะผ ะฝะฐะด ะตะต ัƒะปัƒั‡ัˆะตะฝะธะตะผ. ะกะฟะฐัะธะฑะพ ะทะฐ ะฒะฐัˆะต ั‚ะตั€ะฟะตะฝะธะต! ๐Ÿ™ + +## ะก ั‡ะตะณะพ ะฝะฐั‡ะฐั‚ัŒ + +- **ะฃัั‚ะฐะฝะพะฒะธั‚ะต** `ultralytics` ั ะฟะพะผะพั‰ัŒัŽ pip ะธ ะฟั€ะธัั‚ัƒะฟะฐะนั‚ะต ะบ ั€ะฐะฑะพั‚ะต ะทะฐ ะฝะตัะบะพะปัŒะบะพ ะผะธะฝัƒั‚   [:material-clock-fast: ะะฐั‡ะฐั‚ัŒ ั€ะฐะฑะพั‚ัƒ](quickstart.md){ .md-button } +- **ะŸั€ะตะดัะบะฐะถะธั‚ะต** ะฝะพะฒั‹ะต ะธะทะพะฑั€ะฐะถะตะฝะธั ะธ ะฒะธะดะตะพ ั ะฟะพะผะพั‰ัŒัŽ YOLOv8   [:octicons-image-16: ะŸั€ะตะดัะบะฐะทะฐะฝะธั ะดะปั ะธะทะพะฑั€ะฐะถะตะฝะธะน](modes/predict.md){ .md-button } +- **ะขั€ะตะฝะธั€ัƒะนั‚ะต** ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ YOLOv8 ะฝะฐ ัะฒะพะธั… ัะพะฑัั‚ะฒะตะฝะฝั‹ั… ะฝะฐะฑะพั€ะฐั… ะดะฐะฝะฝั‹ั…   [:fontawesome-solid-brain: ะขั€ะตะฝะธั€ะพะฒะฐั‚ัŒ ะผะพะดะตะปัŒ](modes/train.md){ .md-button } +- **ะ˜ััะปะตะดัƒะนั‚ะต** ะทะฐะดะฐั‡ะธ YOLOv8, ั‚ะฐะบะธะต ะบะฐะบ ัะตะณะผะตะฝั‚ะฐั†ะธั, ะบะปะฐััะธั„ะธะบะฐั†ะธั, ะพั†ะตะฝะบะฐ ะฟะพะทั‹ ะธ ะพั‚ัะปะตะถะธะฒะฐะฝะธะต   [:material-magnify-expand: ะ˜ััะปะตะดะพะฒะฐั‚ัŒ ะทะฐะดะฐั‡ะธ](tasks/index.md){ .md-button } + +

+
+ +
+ ะกะผะพั‚ั€ะธั‚ะต: ะšะฐะบ ั‚ั€ะตะฝะธั€ะพะฒะฐั‚ัŒ ะผะพะดะตะปัŒ YOLOv8 ะฝะฐ ัะฒะพะตะผ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะพะผ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… ะฒ Google Colab. +

+ +## YOLO: ะšั€ะฐั‚ะบะฐั ะธัั‚ะพั€ะธั + +[YOLO](https://arxiv.org/abs/1506.02640) (You Only Look Once โ€” ยซะกะผะพั‚ั€ะธัˆัŒ ั‚ะพะปัŒะบะพ ะพะดะธะฝ ั€ะฐะทยป), ะฟะพะฟัƒะปัั€ะฝะฐั ะผะพะดะตะปัŒ ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะฑั‹ะปะฐ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฐ ะ”ะถะพะทะตั„ะพะผ ะ ะตะดะผะพะฝะพะผ ะธ ะะปะธ ะคะฐั€ั…ะฐะดะธ ะธะท ะ’ะฐัˆะธะฝะณั‚ะพะฝัะบะพะณะพ ัƒะฝะธะฒะตั€ัะธั‚ะตั‚ะฐ. ะ—ะฐะฟัƒั‰ะตะฝะฝะฐั ะฒ 2015 ะณะพะดัƒ, YOLO ะฑั‹ัั‚ั€ะพ ะพะฑั€ะตะปะฐ ะฟะพะฟัƒะปัั€ะฝะพัั‚ัŒ ะฑะปะฐะณะพะดะฐั€ั ัะฒะพะตะน ะฒั‹ัะพะบะพะน ัะบะพั€ะพัั‚ะธ ะธ ั‚ะพั‡ะฝะพัั‚ะธ. + +- [YOLOv2](https://arxiv.org/abs/1612.08242), ะฒั‹ะฟัƒั‰ะตะฝะฝะฐั ะฒ 2016 ะณะพะดัƒ, ัƒะปัƒั‡ัˆะธะปะฐ ะพั€ะธะณะธะฝะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ, ะฒะบะปัŽั‡ะธะฒ ะฒ ัะตะฑั ะฟะฐะบะตั‚ะฝัƒัŽ ะฝะพั€ะผะฐะปะธะทะฐั†ะธัŽ, ัะบะพั€ะฝั‹ะต ะพะบะฝะฐ ะธ ะบะปะฐัั‚ะตั€ั‹ ั€ะฐะทะผะตั€ะพะฒ. +- [YOLOv3](https://pjreddie.com/media/files/papers/YOLOv3.pdf), ะทะฐะฟัƒั‰ะตะฝะฝะฐั ะฒ 2018 ะณะพะดัƒ, ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝะพ ัƒะปัƒั‡ัˆะธะปะฐ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะผะพะดะตะปะธ, ะธัะฟะพะปัŒะทัƒั ะฑะพะปะตะต ัั„ั„ะตะบั‚ะธะฒะฝัƒัŽ ะพัะฝะพะฒะฝัƒัŽ ัะตั‚ัŒ, ะผะฝะพะถะตัั‚ะฒะพ ัะบะพั€ะตะน ะธ ะฟะธั€ะฐะผะธะดะฐะปัŒะฝะพะต ะฟั€ะพัั‚ั€ะฐะฝัั‚ะฒะตะฝะฝะพะต ะฟัƒะปะธะฝะณ. +- [YOLOv4](https://arxiv.org/abs/2004.10934) ะฑั‹ะปะฐ ะฒั‹ะฟัƒั‰ะตะฝะฐ ะฒ 2020 ะณะพะดัƒ, ะฟั€ะตะดัั‚ะฐะฒะธะฒ ั‚ะฐะบะธะต ะธะฝะฝะพะฒะฐั†ะธะธ, ะบะฐะบ ัƒะฒะตะปะธั‡ะตะฝะธะต ะดะฐะฝะฝั‹ั… Mosaic, ะฝะพะฒัƒัŽ ัะฒะพะฑะพะดะฝัƒัŽ ะพั‚ ัะบะพั€ัŒะฝะพะน ะณะพะปะพะฒัƒ ะดะตั‚ะตะบั‚ะธั€ะพะฒะฐะฝะธั ะธ ะฝะพะฒัƒัŽ ั„ัƒะฝะบั†ะธัŽ ะฟะพั‚ะตั€ัŒ. +- [YOLOv5](https://github.com/ultralytics/yolov5) ะดะฐะปัŒัˆะต ัƒะปัƒั‡ัˆะธะปะฐ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะผะพะดะตะปะธ ะธ ะดะพะฑะฐะฒะธะปะฐ ะฝะพะฒั‹ะต ั„ัƒะฝะบั†ะธะธ, ั‚ะฐะบะธะต ะบะฐะบ ะพะฟั‚ะธะผะธะทะฐั†ะธั ะณะธะฟะตั€ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ, ะธะฝั‚ะตะณั€ะธั€ะพะฒะฐะฝะฝะฝะพะต ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ัะบัะฟะตั€ะธะผะตะฝั‚ะพะฒ ะธ ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธะน ัะบัะฟะพั€ั‚ ะฒ ะฟะพะฟัƒะปัั€ะฝั‹ะต ั„ะพั€ะผะฐั‚ั‹. +- [YOLOv6](https://github.com/meituan/YOLOv6) ะฑั‹ะปะฐ ะพะฟัƒะฑะปะธะบะพะฒะฐะฝะฐ ะฒ ะพั‚ะบั€ั‹ั‚ะพะผ ะดะพัั‚ัƒะฟะต ะบะพะผะฟะฐะฝะธะตะน [Meituan](https://about.meituan.com/) ะฒ 2022 ะณะพะดัƒ ะธ ะธัะฟะพะปัŒะทัƒะตั‚ัั ะฒะพ ะผะฝะพะณะธั… ะฐะฒั‚ะพะฝะพะผะฝั‹ั… ั€ะพะฑะพั‚ะฐั… ะดะพัั‚ะฐะฒะบะธ ะบะพะผะฟะฐะฝะธะธ. +- [YOLOv7](https://github.com/WongKinYiu/yolov7) ะดะพะฑะฐะฒะธะปะฐ ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝั‹ะต ะทะฐะดะฐั‡ะธ, ั‚ะฐะบะธะต ะบะฐะบ ะพั†ะตะฝะบะฐ ะฟะพะทั‹ ะฟะพ ะฝะฐะฑะพั€ัƒ ะดะฐะฝะฝั‹ั… ะบะปัŽั‡ะตะฒั‹ั… ั‚ะพั‡ะตะบ COCO. +- [YOLOv8](https://github.com/ultralytics/ultralytics) โ€” ะฟะพัะปะตะดะฝัั ะฒะตั€ัะธั YOLO ะพั‚ Ultralytics. ะ‘ัƒะดัƒั‡ะธ ะฟะตั€ะตะดะพะฒะพะน, ัะพะฒั€ะตะผะตะฝะฝะพะน ะผะพะดะตะปัŒัŽ, YOLOv8 ะพัะฝะพะฒะฐะฝะฐ ะฝะฐ ัƒัะฟะตั…ะต ะฟั€ะตะดั‹ะดัƒั‰ะธั… ะฒะตั€ัะธะน, ะฒะฝะตะดั€ัั ะฝะพะฒั‹ะต ั„ัƒะฝะบั†ะธะธ ะธ ัƒัะพะฒะตั€ัˆะตะฝัั‚ะฒะพะฒะฐะฝะธั ะดะปั ะฟะพะฒั‹ัˆะตะฝะธั ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ, ะณะธะฑะบะพัั‚ะธ ะธ ัั„ั„ะตะบั‚ะธะฒะฝะพัั‚ะธ. YOLOv8 ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ ะฟะพะปะฝั‹ะน ัะฟะตะบั‚ั€ ะทะฐะดะฐั‡ ะฒ ะพะฑะปะฐัั‚ะธ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั, ะฒะบะปัŽั‡ะฐั [ะพะฑะฝะฐั€ัƒะถะตะฝะธะต](tasks/detect.md), [ัะตะณะผะตะฝั‚ะฐั†ะธัŽ](tasks/segment.md), [ะพั†ะตะฝะบัƒ ะฟะพะทั‹](tasks/pose.md), [ะพั‚ัะปะตะถะธะฒะฐะฝะธะต](modes/track.md) ะธ [ะบะปะฐััะธั„ะธะบะฐั†ะธัŽ](tasks/classify.md). ะญั‚ะพ ะผะฝะพะณะพัั‚ะพั€ะพะฝะฝะพัั‚ัŒ ะฟะพะทะฒะพะปัะตั‚ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัะผ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะฒะพะทะผะพะถะฝะพัั‚ะธ YOLOv8 ะฒ ัะฐะผั‹ั… ั€ะฐะทะฝะพะพะฑั€ะฐะทะฝั‹ั… ะฟั€ะธะปะพะถะตะฝะธัั… ะธ ัั„ะตั€ะฐั… ะดะตัั‚ะตะปัŒะฝะพัั‚ะธ. + +## ะ›ะธั†ะตะฝะทะธะธ YOLO: ะšะฐะบ ะปะธั†ะตะฝะทะธั€ัƒะตั‚ัั YOLO ะพั‚ Ultralytics? + +Ultralytics ะฟั€ะตะดะปะฐะณะฐะตั‚ ะดะฒะฐ ะฒะฐั€ะธะฐะฝั‚ะฐ ะปะธั†ะตะฝะทะธั€ะพะฒะฐะฝะธั ะดะปั ัƒะดะพะฒะปะตั‚ะฒะพั€ะตะฝะธั ั€ะฐะทะฝะพะพะฑั€ะฐะทะฝั‹ั… ัั†ะตะฝะฐั€ะธะตะฒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั: + +- **ะ›ะธั†ะตะฝะทะธั AGPL-3.0**: ะญั‚ะฐ ะพะดะพะฑั€ะตะฝะฝะฐั [OSI](https://opensource.org/licenses/) ะปะธั†ะตะฝะทะธั ั ะพั‚ะบั€ั‹ั‚ั‹ะผ ะธัั…ะพะดะฝั‹ะผ ะบะพะดะพะผ ะธะดะตะฐะปัŒะฝะพ ะฟะพะดั…ะพะดะธั‚ ะดะปั ัั‚ัƒะดะตะฝั‚ะพะฒ ะธ ัะฝั‚ัƒะทะธะฐัั‚ะพะฒ, ัะฟะพัะพะฑัั‚ะฒัƒั ะพั‚ะบั€ั‹ั‚ะพะผัƒ ัะพั‚ั€ัƒะดะฝะธั‡ะตัั‚ะฒัƒ ะธ ะพะฑะผะตะฝัƒ ะทะฝะฐะฝะธัะผะธ. ะŸะพะดั€ะพะฑะฝัƒัŽ ะธะฝั„ะพั€ะผะฐั†ะธัŽ ัะผะพั‚ั€ะธั‚ะต ะฒ ั„ะฐะนะปะต [LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE). +- **ะšะพั€ะฟะพั€ะฐั‚ะธะฒะฝะฐั ะปะธั†ะตะฝะทะธั**: ะ ะฐะทั€ะฐะฑะพั‚ะฐะฝะฝะฐั ะดะปั ะบะพะผะผะตั€ั‡ะตัะบะพะณะพ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั, ัั‚ะฐ ะปะธั†ะตะฝะทะธั ะฟะพะทะฒะพะปัะตั‚ ะฑะตัะฟั€ะตะฟัั‚ัั‚ะฒะตะฝะฝะพ ะธะฝั‚ะตะณั€ะธั€ะพะฒะฐั‚ัŒ ะฟั€ะพะณั€ะฐะผะผะฝะพะต ะพะฑะตัะฟะตั‡ะตะฝะธะต ะธ AI-ะผะพะดะตะปะธ Ultralytics ะฒ ั‚ะพะฒะฐั€ั‹ ะธ ัƒัะปัƒะณะธ ะบะพะผะผะตั€ั‡ะตัะบะพะณะพ ะฝะฐะทะฝะฐั‡ะตะฝะธั, ะพะฑั…ะพะดั ั‚ั€ะตะฑะพะฒะฐะฝะธั ะพั‚ะบั€ั‹ั‚ะพะณะพ ะธัั…ะพะดะฝะพะณะพ ะบะพะดะฐ AGPL-3.0. ะ•ัะปะธ ะฒะฐัˆ ัั†ะตะฝะฐั€ะธะน ะฒะบะปัŽั‡ะฐะตั‚ ะฒะฝะตะดั€ะตะฝะธะต ะฝะฐัˆะธั… ั€ะตัˆะตะฝะธะน ะฒ ะบะพะผะผะตั€ั‡ะตัะบะพะต ะฟั€ะตะดะปะพะถะตะฝะธะต, ะพะฑั€ะฐั‚ะธั‚ะตััŒ ั‡ะตั€ะตะท [ะ›ะธั†ะตะฝะทะธั€ะพะฒะฐะฝะธะต Ultralytics](https://ultralytics.com/license). + +ะะฐัˆะฐ ัั‚ั€ะฐั‚ะตะณะธั ะปะธั†ะตะฝะทะธั€ะพะฒะฐะฝะธั ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฐ, ั‡ั‚ะพะฑั‹ ะพะฑะตัะฟะตั‡ะธั‚ัŒ ะฒะพะทะฒั€ะฐั‚ ัƒัะพะฒะตั€ัˆะตะฝัั‚ะฒะพะฒะฐะฝะธะน ะฝะฐัˆะธั… ะฟั€ะพะตะบั‚ะพะฒ ั ะพั‚ะบั€ั‹ั‚ั‹ะผ ะธัั…ะพะดะฝั‹ะผ ะบะพะดะพะผ ะพะฑั‰ะตัั‚ะฒัƒ. ะœั‹ ั‚ะฒะตั€ะดะพ ะฟั€ะธะฒะตั€ะถะตะฝั‹ ะฟั€ะธะฝั†ะธะฟะฐะผ ะพั‚ะบั€ั‹ั‚ะพะณะพ ะธัั…ะพะดะฝะพะณะพ ะบะพะดะฐ โค๏ธ ะธ ะฝะฐัˆะฐ ะผะธััะธั ะทะฐะบะปัŽั‡ะฐะตั‚ัั ะฒ ะณะฐั€ะฐะฝั‚ะธั€ะพะฒะฐะฝะธะธ ั‚ะพะณะพ, ั‡ั‚ะพ ะฝะฐัˆะธ ะฒะบะปะฐะดั‹ ะผะพะณัƒั‚ ะฑั‹ั‚ัŒ ะธัะฟะพะปัŒะทะพะฒะฐะฝั‹ ะธ ั€ะฐััˆะธั€ะตะฝั‹ ั‚ะฐะบะธะผ ะพะฑั€ะฐะทะพะผ, ะบะพั‚ะพั€ั‹ะน ะฑัƒะดะตั‚ ะฟะพะปะตะทะตะฝ ะดะปั ะฒัะตั…. diff --git a/ultralytics/docs/ru/index.md:Zone.Identifier b/ultralytics/docs/ru/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/models/fast-sam.md b/ultralytics/docs/ru/models/fast-sam.md new file mode 100755 index 0000000..4756718 --- /dev/null +++ b/ultralytics/docs/ru/models/fast-sam.md @@ -0,0 +1,193 @@ +--- +comments: true +description: ะ˜ััะปะตะดัƒะนั‚ะต FastSAM, ะฑะฐะทะธั€ัƒัŽั‰ัƒัŽัั ะฝะฐ CNN ัะตะณะผะตะฝั‚ะฐั†ะธัŽ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธัั…. ะฃะปัƒั‡ัˆะตะฝะฝะพะต ะฒะทะฐะธะผะพะดะตะนัั‚ะฒะธะต ั ะฟะพะปัŒะทะพะฒะฐั‚ะตะปะตะผ, ะฒั‹ัะพะบะฐั ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝะฐั ัั„ั„ะตะบั‚ะธะฒะฝะพัั‚ัŒ ะธ ะฟั€ะธะผะตะฝะธะผะพัั‚ัŒ ะบ ั€ะฐะทะปะธั‡ะฝั‹ะผ ะทะฐะดะฐั‡ะฐะผ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั. +keywords: FastSAM, ะผะฐัˆะธะฝะฝะพะต ะพะฑัƒั‡ะตะฝะธะต, ั€ะตัˆะตะฝะธะต ะฝะฐ ะฑะฐะทะต CNN, ัะตะณะผะตะฝั‚ะฐั†ะธั ะพะฑัŠะตะบั‚ะพะฒ, ั€ะตัˆะตะฝะธะต ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ, Ultralytics, ะทะฐะดะฐั‡ะธ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั, ะพะฑั€ะฐะฑะพั‚ะบะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะฟั€ะธะผะตะฝะตะฝะธะต ะฒ ะฟั€ะพะผั‹ัˆะปะตะฝะฝะพัั‚ะธ, ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะพะต ะฒะทะฐะธะผะพะดะตะนัั‚ะฒะธะต +--- + +# ะœะพะดะตะปัŒ Fast Segment Anything (FastSAM) + +ะœะพะดะตะปัŒ Fast Segment Anything (FastSAM) - ัั‚ะพ ะฝะพะฒะฐั‚ะพั€ัะบะพะต ั€ะตัˆะตะฝะธะต ะฝะฐ ะฑะฐะทะต CNN, ะฟั€ะตะดะฝะฐะทะฝะฐั‡ะตะฝะฝะพะต ะดะปั ั€ะตัˆะตะฝะธั ะทะฐะดะฐั‡ะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. ะญั‚ะฐ ะทะฐะดะฐั‡ะฐ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฐ ะดะปั ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะปัŽะฑะพะณะพ ะพะฑัŠะตะบั‚ะฐ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ ะฝะฐ ะพัะฝะพะฒะต ั€ะฐะทะปะธั‡ะฝั‹ั… ะฒะพะทะผะพะถะฝั‹ั… ะทะฐะฟั€ะพัะพะฒ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปั. FastSAM ะทะฝะฐั‡ะธั‚ะตะปัŒะฝะพ ัะฝะธะถะฐะตั‚ ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝั‹ะต ั‚ั€ะตะฑะพะฒะฐะฝะธั, ะฟั€ะธ ัั‚ะพะผ ัะพั…ั€ะฐะฝัั ะบะพะฝะบัƒั€ะตะฝั‚ะพัะฟะพัะพะฑะฝะพัั‚ัŒ ั€ะฐะฑะพั‚ั‹, ั‡ั‚ะพ ะดะตะปะฐะตั‚ ะตะต ะฟั€ะฐะบั‚ะธั‡ะตัะบะธ ะฟะพะดั…ะพะดัั‰ะตะน ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ะทะฐะดะฐั‡ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั. + +![ะžะฑะทะพั€ ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ ะผะพะดะตะปะธ Fast Segment Anything (FastSAM)](https://user-images.githubusercontent.com/26833433/248551984-d98f0f6d-7535-45d0-b380-2e1440b52ad7.jpg) + +## ะžะฑะทะพั€ + +FastSAM ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฐ ะดะปั ะฟั€ะตะพะดะพะปะตะฝะธั ะพะณั€ะฐะฝะธั‡ะตะฝะธะน [ะผะพะดะตะปะธ Segment Anything (SAM)](sam.md), ั‚ัะถะตะปะพะน ะผะพะดะตะปะธ Transformer, ั‚ั€ะตะฑัƒัŽั‰ะตะน ะทะฝะฐั‡ะธั‚ะตะปัŒะฝั‹ั… ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝั‹ั… ั€ะตััƒั€ัะพะฒ. FastSAM ั€ะฐะทะดะตะปัะตั‚ ะทะฐะดะฐั‡ัƒ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะพะฑัŠะตะบั‚ะพะฒ ะฝะฐ ะดะฒะฐ ะฟะพัะปะตะดะพะฒะฐั‚ะตะปัŒะฝั‹ั… ัั‚ะฐะฟะฐ: ัะตะณะผะตะฝั‚ะฐั†ะธั ะฒัะตั… ัะบะทะตะผะฟะปัั€ะพะฒ ะธ ะฒั‹ะฑะพั€, ะพัะฝะพะฒะฐะฝะฝั‹ะน ะฝะฐ ะทะฐะฟั€ะพัะฐั… ะฟะพะปัŒะทะพะฒะฐั‚ะตะปั. ะะฐ ะฟะตั€ะฒะพะผ ัั‚ะฐะฟะต ะธัะฟะพะปัŒะทัƒะตั‚ัั [YOLOv8-seg](../tasks/segment.md) ะดะปั ัะพะทะดะฐะฝะธั ัะตะณะผะตะฝั‚ะฐั†ะธะพะฝะฝั‹ั… ะผะฐัะพะบ ะฒัะตั… ัะบะทะตะผะฟะปัั€ะพะฒ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ. ะะฐ ะฒั‚ะพั€ะพะผ ัั‚ะฐะฟะต ะผะพะดะตะปัŒ ะฒั‹ะฒะพะดะธั‚ ะพะฑะปะฐัั‚ัŒ ะธะฝั‚ะตั€ะตัะฐ, ัะพะพั‚ะฒะตั‚ัั‚ะฒัƒัŽั‰ัƒัŽ ะทะฐะฟั€ะพััƒ. + +## ะžัะฝะพะฒะฝั‹ะต ะพัะพะฑะตะฝะฝะพัั‚ะธ + +1. **ะ ะตัˆะตะฝะธะต ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ:** ะ‘ะปะฐะณะพะดะฐั€ั ัั„ั„ะตะบั‚ะธะฒะฝะพัั‚ะธ ะฒั‹ั‡ะธัะปะตะฝะธะน ะฝะฐ ะฑะฐะทะต CNN, FastSAM ะพะฑะตัะฟะตั‡ะธะฒะฐะตั‚ ั€ะตัˆะตะฝะธะต ะทะฐะดะฐั‡ะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ, ั‡ั‚ะพ ะดะตะปะฐะตั‚ ะตะต ั†ะตะฝะฝะพะน ะดะปั ะฟั€ะธะผะตะฝะตะฝะธั ะฒ ะฟั€ะพะผั‹ัˆะปะตะฝะฝั‹ั… ะฟั€ะธะปะพะถะตะฝะธัั…, ั‚ั€ะตะฑัƒัŽั‰ะธั… ะฑั‹ัั‚ั€ั‹ั… ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ. + +2. **ะญั„ั„ะตะบั‚ะธะฒะฝะพัั‚ัŒ ะธ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ:** FastSAM ะพะฑะตัะฟะตั‡ะธะฒะฐะตั‚ ะทะฝะฐั‡ะธั‚ะตะปัŒะฝะพะต ัะฝะธะถะตะฝะธะต ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝั‹ั… ะธ ั€ะตััƒั€ัะฝั‹ั… ั‚ั€ะตะฑะพะฒะฐะฝะธะน, ะฝะต ัƒั…ัƒะดัˆะฐั ะบะฐั‡ะตัั‚ะฒะพ ั€ะฐะฑะพั‚ั‹. ะžะฝะฐ ะดะพัั‚ะธะณะฐะตั‚ ัะพะฟะพัั‚ะฐะฒะธะผะพะน ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ั ะผะพะดะตะปัŒัŽ SAM, ะฝะพ ั‚ั€ะตะฑัƒะตั‚ ะทะฝะฐั‡ะธั‚ะตะปัŒะฝะพ ะผะตะฝัŒัˆะต ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝั‹ั… ั€ะตััƒั€ัะพะฒ, ั‡ั‚ะพ ะฟะพะทะฒะพะปัะตั‚ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะตะต ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. + +3. **ะกะตะณะผะตะฝั‚ะฐั†ะธั ะฝะฐ ะพัะฝะพะฒะต ะทะฐะฟั€ะพัะพะฒ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปั:** FastSAM ะผะพะถะตั‚ ะฒั‹ะฟะพะปะฝัั‚ัŒ ัะตะณะผะตะฝั‚ะฐั†ะธัŽ ะปัŽะฑะพะณะพ ะพะฑัŠะตะบั‚ะฐ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ, ะพัะฝะพะฒั‹ะฒะฐัััŒ ะฝะฐ ั€ะฐะทะปะธั‡ะฝั‹ั… ะฒะพะทะผะพะถะฝั‹ั… ะทะฐะฟั€ะพัะฐั… ะฟะพะปัŒะทะพะฒะฐั‚ะตะปั, ั‡ั‚ะพ ะพะฑะตัะฟะตั‡ะธะฒะฐะตั‚ ะณะธะฑะบะพัั‚ัŒ ะธ ะฟั€ะธัะฟะพัะพะฑะปัะตะผะพัั‚ัŒ ะบ ั€ะฐะทะปะธั‡ะฝั‹ะผ ัั†ะตะฝะฐั€ะธัะผ. + +4. **ะžัะฝะพะฒะฐะฝะฐ ะฝะฐ YOLOv8-seg:** FastSAM ะพัะฝะพะฒะฐะฝะฐ ะฝะฐ ะผะพะดะตะปะธ [YOLOv8-seg](../tasks/segment.md), ะบะพั‚ะพั€ะฐั ัะฒะปัะตั‚ัั ะดะตั‚ะตะบั‚ะพั€ะพะผ ะพะฑัŠะตะบั‚ะพะฒ ั ะฒะตั‚ะฒัŒัŽ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ัะบะทะตะผะฟะปัั€ะพะฒ. ะญั‚ะพ ะฟะพะทะฒะพะปัะตั‚ ะตะน ัั„ั„ะตะบั‚ะธะฒะฝะพ ัะพะทะดะฐะฒะฐั‚ัŒ ัะตะณะผะตะฝั‚ะฐั†ะธะพะฝะฝั‹ะต ะผะฐัะบะธ ะฒัะตั… ัะบะทะตะผะฟะปัั€ะพะฒ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ. + +5. **ะ’ั‹ัะพะบะธะต ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ ะฝะฐ ะฟะพะบะฐะทะฐั‚ะตะปัั…:** ะŸั€ะธ ะฒั‹ะฟะพะปะฝะตะฝะธะธ ะทะฐะดะฐั‡ะธ ะฟั€ะตะดะปะพะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… MS COCO FastSAM ะดะพัั‚ะธะณะฐะตั‚ ะฒั‹ัะพะบะธั… ะฟะพะบะฐะทะฐั‚ะตะปะตะน ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะฟั€ะธ ะทะฝะฐั‡ะธั‚ะตะปัŒะฝะพ ะฑะพะปัŒัˆะตะน ัะบะพั€ะพัั‚ะธ ั€ะฐะฑะพั‚ั‹, ั‡ะตะผ [SAM](sam.md) ะฝะฐ ะพะดะฝะพะผ ะณั€ะฐั„ะธั‡ะตัะบะพะผ ะฟั€ะพั†ะตััะพั€ะต NVIDIA RTX 3090, ั‡ั‚ะพ ัะฒะธะดะตั‚ะตะปัŒัั‚ะฒัƒะตั‚ ะพ ะตะต ัั„ั„ะตะบั‚ะธะฒะฝะพัั‚ะธ ะธ ัะฟะพัะพะฑะฝะพัั‚ะธ. + +6. **ะŸั€ะฐะบั‚ะธั‡ะตัะบะพะต ะฟั€ะธะผะตะฝะตะฝะธะต:** ะŸั€ะตะดะปะพะถะตะฝะฝั‹ะน ะฟะพะดั…ะพะด ะฟั€ะตะดะพัั‚ะฐะฒะปัะตั‚ ะฝะพะฒะพะต ะฟั€ะฐะบั‚ะธั‡ะตัะบะพะต ั€ะตัˆะตะฝะธะต ะดะปั ะฑะพะปัŒัˆะพะณะพ ะบะพะปะธั‡ะตัั‚ะฒะฐ ะทะฐะดะฐั‡ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั ั ะพั‡ะตะฝัŒ ะฒั‹ัะพะบะพะน ัะบะพั€ะพัั‚ัŒัŽ, ะฒ ะดะตััั‚ะบะธ ะธะปะธ ัะพั‚ะฝะธ ั€ะฐะท ะฟั€ะตะฒั‹ัˆะฐัŽั‰ะตะน ัะบะพั€ะพัั‚ัŒ ััƒั‰ะตัั‚ะฒัƒัŽั‰ะธั… ะผะตั‚ะพะดะพะฒ. + +7. **ะ’ะพะทะผะพะถะฝะพัั‚ัŒ ัะถะฐั‚ะธั ะผะพะดะตะปะธ:** FastSAM ะดะตะผะพะฝัั‚ั€ะธั€ัƒะตั‚ ะฒะพะทะผะพะถะฝะพัั‚ัŒ ััƒั‰ะตัั‚ะฒะตะฝะฝะพ ัะฝะธะทะธั‚ัŒ ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝั‹ะต ะทะฐั‚ั€ะฐั‚ั‹, ะฒะฒะตะดั ะธัะบัƒััั‚ะฒะตะฝะฝะพะต ะฟั€ะตะธะผัƒั‰ะตัั‚ะฒะพ ะฒ ัั‚ั€ัƒะบั‚ัƒั€ัƒ ะผะพะดะตะปะธ, ะพั‚ะบั€ั‹ะฒะฐั ะฝะพะฒั‹ะต ะฒะพะทะผะพะถะฝะพัั‚ะธ ะดะปั ัะพะทะดะฐะฝะธั ะบั€ัƒะฟะฝะพะผะฐััˆั‚ะฐะฑะฝั‹ั… ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ ะผะพะดะตะปะตะน ะดะปั ะพะฑั‰ะธั… ะทะฐะดะฐั‡ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั. + +## ะ”ะพัั‚ัƒะฟะฝั‹ะต ะผะพะดะตะปะธ, ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ ะธ ั€ะตะถะธะผั‹ ั€ะฐะฑะพั‚ั‹ + +ะ’ ัะปะตะดัƒัŽั‰ะตะน ั‚ะฐะฑะปะธั†ะต ะฟั€ะตะดัั‚ะฐะฒะปะตะฝั‹ ะดะพัั‚ัƒะฟะฝั‹ะต ะผะพะดะตะปะธ ั ะธั… ะบะพะฝะบั€ะตั‚ะฝั‹ะผะธ ะทะฐั€ะฐะฝะตะต ะพะฑัƒั‡ะตะฝะฝั‹ะผะธ ะฒะตัะฐะผะธ, ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ ะธ ัะพะฒะผะตัั‚ะธะผะพัั‚ัŒ ั ั€ะฐะทะปะธั‡ะฝั‹ะผะธ ั€ะตะถะธะผะฐะผะธ ั€ะฐะฑะพั‚ั‹, ั‚ะฐะบะธะผะธ ะบะฐะบ [ะ’ั‹ะฒะพะด](../modes/predict.md), [ะ’ะฐะปะธะดะฐั†ะธั](../modes/val.md), [ะžะฑัƒั‡ะตะฝะธะต](../modes/train.md) ะธ [ะญะบัะฟะพั€ั‚](../modes/export.md), ะพะฑะพะทะฝะฐั‡ะตะฝะฝั‹ะต ะทะฝะฐั‡ะบะฐะผะธ โœ… ะดะปั ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ั… ั€ะตะถะธะผะพะฒ ะธ ะทะฝะฐั‡ะบะฐะผะธ โŒ ะดะปั ะฝะตะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ั… ั€ะตะถะธะผะพะฒ. + +| ะขะธะฟ ะผะพะดะตะปะธ | ะ—ะฐั€ะฐะฝะตะต ะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ | ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ | ะ’ั‹ะฒะพะด | ะ’ะฐะปะธะดะฐั†ะธั | ะžะฑัƒั‡ะตะฝะธะต | ะญะบัะฟะพั€ั‚ | +|------------|------------------------|------------------------------------------------|-------|-----------|----------|---------| +| FastSAM-s | `FastSAM-s.pt` | [ะกะตะณะผะตะฝั‚ะฐั†ะธั ัะบะทะตะผะฟะปัั€ะพะฒ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| FastSAM-x | `FastSAM-x.pt` | [ะกะตะณะผะตะฝั‚ะฐั†ะธั ัะบะทะตะผะฟะปัั€ะพะฒ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะœะพะดะตะปะธ FastSAM ะปะตะณะบะพ ะธะฝั‚ะตะณั€ะธั€ะพะฒะฐั‚ัŒ ะฒ ะฒะฐัˆะธ ะฟั€ะธะปะพะถะตะฝะธั ะฝะฐ Python. Ultralytics ะฟั€ะตะดะพัั‚ะฐะฒะปัะตั‚ ัƒะดะพะฑะฝั‹ะน ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะธะน ะธะฝั‚ะตั€ั„ะตะนั API ะธ ะบะพะผะฐะฝะดั‹ CLI ะดะปั ัƒะฟั€ะพั‰ะตะฝะธั ั€ะฐะทั€ะฐะฑะพั‚ะบะธ. + +### ะ˜ัะฟะพะปัŒะทะพะฒะฐะฝะธะต ะดะปั ะฟั€ะตะดัะบะฐะทะฐะฝะธะน + +ะ”ะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ ะธัะฟะพะปัŒะทัƒะนั‚ะต ะผะตั‚ะพะด `predict`, ะบะฐะบ ะฟะพะบะฐะทะฐะฝะพ ะฝะธะถะต: + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + ```python + from ultralytics import FastSAM + from ultralytics.models.fastsam import FastSAMPrompt + + # ะžะฟั€ะตะดะตะปะตะฝะธะต ะธัั…ะพะดะฐ ะฒั‹ะฒะพะดะฐ + source = 'ะฟัƒั‚ัŒ/ะบ/ั„ะพั‚ะพ_ะฐะฒั‚ะพะฑัƒัะฐ.jpg' + + # ะกะพะทะดะฐะฝะธะต ะผะพะดะตะปะธ FastSAM + model = FastSAM('FastSAM-s.pt') # ะธะปะธ FastSAM-x.pt + + # ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะต + ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ = model(source, device='cpu', retina_masks=True, imgsz=1024, conf=0.4, iou=0.9) + + # ะกะพะทะดะฐะฝะธะต ะพะฑัŠะตะบั‚ะฐ Prompt Process + prompt_process = FastSAMPrompt(source, ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹, device='cpu') + + # ะ’ั‹ะฒะพะด ะฒัะตะณะพ + ann = prompt_process.everything_prompt() + + # ะŸั€ัะผะพัƒะณะพะปัŒะฝะฐั ะพะฑะปะฐัั‚ัŒ ะฟะพ ัƒะผะพะปั‡ะฐะฝะธัŽ [0,0,0,0] -> [x1,y1,x2,y2] + ann = prompt_process.box_prompt(bbox=[200, 200, 300, 300]) + + # ะขะตะบัั‚ะพะฒั‹ะน ะทะฐะฟั€ะพั + ann = prompt_process.text_prompt(text='ั„ะพั‚ะพะณั€ะฐั„ะธั ัะพะฑะฐะบะธ') + + # ะ—ะฐะฟั€ะพั ั‚ะพั‡ะบะธ + # ะขะพั‡ะบะธ ะฟะพ ัƒะผะพะปั‡ะฐะฝะธัŽ [[0,0]] [[x1,y1],[x2,y2]] + # ะผะตั‚ะบะฐ ั‚ะพั‡ะบะธ ะฟะพ ัƒะผะพะปั‡ะฐะฝะธัŽ [0] [1,0] 0:ั„ะพะฝ, 1:ะฟะตั€ะตะดะฝะธะน ะฟะปะฐะฝ + ann = prompt_process.point_prompt(points=[[200, 200]], pointlabel=[1]) + prompt_process.plot(annotations=ann, output='./') + ``` + + === "CLI" + ```bash + # ะ—ะฐะณั€ัƒะทะบะฐ ะผะพะดะตะปะธ FastSAM ะธ ัะตะณะผะตะฝั‚ะฐั†ะธั ะฒัะตะณะพ ะพะฑัŠะตะบั‚ะฐ ะฝะฐ ะฝะตะผ + yolo segment predict model=FastSAM-s.pt source=ะฟัƒั‚ัŒ/ะบ/ั„ะพั‚ะพ_ะฐะฒั‚ะพะฑัƒัะฐ.jpg imgsz=640 + ``` + +ะ’ ัั‚ะพะผ ั„ั€ะฐะณะผะตะฝั‚ะต ะบะพะดะฐ ะดะตะผะพะฝัั‚ั€ะธั€ัƒะตั‚ัั ะฟั€ะพัั‚ะพั‚ะฐ ะทะฐะณั€ัƒะทะบะธ ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ ะธ ะฒั‹ะฟะพะปะฝะตะฝะธั ะฟั€ะตะดัะบะฐะทะฐะฝะธะน ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ. + +### ะ˜ัะฟะพะปัŒะทะพะฒะฐะฝะธะต ะดะปั ะฒะฐะปะธะดะฐั†ะธะธ + +ะ’ะฐะปะธะดะฐั†ะธัŽ ะผะพะดะตะปะธ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… ะผะพะถะฝะพ ะฒั‹ะฟะพะปะฝะธั‚ัŒ ัะปะตะดัƒัŽั‰ะธะผ ะพะฑั€ะฐะทะพะผ: + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + ```python + from ultralytics import FastSAM + + # ะกะพะทะดะฐะฝะธะต ะผะพะดะตะปะธ FastSAM + model = FastSAM('FastSAM-s.pt') # ะธะปะธ FastSAM-x.pt + + # ะ’ะฐะปะธะดะฐั†ะธั ะผะพะดะตะปะธ + ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ = model.val(data='coco8-seg.yaml') + ``` + + === "CLI" + ```bash + # ะ—ะฐะณั€ัƒะทะบะฐ ะผะพะดะตะปะธ FastSAM ะธ ะตะต ะฒะฐะปะธะดะฐั†ะธั ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… COCO8 ะฟั€ะธ ั€ะฐะทะผะตั€ะต ะธะทะพะฑั€ะฐะถะตะฝะธั 640 + yolo segment val model=FastSAM-s.pt data=coco8.yaml imgsz=640 + ``` + +ะŸะพะถะฐะปัƒะนัั‚ะฐ, ะพะฑั€ะฐั‚ะธั‚ะต ะฒะฝะธะผะฐะฝะธะต, ั‡ั‚ะพ FastSAM ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ ั‚ะพะปัŒะบะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะธ ัะตะณะผะตะฝั‚ะฐั†ะธัŽ ะตะดะธะฝัั‚ะฒะตะฝะฝะพะณะพ ะบะปะฐััะฐ ะพะฑัŠะตะบั‚ะพะฒ. ะญั‚ะพ ะพะทะฝะฐั‡ะฐะตั‚, ั‡ั‚ะพ ะผะพะดะตะปัŒ ะฑัƒะดะตั‚ ั€ะฐัะฟะพะทะฝะฐะฒะฐั‚ัŒ ะธ ัะตะณะผะตะฝั‚ะธั€ะพะฒะฐั‚ัŒ ะฒัะต ะพะฑัŠะตะบั‚ั‹ ะบะฐะบ ะพะดะธะฝ ะธ ั‚ะพั‚ ะถะต ะบะปะฐัั. ะŸะพัั‚ะพะผัƒ ะฟั€ะธ ะฟะพะดะณะพั‚ะพะฒะบะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… ะฒะฐะผ ะฝัƒะถะฝะพ ะฟั€ะตะพะฑั€ะฐะทะพะฒะฐั‚ัŒ ะฒัะต ะธะดะตะฝั‚ะธั„ะธะบะฐั‚ะพั€ั‹ ะบะฐั‚ะตะณะพั€ะธะน ะพะฑัŠะตะบั‚ะพะฒ ะฒ 0. + +## ะžั„ะธั†ะธะฐะปัŒะฝะพะต ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะต FastSAM + +FastSAM ั‚ะฐะบะถะต ะดะพัั‚ัƒะฟะฝะฐ ะฝะตะฟะพัั€ะตะดัั‚ะฒะตะฝะฝะพ ะธะท ั€ะตะฟะพะทะธั‚ะพั€ะธั [https://github.com/CASIA-IVA-Lab/FastSAM](https://github.com/CASIA-IVA-Lab/FastSAM). ะ’ะพั‚ ะบั€ะฐั‚ะบะธะน ะพะฑะทะพั€ ั‚ะธะฟะธั‡ะฝั‹ั… ัˆะฐะณะพะฒ ะดะปั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั FastSAM: + +### ะฃัั‚ะฐะฝะพะฒะบะฐ + +1. ะšะปะพะฝะธั€ัƒะนั‚ะต ั€ะตะฟะพะทะธั‚ะพั€ะธะน FastSAM: + ```shell + git clone https://github.com/CASIA-IVA-Lab/FastSAM.git + ``` + +2. ะกะพะทะดะฐะนั‚ะต ะธ ะฐะบั‚ะธะฒะธั€ัƒะนั‚ะต ะฒะธั€ั‚ัƒะฐะปัŒะฝะพะต ะพะบั€ัƒะถะตะฝะธะต Conda ั Python 3.9: + ```shell + conda create -n FastSAM python=3.9 + conda activate FastSAM + ``` + +3. ะŸะตั€ะตะนะดะธั‚ะต ะฒ ะบะฐั‚ะฐะปะพะณ ัะบะปะพะฝะธั€ะพะฒะฐะฝะฝะพะณะพ ั€ะตะฟะพะทะธั‚ะพั€ะธั ะธ ัƒัั‚ะฐะฝะพะฒะธั‚ะต ั‚ั€ะตะฑัƒะตะผั‹ะต ะฟะฐะบะตั‚ั‹: + ```shell + cd FastSAM + pip install -r requirements.txt + ``` + +4. ะฃัั‚ะฐะฝะพะฒะธั‚ะต ะผะพะดะตะปัŒ CLIP: + ```shell + pip install git+https://github.com/openai/CLIP.git + ``` + +### ะŸั€ะธะผะตั€ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +1. ะกะบะฐั‡ะฐะนั‚ะต [ั„ะฐะนะป ะบะพะฝั‚ั€ะพะปัŒะฝะพะน ั‚ะพั‡ะบะธ ะผะพะดะตะปะธ](https://drive.google.com/file/d/1m1sjY4ihXBU1fZXdQ-Xdj-mDltW-2Rqv/view?usp=sharing). + +2. ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต ะผะพะดะตะปัŒ FastSAM ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะฒั‹ะฒะพะดะฐ. ะŸั€ะธะผะตั€ั‹ ะบะพะผะฐะฝะด: + + - ะกะตะณะผะตะฝั‚ะฐั†ะธั ะฒัะตะณะพ ะพะฑัŠะตะบั‚ะฐ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg + ``` + + - ะกะตะณะผะตะฝั‚ะฐั†ะธั ะพะฟั€ะตะดะตะปะตะฝะฝั‹ั… ะพะฑัŠะตะบั‚ะพะฒ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ั‚ะตะบัั‚ะพะฒะพะณะพ ะทะฐะฟั€ะพัะฐ: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --text_prompt "ะถะตะปั‚ะฐั ัะพะฑะฐะบะฐ" + ``` + + - ะกะตะณะผะตะฝั‚ะฐั†ะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ะฟั€ะตะดะตะปะฐั… ะพะณั€ะฐะฝะธั‡ะธะฒะฐัŽั‰ะตะน ั€ะฐะผะบะธ (ัƒะบะฐะถะธั‚ะต ะบะพะพั€ะดะธะฝะฐั‚ั‹ ั€ะฐะผะบะธ ะฒ ั„ะพั€ะผะฐั‚ะต xywh): + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --box_prompt "[570,200,230,400]" + ``` + + - ะกะตะณะผะตะฝั‚ะฐั†ะธั ะพะฑัŠะตะบั‚ะพะฒ, ะฝะฐั…ะพะดัั‰ะธั…ัั ะฑะปะธะทะบะพ ะบ ะพะฟั€ะตะดะตะปะตะฝะฝั‹ะผ ั‚ะพั‡ะบะฐะผ: + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --point_prompt "[[520,360],[620,300]]" --point_label "[1,0]" + ``` + +ะšั€ะพะผะต ั‚ะพะณะพ, ะฒั‹ ะผะพะถะตั‚ะต ะพะฟั€ะพะฑะพะฒะฐั‚ัŒ FastSAM ั ะฟะพะผะพั‰ัŒัŽ [ะดะตะผะพะฝัั‚ั€ะฐั†ะธะพะฝะฝะพะณะพ ะฝะพัƒั‚ะฑัƒะบะฐ Colab](https://colab.research.google.com/drive/1oX14f6IneGGw612WgVlAiy91UHwFAvr9?usp=sharing) ะธะปะธ ะฝะฐ [ะฒะตะฑ-ะดะตะผะพ HuggingFace](https://huggingface.co/spaces/An-619/FastSAM) ะดะปั ะฒะธะทัƒะฐะปัŒะฝะพะณะพ ะพะฟั‹ั‚ะฐ. + +## ะฆะธั‚ะธั€ะพะฒะฐะฝะธะต ะธ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ะธ + +ะœั‹ ั…ะพั‚ะตะปะธ ะฑั‹ ะฒั‹ั€ะฐะทะธั‚ัŒ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ัŒ ะฐะฒั‚ะพั€ะฐะผ FastSAM ะทะฐ ะธั… ะทะฝะฐั‡ะธั‚ะตะปัŒะฝั‹ะน ะฒะบะปะฐะด ะฒ ะพะฑะปะฐัั‚ะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{zhao2023fast, + title={Fast Segment Anything}, + author={Xu Zhao and Wenchao Ding and Yongqi An and Yinglong Du and Tao Yu and Min Li and Ming Tang and Jinqiao Wang}, + year={2023}, + eprint={2306.12156}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ะžั€ะธะณะธะฝะฐะปัŒะฝะฐั ัั‚ะฐั‚ัŒั FastSAM ะดะพัั‚ัƒะฟะฝะฐ ะฝะฐ [arXiv](https://arxiv.org/abs/2306.12156). ะะฒั‚ะพั€ั‹ ัะดะตะปะฐะปะธ ัะฒะพัŽ ั€ะฐะฑะพั‚ัƒ ะพะฑั‰ะตะดะพัั‚ัƒะฟะฝะพะน, ะธ ะบะพะด ะผะพะถะฝะพ ะฟะพะปัƒั‡ะธั‚ัŒ ะฝะฐ [GitHub](https://github.com/CASIA-IVA-Lab/FastSAM). ะœั‹ ั†ะตะฝะธะผ ะธั… ัƒัะธะปะธั ะฟะพ ั€ะฐะทะฒะธั‚ะธัŽ ะพั‚ั€ะฐัะปะธ ะธ ะดะพัั‚ัƒะฟัƒ ะบ ะธั… ั€ะฐะฑะพั‚ะต ะดะปั ัˆะธั€ะพะบะพะณะพ ะบั€ัƒะณะฐ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปะตะน. diff --git a/ultralytics/docs/ru/models/fast-sam.md:Zone.Identifier b/ultralytics/docs/ru/models/fast-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/models/fast-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/models/index.md b/ultralytics/docs/ru/models/index.md new file mode 100755 index 0000000..43fd65b --- /dev/null +++ b/ultralytics/docs/ru/models/index.md @@ -0,0 +1,98 @@ +--- +comments: true +description: ะ˜ััะปะตะดัƒะนั‚ะต ั€ะฐะทะฝะพะพะฑั€ะฐะทะฝั‹ะน ัะฟะตะบั‚ั€ ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ั… Ultralytics ะผะพะดะตะปะตะน ัะตะผะตะนัั‚ะฒะฐ YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS ะธ RT-DETR. ะะฐั‡ะฝะธั‚ะต ั€ะฐะฑะพั‚ัƒ ั ะฟั€ะธะผะตั€ะฐะผะธ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะบะฐะบ ะดะปั CLI, ั‚ะฐะบ ะธ ะดะปั Python. +keywords: Ultralytics, ะดะพะบัƒะผะตะฝั‚ะฐั†ะธั, YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS, RT-DETR, ะผะพะดะตะปะธ, ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹, Python, CLI +--- + +# ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะผะพะดะตะปะธ Ultralytics + +ะ”ะพะฑั€ะพ ะฟะพะถะฐะปะพะฒะฐั‚ัŒ ะฒ ะดะพะบัƒะผะตะฝั‚ะฐั†ะธัŽ ะฟะพ ะผะพะดะตะปัะผ Ultralytics! ะœั‹ ะฟะพะดะดะตั€ะถะธะฒะฐะตะผ ัˆะธั€ะพะบะธะน ัะฟะตะบั‚ั€ ะผะพะดะตะปะตะน, ะบะฐะถะดะฐั ะธะท ะบะพั‚ะพั€ั‹ั… ะฐะดะฐะฟั‚ะธั€ะพะฒะฐะฝะฐ ะดะปั ะบะพะฝะบั€ะตั‚ะฝั‹ั… ะทะฐะดะฐั‡, ั‚ะฐะบะธั… ะบะฐะบ [ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md), [ัะตะณะผะตะฝั‚ะฐั†ะธั ะฝะฐ ัƒั€ะพะฒะฝะต ะธะฝัั‚ะฐะฝั†ะธะน](../tasks/segment.md), [ะบะปะฐััะธั„ะธะบะฐั†ะธั ะธะทะพะฑั€ะฐะถะตะฝะธะน](../tasks/classify.md), [ะพั†ะตะฝะบะฐ ะฟะพะทั‹](../tasks/pose.md) ะธ [ัะปะตะถะตะฝะธะต ะทะฐ ะฝะตัะบะพะปัŒะบะธะผะธ ะพะฑัŠะตะบั‚ะฐะผะธ](../modes/track.md). ะ•ัะปะธ ะฒั‹ ะทะฐะธะฝั‚ะตั€ะตัะพะฒะฐะฝั‹ ะฒ ะดะพะฑะฐะฒะปะตะฝะธะธ ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ ัะฒะพะตะน ะผะพะดะตะปะธ ะฒ Ultralytics, ะพะทะฝะฐะบะพะผัŒั‚ะตััŒ ั ะฝะฐัˆะธะผ [ะ ัƒะบะพะฒะพะดัั‚ะฒะพะผ ะดะปั ัƒั‡ะฐัั‚ะฝะธะบะพะฒ](../../help/contributing.md). + +!!! Note "ะŸั€ะธะผะตั‡ะฐะฝะธะต" + + ๐Ÿšง ะะฐัˆะฐ ะดะพะบัƒะผะตะฝั‚ะฐั†ะธั ะฝะฐ ั€ะฐะทะฝั‹ั… ัะทั‹ะบะฐั… ะฝะฐั…ะพะดะธั‚ัั ะฒ ัั‚ะฐะดะธะธ ั€ะฐะทั€ะฐะฑะพั‚ะบะธ, ะธ ะผั‹ ัƒัะตั€ะดะฝะพ ั€ะฐะฑะพั‚ะฐะตะผ ะฝะฐะด ะตะต ัƒะปัƒั‡ัˆะตะฝะธะตะผ. ะกะฟะฐัะธะฑะพ ะทะฐ ะฒะฐัˆะต ั‚ะตั€ะฟะตะฝะธะต! ๐Ÿ™ + +## ะžัะพะฑะตะฝะฝั‹ะต ะผะพะดะตะปะธ + +ะ’ะพั‚ ะฝะตะบะพั‚ะพั€ั‹ะต ะบะปัŽั‡ะตะฒั‹ะต ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะผะพะดะตะปะธ: + +1. **[YOLOv3](yolov3.md)**: ะขั€ะตั‚ัŒะต ะฟะพะบะพะปะตะฝะธะต ัะตะผะตะนัั‚ะฒะฐ ะผะพะดะตะปะตะน YOLO, ะฐะฒั‚ะพั€ัั‚ะฒะฐ ะ”ะถะพะทะตั„ะฐ ะ ะตะดะผะพะฝะฐ, ะธะทะฒะตัั‚ะฝะพะต ัะฒะพะตะน ัั„ั„ะตะบั‚ะธะฒะฝะพัั‚ัŒัŽ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. +2. **[YOLOv4](yolov4.md)**: ะะฐั‚ะธะฒะฝะพะต ะดะปั darknet ะพะฑะฝะพะฒะปะตะฝะธะต YOLOv3, ะฒั‹ะฟัƒั‰ะตะฝะฝะพะต ะะปะตะบัะตะตะผ ะ‘ะพั‡ะบะพะฒัะบะธะผ ะฒ 2020 ะณะพะดัƒ. +3. **[YOLOv5](yolov5.md)**: ะฃะปัƒั‡ัˆะตะฝะฝะฐั ะฒะตั€ัะธั ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ YOLO ะพั‚ Ultralytics, ะฟั€ะตะดะปะฐะณะฐัŽั‰ะฐั ะปัƒั‡ัˆะธะต ะบะพะผะฟั€ะพะผะธััั‹ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะธ ัะบะพั€ะพัั‚ะธ ะฟะพ ัั€ะฐะฒะฝะตะฝะธัŽ ั ะฟั€ะตะดั‹ะดัƒั‰ะธะผะธ ะฒะตั€ัะธัะผะธ. +4. **[YOLOv6](yolov6.md)**: ะ’ั‹ะฟัƒั‰ะตะฝะฝะฐั ะฒ 2022 ะณะพะดัƒ ะบะพะผะฟะฐะฝะธะตะน [Meituan](https://about.meituan.com/) ะธ ะธัะฟะพะปัŒะทัƒะตะผะฐั ะฒะพ ะผะฝะพะณะธั… ั€ะพะฑะพั‚ะฐั… ะฐะฒั‚ะพะฝะพะผะฝะพะน ะดะพัั‚ะฐะฒะบะธ ะบะพะผะฟะฐะฝะธะธ. +5. **[YOLOv7](yolov7.md)**: ะžะฑะฝะพะฒะปะตะฝะฝั‹ะต ะผะพะดะตะปะธ YOLO, ะฒั‹ะฟัƒั‰ะตะฝะฝั‹ะต ะฒ 2022 ะณะพะดัƒ ะฐะฒั‚ะพั€ะฐะผะธ YOLOv4. +6. **[YOLOv8](yolov8.md) ะะžะ’ะ˜ะะšะ ๐Ÿš€**: ะŸะพัะปะตะดะฝัั ะฒะตั€ัะธั ัะตะผะตะนัั‚ะฒะฐ YOLO, ะพะฑะปะฐะดะฐัŽั‰ะฐั ั€ะฐััˆะธั€ะตะฝะฝั‹ะผะธ ะฒะพะทะผะพะถะฝะพัั‚ัะผะธ, ั‚ะฐะบะธะผะธ ะบะฐะบ ัะตะณะผะตะฝั‚ะฐั†ะธั ะฝะฐ ัƒั€ะพะฒะฝะต ะธะฝัั‚ะฐะฝั†ะธะน, ะพั†ะตะฝะบะฐ ะฟะพะทั‹/ะบะปัŽั‡ะตะฒั‹ั… ั‚ะพั‡ะตะบ ะธ ะบะปะฐััะธั„ะธะบะฐั†ะธั. +7. **[Segment Anything Model (SAM)](sam.md)**: ะœะพะดะตะปัŒ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะฒัะตะณะพ ะธ ะฒัั (SAM) ะพั‚ Meta. +8. **[Mobile Segment Anything Model (MobileSAM)](mobile-sam.md)**: MobileSAM ะดะปั ะผะพะฑะธะปัŒะฝั‹ั… ะฟั€ะธะปะพะถะตะฝะธะน ะพั‚ ัƒะฝะธะฒะตั€ัะธั‚ะตั‚ะฐ Kyung Hee. +9. **[Fast Segment Anything Model (FastSAM)](fast-sam.md)**: FastSAM ะพั‚ ะ“ั€ัƒะฟะฟั‹ ะฐะฝะฐะปะธะทะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะน ะธ ะฒะธะดะตะพ, ะ˜ะฝัั‚ะธั‚ัƒั‚ะฐ ะฐะฒั‚ะพะผะฐั‚ะธะบะธ, ะšะธั‚ะฐะนัะบะพะน ะฐะบะฐะดะตะผะธะธ ะฝะฐัƒะบ. +10. **[YOLO-NAS](yolo-nas.md)**: ะœะพะดะตะปะธ ะฝะตะนั€ะพะฝะฝะพะน ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ ะฟะพะธัะบะฐ YOLO (NAS). +11. **[Realtime Detection Transformers (RT-DETR)](rtdetr.md)**: ะœะพะดะตะปะธ ั‚ั€ะฐะฝัั„ะพั€ะผะตั€ะพะฒ ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ (RT-DETR) ะพั‚ Baidu PaddlePaddle. + +

+
+ +
+ ะกะผะพั‚ั€ะธั‚ะต: ะ—ะฐะฟัƒัะบ ะผะพะดะตะปะตะน YOLO ะพั‚ Ultralytics ะฒัะตะณะพ ะฒ ะฝะตัะบะพะปัŒะบะพ ัั‚ั€ะพะบ ะบะพะดะฐ. +

+ +## ะะฐั‡ะฐะปะพ ั€ะฐะฑะพั‚ั‹: ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะญั‚ะพั‚ ะฟั€ะธะผะตั€ ะฟั€ะตะดะพัั‚ะฐะฒะปัะตั‚ ะฟั€ะพัั‚ั‹ะต ะฟั€ะธะผะตั€ั‹ ะพะฑัƒั‡ะตะฝะธั ะธ ะฒั‹ะฒะพะดะฐ ะดะปั YOLO. ะŸะพะปะฝะฐั ะดะพะบัƒะผะตะฝั‚ะฐั†ะธั ะฟะพ ัั‚ะธะผ ะธ ะดั€ัƒะณะธะผ [ั€ะตะถะธะผะฐะผ](../modes/index.md) ะฟั€ะตะดัั‚ะฐะฒะปะตะฝะฐ ะฝะฐ ัั‚ั€ะฐะฝะธั†ะฐั… ะดะพะบัƒะผะตะฝั‚ะฐั†ะธะธ [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) ะธ [Export](../modes/export.md). + +ะžะฑั€ะฐั‚ะธั‚ะต ะฒะฝะธะผะฐะฝะธะต, ั‡ั‚ะพ ะฝะธะถะต ะฟั€ะธะฒะตะดะตะฝ ะฟั€ะธะผะตั€ ะดะปั ะผะพะดะตะปะตะน [Detect](../tasks/detect.md) YOLOv8 ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. ะ”ะปั ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝั‹ั… ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ั… ะทะฐะดะฐั‡ ัะผะพั‚ั€ะธั‚ะต ะดะพะบัƒะผะตะฝั‚ะฐั†ะธัŽ ะฟะพ [Segment](../tasks/segment.md), [Classify](../tasks/classify.md) ะธ [Pose](../tasks/pose.md). + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ะŸั€ะตะดะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ PyTorch `*.pt`, ะฐ ั‚ะฐะบะถะต ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะพะฝะฝั‹ะต ั„ะฐะนะปั‹ `*.yaml` ะผะพะณัƒั‚ ะฑั‹ั‚ัŒ ะฟะตั€ะตะดะฐะฝั‹ ะฒ ะบะปะฐััั‹ `YOLO()`, `SAM()`, `NAS()` ะธ `RTDETR()`, ั‡ั‚ะพะฑั‹ ัะพะทะดะฐั‚ัŒ ัะบะทะตะผะฟะปัั€ ะผะพะดะตะปะธ ะฝะฐ Python: + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv8n ะดะปั COCO + model = YOLO('yolov8n.pt') + + # ะžั‚ะพะฑั€ะฐะทะธั‚ัŒ ะธะฝั„ะพั€ะผะฐั†ะธัŽ ะพ ะผะพะดะตะปะธ (ะฝะตะพะฑัะทะฐั‚ะตะปัŒะฝะพ) + model.info() + + # ะžะฑัƒั‡ะธั‚ัŒ ะผะพะดะตะปัŒ ะฝะฐ ะฟั€ะธะผะตั€ะฝะพะผ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO8 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ะ—ะฐะฟัƒัั‚ะธั‚ัŒ ะฒั‹ะฒะพะด ั ะฟะพะผะพั‰ัŒัŽ ะผะพะดะตะปะธ YOLOv8n ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ะšะพะผะฐะฝะดั‹ CLI ะดะพัั‚ัƒะฟะฝั‹ ะดะปั ะฝะตะฟะพัั€ะตะดัั‚ะฒะตะฝะฝะพะณะพ ะทะฐะฟัƒัะบะฐ ะผะพะดะตะปะตะน: + + ```bash + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv8n ะดะปั COCO ะธ ะพะฑัƒั‡ะธั‚ัŒ ะตั‘ ะฝะฐ ะฟั€ะธะผะตั€ะฝะพะผ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO8 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv8n ะดะปั COCO ะธ ะทะฐะฟัƒัั‚ะธั‚ัŒ ะฒั‹ะฒะพะด ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## ะ’ะบะปะฐะด ะฒ ะฝะพะฒั‹ะต ะผะพะดะตะปะธ + +ะ—ะฐะธะฝั‚ะตั€ะตัะพะฒะฐะฝั‹ ะฒ ั‚ะพะผ, ั‡ั‚ะพะฑั‹ ะฒะฝะตัั‚ะธ ัะฒะพัŽ ะผะพะดะตะปัŒ ะฒ Ultralytics? ะžั‚ะปะธั‡ะฝะพ! ะœั‹ ะฒัะตะณะดะฐ ะพั‚ะบั€ั‹ั‚ั‹ ะดะปั ั€ะฐััˆะธั€ะตะฝะธั ะฝะฐัˆะตะณะพ ะฟะพั€ั‚ั„ะพะปะธะพ ะผะพะดะตะปะตะน. + +1. **ะกะดะตะปะฐะนั‚ะต Fork ะ ะตะฟะพะทะธั‚ะพั€ะธั**: ะะฐั‡ะฝะธั‚ะต ั ัะพะทะดะฐะฝะธั ั„ะพั€ะบะฐ [ั€ะตะฟะพะทะธั‚ะพั€ะธั Ultralytics ะฝะฐ GitHub](https://github.com/ultralytics/ultralytics). + +2. **ะกะบะปะพะฝะธั€ัƒะนั‚ะต ัะฒะพะน Fork**: ะกะบะปะพะฝะธั€ัƒะนั‚ะต ะฒะฐัˆ ั„ะพั€ะบ ะฝะฐ ะปะพะบะฐะปัŒะฝัƒัŽ ะผะฐัˆะธะฝัƒ ะธ ัะพะทะดะฐะนั‚ะต ะฝะพะฒัƒัŽ ะฒะตั‚ะบัƒ ะดะปั ั€ะฐะฑะพั‚ั‹. + +3. **ะ ะตะฐะปะธะทัƒะนั‚ะต ัะฒะพัŽ ะœะพะดะตะปัŒ**: ะ”ะพะฑะฐะฒัŒั‚ะต ะฒะฐัˆัƒ ะผะพะดะตะปัŒ, ัะปะตะดัƒั ัั‚ะฐะฝะดะฐั€ั‚ะฐะผ ะฟั€ะพะณั€ะฐะผะผะธั€ะพะฒะฐะฝะธั ะธ ั€ัƒะบะพะฒะพะดัั‰ะธะผ ะฟั€ะธะฝั†ะธะฟะฐะผ, ัƒะบะฐะทะฐะฝะฝั‹ะผ ะฒ ะฝะฐัˆะตะผ [ะ ัƒะบะพะฒะพะดัั‚ะฒะต ะดะปั ัƒั‡ะฐัั‚ะฝะธะบะพะฒ](../../help/contributing.md). + +4. **ะขั‰ะฐั‚ะตะปัŒะฝะพ ะฟั€ะพั‚ะตัั‚ะธั€ัƒะนั‚ะต**: ะฃะฑะตะดะธั‚ะตััŒ, ั‡ั‚ะพ ะฒั‹ ั‚ั‰ะฐั‚ะตะปัŒะฝะพ ะฟั€ะพั‚ะตัั‚ะธั€ะพะฒะฐะปะธ ัะฒะพัŽ ะผะพะดะตะปัŒ, ะบะฐะบ ะธะทะพะปะธั€ะพะฒะฐะฝะฝะพ, ั‚ะฐะบ ะธ ะบะฐะบ ั‡ะฐัั‚ัŒ ะฟะฐะนะฟะปะฐะนะฝะฐ. + +5. **ะกะพะทะดะฐะนั‚ะต Pull Request**: ะšะฐะบ ั‚ะพะปัŒะบะพ ะฒั‹ ะฑัƒะดะตั‚ะต ัƒะดะพะฒะปะตั‚ะฒะพั€ะตะฝั‹ ัะฒะพะตะน ะผะพะดะตะปัŒัŽ, ัะพะทะดะฐะนั‚ะต pull request ะฒ ะพัะฝะพะฒะฝะพะน ั€ะตะฟะพะทะธั‚ะพั€ะธะน ะดะปั ั€ะฐััะผะพั‚ั€ะตะฝะธั. + +6. **ะšะพะด-ั€ะตะฒัŒัŽ ะธ ะกะปะธัะฝะธะต**: ะŸะพัะปะต ั€ะฐััะผะพั‚ั€ะตะฝะธั, ะตัะปะธ ะฒะฐัˆะฐ ะผะพะดะตะปัŒ ัะพะพั‚ะฒะตั‚ัั‚ะฒัƒะตั‚ ะฝะฐัˆะธะผ ะบั€ะธั‚ะตั€ะธัะผ, ะพะฝะฐ ะฑัƒะดะตั‚ ะพะฑัŠะตะดะธะฝะตะฝะฐ ั ะพัะฝะพะฒะฝั‹ะผ ั€ะตะฟะพะทะธั‚ะพั€ะธะตะผ. + +ะ”ะปั ะฟะพะดั€ะพะฑะฝั‹ั… ะธะฝัั‚ั€ัƒะบั†ะธะน ัะผ. ะฝะฐัˆะต [ะ ัƒะบะพะฒะพะดัั‚ะฒะพ ะดะปั ัƒั‡ะฐัั‚ะฝะธะบะพะฒ](../../help/contributing.md). diff --git a/ultralytics/docs/ru/models/index.md:Zone.Identifier b/ultralytics/docs/ru/models/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/models/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/models/mobile-sam.md b/ultralytics/docs/ru/models/mobile-sam.md new file mode 100755 index 0000000..906ec88 --- /dev/null +++ b/ultralytics/docs/ru/models/mobile-sam.md @@ -0,0 +1,116 @@ +--- +comments: true +description: ะฃะทะฝะฐะนั‚ะต ะฑะพะปัŒัˆะต ะพ MobileSAM, ะตะณะพ ั€ะตะฐะปะธะทะฐั†ะธะธ, ัั€ะฐะฒะฝะตะฝะธะธ ั ะพั€ะธะณะธะฝะฐะปัŒะฝั‹ะผ SAM ะธ ะพ ั‚ะพะผ, ะบะฐะบ ะตะณะพ ะทะฐะณั€ัƒะทะธั‚ัŒ ะธ ะฟั€ะพั‚ะตัั‚ะธั€ะพะฒะฐั‚ัŒ ะฒ ั„ั€ะตะนะผะฒะพั€ะบะต Ultralytics. ะฃะปัƒั‡ัˆะธั‚ะต ัะฒะพะธ ะผะพะฑะธะปัŒะฝั‹ะต ะฟั€ะธะปะพะถะตะฝะธั ัƒะถะต ัะตะณะพะดะฝั. +keywords: MobileSAM, Ultralytics, SAM, ะผะพะฑะธะปัŒะฝั‹ะต ะฟั€ะธะปะพะถะตะฝะธั, Arxiv, GPU, API, ะบะพะดะธั€ะพะฒั‰ะธะบ ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะดะตะบะพะดะตั€ ะผะฐัะพะบ, ะทะฐะณั€ัƒะทะบะฐ ะผะพะดะตะปะธ, ะผะตั‚ะพะด ั‚ะตัั‚ะธั€ะพะฒะฐะฝะธั +--- + +![ะ›ะพะณะพั‚ะธะฟ MobileSAM](https://github.com/ChaoningZhang/MobileSAM/blob/master/assets/logo2.png?raw=true) + +# Mobile Segment Anything (MobileSAM) + +ะขะตะฟะตั€ัŒ ะดะพัั‚ัƒะฟะฝะฐ ัั‚ะฐั‚ัŒั MobileSAM ะฒ [ะฐั€ั…ะธะฒะต arXiv](https://arxiv.org/pdf/2306.14289.pdf). + +ะ”ะตะผะพะฝัั‚ั€ะฐั†ะธัŽ ั€ะฐะฑะพั‚ั‹ MobileSAM ะฝะฐ CPU ะผะพะถะฝะพ ะฝะฐะนั‚ะธ ะฟะพ ัั‚ะพะน [ััั‹ะปะบะต](https://huggingface.co/spaces/dhkim2810/MobileSAM). ะ’ั€ะตะผั ะฒั‹ะฟะพะปะฝะตะฝะธั ะฝะฐ Mac i5 CPU ัะพัั‚ะฐะฒะปัะตั‚ ะฟั€ะธะผะตั€ะฝะพ 3 ัะตะบัƒะฝะดั‹. ะ’ ะดะตะผะพะฝัั‚ั€ะฐั†ะธะพะฝะฝะพะน ะฒะตั€ัะธะธ Hugging Face ะธะฝั‚ะตั€ั„ะตะนั ะธ ะผะตะฝะตะต ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝั‹ะต ะฟั€ะพั†ะตััะพั€ั‹ CPU ะผะพะณัƒั‚ ะฒั‹ะทั‹ะฒะฐั‚ัŒ ะทะฐะผะตะดะปะตะฝะธะต ั€ะฐะฑะพั‚ั‹, ะฝะพ ะพะฝะฐ ะฟั€ะพะดะพะปะถะฐะตั‚ ั€ะฐะฑะพั‚ะฐั‚ัŒ ัั„ั„ะตะบั‚ะธะฒะฝะพ. + +MobileSAM ั€ะตะฐะปะธะทะพะฒะฐะฝ ะฒ ั€ะฐะทะปะธั‡ะฝั‹ั… ะฟั€ะพะตะบั‚ะฐั…, ะฒะบะปัŽั‡ะฐั [Grounding-SAM](https://github.com/IDEA-Research/Grounded-Segment-Anything), [AnyLabeling](https://github.com/vietanhdev/anylabeling) ะธ [Segment Anything in 3D](https://github.com/Jumpat/SegmentAnythingin3D). + +MobileSAM ะพะฑัƒั‡ะฐะตั‚ัั ะฝะฐ ะพะดะฝะพะผ ะณั€ะฐั„ะธั‡ะตัะบะพะผ ะฟั€ะพั†ะตััะพั€ะต (GPU) ัะพ 100 ั‚ั‹ััั‡ะฐะผะธ ะดะฐะฝะฝั‹ั… (1% ะพั‚ ะพั€ะธะณะธะฝะฐะปัŒะฝั‹ั… ะธะทะพะฑั€ะฐะถะตะฝะธะน) ะทะฐ ะผะตะฝะตะต ั‡ะตะผ ะดะตะฝัŒ. ะšะพะด ะดะปั ะพะฑัƒั‡ะตะฝะธั ะฑัƒะดะตั‚ ะดะพัั‚ัƒะฟะตะฝ ะฒ ะฑัƒะดัƒั‰ะตะผ. + +## ะ”ะพัั‚ัƒะฟะฝั‹ะต ะผะพะดะตะปะธ, ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ ะธ ั€ะตะถะธะผั‹ ั€ะฐะฑะพั‚ั‹ + +ะ’ ั‚ะฐะฑะปะธั†ะต ะฟั€ะตะดัั‚ะฐะฒะปะตะฝั‹ ะดะพัั‚ัƒะฟะฝั‹ะต ะผะพะดะตะปะธ ั ัะพะพั‚ะฒะตั‚ัั‚ะฒัƒัŽั‰ะธะผะธ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะผะธ ะฒะตัะฐะผะธ, ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะผะธ ะทะฐะดะฐั‡ะฐะผะธ ะธ ะธั… ัะพะฒะผะตัั‚ะธะผะพัั‚ัŒัŽ ั ั€ะฐะทะปะธั‡ะฝั‹ะผะธ ั€ะตะถะธะผะฐะผะธ ั€ะฐะฑะพั‚ั‹, ั‚ะฐะบะธะผะธ ะบะฐะบ [ะฒั‹ะฒะพะด](../modes/predict.md), [ะฒะฐะปะธะดะฐั†ะธั](../modes/val.md), [ั‚ั€ะตะฝะธั€ะพะฒะบะฐ](../modes/train.md) ะธ [ัะบัะฟะพั€ั‚](../modes/export.md), ัƒะบะฐะทะฐะฝะฝั‹ะผะธ ั ะฟะพะผะพั‰ัŒัŽ ัะผะพะดะทะธ โœ… ะดะปั ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ั… ั€ะตะถะธะผะพะฒ ะธ ัะผะพะดะทะธ โŒ ะดะปั ะฝะตะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ั…. + +| ะขะธะฟ ะผะพะดะตะปะธ | ะŸั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ | ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ | ะ’ั‹ะฒะพะด | ะ’ะฐะปะธะดะฐั†ะธั | ะขั€ะตะฝะธั€ะพะฒะบะฐ | ะญะบัะฟะพั€ั‚ | +|------------|-------------------------------|------------------------------------------------|-------|-----------|------------|---------| +| MobileSAM | `mobile_sam.pt` | [ะกะตะณะผะตะฝั‚ะฐั†ะธั ัะบะทะตะผะฟะปัั€ะพะฒ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## ะŸะตั€ะตั…ะพะด ะพั‚ SAM ะบ MobileSAM + +ะŸะพัะบะพะปัŒะบัƒ MobileSAM ัะพั…ั€ะฐะฝัะตั‚ ั‚ัƒ ะถะต ัะฐะผัƒัŽ ะฟะพัะปะตะดะพะฒะฐั‚ะตะปัŒะฝะพัั‚ัŒ ะพะฟะตั€ะฐั†ะธะน, ั‡ั‚ะพ ะธ ะพั€ะธะณะธะฝะฐะปัŒะฝั‹ะน SAM, ะฒ ะฝะตะณะพ ะธะฝั‚ะตะณั€ะธั€ะพะฒะฐะฝั‹ ะฒัะต ะธะฝั‚ะตั€ั„ะตะนัั‹ ะดะปั ะฟั€ะตะดะพะฑั€ะฐะฑะพั‚ะบะธ, ะฟะพัั‚ะพะฑั€ะฐะฑะพั‚ะบะธ ะธ ะฟั€ะพั‡ะธะต ะธะฝั‚ะตั€ั„ะตะนัั‹. ะ’ ั€ะตะทัƒะปัŒั‚ะฐั‚ะต, ะฟะพะปัŒะทะพะฒะฐั‚ะตะปะธ, ัƒะถะต ะธัะฟะพะปัŒะทัƒัŽั‰ะธะต ะพั€ะธะณะธะฝะฐะปัŒะฝั‹ะน SAM, ะผะพะณัƒั‚ ะปะตะณะบะพ ะฟะตั€ะตะนั‚ะธ ะฝะฐ MobileSAM. + +MobileSAM ั€ะฐะฑะพั‚ะฐะตั‚ ัั€ะฐะฒะฝะธะผะพ ั ะพั€ะธะณะธะฝะฐะปัŒะฝั‹ะผ SAM ะธ ะธะผะตะตั‚ ั‚ัƒ ะถะต ัะฐะผัƒัŽ ะฟะพัะปะตะดะพะฒะฐั‚ะตะปัŒะฝะพัั‚ัŒ ะพะฟะตั€ะฐั†ะธะน, ะทะฐ ะธัะบะปัŽั‡ะตะฝะธะตะผ ะธะทะผะตะฝะตะฝะธั ะบะพะดะธั€ะพะฒั‰ะธะบะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะน. ะ’ ั‡ะฐัั‚ะฝะพัั‚ะธ, ะผั‹ ะทะฐะผะตะฝัะตะผ ะพั€ะธะณะธะฝะฐะปัŒะฝั‹ะน "ั‚ัะถั‘ะปั‹ะน" ะบะพะดะธั€ะพะฒั‰ะธะบ ะธะทะพะฑั€ะฐะถะตะฝะธะน ViT-H (632M) ะฝะฐ ะฑะพะปะตะต ะบะพะผะฟะฐะบั‚ะฝั‹ะน Tiny-ViT (5M). ะะฐ ะพะดะฝะพะผ ะณั€ะฐั„ะธั‡ะตัะบะพะผ ะฟั€ะพั†ะตััะพั€ะต MobileSAM ั€ะฐะฑะพั‚ะฐะตั‚ ะฟั€ะธะผะตั€ะฝะพ ะทะฐ 12 ะผั ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะต: 8 ะผั ะฝะฐ ะบะพะดะธั€ะพะฒั‰ะธะบ ะธะทะพะฑั€ะฐะถะตะฝะธะน ะธ 4 ะผั ะฝะฐ ะดะตะบะพะดะตั€ ะผะฐัะพะบ. + +ะ’ ั‚ะฐะฑะปะธั†ะต ะฟั€ะตะดัั‚ะฐะฒะปะตะฝะพ ัั€ะฐะฒะฝะตะฝะธะต ะบะพะดะธั€ะพะฒั‰ะธะบะพะฒ ะธะทะพะฑั€ะฐะถะตะฝะธะน ะฝะฐ ะฑะฐะทะต ViT: + +| ะšะพะดะธั€ะพะฒั‰ะธะบ ะธะทะพะฑั€ะฐะถะตะฝะธะน | ะžั€ะธะณะธะฝะฐะปัŒะฝั‹ะน SAM | MobileSAM | +|------------------------|------------------|-----------| +| ะŸะฐั€ะฐะผะตั‚ั€ั‹ | 611M | 5M | +| ะกะบะพั€ะพัั‚ัŒ | 452 ะผั | 8 ะผั | + +ะšะฐะบ ะพั€ะธะณะธะฝะฐะปัŒะฝั‹ะน SAM, ั‚ะฐะบ ะธ MobileSAM ะธัะฟะพะปัŒะทัƒัŽั‚ ะพะดะธะฝ ะธ ั‚ะพั‚ ะถะต ะดะตะบะพะดะตั€ ะผะฐัะพะบ, ัƒะฟั€ะฐะฒะปัะตะผั‹ะน ะฟะพะดัะบะฐะทะบะฐะผะธ: + +| ะ”ะตะบะพะดะตั€ ะผะฐัะพะบ | ะžั€ะธะณะธะฝะฐะปัŒะฝั‹ะน SAM | MobileSAM | +|---------------|------------------|-----------| +| ะŸะฐั€ะฐะผะตั‚ั€ั‹ | 3.876M | 3.876M | +| ะกะบะพั€ะพัั‚ัŒ | 4 ะผั | 4 ะผั | + +ะะธะถะต ะฟั€ะธะฒะตะดะตะฝะพ ัั€ะฐะฒะฝะตะฝะธะต ะฒัะตะน ะฟะพัะปะตะดะพะฒะฐั‚ะตะปัŒะฝะพัั‚ะธ ะพะฟะตั€ะฐั†ะธะน: + +| ะŸะพะปะฝะฐั ะฟะพัะปะตะดะพะฒะฐั‚ะตะปัŒะฝะพัั‚ัŒ ะพะฟะตั€ะฐั†ะธะน (Enc+Dec) | ะžั€ะธะณะธะฝะฐะปัŒะฝั‹ะน SAM | MobileSAM | +|----------------------------------------------|------------------|-----------| +| ะŸะฐั€ะฐะผะตั‚ั€ั‹ | 615M | 9.66M | +| ะกะบะพั€ะพัั‚ัŒ | 456 ะผั | 12 ะผั | + +ะŸั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ MobileSAM ะธ ะพั€ะธะณะธะฝะฐะปัŒะฝะพะณะพ SAM ะดะตะผะพะฝัั‚ั€ะธั€ัƒะตั‚ัั ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะฟะพะดัะบะฐะทะพะบ ะฒ ั„ะพั€ะผะต ั‚ะพั‡ะบะธ ะธ ะฟั€ัะผะพัƒะณะพะปัŒะฝะธะบะฐ. + +![ะ˜ะทะพะฑั€ะฐะถะตะฝะธะต ั ะฟะพะดัะบะฐะทะบะพะน ะฒ ะฒะธะดะต ั‚ะพั‡ะบะธ](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +![ะ˜ะทะพะฑั€ะฐะถะตะฝะธะต ั ะฟะพะดัะบะฐะทะบะพะน ะฒ ะฒะธะดะต ะฟั€ัะผะพัƒะณะพะปัŒะฝะธะบะฐ](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +MobileSAM ะพะฑะตัะฟะตั‡ะธะฒะฐะตั‚ ะฟั€ะธะผะตั€ะฝะพ ะฒ 5 ั€ะฐะท ะผะตะฝัŒัˆะธะน ั€ะฐะทะผะตั€ ะธ ะฒ 7 ั€ะฐะท ะฑะพะปัŒัˆัƒัŽ ัะบะพั€ะพัั‚ัŒ ั€ะฐะฑะพั‚ั‹ ะฟะพ ัั€ะฐะฒะฝะตะฝะธัŽ ั ั‚ะตะบัƒั‰ะธะผ FastSAM. ะ‘ะพะปะตะต ะฟะพะดั€ะพะฑะฝะฐั ะธะฝั„ะพั€ะผะฐั†ะธั ะดะพัั‚ัƒะฟะฝะฐ ะฝะฐ [ัั‚ั€ะฐะฝะธั†ะต ะฟั€ะพะตะบั‚ะฐ MobileSAM](https://github.com/ChaoningZhang/MobileSAM). + +## ะขะตัั‚ะธั€ะพะฒะฐะฝะธะต MobileSAM ะฒ Ultralytics + +ะšะฐะบ ะธ ะพั€ะธะณะธะฝะฐะปัŒะฝั‹ะน SAM, ะผั‹ ะฟั€ะตะดะปะฐะณะฐะตะผ ะฟั€ะพัั‚ะพะน ะผะตั‚ะพะด ั‚ะตัั‚ะธั€ะพะฒะฐะฝะธั ะฒ Ultralytics, ะฒะบะปัŽั‡ะฐั ั€ะตะถะธะผั‹ ั‚ะตัั‚ะธั€ะพะฒะฐะฝะธั ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะฟะพะดัะบะฐะทะพะบ ะฒ ั„ะพั€ะผะต ั‚ะพั‡ะบะธ ะธ ะฟั€ัะผะพัƒะณะพะปัŒะฝะธะบะฐ. + +### ะ—ะฐะณั€ัƒะทะบะฐ ะผะพะดะตะปะธ + +ะ’ั‹ ะผะพะถะตั‚ะต ะทะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ [ะทะดะตััŒ](https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt). + +### ะŸะพะดัะบะฐะทะบะฐ ะฒ ั„ะพั€ะผะต ั‚ะพั‡ะบะธ + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + ```python + from ultralytics import SAM + + # ะ—ะฐะณั€ัƒะทะบะฐ ะผะพะดะตะปะธ + model = SAM('mobile_sam.pt') + + # ะŸั€ะตะดัะบะฐะทะฐะฝะธะต ัะตะณะผะตะฝั‚ะฐ ะฝะฐ ะพัะฝะพะฒะต ะฟะพะดัะบะฐะทะบะธ ะฒ ั„ะพั€ะผะต ั‚ะพั‡ะบะธ + model.predict('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +### ะŸะพะดัะบะฐะทะบะฐ ะฒ ั„ะพั€ะผะต ะฟั€ัะผะพัƒะณะพะปัŒะฝะธะบะฐ + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + ```python + from ultralytics import SAM + + # ะ—ะฐะณั€ัƒะทะบะฐ ะผะพะดะตะปะธ + model = SAM('mobile_sam.pt') + + # ะŸั€ะตะดัะบะฐะทะฐะฝะธะต ัะตะณะผะตะฝั‚ะฐ ะฝะฐ ะพัะฝะพะฒะต ะฟะพะดัะบะฐะทะบะธ ะฒ ั„ะพั€ะผะต ะฟั€ัะผะพัƒะณะพะปัŒะฝะธะบะฐ + model.predict('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + ``` + +ะœั‹ ั€ะตะฐะปะธะทะพะฒะฐะปะธ `MobileSAM` ะธ `SAM` ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะพะดะฝะพะณะพ ะธ ั‚ะพะณะพ ะถะต API. ะ”ะปั ะฟะพะปัƒั‡ะตะฝะธั ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝะพะน ะธะฝั„ะพั€ะผะฐั†ะธะธ ะพ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ัะผ. [ัั‚ั€ะฐะฝะธั†ัƒ SAM](sam.md). + +## ะฆะธั‚ะธั€ะพะฒะฐะฝะธะต ะธ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ะธ + +ะ•ัะปะธ ะฒั‹ ัั‡ะธั‚ะฐะตั‚ะต MobileSAM ะฟะพะปะตะทะฝั‹ะผ ะฒ ัะฒะพะตะน ะฝะฐัƒั‡ะฝะพ-ะธััะปะตะดะพะฒะฐั‚ะตะปัŒัะบะพะน ะธะปะธ ั€ะฐะทั€ะฐะฑะพั‚ะพั‡ะฝะพะน ั€ะฐะฑะพั‚ะต, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ั€ะฐััะผะพั‚ั€ะธั‚ะต ะฒะพะทะผะพะถะฝะพัั‚ัŒ ั†ะธั‚ะธั€ะพะฒะฐะฝะธั ะฝะฐัˆะตะน ัั‚ะฐั‚ัŒะธ: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{mobile_sam, + title={Faster Segment Anything: Towards Lightweight SAM for Mobile Applications}, + author={Zhang, Chaoning and Han, Dongshen and Qiao, Yu and Kim, Jung Uk and Bae, Sung Ho and Lee, Seungkyu and Hong, Choong Seon}, + journal={arXiv preprint arXiv:2306.14289}, + year={2023} + } diff --git a/ultralytics/docs/ru/models/mobile-sam.md:Zone.Identifier b/ultralytics/docs/ru/models/mobile-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/models/mobile-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/models/rtdetr.md b/ultralytics/docs/ru/models/rtdetr.md new file mode 100755 index 0000000..7b8b374 --- /dev/null +++ b/ultralytics/docs/ru/models/rtdetr.md @@ -0,0 +1,93 @@ +--- +comments: true +description: ะฃะทะฝะฐะนั‚ะต ะพ ะฒะพะทะผะพะถะฝะพัั‚ัั… ะธ ะฟั€ะตะธะผัƒั‰ะตัั‚ะฒะฐั… RT-DETR ะพั‚ Baidu - ัั„ั„ะตะบั‚ะธะฒะฝะพะณะพ ะธ ะณะธะฑะบะพะณะพ ะดะตั‚ะตะบั‚ะพั€ะฐ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ, ะพัะฝะพะฒะฐะฝะฝะพะณะพ ะฝะฐ Vision Transformers. ะ’ะบะปัŽั‡ะฐะตั‚ ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ. +keywords: RT-DETR, Baidu, Vision Transformers, object detection, real-time performance, CUDA, TensorRT, IoU-aware query selection, Ultralytics, Python API, PaddlePaddle +--- + +# RT-DETR ะพั‚ Baidu: ะดะตั‚ะตะบั‚ะพั€ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ ะฝะฐ ะพัะฝะพะฒะต Vision Transformers + +## ะžะฑะทะพั€ + +Real-Time Detection Transformer (RT-DETR), ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฝั‹ะน ะบะพะผะฟะฐะฝะธะตะน Baidu, ัะฒะปัะตั‚ัั ะฟะตั€ะตะดะพะฒั‹ะผ ัะฝะด-ั‚ัƒ-ัะฝะด ะดะตั‚ะตะบั‚ะพั€ะพะผ ะพะฑัŠะตะบั‚ะพะฒ, ะบะพั‚ะพั€ั‹ะน ะพะฑะตัะฟะตั‡ะธะฒะฐะตั‚ ะฒั‹ัะพะบัƒัŽ ั‚ะพั‡ะฝะพัั‚ัŒ ะฟั€ะธ ั€ะฐะฑะพั‚ะต ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. ะžะฝ ะธัะฟะพะปัŒะทัƒะตั‚ ะฟั€ะตะธะผัƒั‰ะตัั‚ะฒะฐ Vision Transformers (ViT) ะดะปั ัั„ั„ะตะบั‚ะธะฒะฝะพะน ะพะฑั€ะฐะฑะพั‚ะบะธ ะผัƒะปัŒั‚ะธะผะฐััˆั‚ะฐะฑะฝั‹ั… ะฟั€ะธะทะฝะฐะบะพะฒ ะฟัƒั‚ะตะผ ั€ะฐะทะดะตะปะตะฝะธั ะฒะทะฐะธะผะพะดะตะนัั‚ะฒะธั ะฒะฝัƒั‚ั€ะธ ะผะฐััˆั‚ะฐะฑะฐ ะธ ัะปะธัะฝะธั ะผะตะถะดัƒ ะผะฐััˆั‚ะฐะฑะฐะผะธ. RT-DETR ะปะตะณะบะพ ะฐะดะฐะฟั‚ะธั€ัƒะตั‚ัั ะดะปั ะฟะพะดะดะตั€ะถะบะธ ะณะธะฑะบะพะน ะฝะฐัั‚ั€ะพะนะบะธ ัะบะพั€ะพัั‚ะธ ะฒั‹ะฒะพะดะฐ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ั€ะฐะทะฝั‹ั… ัะปะพะตะฒ ะดะตะบะพะดะตั€ะฐ ะฑะตะท ะฝะตะพะฑั…ะพะดะธะผะพัั‚ะธ ะฟะพะฒั‚ะพั€ะฝะพะณะพ ะพะฑัƒั‡ะตะฝะธั. ะœะพะดะตะปัŒ ะฟะพะบะฐะทั‹ะฒะฐะตั‚ ะฒั‹ัะพะบัƒัŽ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะฝะฐ ัƒัะบะพั€ะตะฝะฝั‹ั… ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝั‹ั… ะฟะปะฐั‚ั„ะพั€ะผะฐั…, ั‚ะฐะบะธั… ะบะฐะบ CUDA ั TensorRT, ะฟั€ะตะฒะพัั…ะพะดั ะผะฝะพะณะธะต ะดั€ัƒะณะธะต ะดะตั‚ะตะบั‚ะพั€ั‹ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. + +![ะŸั€ะธะผะตั€ ะผะพะดะตะปะธ](https://user-images.githubusercontent.com/26833433/238963168-90e8483f-90aa-4eb6-a5e1-0d408b23dd33.png) +**ะžะฑะทะพั€ RT-DETR ะพั‚ Baidu.** ะกั…ะตะผะฐ ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ ะผะพะดะตะปะธ RT-DETR ะฟะพะบะฐะทั‹ะฒะฐะตั‚ ะฟะพัะปะตะดะฝะธะต ั‚ั€ะธ ัั‚ะฐะดะธะธ ะพัะฝะพะฒะฝะพะน ัะตั‚ะธ {S3, S4, S5} ะฒ ะบะฐั‡ะตัั‚ะฒะต ะฒั…ะพะดะฝั‹ั… ะดะฐะฝะฝั‹ั… ะดะปั ัะฝะบะพะดะตั€ะฐ. ะญั„ั„ะตะบั‚ะธะฒะฝั‹ะน ะณะธะฑั€ะธะดะฝั‹ะน ัะฝะบะพะดะตั€ ะฟั€ะตะพะฑั€ะฐะทัƒะตั‚ ะผัƒะปัŒั‚ะธะผะฐััˆั‚ะฐะฑะฝั‹ะต ะฟั€ะธะทะฝะฐะบะธ ะฒ ะฟะพัะปะตะดะพะฒะฐั‚ะตะปัŒะฝะพัั‚ัŒ ะฟั€ะธะทะฝะฐะบะพะฒ ะธะทะพะฑั€ะฐะถะตะฝะธั ั ะฟะพะผะพั‰ัŒัŽ ะธะฝั‚ะตั€ะฐะบั‚ะธะฒะฝะพะณะพ ะธะฝั‚ั€ะฐัะบะฐะปัŒะฝะพะณะพ ะฒะทะฐะธะผะพะดะตะนัั‚ะฒะธั ะฟั€ะธะทะฝะฐะบะพะฒ (AIFI) ะธ ะผะพะดัƒะปั ัะปะธัะฝะธั ะฟั€ะธะทะฝะฐะบะพะฒ ะผะตะถะดัƒ ะบั€ะพัั-ะผะฐััˆั‚ะฐะฑะฐะผะธ (CCFM). ะ”ะปั ะฝะฐั‡ะฐะปัŒะฝะพะน ะธะฝะธั†ะธะฐะปะธะทะฐั†ะธะธ ะพะฑัŠะตะบั‚ะฝั‹ั… ะทะฐะฟั€ะพัะพะฒ ะดะตะบะพะดะตั€ะฐ ะธัะฟะพะปัŒะทัƒะตั‚ัั ะฒั‹ะฑะพั€ ะทะฐะฟั€ะพัะพะฒ ั ัƒั‡ะตั‚ะพะผ ะพั†ะตะฝะบะธ ะฟะตั€ะตัะตั‡ะตะฝะธั ะพะฑัŠะตะดะธะฝะตะฝะธั (IoU-aware query selection). ะะฐะบะพะฝะตั†, ะดะตะบะพะดะตั€ ั ะฒัะฟะพะผะพะณะฐั‚ะตะปัŒะฝั‹ะผะธ ะณะพะปะพะฒะฐะผะธ ะฟั€ะตะดัะบะฐะทะฐะฝะธั ะธั‚ะตั€ะฐั‚ะธะฒะฝะพ ะพะฟั‚ะธะผะธะทะธั€ัƒะตั‚ ะพะฑัŠะตะบั‚ะฝั‹ะต ะทะฐะฟั€ะพัั‹ ะดะปั ะณะตะฝะตั€ะฐั†ะธะธ ั€ะฐะผะพะบ ะธ ะฒะตั€ะพัั‚ะฝะพัั‚ะตะน ([ะธัั‚ะพั‡ะฝะธะบ](https://arxiv.org/pdf/2304.08069.pdf)). + +### ะžัะฝะพะฒะฝั‹ะต ะพัะพะฑะตะฝะฝะพัั‚ะธ + +- **ะญั„ั„ะตะบั‚ะธะฒะฝั‹ะน ะณะธะฑั€ะธะดะฝั‹ะน ัะฝะบะพะดะตั€:** RT-DETR ะพั‚ Baidu ะธัะฟะพะปัŒะทัƒะตั‚ ัั„ั„ะตะบั‚ะธะฒะฝั‹ะน ะณะธะฑั€ะธะดะฝั‹ะน ัะฝะบะพะดะตั€, ะบะพั‚ะพั€ั‹ะน ะพะฑั€ะฐะฑะฐั‚ั‹ะฒะฐะตั‚ ะผัƒะปัŒั‚ะธะผะฐััˆั‚ะฐะฑะฝั‹ะต ะฟั€ะธะทะฝะฐะบะธ ะฟัƒั‚ะตะผ ั€ะฐะทะดะตะปะตะฝะธั ะฒะทะฐะธะผะพะดะตะนัั‚ะฒะธั ะฒะฝัƒั‚ั€ะธ ะผะฐััˆั‚ะฐะฑะฐ ะธ ัะปะธัะฝะธั ะผะตะถะดัƒ ะผะฐััˆั‚ะฐะฑะฐะผะธ. ะญั‚ะพ ัƒะฝะธะบะฐะปัŒะฝะพะต ั€ะตัˆะตะฝะธะต ะฝะฐ ะพัะฝะพะฒะต Vision Transformers ัะฝะธะถะฐะตั‚ ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝั‹ะต ะทะฐั‚ั€ะฐั‚ั‹ ะธ ะฟะพะทะฒะพะปัะตั‚ ะพััƒั‰ะตัั‚ะฒะปัั‚ัŒ ะดะตั‚ะตะบั†ะธัŽ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. +- **ะ’ั‹ะฑะพั€ ะทะฐะฟั€ะพัะฐ ั ัƒั‡ะตั‚ะพะผ ะพั†ะตะฝะบะธ ะฟะตั€ะตัะตั‡ะตะฝะธั ะพะฑัŠะตะดะธะฝะตะฝะธั (IoU-aware):** RT-DETR ะพั‚ Baidu ัƒะปัƒั‡ัˆะฐะตั‚ ะธะฝะธั†ะธะฐะปะธะทะฐั†ะธัŽ ะทะฐะฟั€ะพัะพะฒ ะพะฑัŠะตะบั‚ะพะฒ ะฟัƒั‚ะตะผ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะพัะพะทะฝะฐะฝะฝะพะณะพ ะทะฐะฟั€ะพัะฐ ั ัƒั‡ะตั‚ะพะผ ะพั†ะตะฝะบะธ ะฟะตั€ะตัะตั‡ะตะฝะธั ะพะฑัŠะตะดะธะฝะตะฝะธั (IoU-aware query selection). ะญั‚ะพ ะฟะพะทะฒะพะปัะตั‚ ะผะพะดะตะปะธ ั„ะพะบัƒัะธั€ะพะฒะฐั‚ัŒัั ะฝะฐ ะฝะฐะธะฑะพะปะตะต ะทะฝะฐั‡ะธะผั‹ั… ะพะฑัŠะตะบั‚ะฐั… ะฝะฐ ัั†ะตะฝะต ะธ ะฟะพะฒั‹ัˆะฐะตั‚ ั‚ะพั‡ะฝะพัั‚ัŒ ะดะตั‚ะตะบั‚ะธั€ะพะฒะฐะฝะธั. +- **ะ“ะธะฑะบะฐั ัะบะพั€ะพัั‚ัŒ ะฒั‹ะฒะพะดะฐ:** RT-DETR ะพั‚ Baidu ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ ะณะธะฑะบัƒัŽ ะฝะฐัั‚ั€ะพะนะบัƒ ัะบะพั€ะพัั‚ะธ ะฒั‹ะฒะพะดะฐ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ั€ะฐะทะปะธั‡ะฝั‹ั… ัะปะพะตะฒ ะดะตะบะพะดะตั€ะฐ ะฑะตะท ะฝะตะพะฑั…ะพะดะธะผะพัั‚ะธ ะฟะพะฒั‚ะพั€ะฝะพะณะพ ะพะฑัƒั‡ะตะฝะธั. ะ“ะธะฑะบะพัั‚ัŒ ะดะฐะฝะฝะพะณะพ ะฟะพะดั…ะพะดะฐ ัƒะฟั€ะพั‰ะฐะตั‚ ะตะณะพ ะฟั€ะธะผะตะฝะตะฝะธะต ะฒ ั€ะฐะทะปะธั‡ะฝั‹ั… ัั†ะตะฝะฐั€ะธัั… ะดะตั‚ะตะบั†ะธะธ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. + +## ะŸั€ะตะดะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ + +Python API Ultralytics ะฟั€ะตะดะพัั‚ะฐะฒะปัะตั‚ ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ RT-DETR ะพั‚ PaddlePaddle ั ั€ะฐะทะปะธั‡ะฝั‹ะผะธ ะผะฐััˆั‚ะฐะฑะฐะผะธ: + +- RT-DETR-L: 53.0% AP ะฝะฐ COCO val2017, 114 FPS ะฝะฐ GPU T4 +- RT-DETR-X: 54.8% AP ะฝะฐ COCO val2017, 74 FPS ะฝะฐ GPU T4 + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะ’ ัั‚ะพะผ ะฟั€ะธะผะตั€ะต ะฟั€ะตะดัั‚ะฐะฒะปะตะฝั‹ ะฟั€ะพัั‚ั‹ะต ะฟั€ะธะผะตั€ั‹ ะพะฑัƒั‡ะตะฝะธั ะธ ะฒั‹ะฒะพะดะฐ ะผะพะดะตะปะธ RT-DETR. ะ”ะปั ะฟะพะปะฝะพะน ะดะพะบัƒะผะตะฝั‚ะฐั†ะธะธ ะฟะพ ัั‚ะธะผ ะธ ะดั€ัƒะณะธะผ [ั€ะตะถะธะผะฐะผ](../modes/index.md) ัะผะพั‚ั€ะธั‚ะต ัั‚ั€ะฐะฝะธั†ั‹ ะดะพะบัƒะผะตะฝั‚ะฐั†ะธะธ [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) ะธ [Export](../modes/export.md). + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import RTDETR + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ RT-DETR-l ะฝะฐ COCO + model = RTDETR('rtdetr-l.pt') + + # ะžั‚ะพะฑั€ะฐะถะตะฝะธะต ะธะฝั„ะพั€ะผะฐั†ะธะธ ะพ ะผะพะดะตะปะธ (ะฟะพ ะถะตะปะฐะฝะธัŽ) + model.info() + + # ะžะฑัƒั‡ะตะฝะธะต ะผะพะดะตะปะธ ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… COCO8 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ะžััƒั‰ะตัั‚ะฒะปะตะฝะธะต ะฒั‹ะฒะพะดะฐ ะผะพะดะตะปะธ RT-DETR-l ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ```bash + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ RT-DETR-l ะฝะฐ COCO ะธ ะตะต ะพะฑัƒั‡ะตะฝะธะต ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… COCO8 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… + yolo train model=rtdetr-l.pt data=coco8.yaml epochs=100 imgsz=640 + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ RT-DETR-l ะฝะฐ COCO ะธ ะฒั‹ะฒะพะด ะตะต ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + yolo predict model=rtdetr-l.pt source=path/to/bus.jpg + ``` + +## ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ ะธ ั€ะตะถะธะผั‹ + +ะ’ ั‚ะฐะฑะปะธั†ะต ะฟั€ะตะดัั‚ะฐะฒะปะตะฝั‹ ั‚ะธะฟั‹ ะผะพะดะตะปะตะน, ะบะพะฝะบั€ะตั‚ะฝั‹ะต ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ, ะทะฐะดะฐั‡ะธ, ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะบะฐะถะดะพะน ะผะพะดะตะปัŒัŽ, ะฐ ั‚ะฐะบะถะต ั€ะฐะทะปะธั‡ะฝั‹ะต ั€ะตะถะธะผั‹ ([Train](../modes/train.md) , [Val](../modes/val.md), [Predict](../modes/predict.md), [Export](../modes/export.md)), ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะบะฐะถะดะพะน ะผะพะดะตะปัŒัŽ, ั‡ั‚ะพ ะพะฑะพะทะฝะฐั‡ะตะฝะพ ัะธะผะฒะพะปะพะผ โœ…. + +| ะขะธะฟ ะผะพะดะตะปะธ | ะŸั€ะตะดะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ | ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ | ะ’ั‹ะฒะพะด | ะ’ะฐะปะธะดะฐั†ะธั | ะžะฑัƒั‡ะตะฝะธะต | ะญะบัะฟะพั€ั‚ | +|---------------------|--------------------|--------------------------------------------|-------|-----------|----------|---------| +| RT-DETR Large | `rtdetr-l.pt` | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| RT-DETR Extra-Large | `rtdetr-x.pt` | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +## ะฆะธั‚ะธั€ะพะฒะฐะฝะธะต ะธ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ะธ + +ะ•ัะปะธ ะฒั‹ ะธัะฟะพะปัŒะทัƒะตั‚ะต Baidu RT-DETR ะฒ ัะฒะพะธั… ะธััะปะตะดะพะฒะฐะฝะธัั… ะธะปะธ ั€ะฐะทั€ะฐะฑะพั‚ะบะต, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ะฟั€ะพั†ะธั‚ะธั€ัƒะนั‚ะต [ะพั€ะธะณะธะฝะฐะปัŒะฝัƒัŽ ัั‚ะฐั‚ัŒัŽ](https://arxiv.org/abs/2304.08069): + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{lv2023detrs, + title={DETRs Beat YOLOs on Real-time Object Detection}, + author={Wenyu Lv and Shangliang Xu and Yian Zhao and Guanzhong Wang and Jinman Wei and Cheng Cui and Yuning Du and Qingqing Dang and Yi Liu}, + year={2023}, + eprint={2304.08069}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ะœั‹ ั…ะพั‚ะตะปะธ ะฑั‹ ะฒั‹ั€ะฐะทะธั‚ัŒ ัะฒะพัŽ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ัŒ ะบะพะผะฟะฐะฝะธะธ Baidu ะธ ะบะพะผะฐะฝะดะต [PaddlePaddle](https://github.com/PaddlePaddle/PaddleDetection) ะทะฐ ัะพะทะดะฐะฝะธะต ะธ ะฟะพะดะดะตั€ะถะบัƒ ัั‚ะพะณะพ ั†ะตะฝะฝะพะณะพ ั€ะตััƒั€ัะฐ ะดะปั ัะพะพะฑั‰ะตัั‚ะฒะฐ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั. ะœั‹ ะพั‡ะตะฝัŒ ั†ะตะฝะธะผ ะธั… ะฒะบะปะฐะด ะฒ ะพะฑะปะฐัั‚ะธ ั€ะฐะทั€ะฐะฑะพั‚ะบะธ ะดะตั‚ะตะบั‚ะพั€ะพะฒ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ ะฝะฐ ะพัะฝะพะฒะต Vision Transformers, RT-DETR. + +*Keywords: RT-DETR, Transformer, ViT, Vision Transformers, Baidu RT-DETR, PaddlePaddle, Paddle Paddle RT-DETR, real-time object detection, Vision Transformers-based object detection, pre-trained PaddlePaddle RT-DETR models, Baidu's RT-DETR usage, Ultralytics Python API* diff --git a/ultralytics/docs/ru/models/rtdetr.md:Zone.Identifier b/ultralytics/docs/ru/models/rtdetr.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/models/rtdetr.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/models/sam.md b/ultralytics/docs/ru/models/sam.md new file mode 100755 index 0000000..11479c3 --- /dev/null +++ b/ultralytics/docs/ru/models/sam.md @@ -0,0 +1,226 @@ +--- +comments: true +description: ะ˜ะทัƒั‡ะธั‚ะต ะฟะตั€ะตะดะพะฒัƒัŽ ะผะพะดะตะปัŒ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน "Segment Anything Model" (SAM) ะพั‚ ะบะพะผะฟะฐะฝะธะธ Ultralytics, ะบะพั‚ะพั€ะฐั ะฟะพะทะฒะพะปัะตั‚ ะฒั‹ะฟะพะปะฝัั‚ัŒ ัะตะณะผะตะฝั‚ะฐั†ะธัŽ ะธะทะพะฑั€ะฐะถะตะฝะธะน ะฒ ั€ะตะถะธะผะต ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ. ะฃะทะฝะฐะนั‚ะต ะพ ะฒะพะทะผะพะถะฝะพัั‚ะธ ะดะฐั‚ัŒ ะฟะพะดัะบะฐะทะบะธ ะผะพะดะตะปะธ ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ัะตะณะผะตะฝั‚ะฐั†ะธะธ, ะพ ะตะต ะฒะพะทะผะพะถะฝะพัั‚ัั… ะฝัƒะปะตะฒะพะน ะฝะฐัั‚ั€ะพะนะบะธ ะธ ะพ ั‚ะพะผ, ะบะฐะบ ะตะต ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ. +keywords: Ultralytics, ัะตะณะผะตะฝั‚ะฐั†ะธั ะธะทะพะฑั€ะฐะถะตะฝะธะน, "Segment Anything Model", SAM, ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… SA-1B, ั€ะฐะฑะพั‚ะฐ ะฒ ั€ะตะถะธะผะต ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ, ะฝัƒะปะตะฒะฐั ะฝะฐัั‚ั€ะพะนะบะฐ, ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ะฐะฝะฐะปะธะท ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะผะฐัˆะธะฝะฝะพะต ะพะฑัƒั‡ะตะฝะธะต +--- + +# Segment Anything Model (SAM) + +ะ”ะพะฑั€ะพ ะฟะพะถะฐะปะพะฒะฐั‚ัŒ ะฒ ะผะธั€ ะฟะตั€ะตะดะพะฒะพะน ะผะพะดะตะปะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน "Segment Anything Model" (SAM). ะญั‚ะฐ ั€ะตะฒะพะปัŽั†ะธะพะฝะฝะฐั ะผะพะดะตะปัŒ ัƒัั‚ะฐะฝะพะฒะธะปะฐ ะฝะพะฒั‹ะต ัั‚ะฐะฝะดะฐั€ั‚ั‹ ะฒ ะพะฑะปะฐัั‚ะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะฟั€ะตะดะพัั‚ะฐะฒะปัั ะฒะพะทะผะพะถะฝะพัั‚ัŒ ะฒะฒะพะดะฐ ะฟะพะดัะบะฐะทะพะบ ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. + +## ะ’ะฒะตะดะตะฝะธะต ะฒ Segment Anything Model (SAM) + +Segment Anything Model (SAM) - ัั‚ะพ ะฟะตั€ะตะดะพะฒะฐั ะผะพะดะตะปัŒ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะฟะพะทะฒะพะปััŽั‰ะฐั ะพััƒั‰ะตัั‚ะฒะปัั‚ัŒ ัะตะณะผะตะฝั‚ะฐั†ะธัŽ ั ะฒะพะทะผะพะถะฝะพัั‚ัŒัŽ ะทะฐะดะฐะฒะฐั‚ัŒ ะฟะพะดัะบะฐะทะบะธ, ั‡ั‚ะพ ะพะฑะตัะฟะตั‡ะธะฒะฐะตั‚ ัƒะฝะธะบะฐะปัŒะฝัƒัŽ ะณะธะฑะบะพัั‚ัŒ ะฒ ะทะฐะดะฐั‡ะฐั… ะฐะฝะฐะปะธะทะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะน. SAM ัะฒะปัะตั‚ัั ะบะปัŽั‡ะตะฒั‹ะผ ัะปะตะผะตะฝั‚ะพะผ ะธะฝะธั†ะธะฐั‚ะธะฒั‹ "Segment Anything", ะบะพั‚ะพั€ะฐั ะฒะฒะพะดะธั‚ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ, ะทะฐะดะฐั‡ัƒ ะธ ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… ะดะปั ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน. + +ะ‘ะปะฐะณะพะดะฐั€ั ัะฒะพะตะผัƒ ะฟะตั€ะตะดะพะฒะพะผัƒ ะดะธะทะฐะนะฝัƒ, SAM ะผะพะถะตั‚ ะฐะดะฐะฟั‚ะธั€ะพะฒะฐั‚ัŒัั ะบ ะฝะพะฒั‹ะผ ั€ะฐัะฟั€ะตะดะตะปะตะฝะธัะผ ะธะทะพะฑั€ะฐะถะตะฝะธะน ะธ ะทะฐะดะฐั‡ะฐะผ ะฑะตะท ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝั‹ั… ะทะฝะฐะฝะธะน, ั‡ั‚ะพ ะฝะฐะทั‹ะฒะฐะตั‚ัั ะฒะพะทะผะพะถะฝะพัั‚ัŒัŽ ะฝัƒะปะตะฒะพะน ะฝะฐัั‚ั€ะพะนะบะธ. ะžะฑัƒั‡ะตะฝะฝะฐั ะฝะฐ ะพะฑัˆะธั€ะฝะพะผ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… [SA-1B](https://ai.facebook.com/datasets/segment-anything/), ะบะพั‚ะพั€ั‹ะน ัะพะดะตั€ะถะธั‚ ะฑะพะปะตะต 1 ะผะธะปะปะธะฐั€ะดะฐ ะผะฐัะพะบ, ั€ะฐัะฟั€ะตะดะตะปะตะฝะฝั‹ั… ะฝะฐ 11 ะผะธะปะปะธะพะฝะพะฒ ั‚ั‰ะฐั‚ะตะปัŒะฝะพ ะพั‚ะพะฑั€ะฐะฝะฝั‹ั… ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะผะพะดะตะปัŒ SAM ะฟั€ะพัะฒะธะปะฐ ะฒะฟะตั‡ะฐั‚ะปััŽั‰ัƒัŽ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะฒ ะทะฐะดะฐั‡ะฐั… ะฝัƒะปะตะฒะพะน ะฝะฐัั‚ั€ะพะนะบะธ, ะฟั€ะตะฒะพัั…ะพะดั ะฟั€ะตะดั‹ะดัƒั‰ะธะต ะฟะพะปะฝะพัั‚ัŒัŽ ะฝะฐะดะทะธั€ะฐะตะผั‹ะต ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ ะฒะพ ะผะฝะพะณะธั… ัะปัƒั‡ะฐัั…. + +![ะŸั€ะธะผะตั€ ะธะทะพะฑั€ะฐะถะตะฝะธั ะธะท ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั…](https://user-images.githubusercontent.com/26833433/238056229-0e8ffbeb-f81a-477e-a490-aff3d82fd8ce.jpg) +ะ˜ะทะพะฑั€ะฐะถะตะฝะธั ั ะฝะฐะปะพะถะตะฝะฝั‹ะผะธ ะผะฐัะบะฐะผะธ ะธะท ะฝะฐัˆะตะณะพ ะฝะพะฒะพะณะพ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… SA-1B. SA-1B ัะพะดะตั€ะถะธั‚ 11 ะผะปะฝ. ั€ะฐะทะฝะพะพะฑั€ะฐะทะฝั‹ั… ะปะธั†ะตะฝะทะธั€ะพะฒะฐะฝะฝั‹ั… ะธะทะพะฑั€ะฐะถะตะฝะธะน ะฒั‹ัะพะบะพะณะพ ั€ะฐะทั€ะตัˆะตะฝะธั, ัะณะตะฝะตั€ะธั€ะพะฒะฐะฝะฝั‹ั… ะฟะพะปะฝะพัั‚ัŒัŽ ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธ SAM, ะธ 1,1 ะผะปั€ะด. ะฒั‹ัะพะบะพะบะฐั‡ะตัั‚ะฒะตะฝะฝั‹ั… ะผะฐัะพะบ ัะตะณะผะตะฝั‚ะฐั†ะธะธ. ะญั‚ะธ ะผะฐัะบะธ ะฑั‹ะปะธ ะฐะฝะฝะพั‚ะธั€ะพะฒะฐะฝั‹ ะฟะพะปะฝะพัั‚ัŒัŽ ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธ SAM, ะธ, ะบะฐะบ ะฟะพะบะฐะทะฐะปะธ ั‡ะตะปะพะฒะตั‡ะตัะบะธะต ะพั†ะตะฝะบะธ ะธ ะผะฝะพะถะตัั‚ะฒะพ ัะบัะฟะตั€ะธะผะตะฝั‚ะพะฒ, ะพะฝะธ ัะฒะปััŽั‚ัั ะฒั‹ัะพะบะพะบะฐั‡ะตัั‚ะฒะตะฝะฝั‹ะผะธ ะธ ั€ะฐะทะฝะพะพะฑั€ะฐะทะฝั‹ะผะธ. ะ˜ะทะพะฑั€ะฐะถะตะฝะธั ัะณั€ัƒะฟะฟะธั€ะพะฒะฐะฝั‹ ะฟะพ ะบะพะปะธั‡ะตัั‚ะฒัƒ ะผะฐัะพะบ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะต ะดะปั ะฝะฐะณะปัะดะฝะพัั‚ะธ (ะฒ ัั€ะตะดะฝะตะผ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะต ะฟั€ะธั…ะพะดะธั‚ัั ะพะบะพะปะพ 100 ะผะฐัะพะบ). + +## ะžัะฝะพะฒะฝั‹ะต ะพัะพะฑะตะฝะฝะพัั‚ะธ ะผะพะดะตะปะธ Segment Anything (SAM) + +- **ะ—ะฐะดะฐั‡ะฐ ะฟั€ะตะดะพัั‚ะฐะฒะปะตะฝะธั ะฟะพะดัะบะฐะทะพะบ ะดะปั ัะตะณะผะตะฝั‚ะฐั†ะธะธ:** SAM ะฑั‹ะปะฐ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฐ ั ัƒั‡ะตั‚ะพะผ ะทะฐะดะฐั‡ะธ ะฟั€ะตะดะพัั‚ะฐะฒะปะตะฝะธั ะฟะพะดัะบะฐะทะพะบ ะดะปั ัะตะณะผะตะฝั‚ะฐั†ะธะธ, ั‡ั‚ะพ ะฟะพะทะฒะพะปัะตั‚ ะตะน ะณะตะฝะตั€ะธั€ะพะฒะฐั‚ัŒ ะบะพั€ั€ะตะบั‚ะฝั‹ะต ะผะฐัะบะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะฝะฐ ะพัะฝะพะฒะต ะปัŽะฑั‹ั… ะฟะพะดัะบะฐะทะพะบ, ั‚ะฐะบะธั… ะบะฐะบ ะฟั€ะพัั‚ั€ะฐะฝัั‚ะฒะตะฝะฝั‹ะต ะธะปะธ ั‚ะตะบัั‚ะพะฒั‹ะต ะฟะพะดัะบะฐะทะบะธ, ะธะดะตะฝั‚ะธั„ะธั†ะธั€ัƒัŽั‰ะธะต ะพะฑัŠะตะบั‚. +- **ะ ะฐััˆะธั€ะตะฝะฝะฐั ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ะฐ:** ะœะพะดะตะปัŒ Segment Anything ะธัะฟะพะปัŒะทัƒะตั‚ ะผะพั‰ะฝั‹ะน ะบะพะดะธั€ะพะฒั‰ะธะบ ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะบะพะดะธั€ะพะฒั‰ะธะบ ะฟะพะดัะบะฐะทะพะบ ะธ ะปะตะณะบะธะน ะดะตะบะพะดะตั€ ะผะฐัะพะบ. ะญั‚ะฐ ัƒะฝะธะบะฐะปัŒะฝะฐั ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ะฐ ะพะฑะตัะฟะตั‡ะธะฒะฐะตั‚ ะณะธะฑะบะพะต ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะต ะฟะพะดัะบะฐะทะพะบ, ะฒั‹ั‡ะธัะปะตะฝะธะต ะผะฐัะพะบ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ ะธ ัƒั‡ะตั‚ ะฝะตะพะฟั€ะตะดะตะปะตะฝะฝะพัั‚ะธ ะฒ ะทะฐะดะฐั‡ะฐั… ัะตะณะผะตะฝั‚ะฐั†ะธะธ. +- **ะะฐะฑะพั€ ะดะฐะฝะฝั‹ั… SA-1B:** ะะฐะฑะพั€ ะดะฐะฝะฝั‹ั… SA-1B, ะฟั€ะตะดะปะพะถะตะฝะฝั‹ะน ะฟั€ะพะตะบั‚ะพะผ Segment Anything, ัะพะดะตั€ะถะธั‚ ะฑะพะปะตะต 1 ะผะธะปะปะธะฐั€ะดะฐ ะผะฐัะพะบ ะฝะฐ 11 ะผะธะปะปะธะพะฝะฐั… ะธะทะพะฑั€ะฐะถะตะฝะธะน. ะšะฐะบ ัะฐะผั‹ะน ะฑะพะปัŒัˆะพะน ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… ะดะปั ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะฝะฐ ัะตะณะพะดะฝััˆะฝะธะน ะดะตะฝัŒ, ะพะฝ ะพะฑะตัะฟะตั‡ะธะฒะฐะตั‚ ะผะพะดะตะปะธ SAM ั€ะฐะทะฝะพะพะฑั€ะฐะทะฝั‹ะน ะธ ะผะฐััˆั‚ะฐะฑะฝั‹ะน ะธัั‚ะพั‡ะฝะธะบ ะดะฐะฝะฝั‹ั… ะดะปั ะพะฑัƒั‡ะตะฝะธั. +- **ะŸั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะฟั€ะธ ะฝัƒะปะตะฒะพะน ะฝะฐัั‚ั€ะพะนะบะต:** ะœะพะดะตะปัŒ SAM ะฟั€ะพัะฒะปัะตั‚ ะฒั‹ะดะฐัŽั‰ัƒัŽัั ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะฟั€ะธ ะฒั‹ะฟะพะปะฝะตะฝะธะธ ะทะฐะดะฐั‡ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะฒ ั€ะตะถะธะผะต ะฝัƒะปะตะฒะพะน ะฝะฐัั‚ั€ะพะนะบะธ, ั‡ั‚ะพ ะดะตะปะฐะตั‚ ะตะต ะณะพั‚ะพะฒั‹ะผ ะบ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธัŽ ะธะฝัั‚ั€ัƒะผะตะฝั‚ะพะผ ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ะฟั€ะธะปะพะถะตะฝะธะน ั ะผะธะฝะธะผะฐะปัŒะฝะพะน ะฝะตะพะฑั…ะพะดะธะผะพัั‚ัŒัŽ ะฝะฐัั‚ั€ะฐะธะฒะฐั‚ัŒ ะฟะพะดัะบะฐะทะบะธ. + +ะ”ะปั ะฑะพะปะตะต ะฟะพะดั€ะพะฑะฝะพะณะพ ั€ะฐััะผะพั‚ั€ะตะฝะธั ะผะพะดะตะปะธ Segment Anything ะธ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… SA-1B, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ะฟะพัะตั‚ะธั‚ะต [ะฒะตะฑ-ัะฐะนั‚ Segment Anything](https://segment-anything.com) ะธ ะพะทะฝะฐะบะพะผัŒั‚ะตััŒ ั ะธััะปะตะดะพะฒะฐั‚ะตะปัŒัะบะพะน ัั‚ะฐั‚ัŒะตะน [Segment Anything](https://arxiv.org/abs/2304.02643). + +## ะ”ะพัั‚ัƒะฟะฝั‹ะต ะผะพะดะตะปะธ, ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ ะธ ั€ะตะถะธะผั‹ ั€ะฐะฑะพั‚ั‹ + +ะ’ ั‚ะฐะฑะปะธั†ะต ะฟั€ะตะดัั‚ะฐะฒะปะตะฝั‹ ะดะพัั‚ัƒะฟะฝั‹ะต ะผะพะดะตะปะธ ั ะธั… ัะฟะตั†ะธั„ะธั‡ะตัะบะธะผะธ ะทะฐั€ะฐะฝะตะต ะพะฑัƒั‡ะตะฝะฝั‹ะผะธ ะฒะตัะฐะผะธ, ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะผะธ ะทะฐะดะฐั‡ะฐะผะธ ะธ ะธั… ัะพะฒะผะตัั‚ะธะผะพัั‚ัŒ ั ั€ะฐะทะปะธั‡ะฝั‹ะผะธ ั€ะตะถะธะผะฐะผะธ ั€ะฐะฑะพั‚ั‹, ั‚ะฐะบะธะผะธ ะบะฐะบ [Inference](../modes/predict.md), [Validation](../modes/val.md), [Training](../modes/train.md) ะธ [Export](../modes/export.md), ะพะฑะพะทะฝะฐั‡ะตะฝะฝะฐั ัะธะผะฒะพะปะฐะผะธ โœ… ะดะปั ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ั… ั€ะตะถะธะผะพะฒ ะธ ัะธะผะฒะพะปะฐะผะธ โŒ ะดะปั ะฝะตะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ั… ั€ะตะถะธะผะพะฒ. + +| ะขะธะฟ ะผะพะดะตะปะธ | ะ—ะฐั€ะฐะฝะตะต ะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ | ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ | Inference | Validation | Training | Export | +|------------------------------------|------------------------|---------------------------------------------|-----------|------------|----------|--------| +| ะ‘ะฐะทะพะฒะฐั ะฒะตั€ัะธั SAM (SAM base) | `sam_b.pt` | [ะกะตะณะผะตะฝั‚ะฐั†ะธั ะพะฑัŠะตะบั‚ะพะฒ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| ะ ะฐััˆะธั€ะตะฝะฝะฐั ะฒะตั€ัะธั SAM (SAM large) | `sam_l.pt` | [ะกะตะณะผะตะฝั‚ะฐั†ะธั ะพะฑัŠะตะบั‚ะพะฒ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## ะšะฐะบ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะผะพะดะตะปัŒ SAM: ะณะธะฑะบะพัั‚ัŒ ะธ ะผะพั‰ะฝะพัั‚ัŒ ะฒ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน + +ะœะพะดะตะปัŒ Segment Anything ะผะพะถะตั‚ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒัั ะดะปั ะผะฝะพะถะตัั‚ะฒะฐ ะทะฐะดะฐั‡, ะฒั‹ั…ะพะดัั‰ะธั… ะทะฐ ั€ะฐะผะบะธ ะตะต ั‚ั€ะตะฝะธั€ะพะฒะพั‡ะฝั‹ั… ะดะฐะฝะฝั‹ั…. ะญั‚ะพ ะฒะบะปัŽั‡ะฐะตั‚ ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะณั€ะฐะฝะธั†, ะณะตะฝะตั€ะฐั†ะธัŽ ะฟั€ะตะดะปะพะถะตะฝะธะน ะพะฑัŠะตะบั‚ะพะฒ, ัะตะณะผะตะฝั‚ะฐั†ะธัŽ ัะบะทะตะผะฟะปัั€ะพะฒ ะธ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพะต ะฟั€ะตะพะฑั€ะฐะทะพะฒะฐะฝะธะต ั‚ะตะบัั‚ะฐ ะฒ ะผะฐัะบัƒ. ะก ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะฟะพะดัะบะฐะทะพะบ, SAM ะผะพะถะตั‚ ะฑั‹ัั‚ั€ะพ ะฐะดะฐะฟั‚ะธั€ะพะฒะฐั‚ัŒัั ะบ ะฝะพะฒั‹ะผ ะทะฐะดะฐั‡ะฐะผ ะธ ั€ะฐัะฟั€ะตะดะตะปะตะฝะธัะผ ะดะฐะฝะฝั‹ั… ะฒ ั€ะตะถะธะผะต ะฝัƒะปะตะฒะพะน ะฝะฐัั‚ั€ะพะนะบะธ, ะดะตะปะฐั ะตะณะพ ะณะธะฑะบะธะผ ะธ ะผะพั‰ะฝั‹ะผ ะธะฝัั‚ั€ัƒะผะตะฝั‚ะพะผ ะดะปั ะฒัะตั… ะฒะฐัˆะธั… ะฟะพั‚ั€ะตะฑะฝะพัั‚ะตะน ะฒ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน. + +### ะŸั€ะธะผะตั€ ะฟั€ะพะณะฝะพะทะธั€ะพะฒะฐะฝะธั ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ SAM + +!!! Example "ะกะตะณะผะตะฝั‚ะฐั†ะธั ั ัƒะบะฐะทะฐะฝะธะตะผ ะฟะพะดัะบะฐะทะบะธ" + + ะ’ั‹ะฟะพะปะฝะตะฝะธะต ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธั ั ัƒะบะฐะทะฐะฝะฝั‹ะผะธ ะฟะพะดัะบะฐะทะบะฐะผะธ. + + === "Python" + + ```python + from ultralytics import SAM + + # ะ—ะฐะณั€ัƒะทะบะฐ ะผะพะดะตะปะธ + model = SAM('sam_b.pt') + + # ะ’ั‹ะฒะพะด ะธะฝั„ะพั€ะผะฐั†ะธะธ ะพ ะผะพะดะตะปะธ (ะฟะพ ะถะตะปะฐะฝะธัŽ) + model.info() + + # ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ั ัƒะบะฐะทะฐะฝะธะตะผ ะณั€ะฐะฝะธั†ั‹ ะพะฑัŠะตะบั‚ะฐ (bboxes prompt) + model('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + + # ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ั ัƒะบะฐะทะฐะฝะธะตะผ ั‚ะพั‡ะบะธ (points prompt) + model('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +!!! Example "ะกะตะณะผะตะฝั‚ะฐั†ะธั ะฒัะตะณะพ ะธะทะพะฑั€ะฐะถะตะฝะธั" + + ะกะตะณะผะตะฝั‚ะฐั†ะธั ะฒัะตะณะพ ะธะทะพะฑั€ะฐะถะตะฝะธั. + + === "Python" + + ```python + from ultralytics import SAM + + # ะ—ะฐะณั€ัƒะทะบะฐ ะผะพะดะตะปะธ + model = SAM('sam_b.pt') + + # ะ’ั‹ะฒะพะด ะธะฝั„ะพั€ะผะฐั†ะธะธ ะพ ะผะพะดะตะปะธ (ะฟะพ ะถะตะปะฐะฝะธัŽ) + model.info() + + # ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ + model('ะฟัƒั‚ัŒ/ะบ/ะธะทะพะฑั€ะฐะถะตะฝะธัŽ.jpg') + ``` + + === "CLI" + + ```bash + # ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ั ะฟะพะผะพั‰ัŒัŽ ะผะพะดะตะปะธ SAM + yolo predict model=sam_b.pt source=ะฟัƒั‚ัŒ/ะบ/ะธะทะพะฑั€ะฐะถะตะฝะธัŽ.jpg + ``` + +- ะ›ะพะณะธะบะฐ ะทะดะตััŒ ัะพัั‚ะพะธั‚ ะฒ ั‚ะพะผ, ั‡ั‚ะพะฑั‹ ะฒั‹ะฟะพะปะฝะธั‚ัŒ ัะตะณะผะตะฝั‚ะฐั†ะธัŽ ะฒัะตะณะพ ะธะทะพะฑั€ะฐะถะตะฝะธั, ะตัะปะธ ะฒั‹ ะฝะต ะฟะตั€ะตะดะฐะตั‚ะต ะฝะธะบะฐะบะธั… ะฟะพะดัะบะฐะทะพะบ (bboxes/points/masks). + +!!! Example "ะŸั€ะธะผะตั€ SAMPredictor" + + ะ’ ัั‚ะพะผ ะฟั€ะธะผะตั€ะต ะฒั‹ ะผะพะถะตั‚ะต ัƒัั‚ะฐะฝะพะฒะธั‚ัŒ ะธะทะพะฑั€ะฐะถะตะฝะธะต ะพะดะธะฝ ั€ะฐะท ะธ ะฒั‹ะฟะพะปะฝะธั‚ัŒ ะผะฝะพะถะตัั‚ะฒะพ ะฟั€ะพะณะฝะพะทะธั€ะพะฒะฐะฝะธะน ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะฟะพะดัะบะฐะทะพะบ, ะฝะต ะทะฐะฟัƒัะบะฐั ะบะพะดะธั€ะพะฒั‰ะธะบ ะธะทะพะฑั€ะฐะถะตะฝะธั ะฝะตัะบะพะปัŒะบะพ ั€ะฐะท. + + === "ะŸั€ะพะณะฝะพะทะธั€ะพะฒะฐะฝะธะต ั ะฟะพะดัะบะฐะทะบะฐะผะธ" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # ะกะพะทะดะฐะฝะธะต SAMPredictor + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # ะฃัั‚ะฐะฝะพะฒะบะฐ ะธะทะพะฑั€ะฐะถะตะฝะธั + predictor.set_image("ultralytics/assets/zidane.jpg") # ัƒัั‚ะฐะฝะพะฒะธั‚ัŒ ั ะฟะพะผะพั‰ัŒัŽ ั„ะฐะนะปะฐ ะธะทะพะฑั€ะฐะถะตะฝะธั + predictor.set_image(cv2.imread("ultralytics/assets/zidane.jpg")) # ัƒัั‚ะฐะฝะพะฒะธั‚ัŒ ั ะฟะพะผะพั‰ัŒัŽ np.ndarray + results = predictor(bboxes=[439, 437, 524, 709]) + results = predictor(points=[900, 370], labels=[1]) + + # ะกะฑั€ะพั ะธะทะพะฑั€ะฐะถะตะฝะธั + predictor.reset_image() + ``` + + ะกะตะณะผะตะฝั‚ะฐั†ะธั ะฒัะตะณะพ ะธะทะพะฑั€ะฐะถะตะฝะธั ั ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝั‹ะผะธ ะฐั€ะณัƒะผะตะฝั‚ะฐะผะธ. + + === "ะกะตะณะผะตะฝั‚ะฐั†ะธั ะฒัะตะณะพ ะธะทะพะฑั€ะฐะถะตะฝะธั" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # ะกะพะทะดะฐะฝะธะต SAMPredictor + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # ะกะตะณะผะตะฝั‚ะฐั†ะธั ั ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝั‹ะผะธ ะฐั€ะณัƒะผะตะฝั‚ะฐะผะธ + results = predictor(source="ultralytics/assets/zidane.jpg", crop_n_layers=1, points_stride=64) + ``` + +- ะ‘ะพะปัŒัˆะต ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝั‹ั… ะฐั€ะณัƒะผะตะฝั‚ะพะฒ ะดะปั `ะกะตะณะผะตะฝั‚ะฐั†ะธะธ ะฒัะตะณะพ ะธะทะพะฑั€ะฐะถะตะฝะธั` ัะผ. [ะกัั‹ะปะบะฐ ะฝะฐ`Predictor/generate`](../../../reference/models/sam/predict.md). + +## ะกั€ะฐะฒะฝะตะฝะธะต SAM ะธ YOLOv8 + +ะ—ะดะตััŒ ะผั‹ ัั€ะฐะฒะฝะธะฒะฐะตะผ ัะฐะผัƒัŽ ะผะฐะปะตะฝัŒะบัƒัŽ ะผะพะดะตะปัŒ SAM, SAM-b, ั ัะฐะผะพะน ะผะฐะปะตะฝัŒะบะพะน ะผะพะดะตะปัŒัŽ ัะตะณะผะตะฝั‚ะฐั†ะธะธ Ultralytics, [YOLOv8n-seg](../tasks/segment.md): + +| ะœะพะดะตะปัŒ | ะ ะฐะทะผะตั€ | ะŸะฐั€ะฐะผะตั‚ั€ั‹ | ะกะบะพั€ะพัั‚ัŒ (CPU) | +|------------------------------------------------|--------------------------------|----------------------------------|-------------------------------------------| +| SAM-b | 358 ะœะ‘ | 94.7 ะผะปะฝ. | 51096 ะผั/ะธะทะพะฑั€ะฐะถะตะฝะธะต | +| [MobileSAM](mobile-sam.md) | 40.7 ะœะ‘ | 10.1 ะผะปะฝ. | 46122 ะผั/ะธะทะพะฑั€ะฐะถะตะฝะธะต | +| [FastSAM-s](fast-sam.md) ั ะพัะฝะพะฒะพะน YOLOv8 | 23.7 ะœะ‘ | 11.8 ะผะปะฝ. | 115 ะผั/ะธะทะพะฑั€ะฐะถะตะฝะธะต | +| Ultralytics [YOLOv8n-seg](../tasks/segment.md) | **6.7 ะœะ‘** (ะฒ 53.4 ั€ะฐะท ะผะตะฝัŒัˆะต) | **3.4 ะผะปะฝ.** (ะฒ 27.9 ั€ะฐะท ะผะตะฝัŒัˆะต) | **59 ะผั/ะธะทะพะฑั€ะฐะถะตะฝะธะต** (ะฒ 866 ั€ะฐะท ะฑั‹ัั‚ั€ะตะต) | + +ะญั‚ะพ ัั€ะฐะฒะฝะตะฝะธะต ะฟะพะบะฐะทั‹ะฒะฐะตั‚ ั€ะฐะทะฝะธั†ัƒ ะฒ ะฟะพั€ัะดะบะต ะฒะตะปะธั‡ะธะฝั‹ ะผะตะถะดัƒ ะผะพะดะตะปัะผะธ ะฟะพ ะธั… ั€ะฐะทะผะตั€ะฐะผ ะธ ัะบะพั€ะพัั‚ะธ. ะ’ ั‚ะพ ะฒั€ะตะผั ะบะฐะบ SAM ะฟั€ะตะดะปะฐะณะฐะตั‚ ัƒะฝะธะบะฐะปัŒะฝั‹ะต ะฒะพะทะผะพะถะฝะพัั‚ะธ ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะพะน ัะตะณะผะตะฝั‚ะฐั†ะธะธ, ะพะฝ ะฝะต ัะฒะปัะตั‚ัั ะฟั€ัะผั‹ะผ ะบะพะฝะบัƒั€ะตะฝั‚ะพะผ ะผะพะดะตะปัะผ ัะตะณะผะตะฝั‚ะฐั†ะธะธ YOLOv8, ะบะพั‚ะพั€ั‹ะต ัะฒะปััŽั‚ัั ะฑะพะปะตะต ะผะฐะปะตะฝัŒะบะธะผะธ, ะฑั‹ัั‚ั€ะตะต ะธ ัั„ั„ะตะบั‚ะธะฒะฝะตะต. + +ะขะตัั‚ั‹ ะฟั€ะพะฒะพะดะธะปะธััŒ ะฝะฐ ะฝะพัƒั‚ะฑัƒะบะต Apple M2 ั 16 ะ“ะ‘ ะพะฟะตั€ะฐั‚ะธะฒะฝะพะน ะฟะฐะผัั‚ะธ 2023 ะณะพะดะฐ. ะงั‚ะพะฑั‹ ะฒะพัะฟั€ะพะธะทะฒะตัั‚ะธ ัั‚ะพั‚ ั‚ะตัั‚: + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + ```python + from ultralytics import FastSAM, SAM, YOLO + + # ะะฝะฐะปะธะท SAM-b + model = SAM('sam_b.pt') + model.info() + model('ultralytics/assets') + + # ะะฝะฐะปะธะท MobileSAM + model = SAM('mobile_sam.pt') + model.info() + model('ultralytics/assets') + + # ะะฝะฐะปะธะท FastSAM-s + model = FastSAM('FastSAM-s.pt') + model.info() + model('ultralytics/assets') + + # ะะฝะฐะปะธะท YOLOv8n-seg + model = YOLO('yolov8n-seg.pt') + model.info() + model('ultralytics/assets') + ``` + +## ะะฒั‚ะพ-ะฐะฝะฝะพั‚ะฐั†ะธั: ะฑั‹ัั‚ั€ั‹ะน ะฟัƒั‚ัŒ ะบ ะฝะฐะฑะพั€ะฐะผ ะดะฐะฝะฝั‹ั… ะดะปั ัะตะณะผะตะฝั‚ะฐั†ะธะธ + +ะะฒั‚ะพ-ะฐะฝะฝะพั‚ะฐั†ะธั - ัั‚ะพ ะบะปัŽั‡ะตะฒะฐั ั„ัƒะฝะบั†ะธั SAM, ะฟะพะทะฒะพะปััŽั‰ะฐั ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัะผ ะณะตะฝะตั€ะธั€ะพะฒะฐั‚ัŒ [ะฝะฐะฑะพั€ั‹ ะดะฐะฝะฝั‹ั… ะดะปั ัะตะณะผะตะฝั‚ะฐั†ะธะธ](https://docs.ultralytics.com/datasets/segment) ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั. ะญั‚ะฐ ั„ัƒะฝะบั†ะธั ะฟะพะทะฒะพะปัะตั‚ ะฑั‹ัั‚ั€ะพ ะธ ั‚ะพั‡ะฝะพ ะฐะฝะฝะพั‚ะธั€ะพะฒะฐั‚ัŒ ะฑะพะปัŒัˆะพะต ะบะพะปะธั‡ะตัั‚ะฒะพ ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะพะฑั…ะพะดั ะฝะตะพะฑั…ะพะดะธะผะพัั‚ัŒ ั‚ั€ัƒะดะพะตะผะบะพะน ั€ัƒั‡ะฝะพะน ั€ะฐะทะผะตั‚ะบะธ. + +### ะกะพะทะดะฐะฝะธะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… ะดะปั ัะตะณะผะตะฝั‚ะฐั†ะธะธ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะผะพะดะตะปะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั + +ะ”ะปั ะฐะฒั‚ะพ-ะฐะฝะฝะพั‚ะฐั†ะธะธ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ั„ั€ะตะนะผะฒะพั€ะบะฐ Ultralytics ะธัะฟะพะปัŒะทัƒะนั‚ะต ั„ัƒะฝะบั†ะธัŽ `auto_annotate`, ะบะฐะบ ะฟะพะบะฐะทะฐะฝะพ ะฝะธะถะต: + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + ```python + from ultralytics.data.annotator import auto_annotate + + auto_annotate(data="path/to/images", det_model="yolov8x.pt", sam_model='sam_b.pt') + ``` + +| ะั€ะณัƒะผะตะฝั‚ | ะขะธะฟ | ะžะฟะธัะฐะฝะธะต | ะ—ะฝะฐั‡ะตะฝะธะต ะฟะพ ัƒะผะพะปั‡ะฐะฝะธัŽ | +|------------|------------------------|--------------------------------------------------------------------------------------------------------------|-----------------------| +| data | str | ะŸัƒั‚ัŒ ะบ ะฟะฐะฟะบะต ั ะธะทะพะฑั€ะฐะถะตะฝะธัะผะธ, ะบะพั‚ะพั€ั‹ะต ะดะพะปะถะฝั‹ ะฑั‹ั‚ัŒ ะฐะฝะฝะพั‚ะธั€ะพะฒะฐะฝั‹. | | +| det_model | str, ะพะฟั†ะธะพะฝะฐะปัŒะฝะพ | ะŸั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะฐั ะผะพะดะตะปัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั YOLO. ะŸะพ ัƒะผะพะปั‡ะฐะฝะธัŽ 'yolov8x.pt'. | 'yolov8x.pt' | +| sam_model | str, ะพะฟั†ะธะพะฝะฐะปัŒะฝะพ | ะŸั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะฐั ะผะพะดะตะปัŒ ัะตะณะผะตะฝั‚ะฐั†ะธะธ SAM. ะŸะพ ัƒะผะพะปั‡ะฐะฝะธัŽ 'sam_b.pt'. | 'sam_b.pt' | +| device | str, ะพะฟั†ะธะพะฝะฐะปัŒะฝะพ | ะฃัั‚ั€ะพะนัั‚ะฒะพ ะดะปั ะทะฐะฟัƒัะบะฐ ะผะพะดะตะปะตะน. ะŸะพ ัƒะผะพะปั‡ะฐะฝะธัŽ ะฟัƒัั‚ะฐั ัั‚ั€ะพะบะฐ (ะฆะŸ ะธะปะธ ะ“ะŸ, ะตัะปะธ ะดะพัั‚ัƒะฟะฝะพ). | | +| output_dir | str, None, ะพะฟั†ะธะพะฝะฐะปัŒะฝะพ | ะšะฐั‚ะฐะปะพะณ ะดะปั ัะพั…ั€ะฐะฝะตะฝะธั ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ะฐะฝะฝะพั‚ะฐั†ะธะธ. ะŸะพ ัƒะผะพะปั‡ะฐะฝะธัŽ - ะฟะฐะฟะบะฐ "labels" ะฒ ั‚ะพะผ ะถะต ะบะฐั‚ะฐะปะพะณะต, ั‡ั‚ะพ ะธ "data". | None | + +ะคัƒะฝะบั†ะธั `auto_annotate` ะฟั€ะธะฝะธะผะฐะตั‚ ะฟัƒั‚ัŒ ะบ ะฒะฐัˆะธะผ ะธะทะพะฑั€ะฐะถะตะฝะธัะผ ัะพ ะฒัะตะผะธ ะพะฟั†ะธะพะฝะฐะปัŒะฝั‹ะผะธ ะฐั€ะณัƒะผะตะฝั‚ะฐะผะธ ะดะปั ัƒะบะฐะทะฐะฝะธั ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะผะพะดะตะปะตะน ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ SAM, ัƒัั‚ั€ะพะนัั‚ะฒะฐ ะดะปั ะทะฐะฟัƒัะบะฐ ะผะพะดะตะปะตะน ะธ ะบะฐั‚ะฐะปะพะณะฐ ะฒั‹ะฒะพะดะฐ ะดะปั ัะพั…ั€ะฐะฝะตะฝะธั ะฐะฝะฝะพั‚ะธั€ะพะฒะฐะฝะฝั‹ั… ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ. + +ะะฒั‚ะพ-ะฐะฝะฝะพั‚ะฐั†ะธั ั ะฟะพะผะพั‰ัŒัŽ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะผะพะดะตะปะตะน ะผะพะถะตั‚ ััƒั‰ะตัั‚ะฒะตะฝะฝะพ ัะพะบั€ะฐั‚ะธั‚ัŒ ะฒั€ะตะผั ะธ ัƒัะธะปะธั, ะทะฐั‚ั€ะฐั‡ะธะฒะฐะตะผั‹ะต ะฝะฐ ัะพะทะดะฐะฝะธะต ะฒั‹ัะพะบะพะบะฐั‡ะตัั‚ะฒะตะฝะฝั‹ั… ะฝะฐะฑะพั€ะพะฒ ะดะฐะฝะฝั‹ั… ะดะปั ัะตะณะผะตะฝั‚ะฐั†ะธะธ. ะญั‚ะฐ ั„ัƒะฝะบั†ะธั ะพัะพะฑะตะฝะฝะพ ะฟะพะปะตะทะฝะฐ ะดะปั ะธััะปะตะดะพะฒะฐั‚ะตะปะตะน ะธ ั€ะฐะทั€ะฐะฑะพั‚ั‡ะธะบะพะฒ, ั€ะฐะฑะพั‚ะฐัŽั‰ะธั… ั ะฑะพะปัŒัˆะธะผะธ ัะฑะพั€ะฝะธะบะฐะผะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะฟะพัะบะพะปัŒะบัƒ ะพะฝะฐ ะฟะพะทะฒะพะปัะตั‚ ะธะผ ัะบะพะฝั†ะตะฝั‚ั€ะธั€ะพะฒะฐั‚ัŒัั ะฝะฐ ั€ะฐะทั€ะฐะฑะพั‚ะบะต ะธ ะพั†ะตะฝะบะต ะผะพะดะตะปะตะน, ะฐ ะฝะต ะฝะฐ ั€ัƒั‡ะฝะพะน ั€ะฐะทะผะตั‚ะบะต. + +## ะฆะธั‚ะธั€ะพะฒะฐะฝะธะต ะธ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ะธ + +ะ•ัะปะธ ะฒะฐะผ ะฟั€ะธะณะพะดะธะปะฐััŒ ะผะพะดะตะปัŒ SAM ะฒ ะฒะฐัˆะตะน ะธััะปะตะดะพะฒะฐั‚ะตะปัŒัะบะพะน ะธะปะธ ั€ะฐะทั€ะฐะฑะพั‚ั‡ะตัะบะพะน ั€ะฐะฑะพั‚ะต, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ั€ะฐััะผะพั‚ั€ะธั‚ะต ะฒะพะทะผะพะถะฝะพัั‚ัŒ ั†ะธั‚ะธั€ะพะฒะฐะฝะธั ะฝะฐัˆะตะน ัั‚ะฐั‚ัŒะธ: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{kirillov2023segment, + title={Segment Anything}, + author={Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollรกr and Ross Girshick}, + year={2023}, + eprint={2304.02643}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ะœั‹ ั…ะพั‚ะตะปะธ ะฑั‹ ะฒั‹ั€ะฐะทะธั‚ัŒ ัะฒะพัŽ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ัŒ ะบะพะผะฟะฐะฝะธะธ Meta AI ะทะฐ ัะพะทะดะฐะฝะธะต ะธ ะฟะพะดะดะตั€ะถะบัƒ ัั‚ะพะณะพ ั†ะตะฝะฝะพะณะพ ั€ะตััƒั€ัะฐ ะดะปั ัะพะพะฑั‰ะตัั‚ะฒะฐ ะฒะธะทัƒะฐะปัŒะฝะพะณะพ ะฐะฝะฐะปะธะทะฐ. + +*keywords: Segment Anything, Segment Anything Model, SAM, Meta SAM, ัะตะณะผะตะฝั‚ะฐั†ะธั ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะฒะพะทะผะพะถะฝะพัั‚ัŒ ะดะฐะฒะฐั‚ัŒ ะฟะพะดัะบะฐะทะบะธ ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ัะตะณะผะตะฝั‚ะฐั†ะธะธ, ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ั ะฝัƒะปะตะฒะพะน ะฝะฐัั‚ั€ะพะนะบะพะน, ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… SA-1B, ะฟะตั€ะตะดะพะฒะฐั ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ะฐ, ะฐะฒั‚ะพ-ะฐะฝะฝะพั‚ะฐั†ะธั, Ultralytics, ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ, ะฑะฐะทะพะฒะฐั ะฒะตั€ัะธั SAM, ั€ะฐััˆะธั€ะตะฝะฝะฐั ะฒะตั€ัะธั SAM, ัะตะณะผะตะฝั‚ะฐั†ะธั ัะบะทะตะผะฟะปัั€ะพะฒ, ะฒะธะทัƒะฐะปัŒะฝั‹ะน ะฐะฝะฐะปะธะท, ะธัะบัƒััั‚ะฒะตะฝะฝั‹ะน ะธะฝั‚ะตะปะปะตะบั‚, ะผะฐัˆะธะฝะฝะพะต ะพะฑัƒั‡ะตะฝะธะต, ะฐะฝะฝะพั‚ะฐั†ะธั ะดะฐะฝะฝั‹ั…, ะผะฐัะบะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ, ะผะพะดะตะปัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั, ะผะพะดะตะปัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั YOLOv8, ะฑะธะฑะปะธะพะณั€ะฐั„ะธั‡ะตัะบะฐั ััั‹ะปะบะฐ, Meta AI.* diff --git a/ultralytics/docs/ru/models/sam.md:Zone.Identifier b/ultralytics/docs/ru/models/sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/models/sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/models/yolo-nas.md b/ultralytics/docs/ru/models/yolo-nas.md new file mode 100755 index 0000000..cb44c48 --- /dev/null +++ b/ultralytics/docs/ru/models/yolo-nas.md @@ -0,0 +1,121 @@ +--- +comments: true +description: ะ˜ะทัƒั‡ะธั‚ะต ะฟะพะดั€ะพะฑะฝัƒัŽ ะดะพะบัƒะผะตะฝั‚ะฐั†ะธัŽ YOLO-NAS, ะฟั€ะตะฒะพัั…ะพะดะฝะพะน ะผะพะดะตะปะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. ะฃะทะฝะฐะนั‚ะต ะพ ะตะต ั„ัƒะฝะบั†ะธัั…, ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะผะพะดะตะปัั…, ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ ั ะฟะพะผะพั‰ัŒัŽ Ultralytics Python API ะธ ะผะฝะพะณะพะผ ะดั€ัƒะณะพะผ. +keywords: YOLO-NAS, Deci AI, ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ะณะปัƒะฑะพะบะพะต ะพะฑัƒั‡ะตะฝะธะต, ะฟะพะธัะบ ะฝะตะนั€ะพะฐั€ั…ะธั‚ะตะบั‚ัƒั€, Ultralytics Python API, ะผะพะดะตะปัŒ YOLO, ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ, ะบะฒะฐะฝั‚ะธะทะฐั†ะธั, ะพะฟั‚ะธะผะธะทะฐั†ะธั, COCO, Objects365, Roboflow 100 +--- + +# YOLO-NAS + +## ะžะฑะทะพั€ + +ะ ะฐะทั€ะฐะฑะพั‚ะฐะฝะฝั‹ะน ะบะพะผะฟะฐะฝะธะตะน Deci AI, YOLO-NAS ัะฒะปัะตั‚ัั ั€ะตะฒะพะปัŽั†ะธะพะฝะฝะพะน ั„ัƒะฝะดะฐะผะตะฝั‚ะฐะปัŒะฝะพะน ะผะพะดะตะปัŒัŽ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. ะžะฝะฐ ัะฒะปัะตั‚ัั ะฟั€ะพะดัƒะบั‚ะพะผ ะฟั€ะพะดะฒะธะฝัƒั‚ะพะน ั‚ะตั…ะฝะพะปะพะณะธะธ ะฟะพะธัะบะฐ ะฝะตะนั€ะพะฐั€ั…ะธั‚ะตะบั‚ัƒั€ ะธ ัะฟะตั†ะธะฐะปัŒะฝะพ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฐ ะดะปั ะฟั€ะตะพะดะพะปะตะฝะธั ะพะณั€ะฐะฝะธั‡ะตะฝะธะน ะฟั€ะตะดั‹ะดัƒั‰ะธั… ะผะพะดะตะปะตะน YOLO. ะ‘ะปะฐะณะพะดะฐั€ั ััƒั‰ะตัั‚ะฒะตะฝะฝะพะผัƒ ัƒะปัƒั‡ัˆะตะฝะธัŽ ะฟะพะดะดะตั€ะถะบะธ ะบะฒะฐะฝั‚ะพะฒะฐะฝะธั ะธ ะบะพะผะฟั€ะพะผะธััะฐ ะผะตะถะดัƒ ั‚ะพั‡ะฝะพัั‚ัŒัŽ ะธ ะทะฐะดะตั€ะถะบะพะน, YOLO-NAS ะฟั€ะตะดัั‚ะฐะฒะปัะตั‚ ัะพะฑะพะน ะทะฝะฐั‡ะธั‚ะตะปัŒะฝั‹ะน ะฟั€ะพั€ั‹ะฒ ะฒ ะพะฑะปะฐัั‚ะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. + +![ะ˜ะปะปัŽัั‚ั€ะฐั†ะธั ะผะพะดะตะปะธ](https://learnopencv.com/wp-content/uploads/2023/05/yolo-nas_COCO_map_metrics.png) +**ะžะฑะทะพั€ YOLO-NAS.** YOLO-NAS ะธัะฟะพะปัŒะทัƒะตั‚ ะฑะปะพะบะธ, ะฟะพะดะดะตั€ะถะธะฒะฐัŽั‰ะธะต ะบะฒะฐะฝั‚ะพะฒะฐะฝะธะต, ะธ ัะตะปะตะบั‚ะธะฒะฝะพะต ะบะฒะฐะฝั‚ะพะฒะฐะฝะธะต ะดะปั ะดะพัั‚ะธะถะตะฝะธั ะพะฟั‚ะธะผะฐะปัŒะฝะพะน ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ. ะœะพะดะตะปัŒ, ะบะพะณะดะฐ ะฟะตั€ะตะฒะพะดะธั‚ัั ะฒ ะบะฒะฐะฝั‚ะพะฒะฐะฝะฝัƒัŽ ะฒะตั€ัะธัŽ INT8, ะธะผะตะตั‚ ะผะธะฝะธะผะฐะปัŒะฝะพะต ะฟะฐะดะตะฝะธะต ั‚ะพั‡ะฝะพัั‚ะธ, ั‡ั‚ะพ ัะฒะปัะตั‚ัั ะทะฝะฐั‡ะธั‚ะตะปัŒะฝั‹ะผ ัƒะปัƒั‡ัˆะตะฝะธะตะผ ะฟะพ ัั€ะฐะฒะฝะตะฝะธัŽ ั ะดั€ัƒะณะธะผะธ ะผะพะดะตะปัะผะธ. ะญั‚ะธ ะดะพัั‚ะธะถะตะฝะธั ะฟั€ะธะฒะพะดัั‚ ะบ ะฟั€ะตะฒะพัั…ะพะดะฝะพะน ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ะต ั ะฑะตัะฟั€ะตั†ะตะดะตะฝั‚ะฝั‹ะผะธ ะฒะพะทะผะพะถะฝะพัั‚ัะผะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะธ ะฒั‹ะดะฐัŽั‰ะตะน ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒัŽ. + +### ะžัะฝะพะฒะฝั‹ะต ั„ัƒะฝะบั†ะธะธ + +- **ะ‘ะฐะทะพะฒั‹ะน ะฑะปะพะบ ะฟะพะดะดะตั€ะถะธะฒะฐัŽั‰ะธะน ะบะฒะฐะฝั‚ะพะฒะฐะฝะธะต:** YOLO-NAS ะฟั€ะตะดะปะฐะณะฐะตั‚ ะฝะพะฒั‹ะน ะฑะฐะทะพะฒั‹ะน ะฑะปะพะบ, ะบะพั‚ะพั€ั‹ะน ั…ะพั€ะพัˆะพ ั€ะฐะฑะพั‚ะฐะตั‚ ั ะบะฒะฐะฝั‚ะพะฒะฐะฝะธะตะผ, ั‡ั‚ะพ ะฟะพะทะฒะพะปัะตั‚ ะฟั€ะตะพะดะพะปะตั‚ัŒ ะพะดะฝะพ ะธะท ะทะฝะฐั‡ะธั‚ะตะปัŒะฝั‹ั… ะพะณั€ะฐะฝะธั‡ะตะฝะธะน ะฟั€ะตะดั‹ะดัƒั‰ะธั… ะผะพะดะตะปะตะน YOLO. +- **ะกะพะฒะตั€ัˆะตะฝัั‚ะฒะพะฒะฐะฝะธะต ั‚ั€ะตะฝะธั€ะพะฒะบะธ ะธ ะบะฒะฐะฝั‚ะพะฒะฐะฝะธั:** YOLO-NAS ะธัะฟะพะปัŒะทัƒะตั‚ ะฟั€ะพะดะฒะธะฝัƒั‚ั‹ะต ัั…ะตะผั‹ ั‚ั€ะตะฝะธั€ะพะฒะบะธ ะธ ะฟะพัั‚-ั‚ั€ะตะฝะธั€ะพะฒะพั‡ะฝะพะต ะบะฒะฐะฝั‚ะพะฒะฐะฝะธะต ะดะปั ัƒะปัƒั‡ัˆะตะฝะธั ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ. +- **ะžะฟั‚ะธะผะธะทะฐั†ะธั AutoNAC ะธ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะฐั ะพะฑัƒั‡ะตะฝะธะต:** YOLO-NAS ะธัะฟะพะปัŒะทัƒะตั‚ ะพะฟั‚ะธะผะธะทะฐั†ะธัŽ AutoNAC ะธ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพะต ะพะฑัƒั‡ะตะฝะธะต ะฝะฐ ะธะทะฒะตัั‚ะฝั‹ั… ะฝะฐะฑะพั€ะฐั… ะดะฐะฝะฝั‹ั…, ั‚ะฐะบะธั… ะบะฐะบ COCO, Objects365 ะธ Roboflow 100. ะญั‚ะพ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพะต ะพะฑัƒั‡ะตะฝะธะต ะดะตะปะฐะตั‚ ะผะพะดะตะปัŒ ะธะดะตะฐะปัŒะฝะพ ะฟะพะดั…ะพะดัั‰ะตะน ะดะปั ั€ะตัˆะตะฝะธะน ะฟะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธัŽ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ะฟั€ะพะธะทะฒะพะดัั‚ะฒะตะฝะฝั‹ั… ัั€ะตะดะฐั…. + +## ะŸั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ + +ะžั‰ัƒั‚ะธั‚ะต ะผะพั‰ัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฝะพะฒะพะณะพ ะฟะพะบะพะปะตะฝะธั ั ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะผะธ ะผะพะดะตะปัะผะธ YOLO-NAS, ะฟั€ะตะดะพัั‚ะฐะฒะปะตะฝะฝั‹ะผะธ ะบะพะผะฟะฐะฝะธะตะน Ultralytics. ะญั‚ะธ ะผะพะดะตะปะธ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝั‹ ะดะปั ะพะฑะตัะฟะตั‡ะตะฝะธั ะฒั‹ัะพะบะพะน ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะบะฐะบ ะฒ ะฟะปะฐะฝะต ัะบะพั€ะพัั‚ะธ, ั‚ะฐะบ ะธ ั‚ะพั‡ะฝะพัั‚ะธ. ะ’ั‹ะฑะตั€ะธั‚ะต ะธะท ั€ะฐะทะปะธั‡ะฝั‹ั… ะฒะฐั€ะธะฐะฝั‚ะพะฒ, ะฝะฐัั‚ั€ะพะตะฝะฝั‹ั… ะฟะพะด ะฒะฐัˆะธ ะบะพะฝะบั€ะตั‚ะฝั‹ะต ะฟะพั‚ั€ะตะฑะฝะพัั‚ะธ: + +| ะœะพะดะตะปัŒ | mAP | ะ—ะฐะดะตั€ะถะบะฐ (ms) | +|------------------|-------|---------------| +| YOLO-NAS S | 47.5 | 3.21 | +| YOLO-NAS M | 51.55 | 5.85 | +| YOLO-NAS L | 52.22 | 7.87 | +| YOLO-NAS S INT-8 | 47.03 | 2.36 | +| YOLO-NAS M INT-8 | 51.0 | 3.78 | +| YOLO-NAS L INT-8 | 52.1 | 4.78 | + +ะšะฐะถะดั‹ะน ะฒะฐั€ะธะฐะฝั‚ ะผะพะดะตะปะธ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝ ะดะปั ะดะพัั‚ะธะถะตะฝะธั ะฑะฐะปะฐะฝัะฐ ะผะตะถะดัƒ ัั€ะตะดะฝะตะน ั‚ะพั‡ะฝะพัั‚ัŒัŽ ะพะฑะฝะฐั€ัƒะถะตะฝะธั (mAP) ะธ ะทะฐะดะตั€ะถะบะพะน, ะฟะพะผะพะณะฐั ะฒะฐะผ ะพะฟั‚ะธะผะธะทะธั€ะพะฒะฐั‚ัŒ ะทะฐะดะฐั‡ะธ ะฟะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธัŽ ะพะฑัŠะตะบั‚ะพะฒ ั ั‚ะพั‡ะบะธ ะทั€ะตะฝะธั ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะธ ัะบะพั€ะพัั‚ะธ. + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะšะพะผะฟะฐะฝะธั Ultralytics ัะดะตะปะฐะปะฐ ะธะฝั‚ะตะณั€ะฐั†ะธัŽ ะผะพะดะตะปะตะน YOLO-NAS ะฒ ะฒะฐัˆะธ ะฟั€ะธะปะพะถะตะฝะธั ะฝะฐ Python ะผะฐะบัะธะผะฐะปัŒะฝะพ ะฟั€ะพัั‚ะพะน ั ะฟะพะผะพั‰ัŒัŽ ะฝะฐัˆะตะณะพ ะฟะฐะบะตั‚ะฐ `ultralytics`. ะŸะฐะบะตั‚ ะฟั€ะตะดะพัั‚ะฐะฒะปัะตั‚ ัƒะดะพะฑะฝั‹ะน API ะฝะฐ Python, ั‡ั‚ะพะฑั‹ ัƒะฟั€ะพัั‚ะธั‚ัŒ ะฒะตััŒ ะฟั€ะพั†ะตัั. + +ะะธะถะต ะฟั€ะธะฒะตะดะตะฝั‹ ะฟั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะผะพะดะตะปะตะน YOLO-NAS ั ะฟะฐะบะตั‚ะพะผ `ultralytics` ะดะปั ะฒั‹ะฒะพะดะฐ ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ะธ ะธั… ะฟั€ะพะฒะตั€ะบะธ: + +### ะŸั€ะธะผะตั€ั‹ ะฒั‹ะฒะพะดะฐ ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ะธ ะฟั€ะพะฒะตั€ะบะธ + +ะ’ ัั‚ะพะผ ะฟั€ะธะผะตั€ะต ะผั‹ ะฟั€ะพะฒะตั€ัะตะผ ะผะพะดะตะปัŒ YOLO-NAS-s ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO8. + +!!! Example "ะŸั€ะธะผะตั€" + + ะ’ ัั‚ะพะผ ะฟั€ะธะผะตั€ะต ะฟั€ะตะดัั‚ะฐะฒะปะตะฝ ะฟั€ะพัั‚ะพะน ะบะพะด ะฒั‹ะฒะพะดะฐ ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ะธ ะฟั€ะพะฒะตั€ะบะธ ะดะปั YOLO-NAS. ะ”ะปั ะพะฑั€ะฐะฑะพั‚ะบะธ ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ะฒั‹ะฒะพะดะฐ ัะผ. ั€ะตะถะธะผ [Predict](../modes/predict.md). ะ”ะปั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั YOLO-NAS ั ะดั€ัƒะณะธะผะธ ั€ะตะถะธะผะฐะผะธ ัะผ. [Val](../modes/val.md) ะธ [Export](../modes/export.md). ะŸะฐะบะตั‚ `ultralytics` ะดะปั YOLO-NAS ะฝะต ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ ะพะฑัƒั‡ะตะฝะธะต. + + === "Python" + + ะคะฐะนะปั‹ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะผะพะดะตะปะตะน PyTorch `*.pt` ะผะพะณัƒั‚ ะฑั‹ั‚ัŒ ะฟะตั€ะตะดะฐะฝั‹ ะฒ ะบะปะฐัั `NAS()` ะดะปั ัะพะทะดะฐะฝะธั ัะบะทะตะผะฟะปัั€ะฐ ะผะพะดะตะปะธ ะฝะฐ Python: + + ```python + from ultralytics import NAS + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLO-NAS-s ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO + model = NAS('yolo_nas_s.pt') + + # ะ’ั‹ะฒะพะด ะธะฝั„ะพั€ะผะฐั†ะธะธ ะพ ะผะพะดะตะปะธ (ะพะฟั†ะธะพะฝะฐะปัŒะฝะพ) + model.info() + + # ะŸั€ะพะฒะตั€ะบะฐ ะผะพะดะตะปะธ ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… COCO8 + results = model.val(data='coco8.yaml') + + # ะ’ั‹ะฒะพะด ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ั€ะฐะฑะพั‚ั‹ ะผะพะดะตะปะธ YOLO-NAS-s ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ะ”ะปั ะฟั€ัะผะพะณะพ ะทะฐะฟัƒัะบะฐ ะผะพะดะตะปะตะน ะดะพัั‚ัƒะฟะฝั‹ ัะปะตะดัƒัŽั‰ะธะต ะบะพะผะฐะฝะดั‹: + + ```bash + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLO-NAS-s ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO ะธ ะฟั€ะพะฒะตั€ะบะฐ ะตะต ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… COCO8 + yolo val model=yolo_nas_s.pt data=coco8.yaml + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLO-NAS-s ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO ะธ ะทะฐะฟัƒัะบ ะฒั‹ะฒะพะดะฐ ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + yolo predict model=yolo_nas_s.pt source=path/to/bus.jpg + ``` + +## ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ ะธ ั€ะตะถะธะผั‹ + +ะœั‹ ะฟั€ะตะดะปะฐะณะฐะตะผ ั‚ั€ะธ ะฒะฐั€ะธะฐะฝั‚ะฐ ะผะพะดะตะปะตะน YOLO-NAS: Small (s), Medium (m) ะธ Large (l). ะšะฐะถะดั‹ะน ะฒะฐั€ะธะฐะฝั‚ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝ ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝั‹ั… ะธ ะฟั€ะพะธะทะฒะพะดัั‚ะฒะตะฝะฝั‹ั… ะฟะพั‚ั€ะตะฑะฝะพัั‚ะตะน: + +- **YOLO-NAS-s**: ะžะฟั‚ะธะผะธะทะธั€ะพะฒะฐะฝะฐ ะดะปั ัั€ะตะด ั ะพะณั€ะฐะฝะธั‡ะตะฝะฝั‹ะผะธ ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝั‹ะผะธ ั€ะตััƒั€ัะฐะผะธ, ะณะดะต ัั„ั„ะตะบั‚ะธะฒะฝะพัั‚ัŒ ัะฒะปัะตั‚ัั ะบะปัŽั‡ะตะฒั‹ะผ ั„ะฐะบั‚ะพั€ะพะผ. +- **YOLO-NAS-m**: ะŸั€ะตะดะปะฐะณะฐะตั‚ ัะฑะฐะปะฐะฝัะธั€ะพะฒะฐะฝะฝั‹ะน ะฟะพะดั…ะพะด, ะฟะพะดั…ะพะดะธั‚ ะดะปั ะพะฑั‰ะธั… ะทะฐะดะฐั‡ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ั ะฑะพะปะตะต ะฒั‹ัะพะบะพะน ั‚ะพั‡ะฝะพัั‚ัŒัŽ. +- **YOLO-NAS-l**: ะะดะฐะฟั‚ะธั€ะพะฒะฐะฝะฐ ะดะปั ัั†ะตะฝะฐั€ะธะตะฒ, ั‚ั€ะตะฑัƒัŽั‰ะธั… ะผะฐะบัะธะผะฐะปัŒะฝะพะน ั‚ะพั‡ะฝะพัั‚ะธ, ะณะดะต ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝั‹ะต ั€ะตััƒั€ัั‹ ะผะตะฝะตะต ะพะณั€ะฐะฝะธั‡ะตะฝั‹. + +ะะธะถะต ะฟั€ะธะฒะตะดะตะฝ ะฟะพะดั€ะพะฑะฝั‹ะน ะพะฑะทะพั€ ะบะฐะถะดะพะน ะผะพะดะตะปะธ, ะฒะบะปัŽั‡ะฐั ััั‹ะปะบะธ ะฝะฐ ะธั… ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ, ะทะฐะดะฐั‡ะธ, ะบะพั‚ะพั€ั‹ะต ะพะฝะธ ะฟะพะดะดะตั€ะถะธะฒะฐัŽั‚, ะธ ะธั… ัะพะฒะผะตัั‚ะธะผะพัั‚ัŒ ั ั€ะฐะทะปะธั‡ะฝั‹ะผะธ ั€ะตะถะธะผะฐะผะธ ั€ะฐะฑะพั‚ั‹. + +| ะขะธะฟ ะผะพะดะตะปะธ | ะŸั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ | ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ | ะ’ั‹ะฒะพะด ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ | ะŸั€ะพะฒะตั€ะบะฐ | ะžะฑัƒั‡ะตะฝะธะต | ะญะบัะฟะพั€ั‚ | +|------------|-----------------------------------------------------------------------------------------------|--------------------------------------------|-------------------|----------|----------|---------| +| YOLO-NAS-s | [yolo_nas_s.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_s.pt) | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-m | [yolo_nas_m.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_m.pt) | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-l | [yolo_nas_l.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_l.pt) | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | + +## ะฆะธั‚ะธั€ะพะฒะฐะฝะธะต ะธ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ะธ + +ะ•ัะปะธ ะฒั‹ ะธัะฟะพะปัŒะทัƒะตั‚ะต YOLO-NAS ะฒ ัะฒะพะตะน ะฝะฐัƒั‡ะฝะพ-ะธััะปะตะดะพะฒะฐั‚ะตะปัŒัะบะพะน ะธะปะธ ั€ะฐะทั€ะฐะฑะพั‚ะพั‡ะฝะพะน ั€ะฐะฑะพั‚ะต, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ััั‹ะปะฐะนั‚ะตััŒ ะฝะฐ SuperGradients: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{supergradients, + doi = {10.5281/ZENODO.7789328}, + url = {https://zenodo.org/record/7789328}, + author = {Aharon, Shay and {Louis-Dupont} and {Ofri Masad} and Yurkova, Kate and {Lotem Fridman} and {Lkdci} and Khvedchenya, Eugene and Rubin, Ran and Bagrov, Natan and Tymchenko, Borys and Keren, Tomer and Zhilko, Alexander and {Eran-Deci}}, + title = {Super-Gradients}, + publisher = {GitHub}, + journal = {GitHub repository}, + year = {2021}, + } + ``` + +ะœั‹ ะฒั‹ั€ะฐะถะฐะตะผ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ัŒ ะบะพะผะฐะฝะดะต [SuperGradients](https://github.com/Deci-AI/super-gradients/) ะบะพะผะฟะฐะฝะธะธ Deci AI ะทะฐ ะธั… ัƒัะธะปะธั ะฒ ัะพะทะดะฐะฝะธะธ ะธ ะฟะพะดะดะตั€ะถะบะต ัั‚ะพะณะพ ั†ะตะฝะฝะพะณะพ ั€ะตััƒั€ัะฐ ะดะปั ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั. ะœั‹ ัั‡ะธั‚ะฐะตะผ, ั‡ั‚ะพ YOLO-NAS ัะพ ัะฒะพะตะน ะธะฝะฝะพะฒะฐั†ะธะพะฝะฝะพะน ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ะพะน ะธ ัƒะปัƒั‡ัˆะตะฝะฝั‹ะผะธ ะฒะพะทะผะพะถะฝะพัั‚ัะผะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ัั‚ะฐะฝะตั‚ ะฒะฐะถะฝั‹ะผ ะธะฝัั‚ั€ัƒะผะตะฝั‚ะพะผ ะบะฐะบ ะดะปั ั€ะฐะทั€ะฐะฑะพั‚ั‡ะธะบะพะฒ, ั‚ะฐะบ ะธ ะดะปั ะธััะปะตะดะพะฒะฐั‚ะตะปะตะน. + +*keywords: YOLO-NAS, Deci AI, ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ะณะปัƒะฑะพะบะพะต ะพะฑัƒั‡ะตะฝะธะต, ะฟะพะธัะบ ะฝะตะนั€ะพะฐั€ั…ะธั‚ะตะบั‚ัƒั€, Ultralytics Python API, ะผะพะดะตะปัŒ YOLO, SuperGradients, ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ, ะฑะฐะทะพะฒั‹ะน ะฑะปะพะบ, ะดั€ัƒะถะตัั‚ะฒะตะฝะฝั‹ะน ะบะฒะฐะฝั‚ะพะฒะฐะฝะธัŽ, ะฟั€ะพะดะฒะธะฝัƒั‚ั‹ะต ัั…ะตะผั‹ ั‚ั€ะตะฝะธั€ะพะฒะบะธ, ะฟะพัั‚-ั‚ั€ะตะฝะธั€ะพะฒะพั‡ะฝะพะต ะบะฒะฐะฝั‚ะพะฒะฐะฝะธะต, ะพะฟั‚ะธะผะธะทะฐั†ะธั AutoNAC, COCO, Objects365, Roboflow 100* diff --git a/ultralytics/docs/ru/models/yolo-nas.md:Zone.Identifier b/ultralytics/docs/ru/models/yolo-nas.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/models/yolo-nas.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/models/yolov3.md b/ultralytics/docs/ru/models/yolov3.md new file mode 100755 index 0000000..53d2fc0 --- /dev/null +++ b/ultralytics/docs/ru/models/yolov3.md @@ -0,0 +1,98 @@ +--- +comments: true +description: ะŸะพะปัƒั‡ะธั‚ะต ะพะฑะทะพั€ ะผะพะดะตะปะตะน YOLOv3, YOLOv3-Ultralytics ะธ YOLOv3u. ะฃะทะฝะฐะนั‚ะต ะพ ะธั… ะพัะฝะพะฒะฝั‹ั… ั„ัƒะฝะบั†ะธัั…, ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ ะธ ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ั… ะทะฐะดะฐั‡ะฐั… ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. +keywords: YOLOv3, YOLOv3-Ultralytics, YOLOv3u, ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ะ’ั‹ะฒะพะด, ะžะฑัƒั‡ะตะฝะธะต, Ultralytics +--- + +# YOLOv3, YOLOv3-Ultralytics ะธ YOLOv3u + +## ะžะฑะทะพั€ + +ะญั‚ะพั‚ ะดะพะบัƒะผะตะฝั‚ ะฟั€ะตะดัั‚ะฐะฒะปัะตั‚ ะพะฑะทะพั€ ั‚ั€ะตั… ั‚ะตัะฝะพ ัะฒัะทะฐะฝะฝั‹ั… ะผะพะดะตะปะตะน ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ: [YOLOv3](https://pjreddie.com/darknet/yolo/), [YOLOv3-Ultralytics](https://github.com/ultralytics/yolov3) ะธ [YOLOv3u](https://github.com/ultralytics/ultralytics). + +1. **YOLOv3:** ะญั‚ะพ ั‚ั€ะตั‚ัŒั ะฒะตั€ัะธั ะฐะปะณะพั€ะธั‚ะผะฐ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ You Only Look Once (YOLO). ะ˜ะทะฝะฐั‡ะฐะปัŒะฝะพ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฝะฐั ะ”ะถะพะทะตั„ะพะผ ะ ะตะดะผะพะฝะพะผ (Joseph Redmon), YOLOv3 ัƒะปัƒั‡ัˆะธะปะฐ ัะฒะพะธั… ะฟั€ะตะดัˆะตัั‚ะฒะตะฝะฝะธะบะพะฒ, ะฒะฝะตะดั€ะธะฒ ั‚ะฐะบะธะต ั„ัƒะฝะบั†ะธะธ, ะบะฐะบ ะผะฝะพะณะพัˆะบะฐะปัŒะฝั‹ะต ะฟั€ะตะดัะบะฐะทะฐะฝะธั ะธ ั‚ั€ะธ ั€ะฐะทะปะธั‡ะฝั‹ั… ั€ะฐะทะผะตั€ะฐ ัะดะตั€ ะดะตั‚ะตะบั†ะธะธ. + +2. **YOLOv3-Ultralytics:** ะญั‚ะพ ะธะผะฟะปะตะผะตะฝั‚ะฐั†ะธั ะผะพะดะตะปะธ YOLOv3 ะพั‚ Ultralytics. ะžะฝะฐ ะฒะพัะฟั€ะพะธะทะฒะพะดะธั‚ ะพั€ะธะณะธะฝะฐะปัŒะฝัƒัŽ ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ัƒ YOLOv3 ะธ ะฟั€ะตะดะปะฐะณะฐะตั‚ ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝั‹ะต ะฒะพะทะผะพะถะฝะพัั‚ะธ, ั‚ะฐะบะธะต ะบะฐะบ ะฟะพะดะดะตั€ะถะบะฐ ะฑะพะปัŒัˆะตะณะพ ั‡ะธัะปะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะผะพะดะตะปะตะน ะธ ะฑะพะปะตะต ะฟั€ะพัั‚ั‹ะต ะฒะฐั€ะธะฐะฝั‚ั‹ ะฝะฐัั‚ั€ะพะนะบะธ. + +3. **YOLOv3u:** ะญั‚ะพ ะพะฑะฝะพะฒะปะตะฝะฝะฐั ะฒะตั€ัะธั YOLOv3-Ultralytics, ะบะพั‚ะพั€ะฐั ะฒะบะปัŽั‡ะฐะตั‚ ะฒ ัะตะฑั ั€ะฐะทะดะตะปะตะฝะธะต ะณะพะปะพะฒั‹ ะฝะฐ ัะฒะพะฑะพะดะฝั‹ะต ะพั‚ ะฟั€ะธะฒัะทะบะธ ัะบะพั€ั ะธ ะพะฑัŠะตะบั‚ะฝะพัั‚ะธ, ะธัะฟะพะปัŒะทัƒะตะผะพะต ะฒ ะผะพะดะตะปัั… YOLOv8. YOLOv3u ะธะผะตะตั‚ ั‚ะฐะบัƒัŽ ะถะต ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ัƒ ะพัะฝะพะฒะฝะพะณะพ ะผะพะดัƒะปั ะธ ะผะพะดัƒะปั "ัˆะตะธ", ะบะฐะบ YOLOv3, ะฝะพ ั ะพะฑะฝะพะฒะปะตะฝะฝะพะน ะณะพะปะพะฒะพะน ะดะตั‚ะตะบั†ะธะธ ะธะท YOLOv8. + +![Ultralytics YOLOv3](https://raw.githubusercontent.com/ultralytics/assets/main/yolov3/banner-yolov3.png) + +## ะžัะฝะพะฒะฝั‹ะต ั…ะฐั€ะฐะบั‚ะตั€ะธัั‚ะธะบะธ + +- **YOLOv3:** ะ’ะฝะตะดั€ะตะฝะธะต ั‚ั€ะตั… ะผะฐััˆั‚ะฐะฑะพะฒ ะดะตั‚ะตะบั†ะธะธ ะฟะพะทะฒะพะปะธะปะพ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ั‚ั€ะธ ั€ะฐะทะฝั‹ั… ั€ะฐะทะผะตั€ะฐ ัะดะตั€ ะดะตั‚ะตะบั†ะธะธ: 13x13, 26x26 ะธ 52x52. ะญั‚ะพ ะทะฝะฐั‡ะธั‚ะตะปัŒะฝะพ ัƒะปัƒั‡ัˆะธะปะพ ั‚ะพั‡ะฝะพัั‚ัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ั€ะฐะทะปะธั‡ะฝั‹ั… ั€ะฐะทะผะตั€ะพะฒ. ะšั€ะพะผะต ั‚ะพะณะพ, YOLOv3 ะดะพะฑะฐะฒะธะปะฐ ั‚ะฐะบะธะต ั„ัƒะฝะบั†ะธะธ, ะบะฐะบ ะผะฝะพะถะตัั‚ะฒะตะฝะฝั‹ะต ะฟั€ะตะดัะบะฐะทะฐะฝะธั ะดะปั ะบะฐะถะดะพะณะพ ะพะณั€ะฐะฝะธั‡ะธะฒะฐัŽั‰ะตะณะพ ะฟั€ัะผะพัƒะณะพะปัŒะฝะธะบะฐ ะธ ะฑะพะปะตะต ะฒั‹ัะพะบะพะบะฐั‡ะตัั‚ะฒะตะฝะฝัƒัŽ ัะตั‚ัŒ ัะบัั‚ั€ะฐะบั‚ะพั€ะฐ ะฟั€ะธะทะฝะฐะบะพะฒ. + +- **YOLOv3-Ultralytics:** ะ˜ะผะฟะปะตะผะตะฝั‚ะฐั†ะธั YOLOv3 ะพั‚ Ultralytics ะพะฑะตัะฟะตั‡ะธะฒะฐะตั‚ ั‚ะฐะบัƒัŽ ะถะต ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ, ะบะฐะบ ัƒ ะพั€ะธะณะธะฝะฐะปัŒะฝะพะน ะผะพะดะตะปะธ, ะฝะพ ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝะพ ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ ะฑะพะปัŒัˆะต ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะผะพะดะตะปะตะน, ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝั‹ะต ะผะตั‚ะพะดั‹ ะพะฑัƒั‡ะตะฝะธั ะธ ะฑะพะปะตะต ะฟั€ะพัั‚ั‹ะต ะฒะฐั€ะธะฐะฝั‚ั‹ ะฝะฐัั‚ั€ะพะนะบะธ. ะญั‚ะพ ะดะตะปะฐะตั‚ ะตะต ะฑะพะปะตะต ะณะธะฑะบะพะน ะธ ัƒะดะพะฑะฝะพะน ะดะปั ะฟั€ะฐะบั‚ะธั‡ะตัะบะพะณะพ ะฟั€ะธะผะตะฝะตะฝะธั. + +- **YOLOv3u:** ะ’ ัั‚ะพะน ะพะฑะฝะพะฒะปะตะฝะฝะพะน ะผะพะดะตะปะธ ะธัะฟะพะปัŒะทะพะฒะฐะฝะพ ั€ะฐะทะดะตะปะตะฝะธะต ะณะพะปะพะฒั‹ ะฝะฐ ัะฒะพะฑะพะดะฝั‹ะต ะพั‚ ะฟั€ะธะฒัะทะบะธ ัะบะพั€ั ะธ ะพะฑัŠะตะบั‚ะฝะพัั‚ะธ ะธะท YOLOv8. ะŸัƒั‚ะตะผ ัƒัั‚ั€ะฐะฝะตะฝะธั ะฝะตะพะฑั…ะพะดะธะผะพัั‚ะธ ะฒ ะฟั€ะตะดะพะฟั€ะตะดะตะปะตะฝะฝั‹ั… ะบะพะพั€ะดะธะฝะฐั‚ะฐั… ะฟั€ะธะฒัะทะบะธ ะธ ะพั†ะตะฝะบะฐั… ะพะฑัŠะตะบั‚ะฝะพัั‚ะธ ัั‚ะฐ ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ะฐ ะณะพะปะพะฒั‹ ะดะตั‚ะตะบั†ะธะธ ัะฟะพัะพะฑะฝะฐ ัƒะปัƒั‡ัˆะธั‚ัŒ ัะฟะพัะพะฑะฝะพัั‚ัŒ ะผะพะดะตะปะธ ะพะฑะฝะฐั€ัƒะถะธะฒะฐั‚ัŒ ะพะฑัŠะตะบั‚ั‹ ั€ะฐะทะปะธั‡ะฝั‹ั… ั€ะฐะทะผะตั€ะพะฒ ะธ ั„ะพั€ะผ. ะญั‚ะพ ะดะตะปะฐะตั‚ YOLOv3u ะฑะพะปะตะต ัƒัั‚ะพะนั‡ะธะฒะพะน ะธ ั‚ะพั‡ะฝะพะน ะดะปั ะทะฐะดะฐั‡ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. + +## ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ ะธ ั€ะตะถะธะผั‹ + +ะกะตั€ะธั YOLOv3, ะฒะบะปัŽั‡ะฐั YOLOv3, YOLOv3-Ultralytics ะธ YOLOv3u, ัะฟะตั†ะธะฐะปัŒะฝะพ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฐ ะดะปั ะทะฐะดะฐั‡ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. ะญั‚ะธ ะผะพะดะตะปะธ ะทะฐัะปัƒะถะธะปะธ ะฟั€ะธะทะฝะฐะฝะธะต ัะฒะพะตะน ัั„ั„ะตะบั‚ะธะฒะฝะพัั‚ะธ ะฒ ั€ะฐะทะปะธั‡ะฝั‹ั… ัั†ะตะฝะฐั€ะธัั… ั€ะตะฐะปัŒะฝะพะณะพ ะผะธั€ะฐ, ะพะฑะตัะฟะตั‡ะธะฒะฐั ะฑะฐะปะฐะฝั ะผะตะถะดัƒ ั‚ะพั‡ะฝะพัั‚ัŒัŽ ะธ ัะบะพั€ะพัั‚ัŒัŽ. ะšะฐะถะดั‹ะน ะฒะฐั€ะธะฐะฝั‚ ะฟั€ะตะดะปะฐะณะฐะตั‚ ัƒะฝะธะบะฐะปัŒะฝั‹ะต ั„ัƒะฝะบั†ะธะธ ะธ ะพะฟั‚ะธะผะธะทะฐั†ะธะธ, ั‡ั‚ะพ ะดะตะปะฐะตั‚ ะธั… ะฟะพะดั…ะพะดัั‰ะธะผะธ ะดะปั ัˆะธั€ะพะบะพะณะพ ัะฟะตะบั‚ั€ะฐ ะฟั€ะธะปะพะถะตะฝะธะน. + +ะ’ัะต ั‚ั€ะธ ะผะพะดะตะปะธ ะฟะพะดะดะตั€ะถะธะฒะฐัŽั‚ ะฟะพะปะฝั‹ะน ะฝะฐะฑะพั€ ั€ะตะถะธะผะพะฒ, ะพะฑะตัะฟะตั‡ะธะฒะฐั ะณะธะฑะบะพัั‚ัŒ ะฝะฐ ั€ะฐะทะฝั‹ั… ัั‚ะฐะฟะฐั… ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธั ะธ ั€ะฐะทั€ะฐะฑะพั‚ะบะธ ะผะพะดะตะปะตะน. ะญั‚ะธ ั€ะตะถะธะผั‹ ะฒะบะปัŽั‡ะฐัŽั‚ [ะ’ั‹ะฒะพะด](../modes/predict.md), [ะŸั€ะพะฒะตั€ะบัƒ](../modes/val.md), [ะžะฑัƒั‡ะตะฝะธะต](../modes/train.md) ะธ [ะญะบัะฟะพั€ั‚](../modes/export.md), ั‡ั‚ะพ ะฟะพะทะฒะพะปัะตั‚ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัะผ ะฟะพะปะฝะพั†ะตะฝะฝะพ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ัั‚ะธ ะผะพะดะตะปะธ ะดะปั ัั„ั„ะตะบั‚ะธะฒะฝะพะณะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. + +| ะขะธะฟ ะผะพะดะตะปะธ | ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ | ะ’ั‹ะฒะพะด | ะŸั€ะพะฒะตั€ะบะฐ | ะžะฑัƒั‡ะตะฝะธะต | ะญะบัะฟะพั€ั‚ | +|--------------------|--------------------------------------------|-------|----------|----------|---------| +| YOLOv3 | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3-Ultralytics | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3u | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +ะญั‚ะฐ ั‚ะฐะฑะปะธั†ะฐ ะฟั€ะตะดะพัั‚ะฐะฒะปัะตั‚ ะบั€ะฐั‚ะบะธะน ะพะฑะทะพั€ ะฒะพะทะผะพะถะฝะพัั‚ะตะน ะบะฐะถะดะพะณะพ ะฒะฐั€ะธะฐะฝั‚ะฐ ะผะพะดะตะปะธ YOLOv3, ะฟะพะดั‡ะตั€ะบะธะฒะฐั ะธั… ะณะธะฑะบะพัั‚ัŒ ะธ ะฟั€ะธะณะพะดะฝะพัั‚ัŒ ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ั€ะฐะทะปะธั‡ะฝั‹ั… ะทะฐะดะฐั‡ ะธ ะพะฟะตั€ะฐั†ะธะพะฝะฝั‹ั… ั€ะตะถะธะผะพะฒ ะฒ ะฟะพั‚ะพะบะต ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะญั‚ะพั‚ ะฟั€ะธะผะตั€ ะฟั€ะตะดะพัั‚ะฐะฒะปัะตั‚ ะฟั€ะพัั‚ั‹ะต ะฟั€ะธะผะตั€ั‹ ะพะฑัƒั‡ะตะฝะธั ะธ ะฒั‹ะฒะพะดะฐ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ YOLOv3. ะŸะพะปะฝัƒัŽ ะดะพะบัƒะผะตะฝั‚ะฐั†ะธัŽ ะพะฑ ัั‚ะธั… ะธ ะดั€ัƒะณะธั… [ั€ะตะถะธะผะฐั…](../modes/index.md) ัะผ. ะฝะฐ ัั‚ั€ะฐะฝะธั†ะฐั… ะดะพะบัƒะผะตะฝั‚ะฐั†ะธะธ ะฟะพ [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) ะธ [Export](../modes/export.md). + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ะ’ Python ะผะพะถะฝะพ ะฟะตั€ะตะดะฐะฒะฐั‚ัŒ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ `*.pt` PyTorch ะธ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะพะฝะฝั‹ะต ั„ะฐะนะปั‹ `*.yaml` ะฒ ะบะปะฐัั `YOLO()`, ั‡ั‚ะพะฑั‹ ัะพะทะดะฐั‚ัŒ ัะบะทะตะผะฟะปัั€ ะผะพะดะตะปะธ: + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv3n ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO + model = YOLO('yolov3n.pt') + + # ะžั‚ะพะฑั€ะฐะถะตะฝะธะต ะธะฝั„ะพั€ะผะฐั†ะธะธ ะพ ะผะพะดะตะปะธ (ะฝะตะพะฑัะทะฐั‚ะตะปัŒะฝะพ) + model.info() + + # ะžะฑัƒั‡ะตะฝะธะต ะผะพะดะตะปะธ ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… COCO8 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ะผะพะดะตะปะธ YOLOv3n ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ะ˜ะผะตัŽั‚ัั ะบะพะผะฐะฝะดั‹ ะธะฝั‚ะตั€ั„ะตะนัะฐ ะบะพะผะฐะฝะดะฝะพะน ัั‚ั€ะพะบะธ ะดะปั ะฟั€ัะผะพะณะพ ะทะฐะฟัƒัะบะฐ ะผะพะดะตะปะตะน: + + ```bash + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv3n ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO ะธ ะพะฑัƒั‡ะธั‚ัŒ ะตะต ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… COCO8 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… + yolo train model=yolov3n.pt data=coco8.yaml epochs=100 imgsz=640 + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv3n ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO ะธ ะฒั‹ะฟะพะปะฝะธั‚ัŒ ะฒั‹ะฒะพะด ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + yolo predict model=yolov3n.pt source=path/to/bus.jpg + ``` + +## ะฆะธั‚ะฐั‚ั‹ ะธ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ะธ + +ะ•ัะปะธ ะฒั‹ ะธัะฟะพะปัŒะทัƒะตั‚ะต YOLOv3 ะฒ ัะฒะพะตะผ ะธััะปะตะดะพะฒะฐะฝะธะธ, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ั†ะธั‚ะธั€ัƒะนั‚ะต ะพั€ะธะณะธะฝะฐะปัŒะฝั‹ะต ัั‚ะฐั‚ัŒะธ ะพ YOLO ะธ ั€ะตะฟะพะทะธั‚ะพั€ะธะน Ultralytics YOLOv3: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Redmon, Joseph and Farhadi, Ali}, + journal={arXiv preprint arXiv:1804.02767}, + year={2018} + } + ``` + +ะ‘ะปะฐะณะพะดะฐั€ะธะผ ะ”ะถะพะทะตั„ะฐ ะ ะตะดะผะพะฝะฐ ะธ ะะปะธ ะคะฐั€ั…ะฐะดะธ ะทะฐ ั€ะฐะทั€ะฐะฑะพั‚ะบัƒ ะพั€ะธะณะธะฝะฐะปัŒะฝะพะน ะผะพะดะตะปะธ YOLOv3. diff --git a/ultralytics/docs/ru/models/yolov3.md:Zone.Identifier b/ultralytics/docs/ru/models/yolov3.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/models/yolov3.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/models/yolov4.md b/ultralytics/docs/ru/models/yolov4.md new file mode 100755 index 0000000..b44b6d0 --- /dev/null +++ b/ultralytics/docs/ru/models/yolov4.md @@ -0,0 +1,71 @@ +--- +comments: true +description: ะ˜ะทัƒั‡ะธั‚ะต ะฝะฐัˆ ะฟะพะดั€ะพะฑะฝั‹ะน ั€ัƒะบะพะฒะพะดัั‚ะฒะพ ะฟะพ YOLOv4 - ะฟะตั€ะตะดะพะฒะพะผัƒ ะพะฑะฝะฐั€ัƒะถะธั‚ะตะปัŽ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะถะธะผะต ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ. ะŸะพะฝะธะผะฐะฝะธะต ะตะณะพ ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ะฝั‹ั… ะพัะพะฑะตะฝะฝะพัั‚ะตะน, ะธะฝะฝะพะฒะฐั†ะธะพะฝะฝั‹ั… ั„ัƒะฝะบั†ะธะน ะธ ะฟั€ะธะผะตั€ะพะฒ ะฟั€ะธะผะตะฝะตะฝะธั. +keywords: ultralytics, YOLOv4, ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ะฝะตะนั€ะพะฝะฝั‹ะต ัะตั‚ะธ, ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ, ะดะตั‚ะตะบั‚ะพั€ ะพะฑัŠะตะบั‚ะพะฒ, ะผะฐัˆะธะฝะฝะพะต ะพะฑัƒั‡ะตะฝะธะต +--- + +# YOLOv4: ะ‘ั‹ัั‚ั€ะพะต ะธ ั‚ะพั‡ะฝะพะต ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ + +ะ”ะพะฑั€ะพ ะฟะพะถะฐะปะพะฒะฐั‚ัŒ ะฝะฐ ัั‚ั€ะฐะฝะธั†ัƒ ะดะพะบัƒะผะตะฝั‚ะฐั†ะธะธ Ultralytics ะฟะพ YOLOv4 - ะฟะตั€ะตะดะพะฒะพะผัƒ ะพะฑะฝะฐั€ัƒะถะธั‚ะตะปัŽ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะถะธะผะต ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ, ัะพะทะดะฐะฝะฝะพะผัƒ ะฒ 2020 ะณะพะดัƒ ะะปะตะบัะตะตะผ ะ‘ะพั‡ะบะพะฒัะบะธะผ ะฝะฐ [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). YOLOv4 ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝ ะดะปั ะพะฑะตัะฟะตั‡ะตะฝะธั ะพะฟั‚ะธะผะฐะปัŒะฝะพะณะพ ะฑะฐะปะฐะฝัะฐ ะผะตะถะดัƒ ัะบะพั€ะพัั‚ัŒัŽ ะธ ั‚ะพั‡ะฝะพัั‚ัŒัŽ, ั‡ั‚ะพ ะดะตะปะฐะตั‚ ะตะณะพ ะพั‚ะปะธั‡ะฝั‹ะผ ะฒั‹ะฑะพั€ะพะผ ะดะปั ะผะฝะพะณะธั… ะฟั€ะธะปะพะถะตะฝะธะน. + +![ะ”ะธะฐะณั€ะฐะผะผะฐ ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ YOLOv4](https://user-images.githubusercontent.com/26833433/246185689-530b7fe8-737b-4bb0-b5dd-de10ef5aface.png) +**ะ”ะธะฐะณั€ะฐะผะผะฐ ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ YOLOv4**. ะŸะพะบะฐะทะฐะฝะฐ ัะปะพะถะฝะฐั ัะตั‚ะตะฒะฐั ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ะฐ YOLOv4, ะฒะบะปัŽั‡ะฐั ะบะพะผะฟะพะฝะตะฝั‚ั‹ ะพัะฝะพะฒะฝะพะน ั‡ะฐัั‚ะธ, ัˆะตะธ ะธ ะณะพะปะพะฒั‹, ะฐ ั‚ะฐะบะถะต ะธั… ะฒะทะฐะธะผะพัะฒัะทะฐะฝะฝั‹ะต ัะปะพะธ ะดะปั ะพะฟั‚ะธะผะฐะปัŒะฝะพะณะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะถะธะผะต ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ. + +## ะ’ะฒะตะดะตะฝะธะต + +YOLOv4 ะพะทะฝะฐั‡ะฐะตั‚ You Only Look Once, ะฒะตั€ัะธั 4. ะญั‚ะฐ ะผะพะดะตะปัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ ะฑั‹ะปะฐ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฐ ะดะปั ะฟั€ะตะพะดะพะปะตะฝะธั ะพะณั€ะฐะฝะธั‡ะตะฝะธะน ะฟั€ะตะดั‹ะดัƒั‰ะธั… ะฒะตั€ัะธะน YOLO, ั‚ะฐะบะธั… ะบะฐะบ [YOLOv3](yolov3.md) ะธ ะดั€ัƒะณะธั… ะผะพะดะตะปะตะน ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. ะ’ ะพั‚ะปะธั‡ะธะต ะพั‚ ะดั€ัƒะณะธั… ะดะตั‚ะตะบั‚ะพั€ะพะฒ ะพะฑัŠะตะบั‚ะพะฒ ะฝะฐ ะพัะฝะพะฒะต ัะฒะตั€ั‚ะพั‡ะฝั‹ั… ะฝะตะนั€ะพะฝะฝั‹ั… ัะตั‚ะตะน (CNN), YOLOv4 ะฟั€ะธะผะตะฝะธะผ ะบะฐะบ ะดะปั ัะธัั‚ะตะผ ั€ะตะบะพะผะตะฝะดะฐั†ะธะน, ั‚ะฐะบ ะธ ะดะปั ะฐะฒั‚ะพะฝะพะผะฝะพะณะพ ัƒะฟั€ะฐะฒะปะตะฝะธั ะฟั€ะพั†ะตััะฐะผะธ ะธ ัƒะผะตะฝัŒัˆะตะฝะธั ั‡ะตะปะพะฒะตั‡ะตัะบะพะณะพ ะฒะฒะพะดะฐ. ะ‘ะปะฐะณะพะดะฐั€ั ั€ะฐะฑะพั‚ะต ะฝะฐ ะพะฑั‹ั‡ะฝั‹ั… ะณั€ะฐั„ะธั‡ะตัะบะธั… ะฟั€ะพั†ะตััะพั€ะฐั… (GPU), YOLOv4 ะผะพะถะตั‚ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒัั ะผะฐััะพะฒะพ ะฟะพ ะดะพัั‚ัƒะฟะฝะพะน ั†ะตะฝะต ะธ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝ ะดะปั ั€ะฐะฑะพั‚ั‹ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ ะฝะฐ ะพะฑั‹ั‡ะฝะพะผ GPU, ั‚ั€ะตะฑัƒั ะดะปั ะพะฑัƒั‡ะตะฝะธั ะปะธัˆัŒ ะพะดะฝะพะณะพ ั‚ะฐะบะพะณะพ GPU. + +## ะั€ั…ะธั‚ะตะบั‚ัƒั€ะฐ + +YOLOv4 ะธัะฟะพะปัŒะทัƒะตั‚ ะฝะตัะบะพะปัŒะบะพ ะธะฝะฝะพะฒะฐั†ะธะพะฝะฝั‹ั… ั„ัƒะฝะบั†ะธะน, ะบะพั‚ะพั€ั‹ะต ั€ะฐะฑะพั‚ะฐัŽั‚ ะฒะผะตัั‚ะต ะดะปั ะพะฟั‚ะธะผะธะทะฐั†ะธะธ ะตะณะพ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ. ะกั€ะตะดะธ ะฝะธั… ะฒัั‚ั€ะพะตะฝะฝั‹ะต ัะฒัะทะธ ั ั€ะตะทะธะปัŒั‚ะฐั‚ะฐะผะธ (WRC), ั‡ะฐัั‚ะธั‡ะฝั‹ะต ัะพะตะดะธะฝะตะฝะธั ะผะตะถะดัƒ ัั‚ะฐะฟะฐะผะธ (CSP), ะฝะพั€ะผะฐะปะธะทะฐั†ะธั ะฝะฐ ะผะธะฝะธ-ะฟะฐะบะตั‚ะฐั… (CmBN), ัะฐะผะพะฐะดะฐะฟั‚ะธะฒะฝะพะต ะพะฑัƒั‡ะตะฝะธะต (SAT), ะฐะบั‚ะธะฒะฐั†ะธั Mish, ะฐัƒะณะผะตะฝั‚ะฐั†ะธั ะดะฐะฝะฝั‹ั… Mosaic, ั€ะตะณัƒะปัั€ะธะทะฐั†ะธั DropBlock ะธ ั„ัƒะฝะบั†ะธั ะฟะพั‚ะตั€ัŒ CIoU. ะญั‚ะธ ั„ัƒะฝะบั†ะธะธ ะพะฑัŠะตะดะธะฝะตะฝั‹ ะดะปั ะดะพัั‚ะธะถะตะฝะธั ะฟะตั€ะตะดะพะฒั‹ั… ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ. + +ะขะธะฟะธั‡ะฝั‹ะน ะดะตั‚ะตะบั‚ะพั€ ะพะฑัŠะตะบั‚ะพะฒ ัะพัั‚ะพะธั‚ ะธะท ะฝะตัะบะพะปัŒะบะธั… ั‡ะฐัั‚ะตะน, ะฒะบะปัŽั‡ะฐั ะฒั…ะพะดะฝั‹ะต ะดะฐะฝะฝั‹ะต, ะพัะฝะพะฒะฝัƒัŽ ั‡ะฐัั‚ัŒ, ัˆะตะนะบัƒ ะธ ะณะพะปะพะฒัƒ. ะžัะฝะพะฒะฐ YOLOv4 ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฐ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… ImageNet ะธ ะธัะฟะพะปัŒะทัƒะตั‚ัั ะดะปั ะฟั€ะตะดัะบะฐะทะฐะฝะธั ะบะปะฐััะพะฒ ะธ ะพะณั€ะฐะฝะธั‡ะธะฒะฐัŽั‰ะธั… ั€ะฐะผะพะบ ะพะฑัŠะตะบั‚ะพะฒ. ะžัะฝะพะฒะฐ ะผะพะถะตั‚ ะฑั‹ั‚ัŒ ะฟั€ะตะดัั‚ะฐะฒะปะตะฝะฐ ะฝะตัะบะพะปัŒะบะธะผะธ ะผะพะดะตะปัะผะธ, ะฒะบะปัŽั‡ะฐั VGG, ResNet, ResNeXt ะธะปะธ DenseNet. ะงะฐัั‚ัŒ ัˆะตะนะบะธ ะดะตั‚ะตะบั‚ะพั€ะฐ ะธัะฟะพะปัŒะทัƒะตั‚ัั ะดะปั ัะฑะพั€ะฐ ะบะฐั€ั‚ ะฟั€ะธะทะฝะฐะบะพะฒ ั ั€ะฐะทะฝั‹ั… ัั‚ะฐะฟะพะฒ ะธ ะพะฑั‹ั‡ะฝะพ ะฒะบะปัŽั‡ะฐะตั‚ ะฝะตัะบะพะปัŒะบะพ ะฟัƒั‚ะตะน ัะฝะธะทัƒ ะฒะฒะตั€ั… ะธ ะฝะตัะบะพะปัŒะบะพ ะฟัƒั‚ะตะน ัะฒะตั€ั…ัƒ ะฒะฝะธะท. ะ“ะพะปะพะฒะฝะฐั ั‡ะฐัั‚ัŒ ะธัะฟะพะปัŒะทัƒะตั‚ัั ะดะปั ะพะบะพะฝั‡ะฐั‚ะตะปัŒะฝะพะณะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะธ ะบะปะฐััะธั„ะธะบะฐั†ะธะธ ะพะฑัŠะตะบั‚ะพะฒ. + +## Bag of Freebies + +YOLOv4 ั‚ะฐะบะถะต ะธัะฟะพะปัŒะทัƒะตั‚ ะผะตั‚ะพะดั‹, ะธะทะฒะตัั‚ะฝั‹ะต ะบะฐะบ ยซbag of freebiesยป, ะบะพั‚ะพั€ั‹ะต ัƒะปัƒั‡ัˆะฐัŽั‚ ั‚ะพั‡ะฝะพัั‚ัŒ ะผะพะดะตะปะธ ะฒะพ ะฒั€ะตะผั ะพะฑัƒั‡ะตะฝะธั ะฑะตะท ัƒะฒะตะปะธั‡ะตะฝะธั ัั‚ะพะธะผะพัั‚ะธ ะฒั‹ะฒะพะดะฐ. ะัƒะณะผะตะฝั‚ะฐั†ะธั ะดะฐะฝะฝั‹ั… - ัั‚ะพ ะพะฑั‰ะฐั ั‚ะตั…ะฝะธะบะฐ "bag of freebies", ะธัะฟะพะปัŒะทัƒะตะผะฐั ะฒ ะพะฑะฝะฐั€ัƒะถะตะฝะธะธ ะพะฑัŠะตะบั‚ะพะฒ, ะบะพั‚ะพั€ะฐั ัƒะฒะตะปะธั‡ะธะฒะฐะตั‚ ะธะทะผะตะฝั‡ะธะฒะพัั‚ัŒ ะฒั…ะพะดะฝั‹ั… ะธะทะพะฑั€ะฐะถะตะฝะธะน ะดะปั ะฟะพะฒั‹ัˆะตะฝะธั ัƒัั‚ะพะนั‡ะธะฒะพัั‚ะธ ะผะพะดะตะปะธ. ะะตะบะพั‚ะพั€ั‹ะต ะฟั€ะธะผะตั€ั‹ ะฐัƒะณะผะตะฝั‚ะฐั†ะธะธ ะดะฐะฝะฝั‹ั… ะฒะบะปัŽั‡ะฐัŽั‚ ั„ะพั‚ะพะผะตั‚ั€ะธั‡ะตัะบะธะต ะธ ะณะตะพะผะตั‚ั€ะธั‡ะตัะบะธะต ะธัะบะฐะถะตะฝะธั. ะญั‚ะธ ั‚ะตั…ะฝะธะบะธ ะฟะพะผะพะณะฐัŽั‚ ะผะพะดะตะปะธ ะปัƒั‡ัˆะต ะพะฑะพะฑั‰ะฐั‚ัŒัั ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ั‚ะธะฟะพะฒ ะธะทะพะฑั€ะฐะถะตะฝะธะน. + +## ะžัะพะฑะตะฝะฝะพัั‚ะธ ะธ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ + +YOLOv4 ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝ ะดะปั ะพะฟั‚ะธะผะฐะปัŒะฝะพะน ัะบะพั€ะพัั‚ะธ ะธ ั‚ะพั‡ะฝะพัั‚ะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. ะั€ั…ะธั‚ะตะบั‚ัƒั€ะฐ YOLOv4 ะฒะบะปัŽั‡ะฐะตั‚ ะฒ ัะตะฑั CSPDarknet53 ะฒ ะบะฐั‡ะตัั‚ะฒะต ะพัะฝะพะฒั‹, PANet ะฒ ะบะฐั‡ะตัั‚ะฒะต ัˆะตะนะบะธ ะธ YOLOv3 ะฒ ะบะฐั‡ะตัั‚ะฒะต ะณะพะปะพะฒั‹. ะญั‚ะพ ะฟะพะทะฒะพะปัะตั‚ YOLOv4 ะฒั‹ะฟะพะปะฝัั‚ัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ ั ะฒะฟะตั‡ะฐั‚ะปััŽั‰ะตะน ัะบะพั€ะพัั‚ัŒัŽ, ั‡ั‚ะพ ะดะตะปะฐะตั‚ ะตะณะพ ะฟะพะดั…ะพะดัั‰ะธะผ ะดะปั ะฟั€ะธะปะพะถะตะฝะธะน ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. YOLOv4 ั‚ะฐะบะถะต ะพั‚ะปะธั‡ะฐะตั‚ัั ั‚ะพั‡ะฝะพัั‚ัŒัŽ ะธ ะดะพัั‚ะธะณะฐะตั‚ ะฟะตั€ะตะดะพะฒั‹ั… ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ะฒ ั‚ะตัั‚ะพะฒั‹ั… ะฑะตะฝั‡ะผะฐั€ะบะฐั… ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะะฐ ะผะพะผะตะฝั‚ ะฝะฐะฟะธัะฐะฝะธั ะดะฐะฝะฝะพะณะพ ะดะพะบัƒะผะตะฝั‚ะฐ Ultralytics ะฟะพะบะฐ ะฝะต ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ ะผะพะดะตะปะธ YOLOv4. ะŸะพัั‚ะพะผัƒ ะฒัะต ะฟะพะปัŒะทะพะฒะฐั‚ะตะปะธ, ะทะฐะธะฝั‚ะตั€ะตัะพะฒะฐะฝะฝั‹ะต ะฒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ YOLOv4, ะดะพะปะถะฝั‹ ะพะฑั€ะฐั‰ะฐั‚ัŒัั ะฝะฐะฟั€ัะผัƒัŽ ะบ ั€ะตะฟะพะทะธั‚ะพั€ะธัŽ YOLOv4 ะฝะฐ GitHub ะดะปั ะธะฝัั‚ั€ัƒะบั†ะธะน ะฟะพ ัƒัั‚ะฐะฝะพะฒะบะต ะธ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธัŽ. + +ะ’ะพั‚ ะบั€ะฐั‚ะบะธะน ะพะฑะทะพั€ ั‚ะธะฟะธั‡ะฝั‹ั… ัˆะฐะณะพะฒ, ะบะพั‚ะพั€ั‹ะต ะฒั‹ ะผะพะถะตั‚ะต ะฟั€ะตะดะฟั€ะธะฝัั‚ัŒ ะดะปั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั YOLOv4: + +1. ะŸะพัะตั‚ะธั‚ะต ั€ะตะฟะพะทะธั‚ะพั€ะธะน YOLOv4 ะฝะฐ GitHub: [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet). + +2. ะกะปะตะดัƒะนั‚ะต ะธะฝัั‚ั€ัƒะบั†ะธัะผ, ะฟั€ะตะดะพัั‚ะฐะฒะปะตะฝะฝั‹ะผ ะฒ ั„ะฐะนะปะต README ะดะปั ัƒัั‚ะฐะฝะพะฒะบะธ. ะญั‚ะพ ะพะฑั‹ั‡ะฝะพ ะฒะบะปัŽั‡ะฐะตั‚ ะบะปะพะฝะธั€ะพะฒะฐะฝะธะต ั€ะตะฟะพะทะธั‚ะพั€ะธั, ัƒัั‚ะฐะฝะพะฒะบัƒ ะฝะตะพะฑั…ะพะดะธะผั‹ั… ะทะฐะฒะธัะธะผะพัั‚ะตะน ะธ ะฝะฐัั‚ั€ะพะนะบัƒ ะฝะตะพะฑั…ะพะดะธะผั‹ั… ะฟะตั€ะตะผะตะฝะฝั‹ั… ัั€ะตะดั‹. + +3. ะŸะพัะปะต ะทะฐะฒะตั€ัˆะตะฝะธั ัƒัั‚ะฐะฝะพะฒะบะธ ะฒั‹ ะผะพะถะตั‚ะต ะพะฑัƒั‡ะฐั‚ัŒ ะธ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะผะพะดะตะปัŒ ะฒ ัะพะพั‚ะฒะตั‚ัั‚ะฒะธะธ ั ะธะฝัั‚ั€ัƒะบั†ะธัะผะธ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั, ะฟั€ะตะดะพัั‚ะฐะฒะปะตะฝะฝั‹ะผะธ ะฒ ั€ะตะฟะพะทะธั‚ะพั€ะธะธ. ะžะฑั‹ั‡ะฝะพ ัั‚ะพ ะฒะบะปัŽั‡ะฐะตั‚ ะฟะพะดะณะพั‚ะพะฒะบัƒ ะฒะฐัˆะธั… ะดะฐะฝะฝั‹ั…, ะฝะฐัั‚ั€ะพะนะบัƒ ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ ะผะพะดะตะปะธ, ะพะฑัƒั‡ะตะฝะธะต ะผะพะดะตะปะธ ะธ ะทะฐั‚ะตะผ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะต ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. + +ะžะฑั€ะฐั‚ะธั‚ะต ะฒะฝะธะผะฐะฝะธะต, ั‡ั‚ะพ ะบะพะฝะบั€ะตั‚ะฝั‹ะต ัˆะฐะณะธ ะผะพะณัƒั‚ ะพั‚ะปะธั‡ะฐั‚ัŒัั ะฒ ะทะฐะฒะธัะธะผะพัั‚ะธ ะพั‚ ะฒะฐัˆะตะณะพ ะบะพะฝะบั€ะตั‚ะฝะพะณะพ ัะปัƒั‡ะฐั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะธ ั‚ะตะบัƒั‰ะตะณะพ ัะพัั‚ะพัะฝะธั ั€ะตะฟะพะทะธั‚ะพั€ะธั YOLOv4. ะŸะพัั‚ะพะผัƒ ะฝะฐัั‚ะพัั‚ะตะปัŒะฝะพ ั€ะตะบะพะผะตะฝะดัƒะตั‚ัั ะพะฑั€ะฐั‰ะฐั‚ัŒัั ะฝะตะฟะพัั€ะตะดัั‚ะฒะตะฝะฝะพ ะบ ะธะฝัั‚ั€ัƒะบั†ะธัะผ, ะฟั€ะตะดะพัั‚ะฐะฒะปะตะฝะฝั‹ะผ ะฒ ั€ะตะฟะพะทะธั‚ะพั€ะธะธ YOLOv4 ะฝะฐ GitHub. + +ะŸั€ะธะฝะพัะธะผ ะธะทะฒะธะฝะตะฝะธั ะทะฐ ะฒะพะทะผะพะถะฝั‹ะต ะฝะตัƒะดะพะฑัั‚ะฒะฐ, ะธ ะผั‹ ะฟะพัั‚ะฐั€ะฐะตะผัั ะพะฑะฝะพะฒะธั‚ัŒ ัั‚ะพั‚ ะดะพะบัƒะผะตะฝั‚ ั ะฟั€ะธะผะตั€ะฐะผะธ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะดะปั Ultralytics, ะบะฐะบ ั‚ะพะปัŒะบะพ ะฟะพะดะดะตั€ะถะบะฐ YOLOv4 ะฑัƒะดะตั‚ ั€ะตะฐะปะธะทะพะฒะฐะฝะฐ. + +## ะ—ะฐะบะปัŽั‡ะตะฝะธะต + +YOLOv4 - ัั‚ะพ ะผะพั‰ะฝะฐั ะธ ัั„ั„ะตะบั‚ะธะฒะฝะฐั ะผะพะดะตะปัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ, ะบะพั‚ะพั€ะฐั ะฝะฐั…ะพะดะธั‚ ะฑะฐะปะฐะฝั ะผะตะถะดัƒ ัะบะพั€ะพัั‚ัŒัŽ ะธ ั‚ะพั‡ะฝะพัั‚ัŒัŽ. ะ‘ะปะฐะณะพะดะฐั€ั ัƒะฝะธะบะฐะปัŒะฝั‹ะผ ั„ัƒะฝะบั†ะธัะผ ะธ ะผะตั‚ะพะดะฐะผ "bag of freebies" ะฒะพ ะฒั€ะตะผั ะพะฑัƒั‡ะตะฝะธั, ะพะฝะฐ ะฟั€ะตะฒะพัั…ะพะดะฝะพ ัะฟั€ะฐะฒะปัะตั‚ัั ั ะทะฐะดะฐั‡ะฐะผะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. YOLOv4 ะผะพะถะตั‚ ะฑั‹ั‚ัŒ ะพะฑัƒั‡ะตะฝะฐ ะธ ะธัะฟะพะปัŒะทะพะฒะฐะฝะฐ ะฒัะตะผะธ, ัƒ ะบะพะณะพ ะตัั‚ัŒ ะพะฑั‹ั‡ะฝั‹ะน GPU, ั‡ั‚ะพ ะดะตะปะฐะตั‚ ะตะต ะดะพัั‚ัƒะฟะฝะพะน ะธ ะฟั€ะธะณะพะดะฝะพะน ะดะปั ัˆะธั€ะพะบะพะณะพ ะบั€ัƒะณะฐ ะฟั€ะธะผะตะฝะตะฝะธะน. + +## ะฆะธั‚ะธั€ะพะฒะฐะฝะธะต ะธ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ะธ + +ะœั‹ ั…ะพั‚ะตะปะธ ะฑั‹ ะฒั‹ั€ะฐะทะธั‚ัŒ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ัŒ ะฐะฒั‚ะพั€ะฐะผ YOLOv4 ะทะฐ ะธั… ะทะฝะฐั‡ะธั‚ะตะปัŒะฝั‹ะน ะฒะบะปะฐะด ะฒ ะพะฑะปะฐัั‚ัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะถะธะผะต ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{bochkovskiy2020yolov4, + title={YOLOv4: Optimal Speed and Accuracy of Object Detection}, + author={Alexey Bochkovskiy and Chien-Yao Wang and Hong-Yuan Mark Liao}, + year={2020}, + eprint={2004.10934}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ะžั€ะธะณะธะฝะฐะปัŒะฝะฐั ัั‚ะฐั‚ัŒั YOLOv4 ะดะพัั‚ัƒะฟะฝะฐ ะฝะฐ [arXiv](https://arxiv.org/abs/2004.10934). ะะฒั‚ะพั€ั‹ ัะดะตะปะฐะปะธ ัะฒะพัŽ ั€ะฐะฑะพั‚ัƒ ะพะฑั‰ะตะดะพัั‚ัƒะฟะฝะพะน, ะธ ะบะพะด ะดะพัั‚ัƒะฟะตะฝ ะฝะฐ [GitHub](https://github.com/AlexeyAB/darknet). ะœั‹ ั†ะตะฝะธะผ ะธั… ัƒัะธะปะธั ะฒ ั€ะฐะทะฒะธั‚ะธะธ ะพะฑะปะฐัั‚ะธ ะธ ะดะพัั‚ัƒะฟะฝะพัั‚ะธ ะธั… ั€ะฐะฑะพั‚ั‹ ะดะปั ัˆะธั€ะพะบะพะณะพ ัะพะพะฑั‰ะตัั‚ะฒะฐ. diff --git a/ultralytics/docs/ru/models/yolov4.md:Zone.Identifier b/ultralytics/docs/ru/models/yolov4.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/models/yolov4.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/models/yolov5.md b/ultralytics/docs/ru/models/yolov5.md new file mode 100755 index 0000000..84b1b8c --- /dev/null +++ b/ultralytics/docs/ru/models/yolov5.md @@ -0,0 +1,113 @@ +--- +comments: true +description: ะŸะพะทะฝะฐะบะพะผัŒั‚ะตััŒ ั YOLOv5u, ัƒะปัƒั‡ัˆะตะฝะฝะพะน ะฒะตั€ัะธะตะน ะผะพะดะตะปะธ YOLOv5 ั ัƒะปัƒั‡ัˆะตะฝะฝั‹ะผ ะบะพะผะฟั€ะพะผะธััะพะผ ะผะตะถะดัƒ ั‚ะพั‡ะฝะพัั‚ัŒัŽ ะธ ัะบะพั€ะพัั‚ัŒัŽ ะธ ะผะฝะพะถะตัั‚ะฒะพะผ ะณะพั‚ะพะฒั‹ั… ะผะพะดะตะปะตะน ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ะทะฐะดะฐั‡ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. +keywords: YOLOv5u, ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ะณะพั‚ะพะฒั‹ะต ะผะพะดะตะปะธ, Ultralytics, ะ’ั‹ะฒะพะด, ะŸั€ะพะฒะตั€ะบะฐ, YOLOv5, YOLOv8, ะฑะตะท ัะบะพั€ะตะน, ะฑะตะท ัƒั‡ะตั‚ะฐ ะพะฑัŠะตะบั‚ะฝะพัั‚ะธ, ั€ะตะฐะปัŒะฝะพะต ะฒั€ะตะผั ั€ะฐะฑะพั‚ั‹, ะผะฐัˆะธะฝะฝะพะต ะพะฑัƒั‡ะตะฝะธะต +--- + +# YOLOv5 + +## ะžะฑะทะพั€ + +YOLOv5u ะฟั€ะตะดัั‚ะฐะฒะปัะตั‚ ัะพะฑะพะน ะฟั€ะพะณั€ะตัั ะฒ ะผะตั‚ะพะดะพะปะพะณะธัั… ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. ะ˜ัั…ะพะดั ะธะท ะพัะฝะพะฒะฝะพะน ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ ะผะพะดะตะปะธ [YOLOv5](https://github.com/ultralytics/yolov5), ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฝะพะน ะบะพะผะฟะฐะฝะธะตะน Ultralytics, YOLOv5u ะธะฝั‚ะตะณั€ะธั€ัƒะตั‚ ั€ะฐะทะดะตะปะตะฝะธะต ะณะพะปะพะฒั‹ ะฑะตะท ัะบะพั€ะตะน ะธ ะพะฑัŠะตะบั‚ะฝะพัั‚ะธ, ั„ัƒะฝะบั†ะธะพะฝะฐะปัŒะฝะพัั‚ัŒ, ั€ะฐะฝะตะต ะฟั€ะตะดัั‚ะฐะฒะปะตะฝะฝัƒัŽ ะฒ ะผะพะดะตะปัั… [YOLOv8](yolov8.md). ะญั‚ะฐ ะฐะดะฐะฟั‚ะฐั†ะธั ัƒะปัƒั‡ัˆะฐะตั‚ ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ัƒ ะผะพะดะตะปะธ, ั‡ั‚ะพ ะฟั€ะธะฒะพะดะธั‚ ะบ ัƒะปัƒั‡ัˆะตะฝะฝะพะผัƒ ะบะพะผะฟั€ะพะผะธัััƒ ะผะตะถะดัƒ ั‚ะพั‡ะฝะพัั‚ัŒัŽ ะธ ัะบะพั€ะพัั‚ัŒัŽ ะฒ ะทะฐะดะฐั‡ะฐั… ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. ะฃั‡ะธั‚ั‹ะฒะฐั ัะผะฟะธั€ะธั‡ะตัะบะธะต ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ ะธ ะฟะพะปัƒั‡ะตะฝะฝั‹ะต ั…ะฐั€ะฐะบั‚ะตั€ะธัั‚ะธะบะธ, YOLOv5u ะฟั€ะตะดะปะฐะณะฐะตั‚ ัั„ั„ะตะบั‚ะธะฒะฝัƒัŽ ะฐะปัŒั‚ะตั€ะฝะฐั‚ะธะฒัƒ ะดะปั ั‚ะตั…, ะบั‚ะพ ะธั‰ะตั‚ ะฝะฐะดะตะถะฝั‹ะต ั€ะตัˆะตะฝะธั ะบะฐะบ ะฒ ะฝะฐัƒั‡ะฝั‹ั… ะธััะปะตะดะพะฒะฐะฝะธัั…, ั‚ะฐะบ ะธ ะฒ ะฟั€ะฐะบั‚ะธั‡ะตัะบะธั… ะฟั€ะธะปะพะถะตะฝะธัั…. + +![Ultralytics YOLOv5](https://raw.githubusercontent.com/ultralytics/assets/main/yolov5/v70/splash.png) + +## ะžัะฝะพะฒะฝั‹ะต ะฒะพะทะผะพะถะฝะพัั‚ะธ + +- **ะ ะฐะทะดะตะปะตะฝะธะต ะณะพะปะพะฒั‹ ะฑะตะท ัะบะพั€ะตะน**: ะขั€ะฐะดะธั†ะธะพะฝะฝั‹ะต ะผะพะดะตะปะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฟะพะปะฐะณะฐัŽั‚ัั ะฝะฐ ะทะฐั€ะฐะฝะตะต ะพะฟั€ะตะดะตะปะตะฝะฝั‹ะต ะฟั€ะธะฒัะทะพั‡ะฝั‹ะต ั€ะฐะผะบะธ ะดะปั ะฟั€ะตะดัะบะฐะทะฐะฝะธั ั€ะฐัะฟะพะปะพะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. ะžะดะฝะฐะบะพ YOLOv5u ะผะพะดะตั€ะฝะธะทะธั€ัƒะตั‚ ัั‚ะพั‚ ะฟะพะดั…ะพะด. ะŸั€ะธะฝะธะผะฐั ะฑะตะทัะบะพั€ะฝัƒัŽ ะณะพะปะพะฒัƒ, ะพะฝะฐ ะพะฑะตัะฟะตั‡ะธะฒะฐะตั‚ ะฑะพะปะตะต ะณะธะฑะบะธะน ะธ ะฐะดะฐะฟั‚ะธะฒะฝั‹ะน ะผะตั…ะฐะฝะธะทะผ ะพะฑะฝะฐั€ัƒะถะตะฝะธั, ั‡ั‚ะพ ะฒ ะธั‚ะพะณะต ะฟะพะฒั‹ัˆะฐะตั‚ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะฒ ั€ะฐะทะปะธั‡ะฝั‹ั… ัั†ะตะฝะฐั€ะธัั…. + +- **ะžะฟั‚ะธะผะธะทะธั€ะพะฒะฐะฝะฝั‹ะน ะบะพะผะฟั€ะพะผะธัั ะผะตะถะดัƒ ั‚ะพั‡ะฝะพัั‚ัŒัŽ ะธ ัะบะพั€ะพัั‚ัŒัŽ**: ะกะบะพั€ะพัั‚ัŒ ะธ ั‚ะพั‡ะฝะพัั‚ัŒ ั‡ะฐัั‚ะพ ะดะฒะธะถัƒั‚ัั ะฒ ะฟั€ะพั‚ะธะฒะพะฟะพะปะพะถะฝั‹ั… ะฝะฐะฟั€ะฐะฒะปะตะฝะธัั…. ะะพ YOLOv5u ะฒั‹ะทั‹ะฒะฐะตั‚ ัั‚ะพั‚ ะบะพะผะฟั€ะพะผะธัั. ะžะฝะฐ ะฟั€ะตะดะปะฐะณะฐะตั‚ ะฝะฐัั‚ั€ะพะตะฝะฝั‹ะน ะฑะฐะปะฐะฝั, ะพะฑะตัะฟะตั‡ะธะฒะฐั ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะฒ ั€ะตะถะธะผะต ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ ะฑะตะท ัƒั‰ะตั€ะฑะฐ ะดะปั ั‚ะพั‡ะฝะพัั‚ะธ. ะญั‚ะฐ ั„ัƒะฝะบั†ะธั ะพัะพะฑะตะฝะฝะพ ั†ะตะฝะฝะฐ ะดะปั ะฟั€ะธะปะพะถะตะฝะธะน, ะบะพั‚ะพั€ั‹ะผ ั‚ั€ะตะฑัƒัŽั‚ัั ะฑั‹ัั‚ั€ั‹ะต ะพั‚ะฒะตั‚ั‹, ั‚ะฐะบะธั… ะบะฐะบ ะฐะฒั‚ะพะฝะพะผะฝั‹ะต ั‚ั€ะฐะฝัะฟะพั€ั‚ะฝั‹ะต ัั€ะตะดัั‚ะฒะฐ, ั€ะพะฑะพั‚ะพั‚ะตั…ะฝะธะบะฐ ะธ ะฐะฝะฐะปะธั‚ะธะบะฐ ะฒะธะดะตะพะทะฐะฟะธัะตะน ะฒ ั€ะตะถะธะผะต ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ. + +- **ะ ะฐะทะฝะพะพะฑั€ะฐะทะธะต ะณะพั‚ะพะฒั‹ั… ะผะพะดะตะปะตะน**: ะŸะพะฝะธะผะฐั, ั‡ั‚ะพ ั€ะฐะทะปะธั‡ะฝั‹ะต ะทะฐะดะฐั‡ะธ ั‚ั€ะตะฑัƒัŽั‚ ั€ะฐะทะฝะพะณะพ ะธะฝัั‚ั€ัƒะผะตะฝั‚ะฐั€ะธั, YOLOv5u ะฟั€ะตะดะปะฐะณะฐะตั‚ ะผะฝะพะถะตัั‚ะฒะพ ะณะพั‚ะพะฒั‹ั… ะผะพะดะตะปะตะน. ะะตะทะฐะฒะธัะธะผะพ ะพั‚ ั‚ะพะณะพ, ะฟั€ะธะดะตั€ะถะธะฒะฐะตั‚ะตััŒ ะปะธ ะฒั‹ ะฒั‹ะฒะพะดะฐ, ะฟั€ะพะฒะตั€ะบะธ ะธะปะธ ะพะฑัƒั‡ะตะฝะธั, ะฒะฐั ะพะถะธะดะฐะตั‚ ะผะพะดะตะปัŒ, ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฝะฐั ัะฟะตั†ะธะฐะปัŒะฝะพ ะฟะพะด ะฒะฐัˆัƒ ัƒะฝะธะบะฐะปัŒะฝัƒัŽ ะทะฐะดะฐั‡ัƒ. ะญั‚ะพ ั€ะฐะทะฝะพะพะฑั€ะฐะทะธะต ะณะฐั€ะฐะฝั‚ะธั€ัƒะตั‚, ั‡ั‚ะพ ะฒั‹ ะฝะต ะธัะฟะพะปัŒะทัƒะตั‚ะต ัƒะฝะธะฒะตั€ัะฐะปัŒะฝะพะต ั€ะตัˆะตะฝะธะต, ะฐ ะผะพะดะตะปัŒ, ัะฟะตั†ะธะฐะปัŒะฝะพ ะฝะฐัั‚ั€ะพะตะฝะฝัƒัŽ ะดะปั ะฒะฐัˆะตะณะพ ัƒะฝะธะบะฐะปัŒะฝะพะณะพ ะฒั‹ะทะพะฒะฐ. + +## ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ ะธ ั€ะตะถะธะผั‹ + +ะœะพะดะตะปะธ YOLOv5u ั ั€ะฐะทะปะธั‡ะฝั‹ะผะธ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะผะธ ะฒะตัะฐะผะธ ะฟั€ะตะฒะพัั…ะพะดัั‚ ะฒ ะทะฐะดะฐั‡ะฐั… [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md). ะžะฝะธ ะฟะพะดะดะตั€ะถะธะฒะฐัŽั‚ ัˆะธั€ะพะบะธะน ัะฟะตะบั‚ั€ ั€ะตะถะธะผะพะฒ ั€ะฐะฑะพั‚ั‹, ั‡ั‚ะพ ะดะตะปะฐะตั‚ ะธั… ะฟะพะดั…ะพะดัั‰ะธะผะธ ะดะปั ั€ะฐะทะฝั‹ั… ะฟั€ะธะปะพะถะตะฝะธะน, ะพั‚ ั€ะฐะทั€ะฐะฑะพั‚ะบะธ ะดะพ ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธั. + +| ะขะธะฟ ะผะพะดะตะปะธ | ะŸั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ | ะ—ะฐะดะฐั‡ะฐ | ะ’ั‹ะฒะพะด | ะŸั€ะพะฒะตั€ะบะฐ | ะžะฑัƒั‡ะตะฝะธะต | ะญะบัะฟะพั€ั‚ | +|------------|-----------------------------------------------------------------------------------------------------------------------------|--------------------------------------------|-------|----------|----------|---------| +| YOLOv5u | `yolov5nu`, `yolov5su`, `yolov5mu`, `yolov5lu`, `yolov5xu`, `yolov5n6u`, `yolov5s6u`, `yolov5m6u`, `yolov5l6u`, `yolov5x6u` | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +ะ’ ัั‚ะพะน ั‚ะฐะฑะปะธั†ะต ะฟั€ะตะดะพัั‚ะฐะฒะปะตะฝะฐ ะฟะพะดั€ะพะฑะฝะฐั ะธะฝั„ะพั€ะผะฐั†ะธั ะพ ะฒะฐั€ะธะฐะฝั‚ะฐั… ะผะพะดะตะปะตะน YOLOv5u, ะพัะฝะพะฒะฝั‹ั… ะทะฐะดะฐั‡ะฐั… ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะธ ะฟะพะดะดะตั€ะถะบะต ั€ะฐะทะปะธั‡ะฝั‹ั… ะพะฟะตั€ะฐั†ะธะพะฝะฝั‹ั… ั€ะตะถะธะผะพะฒ, ั‚ะฐะบะธั… ะบะฐะบ [ะ’ั‹ะฒะพะด](../modes/predict.md), [ะŸั€ะพะฒะตั€ะบะฐ](../modes/val.md), [ะžะฑัƒั‡ะตะฝะธะต](../modes/train.md) ะธ [ะญะบัะฟะพั€ั‚](../modes/export.md). ะญั‚ะฐ ะฒัะตัั‚ะพั€ะพะฝะฝัั ะฟะพะดะดะตั€ะถะบะฐ ะฟะพะทะฒะพะปัะตั‚ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัะผ ะฟะพะปะฝะพัั‚ัŒัŽ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะฒะพะทะผะพะถะฝะพัั‚ะธ ะผะพะดะตะปะตะน YOLOv5u ะฒ ัˆะธั€ะพะบะพะผ ัะฟะตะบั‚ั€ะต ะทะฐะดะฐั‡ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. + +## ะŸะพะบะฐะทะฐั‚ะตะปะธ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ + +!!! ะŸั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ + + === "ะžะฑะฝะฐั€ัƒะถะตะฝะธะต" + + ะกะผ. [ะ”ะพะบัƒะผะตะฝั‚ะฐั†ะธัŽ ะฟะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธัŽ](https://docs.ultralytics.com/tasks/detect/) ะดะปั ะฟั€ะธะผะตั€ะพะฒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ัั‚ะธั… ะผะพะดะตะปะตะน, ะพะฑัƒั‡ะตะฝะฝั‹ั… ะฝะฐ [COCO](https://docs.ultralytics.com/datasets/detect/coco/), ะบะพั‚ะพั€ะฐั ะฒะบะปัŽั‡ะฐะตั‚ 80 ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะบะปะฐััะพะฒ. + + | ะœะพะดะตะปัŒ | YAML | ั€ะฐะทะผะตั€
(ะฟะธะบัะตะปะธ) | mAPval
50-95 | ะกะบะพั€ะพัั‚ัŒ
CPU ONNX
(ะผั) | ะกะบะพั€ะพัั‚ัŒ
A100 TensorRT
(ะผั) | ะฟะฐั€ะฐะผะตั‚ั€ั‹
(ะœ) | FLOPs
(ะ‘) | + |-------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------|-------------------------|----------------------|--------------------------------|-------------------------------------|----------------------|-------------------| + | [yolov5nu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5nu.pt) | [yolov5n.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 34.3 | 73.6 | 1.06 | 2.6 | 7.7 | + | [yolov5su.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5su.pt) | [yolov5s.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 43.0 | 120.7 | 1.27 | 9.1 | 24.0 | + | [yolov5mu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5mu.pt) | [yolov5m.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 49.0 | 233.9 | 1.86 | 25.1 | 64.2 | + | [yolov5lu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5lu.pt) | [yolov5l.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 52.2 | 408.4 | 2.50 | 53.2 | 135.0 | + | [yolov5xu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5xu.pt) | [yolov5x.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 53.2 | 763.2 | 3.81 | 97.2 | 246.4 | + | | | | | | | | | + | [yolov5n6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5n6u.pt) | [yolov5n6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 42.1 | 211.0 | 1.83 | 4.3 | 7.8 | + | [yolov5s6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5s6u.pt) | [yolov5s6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 48.6 | 422.6 | 2.34 | 15.3 | 24.6 | + | [yolov5m6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5m6u.pt) | [yolov5m6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 53.6 | 810.9 | 4.36 | 41.2 | 65.7 | + | [yolov5l6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5l6u.pt) | [yolov5l6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 55.7 | 1470.9 | 5.47 | 86.1 | 137.4 | + | [yolov5x6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5x6u.pt) | [yolov5x6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 56.8 | 2436.5 | 8.98 | 155.4 | 250.7 | + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะ’ ัั‚ะพะผ ะฟั€ะธะผะตั€ะต ะฟั€ะธะฒะตะดะตะฝั‹ ะฟั€ะพัั‚ั‹ะต ะฟั€ะธะผะตั€ั‹ ะพะฑัƒั‡ะตะฝะธั ะธ ะฒั‹ะฒะพะดะฐ ะผะพะดะตะปะตะน YOLOv5. ะ”ะปั ะฟะพะปัƒั‡ะตะฝะธั ะฟะพะปะฝะพะน ะดะพะบัƒะผะตะฝั‚ะฐั†ะธะธ ะฟะพ ัั‚ะธะผ ะธ ะดั€ัƒะณะธะผ [ั€ะตะถะธะผะฐะผ ั€ะฐะฑะพั‚ั‹](../modes/index.md) ัะผ. ัั‚ั€ะฐะฝะธั†ั‹ ะดะพะบัƒะผะตะฝั‚ะฐั†ะธะธ ะฟะพ [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) ะธ [Export](../modes/export.md). + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ะŸั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ PyTorch `*.pt` ะธ ั„ะฐะนะปั‹ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ `*.yaml` ะผะพะถะฝะพ ะฟะตั€ะตะดะฐั‚ัŒ ะบะปะฐัััƒ `YOLO()` ะดะปั ัะพะทะดะฐะฝะธั ัะบะทะตะผะฟะปัั€ะฐ ะผะพะดะตะปะธ ะฝะฐ Python: + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv5n ะฝะฐ COCO + model = YOLO('yolov5n.pt') + + # ะžั‚ะพะฑั€ะฐะทะธั‚ัŒ ะธะฝั„ะพั€ะผะฐั†ะธัŽ ะพ ะผะพะดะตะปะธ (ะพะฟั†ะธะพะฝะฐะปัŒะฝะพ) + model.info() + + # ะžะฑัƒั‡ะตะฝะธะต ะผะพะดะตะปะธ ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… ะฝะฐ ะพัะฝะพะฒะต COCO8 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ั ะผะพะดะตะปัŒัŽ YOLOv5n ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + results = model('ะฟัƒั‚ัŒ/ะบ/ั„ะฐะนะปัƒ/bus.jpg') + ``` + + === "CLI" + + ะ”ะพัั‚ัƒะฟะฝั‹ ะบะพะผะฐะฝะดั‹ CLI ะดะปั ะฝะตะฟะพัั€ะตะดัั‚ะฒะตะฝะฝะพะณะพ ะฒั‹ะฟะพะปะฝะตะฝะธั ะผะพะดะตะปะตะน: + + ```bash + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv5n ะฝะฐ COCO ะธ ะพะฑัƒั‡ะตะฝะธะต ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… ะฝะฐ ะพัะฝะพะฒะต COCO8 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… + yolo train model=yolov5n.pt data=coco8.yaml epochs=100 imgsz=640 + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv5n ะฝะฐ COCO ะธ ะฒั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + yolo predict model=yolov5n.pt source=ะฟัƒั‚ัŒ/ะบ/ั„ะฐะนะปัƒ/bus.jpg + ``` + +## ะฆะธั‚ะธั€ะพะฒะฐะฝะธะต ะธ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ะธ + +ะ•ัะปะธ ะฒั‹ ะธัะฟะพะปัŒะทัƒะตั‚ะต YOLOv5 ะธะปะธ YOLOv5u ะฒ ัะฒะพะธั… ะธััะปะตะดะพะฒะฐะฝะธัั…, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ััั‹ะปะฐะนั‚ะตััŒ ะฝะฐ ั€ะตะฟะพะทะธั‚ะพั€ะธะน Ultralytics YOLOv5 ัะปะตะดัƒัŽั‰ะธะผ ะพะฑั€ะฐะทะพะผ: + +!!! Quote "" + + === "BibTeX" + ```bibtex + @software{yolov5, + title = {Ultralytics YOLOv5}, + author = {Glenn Jocher}, + year = {2020}, + version = {7.0}, + license = {AGPL-3.0}, + url = {https://github.com/ultralytics/yolov5}, + doi = {10.5281/zenodo.3908559}, + orcid = {0000-0001-5950-6979} + } + ``` + +ะŸะพะถะฐะปัƒะนัั‚ะฐ, ะพะฑั€ะฐั‚ะธั‚ะต ะฒะฝะธะผะฐะฝะธะต, ั‡ั‚ะพ ะผะพะดะตะปะธ YOLOv5 ะฟั€ะตะดะพัั‚ะฐะฒะปััŽั‚ัั ะฟะพะด [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) ะธ [Enterprise](https://ultralytics.com/license) ะปะธั†ะตะฝะทะธัะผะธ. diff --git a/ultralytics/docs/ru/models/yolov5.md:Zone.Identifier b/ultralytics/docs/ru/models/yolov5.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/models/yolov5.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/models/yolov6.md b/ultralytics/docs/ru/models/yolov6.md new file mode 100755 index 0000000..34ac079 --- /dev/null +++ b/ultralytics/docs/ru/models/yolov6.md @@ -0,0 +1,107 @@ +--- +comments: true +description: ะ˜ะทัƒั‡ะธั‚ะต Meituan YOLOv6 - ัะพะฒั€ะตะผะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ, ะพั‚ะปะธั‡ะฐัŽั‰ัƒัŽัั ะฑะฐะปะฐะฝัะพะผ ะผะตะถะดัƒ ัะบะพั€ะพัั‚ัŒัŽ ะธ ั‚ะพั‡ะฝะพัั‚ัŒัŽ. ะŸะพะดั€ะพะฑะฝะตะต ะพ ั„ัƒะฝะบั†ะธัั…, ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะผะพะดะตะปัั… ะธ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ Python. +keywords: Meituan YOLOv6, ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, Ultralytics, ะดะพะบัƒะผะตะฝั‚ะฐั†ะธั YOLOv6, ะดะฒัƒัั‚ะพั€ะพะฝะฝะตะต ะพะฑัŠะตะดะธะฝะตะฝะธะต, ั‚ั€ะตะฝะธั€ะพะฒะบะฐ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ัะบะพั€ะตะน, ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ, ะฟั€ะธะปะพะถะตะฝะธั ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ +--- + +# Meituan YOLOv6 + +## ะžะฑะทะพั€ + +[Meituan](https://about.meituan.com/) YOLOv6 - ัั‚ะพ ะฟะตั€ะตะดะพะฒะฐั ะผะพะดะตะปัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ, ะบะพั‚ะพั€ะฐั ะพั‚ะปะธั‡ะฝะพ ัะพั‡ะตั‚ะฐะตั‚ ะฒ ัะตะฑะต ัะบะพั€ะพัั‚ัŒ ะธ ั‚ะพั‡ะฝะพัั‚ัŒ, ั‡ั‚ะพ ะดะตะปะฐะตั‚ ะตะต ะฟะพะฟัƒะปัั€ะฝั‹ะผ ะฒั‹ะฑะพั€ะพะผ ะดะปั ะฟั€ะธะปะพะถะตะฝะธะน ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. ะญั‚ะฐ ะผะพะดะตะปัŒ ะฒะบะปัŽั‡ะฐะตั‚ ะฝะตัะบะพะปัŒะบะพ ะทะฝะฐั‡ะธะผั‹ั… ัƒะปัƒั‡ัˆะตะฝะธะน ะฒ ัะฒะพะตะน ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ะต ะธ ัั…ะตะผะต ะพะฑัƒั‡ะตะฝะธั, ะฒะบะปัŽั‡ะฐั ะฒะฝะตะดั€ะตะฝะธะต ะผะพะดัƒะปั ะดะฒัƒัั‚ะพั€ะพะฝะฝะตะณะพ ะพะฑัŠะตะดะธะฝะตะฝะธั (BiC), ัั‚ั€ะฐั‚ะตะณะธัŽ ั‚ั€ะตะฝะธั€ะพะฒะบะธ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ัะบะพั€ะตะน (AAT) ะธ ัƒะปัƒั‡ัˆะตะฝะฝั‹ะน ะดะธะทะฐะนะฝ ะฑะฐะทะพะฒะพะน ะธ ะฒะตั€ั…ะฝะตะน ั‡ะฐัั‚ะตะน ะดะปั ะดะพัั‚ะธะถะตะฝะธั ะฟะตั€ะตะดะพะฒะพะน ั‚ะพั‡ะฝะพัั‚ะธ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO. + +![Meituan YOLOv6](https://user-images.githubusercontent.com/26833433/240750495-4da954ce-8b3b-41c4-8afd-ddb74361d3c2.png) +![ะ˜ะทะพะฑั€ะฐะถะตะฝะธะต ะฟั€ะธะผะตั€ะฐ ะผะพะดะตะปะธ](https://user-images.githubusercontent.com/26833433/240750557-3e9ec4f0-0598-49a8-83ea-f33c91eb6d68.png) +**ะžะฑะทะพั€ YOLOv6.** ะ”ะธะฐะณั€ะฐะผะผะฐ ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ ะผะพะดะตะปะธ, ะฟะพะบะฐะทั‹ะฒะฐัŽั‰ะฐั ะฟะตั€ะตั€ะฐะฑะพั‚ะฐะฝะฝั‹ะต ะบะพะผะฟะพะฝะตะฝั‚ั‹ ัะตั‚ะธ ะธ ัั‚ั€ะฐั‚ะตะณะธะธ ั‚ั€ะตะฝะธั€ะพะฒะบะธ, ะบะพั‚ะพั€ั‹ะต ะฟั€ะธะฒะพะดัั‚ ะบ ะทะฝะฐั‡ะธั‚ะตะปัŒะฝะพะผัƒ ัƒะปัƒั‡ัˆะตะฝะธัŽ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ. (a) ะ’ะตั€ั…ะฝัั ั‡ะฐัั‚ัŒ YOLOv6 (ะฟะพะบะฐะทะฐะฝั‹ N ะธ S). ะžะฑั€ะฐั‚ะธั‚ะต ะฒะฝะธะผะฐะฝะธะต, ั‡ั‚ะพ ะดะปั M/L ะฑะปะพะบ RepBlocks ะทะฐะผะตะฝะตะฝ ะฝะฐ CSPStackRep. (b) ะกั‚ั€ัƒะบั‚ัƒั€ะฐ ะผะพะดัƒะปั BiC. (c) ะ‘ะปะพะบ SimCSPSPPF block. ([ะธัั…ะพะดะฝะธะบ](https://arxiv.org/pdf/2301.05586.pdf)). + +### ะžัะฝะพะฒะฝั‹ะต ั„ัƒะฝะบั†ะธะธ + +- **ะœะพะดัƒะปัŒ ะดะฒัƒัั‚ะพั€ะพะฝะฝะตะณะพ ะพะฑัŠะตะดะธะฝะตะฝะธั (BiC):** YOLOv6 ะฒะบะปัŽั‡ะฐะตั‚ ะผะพะดัƒะปัŒ BiC ะฒ ะฒะตั€ั…ะฝัŽัŽ ั‡ะฐัั‚ัŒ ะดะตั‚ะตะบั‚ะพั€ะฐ, ัƒะปัƒั‡ัˆะฐั ัะธะณะฝะฐะปั‹ ะปะพะบะฐะปะธะทะฐั†ะธะธ ะธ ะพะฑะตัะฟะตั‡ะธะฒะฐั ะฟั€ะธั€ะพัั‚ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะฟั€ะธ ะผะธะฝะธะผะฐะปัŒะฝะพะผ ัะฝะธะถะตะฝะธะธ ัะบะพั€ะพัั‚ะธ. +- **ะกั‚ั€ะฐั‚ะตะณะธั ั‚ั€ะตะฝะธั€ะพะฒะบะธ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ัะบะพั€ะตะน (AAT):** ะญั‚ะฐ ะผะพะดะตะปัŒ ะฟั€ะตะดะปะฐะณะฐะตั‚ AAT, ั‡ั‚ะพะฑั‹ ะฒะพัะฟะพะปัŒะทะพะฒะฐั‚ัŒัั ะฟั€ะตะธะผัƒั‰ะตัั‚ะฒะฐะผะธ ะบะฐะบ ัะบะพั€ะฝั‹ั…, ั‚ะฐะบ ะธ ะฑะตัะบะพะฝั‚ัƒั€ะฝั‹ั… ะฟะฐั€ะฐะดะธะณะผ ะฑะตะท ัƒั‰ะตั€ะฑะฐ ะดะปั ัั„ั„ะตะบั‚ะธะฒะฝะพัั‚ะธ ะฒั‹ะฒะพะดะฐ. +- **ะฃะปัƒั‡ัˆะตะฝะฝั‹ะน ะดะธะทะฐะนะฝ ะฑะฐะทะพะฒะพะน ะธ ะฒะตั€ั…ะฝะตะน ั‡ะฐัั‚ะตะน:** ะŸัƒั‚ะตะผ ั€ะฐััˆะธั€ะตะฝะธั YOLOv6 ะทะฐ ัั‡ะตั‚ ะดะพะฑะฐะฒะปะตะฝะธั ะตั‰ะต ะพะดะฝะพะน ัั‚ะฐะดะธะธ ะฒ ะฑะฐะทะพะฒัƒัŽ ะธ ะฒะตั€ั…ะฝัŽัŽ ั‡ะฐัั‚ัŒ ะผะพะดะตะปะธ ะดะพัั‚ะธะณะฐะตั‚ัั ะฟะตั€ะตะดะพะฒะฐั ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO ะฟั€ะธ ะฒั‹ัะพะบะพะผ ั€ะฐะทั€ะตัˆะตะฝะธะธ ะฒั…ะพะดะฝั‹ั… ะธะทะพะฑั€ะฐะถะตะฝะธะน. +- **ะกั‚ั€ะฐั‚ะตะณะธั ัะฐะผะพัั‚ะพัั‚ะตะปัŒะฝะพะน ัั‚ะฐะฑะธะปะธะทะฐั†ะธะธ:** ะ’ะฝะตะดั€ัะตั‚ัั ะฝะพะฒะฐั ัั‚ั€ะฐั‚ะตะณะธั ัะฐะผะพัั‚ะพัั‚ะตะปัŒะฝะพะน ัั‚ะฐะฑะธะปะธะทะฐั†ะธะธ ะดะปั ะฟะพะฒั‹ัˆะตะฝะธั ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะผะตะฝัŒัˆะธั… ะผะพะดะตะปะตะน YOLOv6, ัƒะปัƒั‡ัˆะฐั ะฒัะฟะพะผะพะณะฐั‚ะตะปัŒะฝะพะต ั€ะตะณั€ะตััะธะพะฝะฝะพะต ะฒะตั‚ะฒะปะตะฝะธะต ะฒะพ ะฒั€ะตะผั ั‚ั€ะตะฝะธั€ะพะฒะบะธ ะธ ัƒะดะฐะปัั ะตะณะพ ะฒะพ ะฒั€ะตะผั ะฒั‹ะฒะพะดะฐ, ั‡ั‚ะพะฑั‹ ะธะทะฑะตะถะฐั‚ัŒ ะทะฐะผะตั‚ะฝะพะณะพ ัะฝะธะถะตะฝะธั ัะบะพั€ะพัั‚ะธ. + +## ะœะตั‚ั€ะธะบะธ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ + +YOLOv6 ะฟั€ะตะดะพัั‚ะฐะฒะปัะตั‚ ะฝะตัะบะพะปัŒะบะพ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะผะพะดะตะปะตะน ั€ะฐะทะปะธั‡ะฝั‹ั… ะผะฐััˆั‚ะฐะฑะพะฒ: + +- YOLOv6-N: 37.5% AP ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO val2017 ะฟั€ะธ 1187 ะบะฐะดั€ะฐั… ะฒ ัะตะบัƒะฝะดัƒ (FPS) ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะณั€ะฐั„ะธั‡ะตัะบะพะณะพ ะฟั€ะพั†ะตััะพั€ะฐ NVIDIA Tesla T4. +- YOLOv6-S: 45.0% AP ะฟั€ะธ 484 FPS. +- YOLOv6-M: 50.0% AP ะฟั€ะธ 226 FPS. +- YOLOv6-L: 52.8% AP ะฟั€ะธ 116 FPS. +- YOLOv6-L6: ะŸะตั€ะตะดะพะฒะฐั ั‚ะพั‡ะฝะพัั‚ัŒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. + +YOLOv6 ั‚ะฐะบะถะต ะฟั€ะตะดะพัั‚ะฐะฒะปัะตั‚ ะบะฒะฐะฝั‚ะพะฒะฐะฝะฝั‹ะต ะผะพะดะตะปะธ ะดะปั ั€ะฐะทะฝั‹ั… ั‚ะพั‡ะฝะพัั‚ะตะน ะธ ะผะพะดะตะปะธ, ะพะฟั‚ะธะผะธะทะธั€ะพะฒะฐะฝะฝั‹ะต ะดะปั ะผะพะฑะธะปัŒะฝั‹ั… ะฟะปะฐั‚ั„ะพั€ะผ. + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะ’ ัั‚ะพะผ ะฟั€ะธะผะตั€ะต ะฟั€ะธะฒะตะดะตะฝั‹ ะฟั€ะพัั‚ั‹ะต ะฟั€ะธะผะตั€ั‹ ั‚ั€ะตะฝะธั€ะพะฒะบะธ ะธ ะฒั‹ะฒะพะดะฐ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ YOLOv6. ะŸะพะปะฝะฐั ะดะพะบัƒะผะตะฝั‚ะฐั†ะธั ะฟะพ ัั‚ะธะผ ะธ ะดั€ัƒะณะธะผ [ั€ะตะถะธะผะฐะผ](../modes/index.md) ะดะพัั‚ัƒะฟะฝะฐ ะฝะฐ ัั‚ั€ะฐะฝะธั†ะฐั… ะดะพะบัƒะผะตะฝั‚ะฐั†ะธะธ [Predict](../modes/predict.md), [Train](../modes/train.md), [Val](../modes/val.md) ะธ [Export](../modes/export.md). + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ะœะพะดะตะปะธ PyTorch, ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ั ะฟะพะผะพั‰ัŒัŽ ั„ะฐะนะปะพะฒ `*.pt`, ะฐ ั‚ะฐะบะถะต ั„ะฐะนะปั‹ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ `*.yaml` ะผะพะณัƒั‚ ะฑั‹ั‚ัŒ ะฟะตั€ะตะดะฐะฝั‹ ะฒ ะบะปะฐัั `YOLO()` ะดะปั ัะพะทะดะฐะฝะธั ัะบะทะตะผะฟะปัั€ะฐ ะผะพะดะตะปะธ ะฝะฐ Python: + + ```python + from ultralytics import YOLO + + # ะŸะพัั‚ั€ะพะตะฝะธะต ะผะพะดะตะปะธ YOLOv6n ั ะฝัƒะปั + model = YOLO('yolov6n.yaml') + + # ะžั‚ะพะฑั€ะฐะถะตะฝะธะต ะธะฝั„ะพั€ะผะฐั†ะธะธ ะพ ะผะพะดะตะปะธ (ะฟะพ ะถะตะปะฐะฝะธัŽ) + model.info() + + # ะขั€ะตะฝะธั€ะพะฒะบะฐ ะผะพะดะตะปะธ ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… COCO8 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ะ’ั‹ะฒะพะด ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะผะพะดะตะปะธ YOLOv6n ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ะ”ะพัั‚ัƒะฟะฝั‹ ะบะพะผะฐะฝะดั‹ ะธะฝั‚ะตั€ั„ะตะนัะฐ ะบะพะผะฐะฝะดะฝะพะน ัั‚ั€ะพะบะธ ะดะปั ะฝะตะฟะพัั€ะตะดัั‚ะฒะตะฝะฝะพะณะพ ะทะฐะฟัƒัะบะฐ ะผะพะดะตะปะตะน: + + ```bash + # ะŸะพัั‚ั€ะพะตะฝะธะต ะผะพะดะตะปะธ YOLOv6n ั ะฝัƒะปั ะธ ั‚ั€ะตะฝะธั€ะพะฒะบะฐ ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… COCO8 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… + yolo train model=yolov6n.yaml data=coco8.yaml epochs=100 imgsz=640 + + # ะŸะพัั‚ั€ะพะตะฝะธะต ะผะพะดะตะปะธ YOLOv6n ั ะฝัƒะปั ะธ ะฒั‹ะฒะพะด ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + yolo predict model=yolov6n.yaml source=path/to/bus.jpg + ``` + +## ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ ะธ ั€ะตะถะธะผั‹ + +ะกะตั€ะธั ะผะพะดะตะปะตะน YOLOv6 ะฟั€ะตะดะปะฐะณะฐะตั‚ ัˆะธั€ะพะบะธะน ะฒั‹ะฑะพั€ ะผะพะดะตะปะตะน ั ะพะฟั‚ะธะผะธะทะฐั†ะธะตะน ะดะปั [ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) ะฒั‹ัะพะบะพะน ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ. ะžะฝะธ ัƒะดะพะฒะปะตั‚ะฒะพั€ััŽั‚ ั€ะฐะทะปะธั‡ะฝั‹ะผ ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝั‹ะผ ะฟะพั‚ั€ะตะฑะฝะพัั‚ัะผ ะธ ั‚ั€ะตะฑะพะฒะฐะฝะธัะผ ั‚ะพั‡ะฝะพัั‚ะธ, ั‡ั‚ะพ ะดะตะปะฐะตั‚ ะธั… ัƒะฝะธะฒะตั€ัะฐะปัŒะฝั‹ะผะธ ะดะปั ัˆะธั€ะพะบะพะณะพ ัะฟะตะบั‚ั€ะฐ ะฟั€ะธะปะพะถะตะฝะธะน. + +| ะขะธะฟ ะผะพะดะตะปะธ | ะŸั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ | ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ | ะ’ั‹ะฒะพะด ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ | ะ’ะฐะปะธะดะฐั†ะธั | ะขั€ะตะฝะธั€ะพะฒะบะฐ | ะญะบัะฟะพั€ั‚ | +|------------|-------------------------------|--------------------------------------------|-------------------|-----------|------------|---------| +| YOLOv6-N | `yolov6-n.pt` | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-S | `yolov6-s.pt` | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-M | `yolov6-m.pt` | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L | `yolov6-l.pt` | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L6 | `yolov6-l6.pt` | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +ะญั‚ะฐ ั‚ะฐะฑะปะธั†ะฐ ะฟั€ะตะดะพัั‚ะฐะฒะปัะตั‚ ะฟะพะดั€ะพะฑะฝั‹ะน ะพะฑะทะพั€ ะฒะฐั€ะธะฐะฝั‚ะพะฒ ะผะพะดะตะปะตะน YOLOv6, ะฟะพะดั‡ะตั€ะบะธะฒะฐั ะธั… ะฒะพะทะผะพะถะฝะพัั‚ะธ ะฒ ะทะฐะดะฐั‡ะฐั… ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะธ ัะพะฒะผะตัั‚ะธะผะพัั‚ัŒ ั ั€ะฐะทะปะธั‡ะฝั‹ะผะธ ะพะฟะตั€ะฐั†ะธะพะฝะฝั‹ะผะธ ั€ะตะถะธะผะฐะผะธ, ั‚ะฐะบะธะผะธ ะบะฐะบ [ะ’ั‹ะฒะพะด ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ](../modes/predict.md), [ะ’ะฐะปะธะดะฐั†ะธั](../modes/val.md), [ะขั€ะตะฝะธั€ะพะฒะบะฐ](../modes/train.md) ะธ [ะญะบัะฟะพั€ั‚](../modes/export.md). ะญั‚ะพ ะพะฑัˆะธั€ะฝะฐั ะฟะพะดะดะตั€ะถะบะฐ ะฟะพะทะฒะพะปัะตั‚ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัะผ ะฟะพะปะฝะพัั‚ัŒัŽ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะฒะพะทะผะพะถะฝะพัั‚ะธ ะผะพะดะตะปะตะน YOLOv6 ะฒ ัˆะธั€ะพะบะพะผ ัะฟะตะบั‚ั€ะต ัั†ะตะฝะฐั€ะธะตะฒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. + +## ะฆะธั‚ะธั€ะพะฒะฐะฝะธั ะธ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ะธ + +ะœั‹ ั…ะพั‚ะตะปะธ ะฑั‹ ะฒั‹ั€ะฐะทะธั‚ัŒ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ัŒ ะฐะฒั‚ะพั€ะฐะผ ะธััะปะตะดะพะฒะฐะฝะธั ะทะฐ ะธั… ะทะฝะฐั‡ะธั‚ะตะปัŒะฝั‹ะน ะฒะบะปะฐะด ะฒ ะพะฑะปะฐัั‚ะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{li2023yolov6, + title={YOLOv6 v3.0: A Full-Scale Reloading}, + author={Chuyi Li and Lulu Li and Yifei Geng and Hongliang Jiang and Meng Cheng and Bo Zhang and Zaidan Ke and Xiaoming Xu and Xiangxiang Chu}, + year={2023}, + eprint={2301.05586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + + ะ˜ัั…ะพะดะฝัƒัŽ ัั‚ะฐั‚ัŒัŽ ะพะฑ YOLOv6 ะผะพะถะฝะพ ะฝะฐะนั‚ะธ ะฝะฐ [arXiv](https://arxiv.org/abs/2301.05586). ะะฒั‚ะพั€ั‹ ัะดะตะปะฐะปะธ ัะฒะพัŽ ั€ะฐะฑะพั‚ัƒ ะพะฑั‰ะตะดะพัั‚ัƒะฟะฝะพะน, ะธ ะบะพะด ะดะพัั‚ัƒะฟะตะฝ ะฝะฐ [GitHub](https://github.com/meituan/YOLOv6). ะœั‹ ั†ะตะฝะธะผ ะธั… ัƒัะธะปะธั ะฒ ั€ะฐะทะฒะธั‚ะธะธ ัั‚ะพะน ะพะฑะปะฐัั‚ะธ ะธ ะดะพัั‚ัƒะฟะฝะพัั‚ะธ ะธั… ั€ะฐะฑะพั‚ั‹ ะดะปั ัˆะธั€ะพะบะพะณะพ ัะพะพะฑั‰ะตัั‚ะฒะฐ. diff --git a/ultralytics/docs/ru/models/yolov6.md:Zone.Identifier b/ultralytics/docs/ru/models/yolov6.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/models/yolov6.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/models/yolov7.md b/ultralytics/docs/ru/models/yolov7.md new file mode 100755 index 0000000..2d03172 --- /dev/null +++ b/ultralytics/docs/ru/models/yolov7.md @@ -0,0 +1,65 @@ +--- +comments: true +description: ะ˜ััะปะตะดัƒะนั‚ะต YOLOv7, ั€ะตะฐะปัŒะฝั‹ะน ะดะตั‚ะตะบั‚ะพั€ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. ะŸะพะทะฝะฐะบะพะผัŒั‚ะตััŒ ั ะตะณะพ ะฒั‹ัะพะบะพะน ัะบะพั€ะพัั‚ัŒัŽ, ะฒะฟะตั‡ะฐั‚ะปััŽั‰ะตะน ั‚ะพั‡ะฝะพัั‚ัŒัŽ ะธ ัƒะฝะธะบะฐะปัŒะฝั‹ะผ ั„ะพะบัƒัะพะผ ะฝะฐ ั‚ั€ะตะฝะธั€ัƒะตะผะพะน ะพะฟั‚ะธะผะธะทะฐั†ะธะธ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ "ะผะตัˆะบะฐ ะฑะตัะฟะปะฐั‚ะฝั‹ั… ัƒะปัƒั‡ัˆะตะฝะธะน". +keywords: YOLOv7, ะดะตั‚ะตะบั‚ะพั€ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ, ะฝะพะฒะตะนัˆะธะน, Ultralytics, ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… MS COCO, ั€ะตะฟะฐั€ะฐะผะตั‚ั€ะธะทะฐั†ะธั ะผะพะดะตะปะธ, ะดะธะฝะฐะผะธั‡ะตัะบะพะต ะฟั€ะธัะฒะพะตะฝะธะต ะผะตั‚ะพะบ, ั€ะฐััˆะธั€ะตะฝะฝะพะต ะผะฐััˆั‚ะฐะฑะธั€ะพะฒะฐะฝะธะต, ะบะพะผะฟะฐัƒะฝะด-ะผะฐััˆั‚ะฐะฑะธั€ะพะฒะฐะฝะธะต +--- + +# YOLOv7: ะขั€ะตะฝะธั€ัƒะตะผั‹ะน "ะผะตัˆะพะบ ะฑะตัะฟะปะฐั‚ะฝั‹ั… ัƒะปัƒั‡ัˆะตะฝะธะน" + +YOLOv7 - ัั‚ะพ ะฝะพะฒะตะนัˆะธะน ะดะตั‚ะตะบั‚ะพั€ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ, ะบะพั‚ะพั€ั‹ะน ะฟั€ะตะฒะพัั…ะพะดะธั‚ ะฒัะต ะธะทะฒะตัั‚ะฝั‹ะต ะดะตั‚ะตะบั‚ะพั€ั‹ ะพะฑัŠะตะบั‚ะพะฒ ะฟะพ ัะบะพั€ะพัั‚ะธ ะธ ั‚ะพั‡ะฝะพัั‚ะธ ะฒ ะดะธะฐะฟะฐะทะพะฝะต ะพั‚ 5 ะบ/ั ะดะพ 160 ะบ/ั. ะฃ ะฝะตะณะพ ัะฐะผะฐั ะฒั‹ัะพะบะฐั ั‚ะพั‡ะฝะพัั‚ัŒ (56,8% AP) ัั€ะตะดะธ ะฒัะตั… ะธะทะฒะตัั‚ะฝั‹ั… ะดะตั‚ะตะบั‚ะพั€ะพะฒ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะถะธะผะต ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ ัะพ ัะบะพั€ะพัั‚ัŒัŽ 30 ะบ/ั ะธ ะฒั‹ัˆะต ะฝะฐ GPU V100. ะšั€ะพะผะต ั‚ะพะณะพ, YOLOv7 ะฟั€ะตะฒะพัั…ะพะดะธั‚ ะดั€ัƒะณะธะต ะดะตั‚ะตะบั‚ะพั€ั‹ ะพะฑัŠะตะบั‚ะพะฒ, ั‚ะฐะบะธะต ะบะฐะบ YOLOR, YOLOX, Scaled-YOLOv4, YOLOv5 ะธ ะผะฝะพะณะธะต ะดั€ัƒะณะธะต, ะฒ ัะบะพั€ะพัั‚ะธ ะธ ั‚ะพั‡ะฝะพัั‚ะธ. ะœะพะดะตะปัŒ ะพะฑัƒั‡ะฐะตั‚ัั ั ะฝัƒะปั ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… MS COCO ะฑะตะท ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะดั€ัƒะณะธั… ะฝะฐะฑะพั€ะพะฒ ะดะฐะฝะฝั‹ั… ะธะปะธ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะฒะตัะพะฒ. ะ˜ัั…ะพะดะฝั‹ะน ะบะพะด ะดะปั YOLOv7 ะดะพัั‚ัƒะฟะตะฝ ะฝะฐ GitHub. + +![ะกั€ะฐะฒะฝะตะฝะธะต YOLOv7 ั ะฝะพะฒะตะนัˆะธะผะธ ะดะตั‚ะตะบั‚ะพั€ะฐะผะธ ะพะฑัŠะตะบั‚ะพะฒ](https://github.com/ultralytics/ultralytics/assets/26833433/5e1e0420-8122-4c79-b8d0-2860aa79af92) +**ะกั€ะฐะฒะฝะตะฝะธะต ะฝะพะฒะตะนัˆะธั… ะดะตั‚ะตะบั‚ะพั€ะพะฒ ะพะฑัŠะตะบั‚ะพะฒ.** ะ˜ะท ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ะฒ ะขะฐะฑะปะธั†ะต 2 ะผั‹ ะทะฝะฐะตะผ, ั‡ั‚ะพ ะฟั€ะตะดะปะพะถะตะฝะฝั‹ะน ะผะตั‚ะพะด ะพะฑะตัะฟะตั‡ะธะฒะฐะตั‚ ะปัƒั‡ัˆะตะต ัะพะพั‚ะฝะพัˆะตะฝะธะต ัะบะพั€ะพัั‚ัŒ-ั‚ะพั‡ะฝะพัั‚ัŒ ะฒ ั†ะตะปะพะผ. ะ•ัะปะธ ัั€ะฐะฒะฝะธะฒะฐั‚ัŒ YOLOv7-tiny-SiLU ั YOLOv5-N (r6.1), ะฝะฐัˆ ะผะตั‚ะพะด ะฑั‹ัั‚ั€ะตะต ะฝะฐ 127 ะบ/ั ะธ ั‚ะพั‡ะฝะตะต ะฝะฐ 10,7% ะฟะพ AP. ะšั€ะพะผะต ั‚ะพะณะพ, YOLOv7 ะธะผะตะตั‚ ั‚ะพั‡ะฝะพัั‚ัŒ AP 51,4% ะฟั€ะธ ัะบะพั€ะพัั‚ะธ ะบะฐะดั€ะพะฒ 161 ะบ/ั, ะฒ ั‚ะพ ะฒั€ะตะผั ะบะฐะบ PPYOLOE-L ั ั‚ะฐะบะพะน ะถะต ั‚ะพั‡ะฝะพัั‚ัŒัŽ ะธะผะตะตั‚ ั‚ะพะปัŒะบะพ ัะบะพั€ะพัั‚ัŒ ะบะฐะดั€ะพะฒ 78 ะบ/ั. ะงั‚ะพ ะบะฐัะฐะตั‚ัั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ, YOLOv7 ัะพะบั€ะฐั‰ะฐะตั‚ ะธั… ะฝะฐ 41% ะฟะพ ัั€ะฐะฒะฝะตะฝะธัŽ ั PPYOLOE-L. ะ•ัะปะธ ัั€ะฐะฒะฝะธั‚ัŒ YOLOv7-X ั 114 ะบ/ั ัะบะพั€ะพัั‚ัŒัŽ ะฒั‹ะฒะพะดะฐ ั YOLOv5-L (r6.1) ั 99 ะบ/ั ัะบะพั€ะพัั‚ัŒัŽ ะฒั‹ะฒะพะดะฐ, YOLOv7-X ะผะพะถะตั‚ ะฟะพะฒั‹ัะธั‚ัŒ AP ะฝะฐ 3,9%. ะ•ัะปะธ ัั€ะฐะฒะฝะธั‚ัŒ YOLOv7-X ั YOLOv5-X (r6.1) ั ะฟะพั…ะพะถะธะผะธ ะผะฐััˆั‚ะฐะฑะฐะผะธ, ัะบะพั€ะพัั‚ัŒ ะฒั‹ะฒะพะดะฐ YOLOv7-X ะฝะฐ 31 ะบ/ั ะฒั‹ัˆะต. ะšั€ะพะผะต ั‚ะพะณะพ, ะฟะพ ะบะพะปะธั‡ะตัั‚ะฒัƒ ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ ะธ ะฒั‹ั‡ะธัะปะตะฝะธะน YOLOv7-X ัะพะบั€ะฐั‰ะฐะตั‚ ะฟะฐั€ะฐะผะตั‚ั€ั‹ ะฝะฐ 22% ะธ ะฒั‹ั‡ะธัะปะตะฝะธั ะฝะฐ 8% ะฟะพ ัั€ะฐะฒะฝะตะฝะธัŽ ั YOLOv5-X (r6.1), ะฝะพ ะฟะพะฒั‹ัˆะฐะตั‚ AP ะฝะฐ 2,2% ([ะ˜ัั‚ะพั‡ะฝะธะบ](https://arxiv.org/pdf/2207.02696.pdf)). + +## ะžะฑะทะพั€ + +ะ”ะตั‚ะตะบั‚ะธั€ะพะฒะฐะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ - ัั‚ะพ ะฒะฐะถะฝั‹ะน ะบะพะผะฟะพะฝะตะฝั‚ ะผะฝะพะณะธั… ัะธัั‚ะตะผ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั, ะฒะบะปัŽั‡ะฐั ะผะฝะพะณะพั‚ะฐั€ะณะตั‚ะฝะพะต ะพั‚ัะปะตะถะธะฒะฐะฝะธะต, ะฐะฒั‚ะพะฝะพะผะฝะพะต ะฒะพะถะดะตะฝะธะต, ั€ะพะฑะพั‚ะพั‚ะตั…ะฝะธะบัƒ ะธ ะฐะฝะฐะปะธะท ะผะตะดะธั†ะธะฝัะบะธั… ะธะทะพะฑั€ะฐะถะตะฝะธะน. ะ’ ะฟะพัะปะตะดะฝะธะต ะณะพะดั‹ ั€ะฐะทั€ะฐะฑะพั‚ะบะฐ ะดะตั‚ะตะบั‚ะพั€ะพะฒ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ ัะพัั€ะตะดะพั‚ะพั‡ะตะฝะฐ ะฝะฐ ะฟั€ะพะตะบั‚ะธั€ะพะฒะฐะฝะธะธ ัั„ั„ะตะบั‚ะธะฒะฝั‹ั… ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ ะธ ะฟะพะฒั‹ัˆะตะฝะธะธ ัะบะพั€ะพัั‚ะธ ะฒั‹ะฒะพะดะฐ ะฝะฐ ั€ะฐะทะปะธั‡ะฝั‹ั… ะฆะŸ, ะ“ะŸะฃ ะธ ะฝะตะนั€ะพะฟั€ะพั†ะตััะพั€ะฐั… (NPUs). YOLOv7 ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ ะบะฐะบ ะผะพะฑะธะปัŒะฝั‹ะต ะณั€ะฐั„ะธั‡ะตัะบะธะต ะฟั€ะพั†ะตััะพั€ั‹ (GPU), ั‚ะฐะบ ะธ ัƒัั‚ั€ะพะนัั‚ะฒะฐ GPU, ะพั‚ ะฟะตั€ะธั„ะตั€ะธะธ ะดะพ ะพะฑะปะฐั‡ะฝั‹ั… ะฒั‹ั‡ะธัะปะตะฝะธะน. + +ะ’ ะพั‚ะปะธั‡ะธะต ะพั‚ ั‚ั€ะฐะดะธั†ะธะพะฝะฝั‹ั… ะดะตั‚ะตะบั‚ะพั€ะพะฒ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ, ะบะพั‚ะพั€ั‹ะต ัะพัั€ะตะดะพั‚ะพั‡ะตะฝั‹ ะฝะฐ ะพะฟั‚ะธะผะธะทะฐั†ะธะธ ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹, YOLOv7 ะฒะฒะพะดะธั‚ ะบะพะฝั†ะตะฟั†ะธัŽ ะพะฟั‚ะธะผะธะทะฐั†ะธะธ ะฟั€ะพั†ะตััะฐ ะพะฑัƒั‡ะตะฝะธั. ะญั‚ะพ ะฒะบะปัŽั‡ะฐะตั‚ ะผะพะดัƒะปะธ ะธ ะผะตั‚ะพะดั‹ ะพะฟั‚ะธะผะธะทะฐั†ะธะธ, ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝะฝั‹ะต ะดะปั ะฟะพะฒั‹ัˆะตะฝะธั ั‚ะพั‡ะฝะพัั‚ะธ ะดะตั‚ะตะบั‚ะธั€ะพะฒะฐะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฑะตะท ัƒะฒะตะปะธั‡ะตะฝะธั ัั‚ะพะธะผะพัั‚ะธ ะฒั‹ะฒะพะดะฐ, ะธะทะฒะตัั‚ะฝะพะณะพ ะบะฐะบ "ั‚ั€ะตะฝะธั€ัƒะตะผั‹ะน ะผะตัˆะพะบ ะฑะตัะฟะปะฐั‚ะฝั‹ั… ัƒะปัƒั‡ัˆะตะฝะธะน". + +## ะžัะฝะพะฒะฝั‹ะต ั„ัƒะฝะบั†ะธะธ + +YOLOv7 ะฟั€ะตะดะปะฐะณะฐะตั‚ ะฝะตัะบะพะปัŒะบะพ ะบะปัŽั‡ะตะฒั‹ั… ั„ัƒะฝะบั†ะธะน: + +1. **ะ ะตะฟะฐั€ะฐะผะตั‚ั€ะธะทะฐั†ะธั ะผะพะดะตะปะธ**: YOLOv7 ะฟั€ะตะดะปะฐะณะฐะตั‚ ะทะฐะฟะปะฐะฝะธั€ะพะฒะฐะฝะฝัƒัŽ ะฟะตั€ะตะฐะฟะฐั€ะฐะผะตั€ะธะทะฐั†ะธัŽ ะผะพะดะตะปะธ, ะบะพั‚ะพั€ะฐั ัะฒะปัะตั‚ัั ัั‚ั€ะฐั‚ะตะณะธะตะน, ะฟั€ะธะผะตะฝะธะผะพะน ะบ ัะปะพัะผ ะฒ ั€ะฐะทะฝั‹ั… ัะตั‚ัั… ั ะบะพะฝั†ะตะฟั†ะธะตะน ะฟัƒั‚ะธ ะณั€ะฐะดะธะตะฝั‚ะฝะพะณะพ ั€ะฐัะฟั€ะพัั‚ั€ะฐะฝะตะฝะธั. + +2. **ะ”ะธะฝะฐะผะธั‡ะตัะบะพะต ะฟั€ะธัะฒะพะตะฝะธะต ะผะตั‚ะพะบ**: ะžะฑัƒั‡ะตะฝะธะต ะผะพะดะตะปะธ ั ะฝะตัะบะพะปัŒะบะธะผะธ ะฒั‹ั…ะพะดะฝั‹ะผะธ ัะปะพัะผะธ ะฟั€ะตะดัั‚ะฐะฒะปัะตั‚ ะฝะพะฒัƒัŽ ะฟั€ะพะฑะปะตะผัƒ: "ะšะฐะบ ะฝะฐะทะฝะฐั‡ะธั‚ัŒ ะดะธะฝะฐะผะธั‡ะตัะบะธะต ั†ะตะปะธ ะดะปั ะฒั‹ั…ะพะดะพะฒ ั€ะฐะทะฝั‹ั… ะฒะตั‚ะฒะตะน?" ะ”ะปั ั€ะตัˆะตะฝะธั ัั‚ะพะน ะฟั€ะพะฑะปะตะผั‹ YOLOv7 ะฟั€ะตะดะปะฐะณะฐะตั‚ ะฝะพะฒั‹ะน ะผะตั‚ะพะด ะฟั€ะธัะฒะพะตะฝะธั ะผะตั‚ะพะบ, ะฝะฐะทั‹ะฒะฐะตะผั‹ะน ะณั€ัƒะฑะพ-ั‚ะพะฝะบะธะผ ะฟั€ะธัะฒะพะตะฝะธะตะผ ะผะตั‚ะพะบ ั ัƒะฟั€ะฐะฒะปะตะฝะธะตะผ ะฒะตะดัƒั‰ะตะน ะฒะตั‚ะฒะธ. + +3. **ะ ะฐััˆะธั€ะตะฝะฝะพะต ะธ ะบะพะผะฟะฐัƒะฝะด-ะผะฐััˆั‚ะฐะฑะธั€ะพะฒะฐะฝะธะต**: YOLOv7 ะฟั€ะตะดะปะฐะณะฐะตั‚ ะผะตั‚ะพะดั‹ "ั€ะฐััˆะธั€ะตะฝะธั" ะธ "ะบะพะผะฟะฐัƒะฝะด-ะผะฐััˆั‚ะฐะฑะธั€ะพะฒะฐะฝะธั" ะดะปั ะดะตั‚ะตะบั‚ะพั€ะฐ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ, ะบะพั‚ะพั€ั‹ะต ัั„ั„ะตะบั‚ะธะฒะฝะพ ะธัะฟะพะปัŒะทัƒัŽั‚ ะฟะฐั€ะฐะผะตั‚ั€ั‹ ะธ ะฒั‹ั‡ะธัะปะตะฝะธั. + +4. **ะญั„ั„ะตะบั‚ะธะฒะฝะพัั‚ัŒ**: ะœะตั‚ะพะด, ะฟั€ะตะดะปะพะถะตะฝะฝั‹ะน YOLOv7, ะผะพะถะตั‚ ัั„ั„ะตะบั‚ะธะฒะฝะพ ัะพะบั€ะฐั‰ะฐั‚ัŒ ะฟั€ะธะผะตั€ะฝะพ ะฝะฐ 40% ะบะพะปะธั‡ะตัั‚ะฒะพ ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ ะธ ะฝะฐ 50% ะฒั‹ั‡ะธัะปะตะฝะธะน ะธะทะฒะตัั‚ะฝะพะณะพ ะดะตั‚ะตะบั‚ะพั€ะฐ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ, ะพะฑะตัะฟะตั‡ะธะฒะฐั ะฑะพะปะตะต ะฑั‹ัั‚ั€ัƒัŽ ัะบะพั€ะพัั‚ัŒ ะฒั‹ะฒะพะดะฐ ะธ ะฑะพะปะตะต ะฒั‹ัะพะบัƒัŽ ั‚ะพั‡ะฝะพัั‚ัŒ ะดะตั‚ะตะบั‚ะธั€ะพะฒะฐะฝะธั. + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะะฐ ะผะพะผะตะฝั‚ ะฝะฐะฟะธัะฐะฝะธั ะดะฐะฝะฝะพะณะพ ะดะพะบัƒะผะตะฝั‚ะฐ Ultralytics ะฒ ะฝะฐัั‚ะพัั‰ะตะต ะฒั€ะตะผั ะฝะต ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ ะผะพะดะตะปะธ YOLOv7. ะŸะพัั‚ะพะผัƒ ะฒัะต ะฟะพะปัŒะทะพะฒะฐั‚ะตะปะธ, ะทะฐะธะฝั‚ะตั€ะตัะพะฒะฐะฝะฝั‹ะต ะฒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ YOLOv7, ะดะพะปะถะฝั‹ ะพะฑั€ะฐั‚ะธั‚ัŒัั ะฝะตะฟะพัั€ะตะดัั‚ะฒะตะฝะฝะพ ะบ ั€ะตะฟะพะทะธั‚ะพั€ะธัŽ YOLOv7 ะฝะฐ GitHub ะดะปั ะธะฝัั‚ั€ัƒะบั†ะธะน ะฟะพ ัƒัั‚ะฐะฝะพะฒะบะต ะธ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธัŽ. + +ะ’ะพั‚ ะบั€ะฐั‚ะบะธะน ะพะฑะทะพั€ ั‚ะธะฟะธั‡ะฝั‹ั… ัˆะฐะณะพะฒ, ะบะพั‚ะพั€ั‹ะต ะผะพะณัƒั‚ ะฟะพั‚ั€ะตะฑะพะฒะฐั‚ัŒัั ะดะปั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั YOLOv7: + +1. ะŸะพัะตั‚ะธั‚ะต ั€ะตะฟะพะทะธั‚ะพั€ะธะน YOLOv7 ะฝะฐ GitHub: [https://github.com/WongKinYiu/yolov7](https://github.com/WongKinYiu/yolov7). + +2. ะกะปะตะดัƒะนั‚ะต ะธะฝัั‚ั€ัƒะบั†ะธัะผ, ะฟั€ะตะดะพัั‚ะฐะฒะปะตะฝะฝั‹ะผ ะฒ ั„ะฐะนะปะต README ะฟะพ ัƒัั‚ะฐะฝะพะฒะบะต. ะžะฑั‹ั‡ะฝะพ ัั‚ะพ ะฒะบะปัŽั‡ะฐะตั‚ ะบะปะพะฝะธั€ะพะฒะฐะฝะธะต ั€ะตะฟะพะทะธั‚ะพั€ะธั, ัƒัั‚ะฐะฝะพะฒะบัƒ ะฝะตะพะฑั…ะพะดะธะผั‹ั… ะทะฐะฒะธัะธะผะพัั‚ะตะน ะธ ะฝะฐัั‚ั€ะพะนะบัƒ ะฝะตะพะฑั…ะพะดะธะผั‹ั… ะฟะตั€ะตะผะตะฝะฝั‹ั… ัั€ะตะดั‹. + +3. ะŸะพัะปะต ะทะฐะฒะตั€ัˆะตะฝะธั ัƒัั‚ะฐะฝะพะฒะบะธ ะฒั‹ ะผะพะถะตั‚ะต ะพะฑัƒั‡ะฐั‚ัŒ ะธ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะผะพะดะตะปัŒ ะฒ ัะพะพั‚ะฒะตั‚ัั‚ะฒะธะธ ั ะธะฝัั‚ั€ัƒะบั†ะธัะผะธ ะฟะพ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธัŽ, ะฟั€ะตะดะพัั‚ะฐะฒะปะตะฝะฝั‹ะผะธ ะฒ ั€ะตะฟะพะทะธั‚ะพั€ะธะธ. ะžะฑั‹ั‡ะฝะพ ัั‚ะพ ะฒะบะปัŽั‡ะฐะตั‚ ะฟะพะดะณะพั‚ะพะฒะบัƒ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั…, ะฝะฐัั‚ั€ะพะนะบัƒ ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ ะผะพะดะตะปะธ, ะพะฑัƒั‡ะตะฝะธะต ะผะพะดะตะปะธ, ะฐ ะทะฐั‚ะตะผ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะต ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะดะตั‚ะตะบั‚ะธั€ะพะฒะฐะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. + +ะžะฑั€ะฐั‚ะธั‚ะต ะฒะฝะธะผะฐะฝะธะต, ั‡ั‚ะพ ะบะพะฝะบั€ะตั‚ะฝั‹ะต ัˆะฐะณะธ ะผะพะณัƒั‚ ะฒะฐั€ัŒะธั€ะพะฒะฐั‚ัŒัั ะฒ ะทะฐะฒะธัะธะผะพัั‚ะธ ะพั‚ ะฒะฐัˆะตะณะพ ะบะพะฝะบั€ะตั‚ะฝะพะณะพ ัะปัƒั‡ะฐั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะธ ั‚ะตะบัƒั‰ะตะณะพ ัะพัั‚ะพัะฝะธั ั€ะตะฟะพะทะธั‚ะพั€ะธั YOLOv7. ะŸะพัั‚ะพะผัƒ ะฝะฐัั‚ะพัั‚ะตะปัŒะฝะพ ั€ะตะบะพะผะตะฝะดัƒะตั‚ัั ะพะฑั€ะฐั‚ะธั‚ัŒัั ะฝะตะฟะพัั€ะตะดัั‚ะฒะตะฝะฝะพ ะบ ะธะฝัั‚ั€ัƒะบั†ะธัะผ, ะฟั€ะตะดะพัั‚ะฐะฒะปะตะฝะฝั‹ะผ ะฒ ั€ะตะฟะพะทะธั‚ะพั€ะธะธ YOLOv7 ะฝะฐ GitHub. + +ะœั‹ ัะพะถะฐะปะตะตะผ ะพะฑะพ ะฒัะตั… ะฝะตัƒะดะพะฑัั‚ะฒะฐั…, ะบะพั‚ะพั€ั‹ะต ัั‚ะพ ะผะพะถะตั‚ ะฒั‹ะทะฒะฐั‚ัŒ, ะธ ะฑัƒะดะตะผ ัั‚ะฐั€ะฐั‚ัŒัั ะพะฑะฝะพะฒะปัั‚ัŒ ัั‚ะพั‚ ะดะพะบัƒะผะตะฝั‚ ั ะฟั€ะธะผะตั€ะฐะผะธ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะดะปั Ultralytics, ะบะฐะบ ั‚ะพะปัŒะบะพ ะฑัƒะดะตั‚ ั€ะตะฐะปะธะทะพะฒะฐะฝะฐ ะฟะพะดะดะตั€ะถะบะฐ YOLOv7. + +## ะฆะธั‚ะธั€ะพะฒะฐะฝะธั ะธ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ะธ + +ะœั‹ ั…ะพั‚ะตะปะธ ะฑั‹ ะฒั‹ั€ะฐะทะธั‚ัŒ ะฟั€ะธะทะฝะฐั‚ะตะปัŒะฝะพัั‚ัŒ ะฐะฒั‚ะพั€ะฐะผ YOLOv7 ะทะฐ ะธั… ะทะฝะฐั‡ะธั‚ะตะปัŒะฝั‹ะน ะฒะบะปะฐะด ะฒ ะพะฑะปะฐัั‚ะธ ะดะตั‚ะตะบั‚ะธั€ะพะฒะฐะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{wang2022yolov7, + title={{YOLOv7}: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors}, + author={Wang, Chien-Yao and Bochkovskiy, Alexey and Liao, Hong-Yuan Mark}, + journal={arXiv preprint arXiv:2207.02696}, + year={2022} + } + ``` + +ะ˜ัั…ะพะดะฝัƒัŽ ัั‚ะฐั‚ัŒัŽ YOLOv7 ะผะพะถะฝะพ ะฝะฐะนั‚ะธ ะฝะฐ [arXiv](https://arxiv.org/pdf/2207.02696.pdf). ะะฒั‚ะพั€ั‹ ะพะฟัƒะฑะปะธะบะพะฒะฐะปะธ ัะฒะพัŽ ั€ะฐะฑะพั‚ัƒ ะฟัƒะฑะปะธั‡ะฝะพ, ะธ ะบะพะด ะดะพัั‚ัƒะฟะตะฝ ะฝะฐ [GitHub](https://github.com/WongKinYiu/yolov7). ะœั‹ ั†ะตะฝะธะผ ะธั… ัƒัะธะปะธั ะฒ ัะพะฒะตั€ัˆะตะฝัั‚ะฒะพะฒะฐะฝะธะธ ัั‚ะพะน ะพะฑะปะฐัั‚ะธ ะธ ะดะพัั‚ัƒะฟะฝะพัั‚ะธ ัะฒะพะตะน ั€ะฐะฑะพั‚ั‹ ะดะปั ัˆะธั€ะพะบะพะน ะพะฑั‰ะตัั‚ะฒะตะฝะฝะพัั‚ะธ. diff --git a/ultralytics/docs/ru/models/yolov7.md:Zone.Identifier b/ultralytics/docs/ru/models/yolov7.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/models/yolov7.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/models/yolov8.md b/ultralytics/docs/ru/models/yolov8.md new file mode 100755 index 0000000..da8ce41 --- /dev/null +++ b/ultralytics/docs/ru/models/yolov8.md @@ -0,0 +1,162 @@ +--- +comments: true +description: ะ˜ะทัƒั‡ะธั‚ะต ะทะฐั…ะฒะฐั‚ั‹ะฒะฐัŽั‰ะธะต ะฒะพะทะผะพะถะฝะพัั‚ะธ YOLOv8, ะฟะพัะปะตะดะฝะตะน ะฒะตั€ัะธะธ ะฝะฐัˆะตะณะพ ะดะตั‚ะตะบั‚ะพั€ะฐ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ! ะฃะทะฝะฐะนั‚ะต, ะบะฐะบ ะฟะตั€ะตะดะพะฒะฐั ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ะฐ, ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ ะธ ะพะฟั‚ะธะผะฐะปัŒะฝะพะต ัะพั‡ะตั‚ะฐะฝะธะต ั‚ะพั‡ะฝะพัั‚ะธ ะธ ัะบะพั€ะพัั‚ะธ ะดะตะปะฐัŽั‚ YOLOv8 ะธะดะตะฐะปัŒะฝั‹ะผ ะฒั‹ะฑะพั€ะพะผ ะดะปั ะฒะฐัˆะธั… ะทะฐะดะฐั‡ ะฟะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธัŽ ะพะฑัŠะตะบั‚ะพะฒ. +keywords: YOLOv8, Ultralytics, ะดะตั‚ะตะบั‚ะพั€ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ, ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ, ะดะพะบัƒะผะตะฝั‚ะฐั†ะธั, ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ัะตั€ะธั YOLO, ะฟะตั€ะตะดะพะฒะฐั ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ะฐ, ั‚ะพั‡ะฝะพัั‚ัŒ, ัะบะพั€ะพัั‚ัŒ +--- + +# YOLOv8 + +## ะžะฑะทะพั€ + +YOLOv8 - ัั‚ะพ ะฟะพัะปะตะดะฝัั ะฒะตั€ัะธั ะฒ ัะตั€ะธะธ ะดะตั‚ะตะบั‚ะพั€ะพะฒ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ YOLO, ะพะฑะตัะฟะตั‡ะธะฒะฐัŽั‰ะฐั ะฟะตั€ะตะดะพะฒัƒัŽ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะฒ ั‚ะตั€ะผะธะฝะฐั… ั‚ะพั‡ะฝะพัั‚ะธ ะธ ัะบะพั€ะพัั‚ะธ. ะžัะฝะพะฒั‹ะฒะฐัััŒ ะฝะฐ ะดะพัั‚ะธะถะตะฝะธัั… ะฟั€ะตะดั‹ะดัƒั‰ะธั… ะฒะตั€ัะธะน YOLO, YOLOv8 ะฒะฒะพะดะธั‚ ะฝะพะฒั‹ะต ะฒะพะทะผะพะถะฝะพัั‚ะธ ะธ ะพะฟั‚ะธะผะธะทะฐั†ะธะธ, ะดะตะปะฐั ะตะณะพ ะธะดะตะฐะปัŒะฝั‹ะผ ะฒั‹ะฑะพั€ะพะผ ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ะทะฐะดะฐั‡ ะฟะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธัŽ ะพะฑัŠะตะบั‚ะพะฒ ะฒ ัˆะธั€ะพะบะพะผ ัะฟะตะบั‚ั€ะต ะฟั€ะธะปะพะถะตะฝะธะน. + +![Ultralytics YOLOv8](https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png) + +## ะžัะฝะพะฒะฝั‹ะต ะฒะพะทะผะพะถะฝะพัั‚ะธ + +- **ะŸะตั€ะตะดะพะฒั‹ะต ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ ะพัะฝะพะฒั‹ ะธ ัˆะตะธ:** YOLOv8 ะธัะฟะพะปัŒะทัƒะตั‚ ะฟะตั€ะตะดะพะฒั‹ะต ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ ะพัะฝะพะฒั‹ ะธ ัˆะตะธ, ั‡ั‚ะพ ะฟั€ะธะฒะพะดะธั‚ ะบ ัƒะปัƒั‡ัˆะตะฝะฝะพะผัƒ ะธะทะฒะปะตั‡ะตะฝะธัŽ ะฟั€ะธะทะฝะฐะบะพะฒ ะธ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ. +- **ะšะปัŽั‡ะตะฒะฐั ะณะพะปะพะฒะฐ Ultralytics ะฑะตะท ัะบะพั€ะตะน:** YOLOv8 ะฟั€ะธะผะตะฝัะตั‚ ะบะปัŽั‡ะตะฒัƒัŽ ะณะพะปะพะฒัƒ Ultralytics ะฑะตะท ัะบะพั€ะตะน, ั‡ั‚ะพ ัะฟะพัะพะฑัั‚ะฒัƒะตั‚ ะฑะพะปะตะต ั‚ะพั‡ะฝะพะผัƒ ะพะฑะฝะฐั€ัƒะถะตะฝะธัŽ ะธ ะฑะพะปะตะต ัั„ั„ะตะบั‚ะธะฒะฝะพะผัƒ ะฟั€ะพั†ะตัััƒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะฟะพ ัั€ะฐะฒะฝะตะฝะธัŽ ั ัะบะพั€ะฝั‹ะผะธ ะฟะพะดั…ะพะดะฐะผะธ. +- **ะžะฟั‚ะธะผะฐะปัŒะฝะพะต ัะพั‡ะตั‚ะฐะฝะธะต ั‚ะพั‡ะฝะพัั‚ะธ ะธ ัะบะพั€ะพัั‚ะธ:** ะก ะพัะฝะพะฒะฝั‹ะผ ะฐะบั†ะตะฝั‚ะพะผ ะฝะฐ ะฟะพะดะดะตั€ะถะฐะฝะธะธ ะพะฟั‚ะธะผะฐะปัŒะฝะพะณะพ ะฑะฐะปะฐะฝัะฐ ะผะตะถะดัƒ ั‚ะพั‡ะฝะพัั‚ัŒัŽ ะธ ัะบะพั€ะพัั‚ัŒัŽ, YOLOv8 ะฟะพะดั…ะพะดะธั‚ ะดะปั ะทะฐะดะฐั‡ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะถะธะผะต ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ ะฒ ั€ะฐะทะปะธั‡ะฝั‹ั… ะพะฑะปะฐัั‚ัั… ะฟั€ะธะผะตะฝะตะฝะธั. +- **ะ ะฐะทะฝะพะพะฑั€ะฐะทะธะต ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะผะพะดะตะปะตะน:** YOLOv8 ะฟั€ะตะดะปะฐะณะฐะตั‚ ั€ัะด ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะผะพะดะตะปะตะน ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ะทะฐะดะฐั‡ ะธ ั‚ั€ะตะฑะพะฒะฐะฝะธะน ะบ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ, ั‡ั‚ะพ ัƒะฟั€ะพั‰ะฐะตั‚ ะฒั‹ะฑะพั€ ะฟะพะดั…ะพะดัั‰ะตะน ะผะพะดะตะปะธ ะดะปั ะบะพะฝะบั€ะตั‚ะฝะพะณะพ ัะปัƒั‡ะฐั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั. + +## ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ ะธ ั€ะตะถะธะผั‹ ั€ะฐะฑะพั‚ั‹ + +ะกะตั€ะธั YOLOv8 ะฟั€ะตะดะปะฐะณะฐะตั‚ ั€ะฐะทะฝะพะพะฑั€ะฐะทะฝั‹ะต ะผะพะดะตะปะธ, ะบะฐะถะดะฐั ะธะท ะบะพั‚ะพั€ั‹ั… ัะฟะตั†ะธะฐะปะธะทะธั€ะพะฒะฐะฝะฐ ะดะปั ะบะพะฝะบั€ะตั‚ะฝั‹ั… ะทะฐะดะฐั‡ ะฒ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะผ ะทั€ะตะฝะธะธ. ะญั‚ะธ ะผะพะดะตะปะธ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝั‹ ะดะปั ัƒะดะพะฒะปะตั‚ะฒะพั€ะตะฝะธั ั€ะฐะทะปะธั‡ะฝั‹ั… ั‚ั€ะตะฑะพะฒะฐะฝะธะน, ะพั‚ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะดะพ ะฑะพะปะตะต ัะปะพะถะฝั‹ั… ะทะฐะดะฐั‡, ั‚ะฐะบะธั… ะบะฐะบ ัะตะณะผะตะฝั‚ะฐั†ะธั ัะบะทะตะผะฟะปัั€ะพะฒ, ะพะฟั€ะตะดะตะปะตะฝะธะต ะฟะพะทั‹/ะบะปัŽั‡ะตะฒั‹ั… ั‚ะพั‡ะตะบ ะธ ะบะปะฐััะธั„ะธะบะฐั†ะธั. + +ะšะฐะถะดะฐั ะฒะฐั€ะธะฐั†ะธั ัะตั€ะธะธ YOLOv8 ะพะฟั‚ะธะผะธะทะธั€ะพะฒะฐะฝะฐ ะดะปั ัะฒะพะตะน ัะพะพั‚ะฒะตั‚ัั‚ะฒัƒัŽั‰ะตะน ะทะฐะดะฐั‡ะธ, ะพะฑะตัะฟะตั‡ะธะฒะฐั ะฒั‹ัะพะบัƒัŽ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะธ ั‚ะพั‡ะฝะพัั‚ัŒ. ะšั€ะพะผะต ั‚ะพะณะพ, ัั‚ะธ ะผะพะดะตะปะธ ัะพะฒะผะตัั‚ะธะผั‹ ัะพ ะผะฝะพะถะตัั‚ะฒะพะผ ั€ะตะถะธะผะพะฒ ั€ะฐะฑะพั‚ั‹, ะฒะบะปัŽั‡ะฐั [ะ’ั‹ะฒะพะด](../modes/predict.md), [ะŸั€ะพะฒะตั€ะบัƒ](../modes/val.md), [ะžะฑัƒั‡ะตะฝะธะต](../modes/train.md) ะธ [ะญะบัะฟะพั€ั‚](../modes/export.md), ั‡ั‚ะพ ะพะฑะปะตะณั‡ะฐะตั‚ ะธั… ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะต ะฝะฐ ั€ะฐะทะปะธั‡ะฝั‹ั… ัั‚ะฐะฟะฐั… ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธั ะธ ั€ะฐะทั€ะฐะฑะพั‚ะบะธ. + +| ะœะพะดะตะปัŒ | ะะฐะทะฒะฐะฝะธั ั„ะฐะนะปะพะฒ | ะ—ะฐะดะฐั‡ะฐ | ะ’ั‹ะฒะพะด | ะŸั€ะพะฒะตั€ะบะฐ | ะžะฑัƒั‡ะตะฝะธะต | ะญะบัะฟะพั€ั‚ | +|-------------|----------------------------------------------------------------------------------------------------------------|------------------------------------------------|-------|----------|----------|---------| +| YOLOv8 | `yolov8n.pt` `yolov8s.pt` `yolov8m.pt` `yolov8l.pt` `yolov8x.pt` | [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-seg | `yolov8n-seg.pt` `yolov8s-seg.pt` `yolov8m-seg.pt` `yolov8l-seg.pt` `yolov8x-seg.pt` | [ะกะตะณะผะตะฝั‚ะฐั†ะธั ัะบะทะตะผะฟะปัั€ะพะฒ](../tasks/segment.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-pose | `yolov8n-pose.pt` `yolov8s-pose.pt` `yolov8m-pose.pt` `yolov8l-pose.pt` `yolov8x-pose.pt` `yolov8x-pose-p6.pt` | [ะŸะพะทะฐ/ะบะปัŽั‡ะตะฒั‹ะต ั‚ะพั‡ะบะธ](../tasks/pose.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-cls | `yolov8n-cls.pt` `yolov8s-cls.pt` `yolov8m-cls.pt` `yolov8l-cls.pt` `yolov8x-cls.pt` | [ะšะปะฐััะธั„ะธะบะฐั†ะธั](../tasks/classify.md) | โœ… | โœ… | โœ… | โœ… | + +ะ”ะฐะฝะฝะฐั ั‚ะฐะฑะปะธั†ะฐ ะฟั€ะตะดะพัั‚ะฐะฒะปัะตั‚ ะพะฑะทะพั€ ะฒะฐั€ะธะฐะฝั‚ะพะฒ ะผะพะดะตะปะตะน YOLOv8, ะฟะพะดั‡ะตั€ะบะธะฒะฐั ะธั… ะฟั€ะธะผะตะฝะธะผะพัั‚ัŒ ะบ ะบะพะฝะบั€ะตั‚ะฝั‹ะผ ะทะฐะดะฐั‡ะฐะผ ะธ ะธั… ัะพะฒะผะตัั‚ะธะผะพัั‚ัŒ ั ั€ะฐะทะปะธั‡ะฝั‹ะผะธ ั€ะตะถะธะผะฐะผะธ ั€ะฐะฑะพั‚ั‹, ั‚ะฐะบะธะผะธ ะบะฐะบ ะ’ั‹ะฒะพะด, ะŸั€ะพะฒะตั€ะบะฐ, ะžะฑัƒั‡ะตะฝะธะต ะธ ะญะบัะฟะพั€ั‚. ะญั‚ะพ ะดะตะผะพะฝัั‚ั€ะธั€ัƒะตั‚ ะณะธะฑะบะพัั‚ัŒ ะธ ะฝะฐะดะตะถะฝะพัั‚ัŒ ัะตั€ะธะธ YOLOv8, ั‡ั‚ะพ ะดะตะปะฐะตั‚ ะธั… ะฟะพะดั…ะพะดัั‰ะธะผะธ ะดะปั ัˆะธั€ะพะบะพะณะพ ัะฟะตะบั‚ั€ะฐ ะฟั€ะธะปะพะถะตะฝะธะน ะฒ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะผ ะทั€ะตะฝะธะธ. + +## ะŸะพะบะฐะทะฐั‚ะตะปะธ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ + +!!! ะŸั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ + + === "ะžะฑะฝะฐั€ัƒะถะตะฝะธะต (COCO)" + + ะกะผ. [ะ”ะพะบัƒะผะตะฝั‚ะฐั†ะธัŽ ะฟะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธัŽ](https://docs.ultralytics.com/tasks/detect/) ะดะปั ะฟั€ะธะผะตั€ะพะฒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ัั‚ะธั… ะผะพะดะตะปะตะน, ะพะฑัƒั‡ะตะฝะฝั‹ั… ะฝะฐ [COCO](https://docs.ultralytics.com/datasets/detect/coco/), ะฒะบะปัŽั‡ะฐัŽั‰ะธั… 80 ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะบะปะฐััะพะฒ. + + | ะœะพะดะตะปัŒ | ั€ะฐะทะผะตั€
(ะฟะธะบัะตะปะธ) | mAPval
50-95 | ะกะบะพั€ะพัั‚ัŒ
CPU ONNX
(ะผั) | ะกะบะพั€ะพัั‚ัŒ
A100 TensorRT
(ะผั) | ะฟะฐั€ะฐะผะตั‚ั€ั‹
(ะœ) | FLOPs
(ะ‘) | + | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + + === "ะžะฑะฝะฐั€ัƒะถะตะฝะธะต (Open Images V7)" + + ะกะผ. [ะ”ะพะบัƒะผะตะฝั‚ะฐั†ะธัŽ ะฟะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธัŽ](https://docs.ultralytics.com/tasks/detect/) ะดะปั ะฟั€ะธะผะตั€ะพะฒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ัั‚ะธั… ะผะพะดะตะปะตะน, ะพะฑัƒั‡ะตะฝะฝั‹ั… ะฝะฐ [Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/), ะฒะบะปัŽั‡ะฐัŽั‰ะธั… 600 ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะบะปะฐััะพะฒ. + + | ะœะพะดะตะปัŒ | ั€ะฐะทะผะตั€
(ะฟะธะบัะตะปะธ) | mAPval
50-95 | ะกะบะพั€ะพัั‚ัŒ
CPU ONNX
(ะผั) | ะกะบะพั€ะพัั‚ัŒ
A100 TensorRT
(ะผั) | ะฟะฐั€ะฐะผะตั‚ั€ั‹
(ะœ) | FLOPs
(ะ‘) | + | ----------------------------------------------------------------------------------------- | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18.4 | 142.4 | 1.21 | 3.5 | 10.5 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27.7 | 183.1 | 1.40 | 11.4 | 29.7 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33.6 | 408.5 | 2.26 | 26.2 | 80.6 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34.9 | 596.9 | 2.43 | 44.1 | 167.4 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36.3 | 860.6 | 3.56 | 68.7 | 260.6 | + + === "ะกะตะณะผะตะฝั‚ะฐั†ะธั (COCO)" + + ะกะผ. [ะ”ะพะบัƒะผะตะฝั‚ะฐั†ะธัŽ ะฟะพ ัะตะณะผะตะฝั‚ะฐั†ะธะธ](https://docs.ultralytics.com/tasks/segment/) ะดะปั ะฟั€ะธะผะตั€ะพะฒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ัั‚ะธั… ะผะพะดะตะปะตะน, ะพะฑัƒั‡ะตะฝะฝั‹ั… ะฝะฐ [COCO](https://docs.ultralytics.com/datasets/segment/coco/), ะฒะบะปัŽั‡ะฐัŽั‰ะธั… 80 ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะบะปะฐััะพะฒ. + + | ะœะพะดะตะปัŒ | ั€ะฐะทะผะตั€
(ะฟะธะบัะตะปะธ) | mAPbox
50-95 | mAPmask
50-95 | ะกะบะพั€ะพัั‚ัŒ
CPU ONNX
(ะผั) | ะกะบะพั€ะพัั‚ัŒ
A100 TensorRT
(ะผั) | ะฟะฐั€ะฐะผะตั‚ั€ั‹
(M) | FLOPs
(ะ‘) | + | -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | + | [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | + | [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | + | [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | + | [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + + === "ะšะปะฐััะธั„ะธะบะฐั†ะธั (ImageNet)" + + ะกะผ. [ะ”ะพะบัƒะผะตะฝั‚ะฐั†ะธัŽ ะฟะพ ะบะปะฐััะธั„ะธะบะฐั†ะธะธ](https://docs.ultralytics.com/tasks/classify/) ะดะปั ะฟั€ะธะผะตั€ะพะฒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ัั‚ะธั… ะผะพะดะตะปะตะน, ะพะฑัƒั‡ะตะฝะฝั‹ั… ะฝะฐ [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), ะฒะบะปัŽั‡ะฐัŽั‰ะธั… 1000 ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะบะปะฐััะพะฒ. + + | ะœะพะดะตะปัŒ | ั€ะฐะทะผะตั€
(ะฟะธะบัะตะปะธ) | acc
top1 | acc
top5 | ะกะบะพั€ะพัั‚ัŒ
CPU ONNX
(ะผั) | ะกะบะพั€ะพัั‚ัŒ
A100 TensorRT
(ะผั) | ะฟะฐั€ะฐะผะตั‚ั€ั‹
(M) | FLOPs
(ะ‘) ะฟั€ะธ 640 | + | -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ | + | [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | + | [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | + | [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | + | [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | + | [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + + === "ะŸะพะทะฐ (COCO)" + + ะกะผ. [ะ”ะพะบัƒะผะตะฝั‚ะฐั†ะธัŽ ะฟะพ ะพั†ะตะฝะบะต ะฟะพะทั‹](https://docs.ultralytics.com/tasks/segment/) ะดะปั ะฟั€ะธะผะตั€ะพะฒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ัั‚ะธั… ะผะพะดะตะปะตะน, ะพะฑัƒั‡ะตะฝะฝั‹ั… ะฝะฐ [COCO](https://docs.ultralytics.com/datasets/pose/coco/), ะฒะบะปัŽั‡ะฐัŽั‰ะธั… 1 ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะน ะบะปะฐัั - 'person'. + + | ะœะพะดะตะปัŒ | ั€ะฐะทะผะตั€
(ะฟะธะบัะตะปะธ) | mAPpose
50-95 | mAPpose
50 | ะกะบะพั€ะพัั‚ัŒ
CPU ONNX
(ะผั) | ะกะบะพั€ะพัั‚ัŒ
A100 TensorRT
(ะผั) | ะฟะฐั€ะฐะผะตั‚ั€ั‹
(M) | FLOPs
(ะ‘) | + | ---------------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | + | [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | + | [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | + | [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | + | [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | + | [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะ’ ัั‚ะพะผ ะฟั€ะธะผะตั€ะต ะฟั€ะตะดัั‚ะฐะฒะปะตะฝั‹ ะฟั€ะพัั‚ั‹ะต ะฟั€ะธะผะตั€ั‹ ะพะฑัƒั‡ะตะฝะธั ะธ ะฒั‹ะฒะพะดะฐ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ YOLOv8. ะ”ะปั ะฟะพะปะฝะพะน ะดะพะบัƒะผะตะฝั‚ะฐั†ะธะธ ะพะฑ ัั‚ะธั… ะธ ะดั€ัƒะณะธั… [ั€ะตะถะธะผะฐั…](../modes/index.md) ัะผ. ัั‚ั€ะฐะฝะธั†ั‹ ะดะพะบัƒะผะตะฝั‚ะฐั†ะธะธ ะฟะพ [ะŸั€ะตะดัะบะฐะทะฐะฝะธัŽ](../modes/predict.md), [ะžะฑัƒั‡ะตะฝะธัŽ](../modes/train.md), [ะŸั€ะพะฒะตั€ะบะต](../modes/val.md) ะธ [ะญะบัะฟะพั€ั‚ัƒ](../modes/export.md). + +ะžะฑั€ะฐั‚ะธั‚ะต ะฒะฝะธะผะฐะฝะธะต, ั‡ั‚ะพ ะฟั€ะธะฒะตะดะตะฝะฝั‹ะน ะฝะธะถะต ะฟั€ะธะผะตั€ ะพั‚ะฝะพัะธั‚ัั ะบ ะผะพะดะตะปัะผ YOLOv8 ะดะปั [ะ”ะตั‚ะตะบั†ะธะธ](../tasks/detect.md) ะพะฑัŠะตะบั‚ะพะฒ. ะ”ะปั ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝั‹ั… ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ั… ะทะฐะดะฐั‡ ัะผ. ะดะพะบัƒะผะตะฝั‚ะฐั†ะธัŽ ะฟะพ [ะกะตะณะผะตะฝั‚ะฐั†ะธะธ](../tasks/segment.md), [ะšะปะฐััะธั„ะธะบะฐั†ะธะธ](../tasks/classify.md) ะธ [ะŸะพะทะต](../tasks/pose.md). + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ะŸั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ PyTorch `*.pt`, ะฐ ั‚ะฐะบะถะต ั„ะฐะนะปั‹ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ `*.yaml` ะผะพะณัƒั‚ ะฑั‹ั‚ัŒ ะฟะตั€ะตะดะฐะฝั‹ ะบะปะฐัััƒ `YOLO()` ะดะปั ัะพะทะดะฐะฝะธั ัะบะทะตะผะฟะปัั€ะฐ ะผะพะดะตะปะธ ะฝะฐ Python: + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv8n ะดะปั COCO + model = YOLO('yolov8n.pt') + + # ะžั‚ะพะฑั€ะฐะทะธั‚ัŒ ะธะฝั„ะพั€ะผะฐั†ะธัŽ ะพ ะผะพะดะตะปะธ (ะฟะพ ะถะตะปะฐะฝะธัŽ) + model.info() + + # ะžะฑัƒั‡ะธั‚ะต ะผะพะดะตะปัŒ ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… COCO8 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ะ’ั‹ะฟะพะปะฝะธั‚ะต ะฒั‹ะฒะพะด ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะผะพะดะตะปะธ YOLOv8n ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + results = model('ะฟัƒั‚ัŒ/ะบ/ะธะทะพะฑั€ะฐะถะตะฝะธัŽ/bus.jpg') + ``` + + === "CLI" + + ะ”ะพัั‚ัƒะฟะฝั‹ ะบะพะผะฐะฝะดั‹ CLI ะดะปั ะฟั€ัะผะพะณะพ ะทะฐะฟัƒัะบะฐ ะผะพะดะตะปะตะน: + + ```bash + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv8n ะดะปั COCO ะธ ะพะฑัƒั‡ะธั‚ะต ะตะต ะฝะฐ ะฟั€ะธะผะตั€ะต ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… COCO8 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv8n ะดะปั COCO ะธ ะฒั‹ะฟะพะปะฝะธั‚ัŒ ะฒั‹ะฒะพะด ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ 'bus.jpg' + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## ะฆะธั‚ะธั€ะพะฒะฐะฝะธะต ะธ ะฑะปะฐะณะพะดะฐั€ะฝะพัั‚ะธ + +ะ•ัะปะธ ะฒั‹ ะธัะฟะพะปัŒะทัƒะตั‚ะต ะผะพะดะตะปัŒ YOLOv8 ะธะปะธ ะปัŽะฑะพะต ะดั€ัƒะณะพะต ะฟั€ะพะณั€ะฐะผะผะฝะพะต ะพะฑะตัะฟะตั‡ะตะฝะธะต ะธะท ัั‚ะพะณะพ ั€ะตะฟะพะทะธั‚ะพั€ะธั ะฒ ัะฒะพะตะน ั€ะฐะฑะพั‚ะต, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ะฟั€ะพั†ะธั‚ะธั€ัƒะนั‚ะต ะตะณะพ ะฒ ัะปะตะดัƒัŽั‰ะตะผ ั„ะพั€ะผะฐั‚ะต: + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @software{yolov8_ultralytics, + author = {Glenn Jocher and Ayush Chaurasia and Jing Qiu}, + title = {Ultralytics YOLOv8}, + version = {8.0.0}, + year = {2023}, + url = {https://github.com/ultralytics/ultralytics}, + orcid = {0000-0001-5950-6979, 0000-0002-7603-6750, 0000-0003-3783-7069}, + license = {AGPL-3.0} + } + ``` + +ะžะฑั€ะฐั‚ะธั‚ะต ะฒะฝะธะผะฐะฝะธะต, ั‡ั‚ะพ ะธะดะตะฝั‚ะธั„ะธะบะฐั‚ะพั€ ั†ะธั„ั€ะพะฒะพะณะพ ะพะฑัŠะตะบั‚ะฐ (DOI) ะฝะฐั…ะพะดะธั‚ัั ะฝะฐ ัั‚ะฐะดะธะธ ะฟะพะปัƒั‡ะตะฝะธั ะธ ะฑัƒะดะตั‚ ะดะพะฑะฐะฒะปะตะฝ ะฒ ั†ะธั‚ะธั€ะพะฒะฐะฝะธะต, ะบะฐะบ ั‚ะพะปัŒะบะพ ะพะฝ ัั‚ะฐะฝะตั‚ ะดะพัั‚ัƒะฟะฝั‹ะผ. ะœะพะดะตะปะธ YOLOv8 ะฟั€ะตะดะพัั‚ะฐะฒะปััŽั‚ัั ะฟะพะด ะปะธั†ะตะฝะทะธะตะน [AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) ะธ ะปะธั†ะตะฝะทะธะตะน [Enterprise](https://ultralytics.com/license). diff --git a/ultralytics/docs/ru/models/yolov8.md:Zone.Identifier b/ultralytics/docs/ru/models/yolov8.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/models/yolov8.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/modes/benchmark.md b/ultralytics/docs/ru/modes/benchmark.md new file mode 100755 index 0000000..564edb1 --- /dev/null +++ b/ultralytics/docs/ru/modes/benchmark.md @@ -0,0 +1,94 @@ +--- +comments: true +description: ะฃะทะฝะฐะนั‚ะต, ะบะฐะบ ะฟั€ะพั„ะธะปะธั€ะพะฒะฐั‚ัŒ ัะบะพั€ะพัั‚ัŒ ะธ ั‚ะพั‡ะฝะพัั‚ัŒ YOLOv8 ะฒ ั€ะฐะทะปะธั‡ะฝั‹ั… ั„ะพั€ะผะฐั‚ะฐั… ัะบัะฟะพั€ั‚ะฐ; ะฟะพะปัƒั‡ะธั‚ะต ะธะฝั„ะพั€ะผะฐั†ะธัŽ ะพ ะผะตั‚ั€ะธะบะฐั… mAP50-95, accuracy_top5 ะธ ะดั€. +keywords: Ultralytics, YOLOv8, ะฑะตะฝั‡ะผะฐั€ะบะธะฝะณ, ะฟั€ะพั„ะธะปะธั€ะพะฒะฐะฝะธะต ัะบะพั€ะพัั‚ะธ, ะฟั€ะพั„ะธะปะธั€ะพะฒะฐะฝะธะต ั‚ะพั‡ะฝะพัั‚ะธ, mAP50-95, accuracy_top5, ONNX, OpenVINO, TensorRT, ั„ะพั€ะผะฐั‚ั‹ ัะบัะฟะพั€ั‚ะฐ YOLO +--- + +# ะ‘ะตะฝั‡ะผะฐั€ะบะธะฝะณ ะผะพะดะตะปะตะน ั Ultralytics YOLO + +ะญะบะพัะธัั‚ะตะผะฐ ะธ ะธะฝั‚ะตะณั€ะฐั†ะธะธ Ultralytics YOLO + +## ะ’ะฒะตะดะตะฝะธะต + +ะŸะพัะปะต ั‚ะพะณะพ, ะบะฐะบ ะฒะฐัˆะฐ ะผะพะดะตะปัŒ ะพะฑัƒั‡ะตะฝะฐ ะธ ะฒะฐะปะธะดะธั€ะพะฒะฐะฝะฐ, ัะปะตะดัƒัŽั‰ะธะผ ะปะพะณะธั‡ะตัะบะธะผ ัˆะฐะณะพะผ ัะฒะปัะตั‚ัั ะพั†ะตะฝะบะฐ ะตะต ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะฒ ั€ะฐะทะปะธั‡ะฝั‹ั… ั€ะตะฐะปัŒะฝั‹ั… ัั†ะตะฝะฐั€ะธัั…. ะ ะตะถะธะผ ะฑะตะฝั‡ะผะฐั€ะบะธะฝะณะฐ ะฒ Ultralytics YOLOv8 ัะปัƒะถะธั‚ ัั‚ะพะน ั†ะตะปะธ, ะฟั€ะตะดะพัั‚ะฐะฒะปัั ะฝะฐะดะตะถะฝั‹ะน ะธะฝัั‚ั€ัƒะผะตะฝั‚ะฐั€ะธะน ะดะปั ะพั†ะตะฝะบะธ ัะบะพั€ะพัั‚ะธ ะธ ั‚ะพั‡ะฝะพัั‚ะธ ะฒะฐัˆะตะน ะผะพะดะตะปะธ ะฒ ั€ัะดะต ั„ะพั€ะผะฐั‚ะพะฒ ัะบัะฟะพั€ั‚ะฐ. + +## ะŸะพั‡ะตะผัƒ ะฑะตะฝั‡ะผะฐั€ะบะธะฝะณ ะบั€ะธั‚ะธั‡ะตะฝ? + +- **ะžะฑะพัะฝะพะฒะฐะฝะฝั‹ะต ั€ะตัˆะตะฝะธั:** ะŸะพะปัƒั‡ะตะฝะธะต ะฟั€ะตะดัั‚ะฐะฒะปะตะฝะธั ะพ ะบะพะผะฟั€ะพะผะธััะต ะผะตะถะดัƒ ัะบะพั€ะพัั‚ัŒัŽ ะธ ั‚ะพั‡ะฝะพัั‚ัŒัŽ. +- **ะ ะฐัะฟั€ะตะดะตะปะตะฝะธะต ั€ะตััƒั€ัะพะฒ:** ะŸะพะฝะธะผะฐะฝะธะต ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ั€ะฐะทะปะธั‡ะฝั‹ั… ั„ะพั€ะผะฐั‚ะพะฒ ัะบัะฟะพั€ั‚ะฐ ะฝะฐ ั€ะฐะทะฝะพะผ ะพะฑะพั€ัƒะดะพะฒะฐะฝะธะธ. +- **ะžะฟั‚ะธะผะธะทะฐั†ะธั:** ะ’ั‹ััะฝะตะฝะธะต, ะบะฐะบะพะน ั„ะพั€ะผะฐั‚ ัะบัะฟะพั€ั‚ะฐ ะฟั€ะตะดะปะฐะณะฐะตั‚ ะปัƒั‡ัˆัƒัŽ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะดะปั ะฒะฐัˆะตะณะพ ะบะพะฝะบั€ะตั‚ะฝะพะณะพ ัะปัƒั‡ะฐั. +- **ะญั„ั„ะตะบั‚ะธะฒะฝะพัั‚ัŒ ะทะฐั‚ั€ะฐั‚:** ะกะดะตะปะฐะนั‚ะต ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะต ะฐะฟะฟะฐั€ะฐั‚ะฝั‹ั… ั€ะตััƒั€ัะพะฒ ะฑะพะปะตะต ัั„ั„ะตะบั‚ะธะฒะฝั‹ะผ ะฝะฐ ะพัะฝะพะฒะต ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ะฑะตะฝั‡ะผะฐั€ะบะธะฝะณะฐ. + +### ะšะปัŽั‡ะตะฒั‹ะต ะผะตั‚ั€ะธะบะธ ะฒ ั€ะตะถะธะผะต ะฑะตะฝั‡ะผะฐั€ะบะธะฝะณะฐ + +- **mAP50-95:** ะ”ะปั ะดะตั‚ะตะบั‚ะธั€ะพะฒะฐะฝะธั ะพะฑัŠะตะบั‚ะพะฒ, ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธ ะพั†ะตะฝะบะธ ะฟะพะท. +- **accuracy_top5:** ะ”ะปั ะบะปะฐััะธั„ะธะบะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน. +- **ะ’ั€ะตะผั ะธะฝั„ะตั€ะตะฝัะฐ:** ะ’ั€ะตะผั, ะทะฐั‚ั€ะฐั‡ะธะฒะฐะตะผะพะต ะฝะฐ ะบะฐะถะดะพะต ะธะทะพะฑั€ะฐะถะตะฝะธะต ะฒ ะผะธะปะปะธัะตะบัƒะฝะดะฐั…. + +### ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ั„ะพั€ะผะฐั‚ั‹ ัะบัะฟะพั€ั‚ะฐ + +- **ONNX:** ะ”ะปั ะพะฟั‚ะธะผะฐะปัŒะฝะพะน ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะฆะŸ +- **TensorRT:** ะ”ะปั ะผะฐะบัะธะผะฐะปัŒะฝะพะน ัั„ั„ะตะบั‚ะธะฒะฝะพัั‚ะธ GPU +- **OpenVINO:** ะ”ะปั ะพะฟั‚ะธะผะธะทะฐั†ะธะธ ะฟะพะด ะฐะฟะฟะฐั€ะฐั‚ะฝะพะต ะพะฑะตัะฟะตั‡ะตะฝะธะต Intel +- **CoreML, TensorFlow SavedModel ะธ ะดั€ัƒะณะธะต:** ะ”ะปั ั€ะฐะทะฝะพะพะฑั€ะฐะทะฝั‹ั… ะฟะพั‚ั€ะตะฑะฝะพัั‚ะตะน ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธั. + +!!! Tip "ะกะพะฒะตั‚" + + * ะญะบัะฟะพั€ั‚ะธั€ัƒะนั‚ะต ะฒ ONNX ะธะปะธ OpenVINO ะดะปั ัƒัะบะพั€ะตะฝะธั ะฟั€ะพั†ะตััะพั€ะฐ ะดะพ 3 ั€ะฐะท. + * ะญะบัะฟะพั€ั‚ะธั€ัƒะนั‚ะต ะฒ TensorRT ะดะปั ัƒัะบะพั€ะตะฝะธั GPU ะดะพ 5 ั€ะฐะท. + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะ—ะฐะฟัƒัั‚ะธั‚ะต ะฑะตะฝั‡ะผะฐั€ะบ YOLOv8n ะฝะฐ ะฒัะตั… ะฟะพะดะดะตั€ะถะธะฒะฐะตะผั‹ั… ั„ะพั€ะผะฐั‚ะฐั… ัะบัะฟะพั€ั‚ะฐ, ะฒะบะปัŽั‡ะฐั ONNX, TensorRT ะธ ั‚. ะด. ะกะผะพั‚ั€ะธั‚ะต ั€ะฐะทะดะตะป ะั€ะณัƒะผะตะฝั‚ั‹ ะฝะธะถะต ะดะปั ะฟะพะปะฝะพะณะพ ัะฟะธัะบะฐ ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ ัะบัะฟะพั€ั‚ะฐ. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics.utils.benchmarks import benchmark + + # ะ‘ะตะฝั‡ะผะฐั€ะบ ะฝะฐ GPU + benchmark(model='yolov8n.pt', data='coco8.yaml', imgsz=640, half=False, device=0) + ``` + === "CLI" + + ```bash + yolo benchmark model=yolov8n.pt data='coco8.yaml' imgsz=640 half=False device=0 + ``` + +## ะั€ะณัƒะผะตะฝั‚ั‹ + +ะั€ะณัƒะผะตะฝั‚ั‹, ั‚ะฐะบะธะต ะบะฐะบ `model`, `data`, `imgsz`, `half`, `device` ะธ `verbose`, ะฟั€ะตะดะพัั‚ะฐะฒะปััŽั‚ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัะผ ะณะธะฑะบะพัั‚ัŒ ะดะปั ั‚ะพะฝะบะพะน ะฝะฐัั‚ั€ะพะนะบะธ ะฑะตะฝั‡ะผะฐั€ะบะพะฒ ะฟะพะด ะธั… ะบะพะฝะบั€ะตั‚ะฝั‹ะต ะฟะพั‚ั€ะตะฑะฝะพัั‚ะธ ะธ ัั€ะฐะฒะฝะตะฝะธั ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ั€ะฐะทะปะธั‡ะฝั‹ั… ั„ะพั€ะผะฐั‚ะพะฒ ัะบัะฟะพั€ั‚ะฐ ั ะปะตะณะบะพัั‚ัŒัŽ. + +| ะšะปัŽั‡ | ะ—ะฝะฐั‡ะตะฝะธะต | ะžะฟะธัะฐะฝะธะต | +|-----------|----------|----------------------------------------------------------------------------------| +| `model` | `None` | ะฟัƒั‚ัŒ ะบ ั„ะฐะนะปัƒ ะผะพะดะตะปะธ, ะฝะฐะฟั€ะธะผะตั€ yolov8n.pt, yolov8n.yaml | +| `data` | `None` | ะฟัƒั‚ัŒ ะบ YAML, ััั‹ะปะฐัŽั‰ะตะผัƒัั ะฝะฐ ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… ะดะปั ะฑะตะฝั‡ะผะฐั€ะบะธะฝะณะฐ (ะฟะพะด ะผะตั‚ะบะพะน `val`) | +| `imgsz` | `640` | ั€ะฐะทะผะตั€ ะธะทะพะฑั€ะฐะถะตะฝะธั ะบะฐะบ ัะบะฐะปัั€ ะธะปะธ ัะฟะธัะพะบ (h, w), ะฝะฐะฟั€ะธะผะตั€ (640, 480) | +| `half` | `False` | ะบะฒะฐะฝั‚ะพะฒะฐะฝะธะต FP16 | +| `int8` | `False` | ะบะฒะฐะฝั‚ะพะฒะฐะฝะธะต INT8 | +| `device` | `None` | ัƒัั‚ั€ะพะนัั‚ะฒะพ ะดะปั ะทะฐะฟัƒัะบะฐ, ะฝะฐะฟั€ะธะผะตั€ cuda device=0 ะธะปะธ device=0,1,2,3 ะธะปะธ device=cpu | +| `verbose` | `False` | ะฝะต ะฟั€ะพะดะพะปะถะฐั‚ัŒ ะฟั€ะธ ะพัˆะธะฑะบะต (bool), ะธะปะธ ะฟะพั€ะพะณะพะฒะพะต ะทะฝะฐั‡ะตะฝะธะต ะดะปั `val` (float) | + +## ะคะพั€ะผะฐั‚ั‹ ัะบัะฟะพั€ั‚ะฐ + +ะ‘ะตะฝั‡ะผะฐั€ะบะธ ะฟะพะฟั‹ั‚ะฐัŽั‚ัั ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธ ะทะฐะฟัƒัั‚ะธั‚ัŒ ะดะปั ะฒัะตั… ะฒะพะทะผะพะถะฝั‹ั… ั„ะพั€ะผะฐั‚ะพะฒ ัะบัะฟะพั€ั‚ะฐ ะฝะธะถะต. + +| ะคะพั€ะผะฐั‚ | ะั€ะณัƒะผะตะฝั‚ `format` | ะœะพะดะตะปัŒ | ะœะตั‚ะฐะดะฐะฝะฝั‹ะต | ะั€ะณัƒะผะตะฝั‚ั‹ | +|--------------------------------------------------------------------|-------------------|---------------------------|------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +ะกะผะพั‚ั€ะธั‚ะต ะฟะพะปะฝัƒัŽ ะธะฝั„ะพั€ะผะฐั†ะธัŽ ะพ `export` ะฝะฐ ัั‚ั€ะฐะฝะธั†ะต [ะญะบัะฟะพั€ั‚](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/ru/modes/benchmark.md:Zone.Identifier b/ultralytics/docs/ru/modes/benchmark.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/modes/benchmark.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/modes/export.md b/ultralytics/docs/ru/modes/export.md new file mode 100755 index 0000000..6351f21 --- /dev/null +++ b/ultralytics/docs/ru/modes/export.md @@ -0,0 +1,108 @@ +--- +comments: true +description: ะŸะพัˆะฐะณะพะฒะพะต ั€ัƒะบะพะฒะพะดัั‚ะฒะพ ะฟะพ ัะบัะฟะพั€ั‚ัƒ ะฒะฐัˆะธั… ะผะพะดะตะปะตะน YOLOv8 ะฒ ั€ะฐะทะปะธั‡ะฝั‹ะต ั„ะพั€ะผะฐั‚ั‹, ั‚ะฐะบะธะต ะบะฐะบ ONNX, TensorRT, CoreML ะธ ะดั€ัƒะณะธะต, ะดะปั ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธั. ะ˜ะทัƒั‡ะธั‚ะต ัะตะนั‡ะฐั!. +keywords: YOLO, YOLOv8, Ultralytics, ะญะบัะฟะพั€ั‚ ะผะพะดะตะปะธ, ONNX, TensorRT, CoreML, TensorFlow SavedModel, OpenVINO, PyTorch, ัะบัะฟะพั€ั‚ ะผะพะดะตะปะธ +--- + +# ะญะบัะฟะพั€ั‚ ะผะพะดะตะปะธ ั Ultralytics YOLO + +ะญะบะพัะธัั‚ะตะผะฐ ะธ ะธะฝั‚ะตะณั€ะฐั†ะธะธ Ultralytics YOLO + +## ะ’ะฒะตะดะตะฝะธะต + +ะžัะฝะพะฒะฝะฐั ั†ะตะปัŒ ั‚ั€ะตะฝะธั€ะพะฒะบะธ ะผะพะดะตะปะธ โ€” ะตั‘ ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธะต ะดะปั ั€ะตะฐะปัŒะฝั‹ั… ะฟั€ะธะปะพะถะตะฝะธะน. ะ ะตะถะธะผ ัะบัะฟะพั€ั‚ะฐ ะฒ Ultralytics YOLOv8 ะฟั€ะตะดะปะฐะณะฐะตั‚ ะผะฝะพะถะตัั‚ะฒะพ ะฒะฐั€ะธะฐะฝั‚ะพะฒ ะดะปั ัะบัะฟะพั€ั‚ะฐ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ ะฒ ั€ะฐะทะปะธั‡ะฝั‹ะต ั„ะพั€ะผะฐั‚ั‹, ะพะฑะตัะฟะตั‡ะธะฒะฐั ะฒะพะทะผะพะถะฝะพัั‚ัŒ ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธั ะฝะฐ ั€ะฐะทะฝั‹ั… ะฟะปะฐั‚ั„ะพั€ะผะฐั… ะธ ัƒัั‚ั€ะพะนัั‚ะฒะฐั…. ะญั‚ะพ ะธัั‡ะตั€ะฟั‹ะฒะฐัŽั‰ะตะต ั€ัƒะบะพะฒะพะดัั‚ะฒะพ ะฝะฐะฟั€ะฐะฒะปะตะฝะพ ะฝะฐ ั‚ะพ, ั‡ั‚ะพะฑั‹ ะฟั€ะพะฒะตัั‚ะธ ะฒะฐั ั‡ะตั€ะตะท ั‚ะพะฝะบะพัั‚ะธ ัะบัะฟะพั€ั‚ะฐ ะผะพะดะตะปะตะน, ะดะตะผะพะฝัั‚ั€ะธั€ัƒั, ะบะฐะบ ะดะพัั‚ะธั‡ัŒ ะผะฐะบัะธะผะฐะปัŒะฝะพะน ัะพะฒะผะตัั‚ะธะผะพัั‚ะธ ะธ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ. + +

+
+ +
+ ะกะผะพั‚ั€ะธั‚ะต: ะšะฐะบ ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบัƒัŽ ะผะพะดะตะปัŒ Ultralytics YOLOv8 ะธ ะทะฐะฟัƒัั‚ะธั‚ัŒ ะถะธะฒะพะต ะฒะพัะฟั€ะพะธะทะฒะตะดะตะฝะธะต ะฝะฐ ะฒะตะฑ-ะบะฐะผะตั€ะต. +

+ +## ะŸะพั‡ะตะผัƒ ัั‚ะพะธั‚ ะฒั‹ะฑั€ะฐั‚ัŒ ั€ะตะถะธะผ ัะบัะฟะพั€ั‚ะฐ YOLOv8? + +- **ะฃะฝะธะฒะตั€ัะฐะปัŒะฝะพัั‚ัŒ:** ะญะบัะฟะพั€ั‚ ะฒ ะฝะตัะบะพะปัŒะบะพ ั„ะพั€ะผะฐั‚ะพะฒ, ะฒะบะปัŽั‡ะฐั ONNX, TensorRT, CoreML ะธ ะดั€ัƒะณะธะต. +- **ะŸั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ:** ะฃะฒะตะปะธั‡ะตะฝะธะต ัะบะพั€ะพัั‚ะธ ะฝะฐ GPU ะดะพ 5 ั€ะฐะท ั TensorRT ะธ ัƒัะบะพั€ะตะฝะธะต ะฝะฐ CPU ะดะพ 3 ั€ะฐะท ั ONNX ะธะปะธ OpenVINO. +- **ะกะพะฒะผะตัั‚ะธะผะพัั‚ัŒ:** ะกะดะตะปะฐะนั‚ะต ะฒะฐัˆัƒ ะผะพะดะตะปัŒ ัƒะฝะธะฒะตั€ัะฐะปัŒะฝะพ ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะตะผะพะน ะฒ ั€ะฐะทะปะธั‡ะฝั‹ั… ะฐะฟะฟะฐั€ะฐั‚ะฝั‹ั… ะธ ะฟั€ะพะณั€ะฐะผะผะฝั‹ั… ัั€ะตะดะฐั…. +- **ะŸั€ะพัั‚ะพั‚ะฐ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั:** ะŸั€ะพัั‚ะพะน ะธะฝั‚ะตั€ั„ะตะนั ะบะพะผะฐะฝะดะฝะพะน ัั‚ั€ะพะบะธ ะธ Python API ะดะปั ะฑั‹ัั‚ั€ะพะณะพ ะธ ะฟั€ะพัั‚ะพะณะพ ัะบัะฟะพั€ั‚ะฐ ะผะพะดะตะปะตะน. + +### ะšะปัŽั‡ะตะฒั‹ะต ะพัะพะฑะตะฝะฝะพัั‚ะธ ั€ะตะถะธะผะฐ ัะบัะฟะพั€ั‚ะฐ + +ะ’ะพั‚ ะฝะตะบะพั‚ะพั€ั‹ะต ะธะท ะบะปัŽั‡ะตะฒั‹ั… ั„ัƒะฝะบั†ะธะน: + +- **ะญะบัะฟะพั€ั‚ ะพะดะฝะธะผ ะบะปะธะบะพะผ:** ะŸั€ะพัั‚ั‹ะต ะบะพะผะฐะฝะดั‹ ะดะปั ัะบัะฟะพั€ั‚ะฐ ะฒ ั€ะฐะทะฝั‹ะต ั„ะพั€ะผะฐั‚ั‹. +- **ะŸะฐะบะตั‚ะฝั‹ะน ัะบัะฟะพั€ั‚:** ะญะบัะฟะพั€ั‚ ะผะพะดะตะปะตะน, ัะฟะพัะพะฑะฝั‹ั… ะบ ะฟะฐะบะตั‚ะฝะพะน ะพะฑั€ะฐะฑะพั‚ะบะต. +- **ะžะฟั‚ะธะผะธะทะธั€ะพะฒะฐะฝะฝะพะต ะฟั€ะตะดัะบะฐะทะฐะฝะธะต:** ะญะบัะฟะพั€ั‚ะธั€ะพะฒะฐะฝะฝั‹ะต ะผะพะดะตะปะธ ะพะฟั‚ะธะผะธะทะธั€ะพะฒะฐะฝั‹ ะดะปั ะฑะพะปะตะต ะฑั‹ัั‚ั€ะพะณะพ ะฟั€ะตะดัะบะฐะทะฐะฝะธั. +- **ะฃั‡ะตะฑะฝั‹ะต ะฒะธะดะตะพ:** ะ“ะปัƒะฑะพะบะธะต ั€ัƒะบะพะฒะพะดัั‚ะฒะฐ ะธ ะพะฑัƒั‡ะฐัŽั‰ะธะต ะฒะธะดะตะพ ะดะปั ะณะปะฐะดะบะพะณะพ ะพะฟั‹ั‚ะฐ ัะบัะฟะพั€ั‚ะฐ. + +!!! Tip "ะกะพะฒะตั‚" + + * ะญะบัะฟะพั€ั‚ะธั€ัƒะนั‚ะต ะฒ ONNX ะธะปะธ OpenVINO ะดะปั ัƒัะบะพั€ะตะฝะธั CPU ะดะพ 3 ั€ะฐะท. + * ะญะบัะฟะพั€ั‚ะธั€ัƒะนั‚ะต ะฒ TensorRT ะดะปั ัƒะฒะตะปะธั‡ะตะฝะธั ัะบะพั€ะพัั‚ะธ ะฝะฐ GPU ะดะพ 5 ั€ะฐะท. + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะญะบัะฟะพั€ั‚ ะผะพะดะตะปะธ YOLOv8n ะฒ ะดั€ัƒะณะพะน ั„ะพั€ะผะฐั‚, ะฝะฐะฟั€ะธะผะตั€ ONNX ะธะปะธ TensorRT. ะกะผะพั‚ั€ะธั‚ะต ั€ะฐะทะดะตะป ะั€ะณัƒะผะตะฝั‚ั‹ ะฝะธะถะต ะดะปั ะฟะพะปะฝะพะณะพ ัะฟะธัะบะฐ ะฐั€ะณัƒะผะตะฝั‚ะพะฒ ัะบัะฟะพั€ั‚ะฐ. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะผะพะดะตะปัŒ + model = YOLO('yolov8n.pt') # ะทะฐะณั€ัƒะทะบะฐ ะพั„ะธั†ะธะฐะปัŒะฝะพะน ะผะพะดะตะปะธ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะบะฐ ะพะฑัƒั‡ะตะฝะฝะพะน ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะพะน ะผะพะดะตะปะธ + + # ะญะบัะฟะพั€ั‚ ะผะพะดะตะปะธ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # ัะบัะฟะพั€ั‚ ะพั„ะธั†ะธะฐะปัŒะฝะพะน ะผะพะดะตะปะธ + yolo export model=path/to/best.pt format=onnx # ัะบัะฟะพั€ั‚ ะพะฑัƒั‡ะตะฝะฝะพะน ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะพะน ะผะพะดะตะปะธ + ``` + +## ะั€ะณัƒะผะตะฝั‚ั‹ + +ะะฐัั‚ั€ะพะนะบะธ ัะบัะฟะพั€ั‚ะฐ ะผะพะดะตะปะตะน YOLO ะพั‚ะฝะพััั‚ัั ะบ ั€ะฐะทะปะธั‡ะฝั‹ะผ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธัะผ ะธ ะพะฟั†ะธัะผ, ะธัะฟะพะปัŒะทัƒะตะผั‹ะผ ะดะปั ัะพั…ั€ะฐะฝะตะฝะธั ะธะปะธ ัะบัะฟะพั€ั‚ะฐ ะผะพะดะตะปะธ ะดะปั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะฒ ะดั€ัƒะณะธั… ัั€ะตะดะฐั… ะธะปะธ ะฟะปะฐั‚ั„ะพั€ะผะฐั…. ะญั‚ะธ ะฝะฐัั‚ั€ะพะนะบะธ ะผะพะณัƒั‚ ะฒะปะธัั‚ัŒ ะฝะฐ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะผะพะดะตะปะธ, ั€ะฐะทะผะตั€ ะธ ัะพะฒะผะตัั‚ะธะผะพัั‚ัŒ ั ั€ะฐะทะฝั‹ะผะธ ัะธัั‚ะตะผะฐะผะธ. ะะตะบะพั‚ะพั€ั‹ะต ะพะฑั‰ะธะต ะฝะฐัั‚ั€ะพะนะบะธ ัะบัะฟะพั€ั‚ะฐ YOLO ะฒะบะปัŽั‡ะฐัŽั‚ ั„ะพั€ะผะฐั‚ ัะบัะฟะพั€ั‚ะธั€ัƒะตะผะพะณะพ ั„ะฐะนะปะฐ ะผะพะดะตะปะธ (ะฝะฐะฟั€ะธะผะตั€, ONNX, TensorFlow SavedModel), ัƒัั‚ั€ะพะนัั‚ะฒะพ, ะฝะฐ ะบะพั‚ะพั€ะพะผ ะฑัƒะดะตั‚ ะทะฐะฟัƒั‰ะตะฝะฐ ะผะพะดะตะปัŒ (ะฝะฐะฟั€ะธะผะตั€, CPU, GPU), ะฐ ั‚ะฐะบะถะต ะฝะฐะปะธั‡ะธะต ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝั‹ั… ั„ัƒะฝะบั†ะธะน, ั‚ะฐะบะธั… ะบะฐะบ ะผะฐัะบะธ ะธะปะธ ะฝะตัะบะพะปัŒะบะพ ะผะตั‚ะพะบ ะฝะฐ ะบะพั€ะพะฑะบัƒ. ะ”ั€ัƒะณะธะต ั„ะฐะบั‚ะพั€ั‹, ะบะพั‚ะพั€ั‹ะต ะผะพะณัƒั‚ ะฟะพะฒะปะธัั‚ัŒ ะฝะฐ ะฟั€ะพั†ะตัั ัะบัะฟะพั€ั‚ะฐ, ะฒะบะปัŽั‡ะฐัŽั‚ ะบะพะฝะบั€ะตั‚ะฝะพะต ะทะฐะดะฐะฝะธะต, ะดะปั ะบะพั‚ะพั€ะพะณะพ ะธัะฟะพะปัŒะทัƒะตั‚ัั ะผะพะดะตะปัŒ, ะธ ั‚ั€ะตะฑะพะฒะฐะฝะธั ะธะปะธ ะพะณั€ะฐะฝะธั‡ะตะฝะธั ั†ะตะปะตะฒะพะน ัั€ะตะดั‹ ะธะปะธ ะฟะปะฐั‚ั„ะพั€ะผั‹. ะ’ะฐะถะฝะพ ั‚ั‰ะฐั‚ะตะปัŒะฝะพ ั€ะฐััะผะพั‚ั€ะตั‚ัŒ ะธ ะฝะฐัั‚ั€ะพะธั‚ัŒ ัั‚ะธ ะฟะฐั€ะฐะผะตั‚ั€ั‹, ั‡ั‚ะพะฑั‹ ัƒะฑะตะดะธั‚ัŒัั, ั‡ั‚ะพ ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐะฝะฝะฐั ะผะพะดะตะปัŒ ะพะฟั‚ะธะผะธะทะธั€ะพะฒะฐะฝะฐ ะดะปั ะฟั€ะตะดะฟะพะปะฐะณะฐะตะผะพะณะพ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะธ ะผะพะถะตั‚ ะฑั‹ั‚ัŒ ัั„ั„ะตะบั‚ะธะฒะฝะพ ะธัะฟะพะปัŒะทะพะฒะฐะฝะฐ ะฒ ั†ะตะปะตะฒะพะน ัั€ะตะดะต. + +| ะšะปัŽั‡ | ะ—ะฝะฐั‡ะตะฝะธะต | ะžะฟะธัะฐะฝะธะต | +|-------------|-----------------|---------------------------------------------------------------------------| +| `format` | `'torchscript'` | ั„ะพั€ะผะฐั‚ ะดะปั ัะบัะฟะพั€ั‚ะฐ | +| `imgsz` | `640` | ั€ะฐะทะผะตั€ ะธะทะพะฑั€ะฐะถะตะฝะธั ะฒ ะฒะธะดะต ัะบะฐะปัั€ะฐ ะธะปะธ ัะฟะธัะบะฐ (h, w), ะฝะฐะฟั€ะธะผะตั€, (640, 480) | +| `keras` | `False` | ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ Keras ะดะปั ัะบัะฟะพั€ั‚ะฐ TF SavedModel | +| `optimize` | `False` | TorchScript: ะพะฟั‚ะธะผะธะทะฐั†ะธั ะดะปั ะผะพะฑะธะปัŒะฝั‹ั… ัƒัั‚ั€ะพะนัั‚ะฒ | +| `half` | `False` | ะบะฒะฐะฝั‚ะพะฒะฐะฝะธะต FP16 | +| `int8` | `False` | ะบะฒะฐะฝั‚ะพะฒะฐะฝะธะต INT8 | +| `dynamic` | `False` | ONNX/TensorRT: ะดะธะฝะฐะผะธั‡ะตัะบะธะต ะพัะธ | +| `simplify` | `False` | ONNX/TensorRT: ัƒะฟั€ะพั‰ะตะฝะธะต ะผะพะดะตะปะธ | +| `opset` | `None` | ONNX: ะฒะตั€ัะธั ะฝะฐะฑะพั€ะฐ ะพะฟะตั€ะฐั†ะธะน (ะฝะตะพะฑัะทะฐั‚ะตะปัŒะฝั‹ะน, ะฟะพ ัƒะผะพะปั‡ะฐะฝะธัŽ ะฟะพัะปะตะดะฝะธะน) | +| `workspace` | `4` | TensorRT: ั€ะฐะทะผะตั€ ั€ะฐะฑะพั‡ะตะน ะพะฑะปะฐัั‚ะธ (ะ“ะ‘) | +| `nms` | `False` | CoreML: ะดะพะฑะฐะฒะปะตะฝะธะต NMS | + +## ะคะพั€ะผะฐั‚ั‹ ัะบัะฟะพั€ั‚ะฐ + +ะ”ะพัั‚ัƒะฟะฝั‹ะต ั„ะพั€ะผะฐั‚ั‹ ัะบัะฟะพั€ั‚ะฐ YOLOv8 ัƒะบะฐะทะฐะฝั‹ ะฒ ั‚ะฐะฑะปะธั†ะต ะฝะธะถะต. ะ’ั‹ ะผะพะถะตั‚ะต ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐั‚ัŒ ะฒ ะปัŽะฑะพะน ั„ะพั€ะผะฐั‚, ะธัะฟะพะปัŒะทัƒั ะฐั€ะณัƒะผะตะฝั‚ `format`, ะฝะฐะฟั€ะธะผะตั€, `format='onnx'` ะธะปะธ `format='engine'`. + +| ะคะพั€ะผะฐั‚ | ะั€ะณัƒะผะตะฝั‚ `format` | ะœะพะดะตะปัŒ | ะœะตั‚ะฐะดะฐะฝะฝั‹ะต | ะั€ะณัƒะผะตะฝั‚ั‹ | +|--------------------------------------------------------------------|-------------------|---------------------------|------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | diff --git a/ultralytics/docs/ru/modes/export.md:Zone.Identifier b/ultralytics/docs/ru/modes/export.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/modes/export.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/modes/index.md b/ultralytics/docs/ru/modes/index.md new file mode 100755 index 0000000..fe4987e --- /dev/null +++ b/ultralytics/docs/ru/modes/index.md @@ -0,0 +1,73 @@ +--- +comments: true +description: ะžั‚ ะพะฑัƒั‡ะตะฝะธั ะดะพ ะพั‚ัะปะตะถะธะฒะฐะฝะธั - ะธัะฟะพะปัŒะทัƒะนั‚ะต ะฒัะต ะฒะพะทะผะพะถะฝะพัั‚ะธ YOLOv8 ะพั‚ Ultralytics. ะŸะพะปัƒั‡ะธั‚ะต ะธะฝั„ะพั€ะผะฐั†ะธัŽ ะธ ะฟั€ะธะผะตั€ั‹ ะดะปั ะบะฐะถะดะพะณะพ ะฟะพะดะดะตั€ะถะธะฒะฐะตะผะพะณะพ ั€ะตะถะธะผะฐ, ะฒะบะปัŽั‡ะฐั ะฟั€ะพะฒะตั€ะบัƒ, ัะบัะฟะพั€ั‚ ะธ ะฑะตะฝั‡ะผะฐั€ะบะธะฝะณ. +keywords: Ultralytics, YOLOv8, ะœะฐัˆะธะฝะฝะพะต ะพะฑัƒั‡ะตะฝะธะต, ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ะžะฑัƒั‡ะตะฝะธะต, ะŸั€ะพะฒะตั€ะบะฐ, ะŸั€ะตะดัะบะฐะทะฐะฝะธะต, ะญะบัะฟะพั€ั‚, ะžั‚ัะปะตะถะธะฒะฐะฝะธะต, ะ‘ะตะฝั‡ะผะฐั€ะบะธะฝะณ +--- + +# ะ ะตะถะธะผั‹ Ultralytics YOLOv8 + +ะญะบะพัะธัั‚ะตะผะฐ Ultralytics YOLO ะธ ะธะฝั‚ะตะณั€ะฐั†ะธะธ + +## ะ’ะฒะตะดะตะฝะธะต + +Ultralytics YOLOv8 - ัั‚ะพ ะฝะต ะฟั€ะพัั‚ะพ ะตั‰ะต ะพะดะฝะฐ ะผะพะดะตะปัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ; ัั‚ะพ ะผะฝะพะณะพั„ัƒะฝะบั†ะธะพะฝะฐะปัŒะฝะฐั ะฟะปะฐั‚ั„ะพั€ะผะฐ, ะฟั€ะตะดะฝะฐะทะฝะฐั‡ะตะฝะฝะฐั ะดะปั ะพั…ะฒะฐั‚ะฐ ะฒัะตะณะพ ะถะธะทะฝะตะฝะฝะพะณะพ ั†ะธะบะปะฐ ะผะพะดะตะปะตะน ะผะฐัˆะธะฝะฝะพะณะพ ะพะฑัƒั‡ะตะฝะธั - ะพั‚ ะฒะฒะพะดะฐ ะดะฐะฝะฝั‹ั… ะธ ะพะฑัƒั‡ะตะฝะธั ะผะพะดะตะปะธ ะดะพ ะฒะฐะปะธะดะฐั†ะธะธ, ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธั ะธ ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะฒ ั€ะตะฐะปัŒะฝะพะผ ะผะธั€ะต. ะšะฐะถะดั‹ะน ั€ะตะถะธะผ ัะปัƒะถะธั‚ ะพะฟั€ะตะดะตะปะตะฝะฝะพะน ั†ะตะปะธ ะธ ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝ, ั‡ั‚ะพะฑั‹ ะฟั€ะตะดะปะพะถะธั‚ัŒ ะฒะฐะผ ะณะธะฑะบะพัั‚ัŒ ะธ ัั„ั„ะตะบั‚ะธะฒะฝะพัั‚ัŒ, ะฝะตะพะฑั…ะพะดะธะผัƒัŽ ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ะทะฐะดะฐั‡ ะธ ัั†ะตะฝะฐั€ะธะตะฒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั. + +

+
+ +
+ ะกะผะพั‚ั€ะธั‚ะต: ะ ัƒะบะพะฒะพะดัั‚ะฒะพ ะฟะพ ั€ะตะถะธะผะฐะผ Ultralytics: ะžะฑัƒั‡ะตะฝะธะต, ะŸั€ะพะฒะตั€ะบะฐ, ะŸั€ะตะดัะบะฐะทะฐะฝะธะต, ะญะบัะฟะพั€ั‚ ะธ ะ‘ะตะฝั‡ะผะฐั€ะบะธะฝะณ. +

+ +### ะžะฑะทะพั€ ั€ะตะถะธะผะพะฒ + +ะŸะพะฝะธะผะฐะฝะธะต ั€ะฐะทะปะธั‡ะฝั‹ั… **ั€ะตะถะธะผะพะฒ**, ะบะพั‚ะพั€ั‹ะต ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ Ultralytics YOLOv8, ะบั€ะธั‚ะธั‡ะตัะบะธ ะฒะฐะถะฝะพ ะดะปั ัั„ั„ะตะบั‚ะธะฒะฝะพะณะพ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะฒะฐัˆะธั… ะผะพะดะตะปะตะน: + +- **ะ ะตะถะธะผ ะพะฑัƒั‡ะตะฝะธั (Train mode)**: ะะฐัั‚ั€ะพะนั‚ะต ะฒะฐัˆัƒ ะผะพะดะตะปัŒ ะฝะฐ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะธะต ะธะปะธ ะฟั€ะตะดะทะฐะณั€ัƒะถะตะฝะฝั‹ะต ะฝะฐะฑะพั€ั‹ ะดะฐะฝะฝั‹ั…. +- **ะ ะตะถะธะผ ะฟั€ะพะฒะตั€ะบะธ (Val mode)**: ะšะพะฝั‚ั€ะพะปัŒะฝะฐั ั‚ะพั‡ะบะฐ ะฟะพัะปะต ะพะฑัƒั‡ะตะฝะธั ะดะปั ะฒะฐะปะธะดะฐั†ะธะธ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะผะพะดะตะปะธ. +- **ะ ะตะถะธะผ ะฟั€ะตะดัะบะฐะทะฐะฝะธั (Predict mode)**: ะ ะฐัะบั€ะพะนั‚ะต ะฟั€ะตะดัะบะฐะทะฐั‚ะตะปัŒะฝัƒัŽ ะผะพั‰ัŒ ะฒะฐัˆะตะน ะผะพะดะตะปะธ ะฝะฐ ะดะฐะฝะฝั‹ั… ะธะท ั€ะตะฐะปัŒะฝะพะณะพ ะผะธั€ะฐ. +- **ะ ะตะถะธะผ ัะบัะฟะพั€ั‚ะฐ (Export mode)**: ะŸะพะดะณะพั‚ะพะฒัŒั‚ะต ะฒะฐัˆัƒ ะผะพะดะตะปัŒ ะบ ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธัŽ ะฒ ั€ะฐะทะปะธั‡ะฝั‹ั… ั„ะพั€ะผะฐั‚ะฐั…. +- **ะ ะตะถะธะผ ะพั‚ัะปะตะถะธะฒะฐะฝะธั (Track mode)**: ะ ะฐััˆะธั€ัŒั‚ะต ะฒะฐัˆัƒ ะผะพะดะตะปัŒ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะดะพ ะฟั€ะธะปะพะถะตะฝะธะน ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. +- **ะ ะตะถะธะผ ะฑะตะฝั‡ะผะฐั€ะบะธะฝะณะฐ (Benchmark mode)**: ะŸั€ะพะฐะฝะฐะปะธะทะธั€ัƒะนั‚ะต ัะบะพั€ะพัั‚ัŒ ะธ ั‚ะพั‡ะฝะพัั‚ัŒ ะฒะฐัˆะตะน ะผะพะดะตะปะธ ะฒ ั€ะฐะทะฝะพะพะฑั€ะฐะทะฝั‹ั… ัั€ะตะดะฐั… ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธั. + +ะญั‚ะพ ะธัั‡ะตั€ะฟั‹ะฒะฐัŽั‰ะตะต ั€ัƒะบะพะฒะพะดัั‚ะฒะพ ะฝะฐะฟั€ะฐะฒะปะตะฝะพ ะฝะฐ ั‚ะพ, ั‡ั‚ะพะฑั‹ ะดะฐั‚ัŒ ะฒะฐะผ ะพะฑะทะพั€ ะธ ะฟั€ะฐะบั‚ะธั‡ะตัะบะธะต ัะฒะตะดะตะฝะธั ะพ ะบะฐะถะดะพะผ ั€ะตะถะธะผะต, ะฟะพะผะพะณะฐั ะฒะฐะผ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะฟะพะปะฝั‹ะน ะฟะพั‚ะตะฝั†ะธะฐะป YOLOv8. + +## [ะžะฑัƒั‡ะตะฝะธะต (Train)](train.md) + +ะ ะตะถะธะผ ะพะฑัƒั‡ะตะฝะธั ะธัะฟะพะปัŒะทัƒะตั‚ัั ะดะปั ะพะฑัƒั‡ะตะฝะธั ะผะพะดะตะปะธ YOLOv8 ะฝะฐ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะพะผ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั…. ะ’ ัั‚ะพะผ ั€ะตะถะธะผะต ะผะพะดะตะปัŒ ะพะฑัƒั‡ะฐะตั‚ัั ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ัƒะบะฐะทะฐะฝะฝะพะณะพ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… ะธ ะณะธะฟะตั€ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ. ะŸั€ะพั†ะตัั ะพะฑัƒั‡ะตะฝะธั ะฒะบะปัŽั‡ะฐะตั‚ ะฒ ัะตะฑั ะพะฟั‚ะธะผะธะทะฐั†ะธัŽ ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ ะผะพะดะตะปะธ, ั‡ั‚ะพะฑั‹ ะพะฝะฐ ะผะพะณะปะฐ ั‚ะพั‡ะฝะพ ะฟั€ะตะดัะบะฐะทั‹ะฒะฐั‚ัŒ ะบะปะฐััั‹ ะธ ะผะตัั‚ะพะฟะพะปะพะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ. + +[ะŸั€ะธะผะตั€ั‹ ะพะฑัƒั‡ะตะฝะธั](train.md){ .md-button } + +## [ะŸั€ะพะฒะตั€ะบะฐ (Val)](val.md) + +ะ ะตะถะธะผ ะฟั€ะพะฒะตั€ะบะธ ะธัะฟะพะปัŒะทัƒะตั‚ัั ะดะปั ะฒะฐะปะธะดะฐั†ะธะธ ะผะพะดะตะปะธ YOLOv8 ะฟะพัะปะต ะตะต ะพะฑัƒั‡ะตะฝะธั. ะ’ ัั‚ะพะผ ั€ะตะถะธะผะต ะผะพะดะตะปัŒ ะพั†ะตะฝะธะฒะฐะตั‚ัั ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… ะดะปั ะฒะฐะปะธะดะฐั†ะธะธ, ั‡ั‚ะพะฑั‹ ะธะทะผะตั€ะธั‚ัŒ ะตะต ั‚ะพั‡ะฝะพัั‚ัŒ ะธ ัะฟะพัะพะฑะฝะพัั‚ัŒ ะบ ะพะฑะพะฑั‰ะตะฝะธัŽ. ะญั‚ะพั‚ ั€ะตะถะธะผ ะผะพะถะตั‚ ะฑั‹ั‚ัŒ ะธัะฟะพะปัŒะทะพะฒะฐะฝ ะดะปั ะฝะฐัั‚ั€ะพะนะบะธ ะณะธะฟะตั€ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ ะผะพะดะตะปะธ ั ั†ะตะปัŒัŽ ัƒะปัƒั‡ัˆะตะฝะธั ะตะต ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ. + +[ะŸั€ะธะผะตั€ั‹ ะฟั€ะพะฒะตั€ะบะธ](val.md){ .md-button } + +## [ะŸั€ะตะดัะบะฐะทะฐะฝะธะต (Predict)](predict.md) + +ะ ะตะถะธะผ ะฟั€ะตะดัะบะฐะทะฐะฝะธั ะธัะฟะพะปัŒะทัƒะตั‚ัั ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะฟั€ะตะดัะบะฐะทะฐะฝะธะน ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8 ะฝะฐ ะฝะพะฒั‹ั… ะธะทะพะฑั€ะฐะถะตะฝะธัั… ะธะปะธ ะฒะธะดะตะพ. ะ’ ัั‚ะพะผ ั€ะตะถะธะผะต ะผะพะดะตะปัŒ ะทะฐะณั€ัƒะถะฐะตั‚ัั ะธะท ั„ะฐะนะปะฐ ะบะพะฝั‚ั€ะพะปัŒะฝะพะน ั‚ะพั‡ะบะธ, ะธ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒ ะผะพะถะตั‚ ะฟั€ะตะดะพัั‚ะฐะฒะธั‚ัŒ ะธะทะพะฑั€ะฐะถะตะฝะธั ะธะปะธ ะฒะธะดะตะพ ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะฒั‹ะฒะพะดะฐ. ะœะพะดะตะปัŒ ะฟั€ะตะดัะบะฐะทั‹ะฒะฐะตั‚ ะบะปะฐััั‹ ะธ ะผะตัั‚ะพะฟะพะปะพะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒะพ ะฒั…ะพะดะฝั‹ั… ะธะทะพะฑั€ะฐะถะตะฝะธัั… ะธะปะธ ะฒะธะดะตะพ. + +[ะŸั€ะธะผะตั€ั‹ ะฟั€ะตะดัะบะฐะทะฐะฝะธั](predict.md){ .md-button } + +## [ะญะบัะฟะพั€ั‚ (Export)](export.md) + +ะ ะตะถะธะผ ัะบัะฟะพั€ั‚ะฐ ะธัะฟะพะปัŒะทัƒะตั‚ัั ะดะปั ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐะฝะธั ะผะพะดะตะปะธ YOLOv8 ะฒ ั„ะพั€ะผะฐั‚, ะบะพั‚ะพั€ั‹ะน ะผะพะถะตั‚ ะฑั‹ั‚ัŒ ะธัะฟะพะปัŒะทะพะฒะฐะฝ ะดะปั ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธั. ะ’ ัั‚ะพะผ ั€ะตะถะธะผะต ะผะพะดะตะปัŒ ะฟั€ะตะพะฑั€ะฐะทัƒะตั‚ัั ะฒ ั„ะพั€ะผะฐั‚, ะบะพั‚ะพั€ั‹ะน ะผะพะถะตั‚ ะฑั‹ั‚ัŒ ะธัะฟะพะปัŒะทะพะฒะฐะฝ ะดั€ัƒะณะธะผะธ ะฟั€ะพะณั€ะฐะผะผะฝั‹ะผะธ ะฟั€ะธะปะพะถะตะฝะธัะผะธ ะธะปะธ ะฐะฟะฟะฐั€ะฐั‚ะฝั‹ะผะธ ัƒัั‚ั€ะพะนัั‚ะฒะฐะผะธ. ะญั‚ะพั‚ ั€ะตะถะธะผ ะฟะพะปะตะทะตะฝ ะฟั€ะธ ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธะธ ะผะพะดะตะปะธ ะฒ ะฟั€ะพะธะทะฒะพะดัั‚ะฒะตะฝะฝะพะน ัั€ะตะดะต. + +[ะŸั€ะธะผะตั€ั‹ ัะบัะฟะพั€ั‚ะฐ](export.md){ .md-button } + +## [ะžั‚ัะปะตะถะธะฒะฐะฝะธะต (Track)](track.md) + +ะ ะตะถะธะผ ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะธัะฟะพะปัŒะทัƒะตั‚ัั ะดะปั ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะผะพะดะตะปะธ YOLOv8. ะ’ ัั‚ะพะผ ั€ะตะถะธะผะต ะผะพะดะตะปัŒ ะทะฐะณั€ัƒะถะฐะตั‚ัั ะธะท ั„ะฐะนะปะฐ ะบะพะฝั‚ั€ะพะปัŒะฝะพะน ั‚ะพั‡ะบะธ, ะธ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒ ะผะพะถะตั‚ ะฟั€ะตะดะพัั‚ะฐะฒะธั‚ัŒ ะฟั€ัะผัƒัŽ ะฒะธะดะตะพั‚ั€ะฐะฝัะปัั†ะธัŽ ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ. ะญั‚ะพั‚ ั€ะตะถะธะผ ะฟะพะปะตะทะตะฝ ะดะปั ะฟั€ะธะปะพะถะตะฝะธะน, ั‚ะฐะบะธั… ะบะฐะบ ัะธัั‚ะตะผั‹ ะฒะธะดะตะพะฝะฐะฑะปัŽะดะตะฝะธั ะธะปะธ ะฑะตัะฟะธะปะพั‚ะฝั‹ะต ะฐะฒั‚ะพะผะพะฑะธะปะธ. + +[ะŸั€ะธะผะตั€ั‹ ะพั‚ัะปะตะถะธะฒะฐะฝะธั](track.md){ .md-button } + +## [ะ‘ะตะฝั‡ะผะฐั€ะบะธะฝะณ (Benchmark)](benchmark.md) + +ะ ะตะถะธะผ ะฑะตะฝั‡ะผะฐั€ะบะธะฝะณะฐ ะธัะฟะพะปัŒะทัƒะตั‚ัั ะดะปั ะฟั€ะพั„ะธะปะธั€ะพะฒะฐะฝะธั ัะบะพั€ะพัั‚ะธ ะธ ั‚ะพั‡ะฝะพัั‚ะธ ั€ะฐะทะปะธั‡ะฝั‹ั… ั„ะพั€ะผะฐั‚ะพะฒ ัะบัะฟะพั€ั‚ะฐ ะดะปั YOLOv8. ะ‘ะตะฝั‡ะผะฐั€ะบะธ ะฟั€ะตะดะพัั‚ะฐะฒะปััŽั‚ ะธะฝั„ะพั€ะผะฐั†ะธัŽ ะพ ั€ะฐะทะผะตั€ะต ัะบัะฟะพั€ั‚ะธั€ัƒะตะผะพะณะพ ั„ะพั€ะผะฐั‚ะฐ, ะตะณะพ ะผะตั‚ั€ะธะบะฐั… `mAP50-95` (ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ, ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธ ะฟะพะทั‹) ะธะปะธ ะผะตั‚ั€ะธะบะฐั… `accuracy_top5` (ะดะปั ะบะปะฐััะธั„ะธะบะฐั†ะธะธ), ะฐ ั‚ะฐะบะถะต ะฒั€ะตะผั ะฒั‹ะฒะพะดะฐ ะฒ ะผะธะปะปะธัะตะบัƒะฝะดะฐั… ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะต ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ั„ะพั€ะผะฐั‚ะพะฒ ัะบัะฟะพั€ั‚ะฐ, ั‚ะฐะบะธั… ะบะฐะบ ONNX, OpenVINO, TensorRT ะธ ะดั€ัƒะณะธั…. ะญั‚ะฐ ะธะฝั„ะพั€ะผะฐั†ะธั ะผะพะถะตั‚ ะฟะพะผะพั‡ัŒ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัะผ ะฒั‹ะฑั€ะฐั‚ัŒ ะพะฟั‚ะธะผะฐะปัŒะฝั‹ะน ั„ะพั€ะผะฐั‚ ัะบัะฟะพั€ั‚ะฐ ะดะปั ะธั… ะบะพะฝะบั€ะตั‚ะฝะพะณะพ ัั†ะตะฝะฐั€ะธั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะฝะฐ ะพัะฝะพะฒะต ะธั… ั‚ั€ะตะฑะพะฒะฐะฝะธะน ะบ ัะบะพั€ะพัั‚ะธ ะธ ั‚ะพั‡ะฝะพัั‚ะธ. + +[ะŸั€ะธะผะตั€ั‹ ะฑะตะฝั‡ะผะฐั€ะบะธะฝะณะฐ](benchmark.md){ .md-button } diff --git a/ultralytics/docs/ru/modes/index.md:Zone.Identifier b/ultralytics/docs/ru/modes/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/modes/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/modes/predict.md b/ultralytics/docs/ru/modes/predict.md new file mode 100755 index 0000000..3e849a4 --- /dev/null +++ b/ultralytics/docs/ru/modes/predict.md @@ -0,0 +1,226 @@ +--- +comments: true +description: ะฃะทะฝะฐะนั‚ะต, ะบะฐะบ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ั€ะตะถะธะผ ะฟั€ะตะดัะบะฐะทะฐะฝะธะน YOLOv8 ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ะทะฐะดะฐั‡. ะ˜ะทัƒั‡ะธั‚ะต ั€ะฐะทะปะธั‡ะฝั‹ะต ะธัั‚ะพั‡ะฝะธะบะธ ะฒั‹ะฒะพะดะฐ, ั‚ะฐะบะธะต ะบะฐะบ ะธะทะพะฑั€ะฐะถะตะฝะธั, ะฒะธะดะตะพ ะธ ั„ะพั€ะผะฐั‚ั‹ ะดะฐะฝะฝั‹ั…. +keywords: Ultralytics, YOLOv8, ั€ะตะถะธะผ ะฟั€ะตะดัะบะฐะทะฐะฝะธะน, ะธัั‚ะพั‡ะฝะธะบะธ ะฒั‹ะฒะพะดะฐ, ะทะฐะดะฐั‡ะธ ะฟั€ะตะดัะบะฐะทะฐะฝะธั, ั€ะตะถะธะผ ะฟะพั‚ะพะบะพะฒะพะน ะฟะตั€ะตะดะฐั‡ะธ, ะพะฑั€ะฐะฑะพั‚ะบะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะพะฑั€ะฐะฑะพั‚ะบะฐ ะฒะธะดะตะพ, ะผะฐัˆะธะฝะฝะพะต ะพะฑัƒั‡ะตะฝะธะต, ะธัะบัƒััั‚ะฒะตะฝะฝั‹ะน ะธะฝั‚ะตะปะปะตะบั‚ +--- + +# ะŸั€ะพะณะฝะพะทะธั€ะพะฒะฐะฝะธะต ะผะพะดะตะปัŒัŽ Ultralytics YOLO + +ะญะบะพัะธัั‚ะตะผะฐ ะธ ะธะฝั‚ะตะณั€ะฐั†ะธะธ Ultralytics YOLO + +## ะ’ะฒะตะดะตะฝะธะต + +ะ’ ะผะธั€ะต ะผะฐัˆะธะฝะฝะพะณะพ ะพะฑัƒั‡ะตะฝะธั ะธ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั ะฟั€ะพั†ะตัั ะธะทะฒะปะตั‡ะตะฝะธั ะธะฝั„ะพั€ะผะฐั†ะธะธ ะธะท ะฒะธะทัƒะฐะปัŒะฝั‹ั… ะดะฐะฝะฝั‹ั… ะฝะฐะทั‹ะฒะฐะตั‚ัั 'ะฒั‹ะฒะพะดะพะผ' ะธะปะธ 'ะฟั€ะตะดัะบะฐะทะฐะฝะธะตะผ'. Ultralytics YOLOv8 ะฟั€ะตะดะปะฐะณะฐะตั‚ ะผะพั‰ะฝัƒัŽ ั„ัƒะฝะบั†ะธัŽ, ะธะทะฒะตัั‚ะฝัƒัŽ ะบะฐะบ **ั€ะตะถะธะผ ะฟั€ะตะดัะบะฐะทะฐะฝะธะน**, ะบะพั‚ะพั€ั‹ะน ะฟั€ะตะดะฝะฐะทะฝะฐั‡ะตะฝ ะดะปั ะฒั‹ัะพะบะพะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพะณะพ ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ ะฒั‹ะฒะพะดะฐ ะฝะฐ ัˆะธั€ะพะบะธะน ัะฟะตะบั‚ั€ ะธัั‚ะพั‡ะฝะธะบะพะฒ ะดะฐะฝะฝั‹ั…. + +

+
+ +
+ ะกะผะพั‚ั€ะตั‚ัŒ: ะšะฐะบ ะธะทะฒะปะตั‡ัŒ ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ ะธะท ะผะพะดะตะปะธ Ultralytics YOLOv8 ะดะปั ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะธั… ะฟั€ะพะตะบั‚ะพะฒ. +

+ +## ะŸั€ะธะบะปะฐะดะฝั‹ะต ะพะฑะปะฐัั‚ะธ + +| ะŸั€ะพะธะทะฒะพะดัั‚ะฒะพ | ะกะฟะพั€ั‚ | ะ‘ะตะทะพะฟะฐัะฝะพัั‚ัŒ | +|:-------------------------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------------------------:|:-------------------------------------------------------------------------------------------------------------------------------:| +| ![ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะฐะฒั‚ะพะทะฐะฟั‡ะฐัั‚ะตะน](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a0f802a8-0776-44cf-8f17-93974a4a28a1) | ![ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ั„ัƒั‚ะฑะพะปะธัั‚ะพะฒ](https://github.com/RizwanMunawar/ultralytics/assets/62513924/7d320e1f-fc57-4d7f-a691-78ee579c3442) | ![ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะฟะฐะดะตะฝะธั ะปัŽะดะตะน](https://github.com/RizwanMunawar/ultralytics/assets/62513924/86437c4a-3227-4eee-90ef-9efb697bdb43) | +| ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะฐะฒั‚ะพะทะฐะฟั‡ะฐัั‚ะตะน | ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ั„ัƒั‚ะฑะพะปะธัั‚ะพะฒ | ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะฟะฐะดะตะฝะธั ะปัŽะดะตะน | + +## ะŸะพั‡ะตะผัƒ ัั‚ะพะธั‚ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ Ultralytics YOLO ะดะปั ะฒั‹ะฒะพะดะฐ? + +ะ’ะพั‚ ะฟะพั‡ะตะผัƒ ะฒะฐะผ ัะปะตะดัƒะตั‚ ั€ะฐััะผะพั‚ั€ะตั‚ัŒ ั€ะตะถะธะผ ะฟั€ะตะดัะบะฐะทะฐะฝะธะน YOLOv8 ะดะปั ะฒะฐัˆะธั… ั€ะฐะทะปะธั‡ะฝั‹ั… ะฟะพั‚ั€ะตะฑะฝะพัั‚ะตะน ะฒ ะฒั‹ะฒะพะดะต: + +- **ะฃะฝะธะฒะตั€ัะฐะปัŒะฝะพัั‚ัŒ:** ะ’ะพะทะผะพะถะฝะพัั‚ัŒ ะดะตะปะฐั‚ัŒ ะฒั‹ะฒะพะดั‹ ะฟะพ ะธะทะพะฑั€ะฐะถะตะฝะธัะผ, ะฒะธะดะตะพ ะธ ะดะฐะถะต ะฟะพั‚ะพะบะพะฒั‹ะผ ั‚ั€ะฐะฝัะปัั†ะธัะผ. +- **ะŸั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ:** ะ ะฐะทั€ะฐะฑะพั‚ะฐะฝ ะดะปั ะพะฑั€ะฐะฑะพั‚ะบะธ ะดะฐะฝะฝั‹ั… ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ ะฑะตะท ะฟะพั‚ะตั€ะธ ั‚ะพั‡ะฝะพัั‚ะธ. +- **ะŸั€ะพัั‚ะพั‚ะฐ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั:** ะ˜ะฝั‚ัƒะธั‚ะธะฒะฝะพ ะฟะพะฝัั‚ะฝั‹ะต ะธะฝั‚ะตั€ั„ะตะนัั‹ Python ะธ CLI ะดะปั ะฑั‹ัั‚ั€ะพะณะพ ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธั ะธ ั‚ะตัั‚ะธั€ะพะฒะฐะฝะธั. +- **ะ’ั‹ัะพะบะฐั ะฝะฐัั‚ั€ะฐะธะฒะฐะตะผะพัั‚ัŒ:** ะ ะฐะทะปะธั‡ะฝั‹ะต ะฝะฐัั‚ั€ะพะนะบะธ ะธ ะฟะฐั€ะฐะผะตั‚ั€ั‹ ะดะปั ะฝะฐัั‚ั€ะพะนะบะธ ะฟะพะฒะตะดะตะฝะธั ะผะพะดะตะปะธ ะฒั‹ะฒะพะดะฐ ะฒ ัะพะพั‚ะฒะตั‚ัั‚ะฒะธะธ ั ะฒะฐัˆะธะผะธ ะบะพะฝะบั€ะตั‚ะฝั‹ะผะธ ั‚ั€ะตะฑะพะฒะฐะฝะธัะผะธ. + +### ะšะปัŽั‡ะตะฒั‹ะต ะพัะพะฑะตะฝะฝะพัั‚ะธ ั€ะตะถะธะผะฐ ะฟั€ะตะดัะบะฐะทะฐะฝะธะน + +ะ ะตะถะธะผ ะฟั€ะตะดัะบะฐะทะฐะฝะธะน YOLOv8 ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝ, ั‡ั‚ะพะฑั‹ ะฑั‹ั‚ัŒ ะฝะฐะดะตะถะฝั‹ะผ ะธ ัƒะฝะธะฒะตั€ัะฐะปัŒะฝั‹ะผ, ะพะฝ ะฒะบะปัŽั‡ะฐะตั‚ ะฒ ัะตะฑั: + +- **ะกะพะฒะผะตัั‚ะธะผะพัั‚ัŒ ั ะฝะตัะบะพะปัŒะบะธะผะธ ะธัั‚ะพั‡ะฝะธะบะฐะผะธ ะดะฐะฝะฝั‹ั…:** ะฝะตะทะฐะฒะธัะธะผะพ ะพั‚ ั‚ะพะณะพ, ะฟั€ะตะดัั‚ะฐะฒะปะตะฝั‹ ะปะธ ะฒะฐัˆะธ ะดะฐะฝะฝั‹ะต ะฒ ะฒะธะดะต ะพั‚ะดะตะปัŒะฝั‹ั… ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะบะพะปะปะตะบั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะฒะธะดะตะพั„ะฐะนะปะพะฒ ะธะปะธ ะฟะพั‚ะพะบะพะฒะพะณะพ ะฒะธะดะตะพ, ั€ะตะถะธะผ ะฟั€ะตะดัะบะฐะทะฐะฝะธะน ะฟะพะบั€ั‹ะฒะฐะตั‚ ะฒัะต ัั‚ะพ. +- **ะ ะตะถะธะผ ะฟะพั‚ะพะบะพะฒะพะน ะฟะตั€ะตะดะฐั‡ะธ:** ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต ั„ัƒะฝะบั†ะธัŽ ะฟะพั‚ะพะบะพะฒะพะน ะฟะตั€ะตะดะฐั‡ะธ ะดะปั ัะพะทะดะฐะฝะธั ะณะตะฝะตั€ะฐั‚ะพั€ะฐ ะพะฑัŠะตะบั‚ะพะฒ `Results`, ัะบะพะฝะพะผัั‰ะตะณะพ ะฟะฐะผัั‚ัŒ. ะะบั‚ะธะฒะธั€ัƒะนั‚ะต ัั‚ะพ, ัƒัั‚ะฐะฝะพะฒะธะฒ `stream=True` ะฒ ะผะตั‚ะพะดะต ะฒั‹ะทะพะฒะฐ ะฟั€ะตะดะธะบั‚ะพั€ะฐ. +- **ะŸะฐะบะตั‚ะฝะฐั ะพะฑั€ะฐะฑะพั‚ะบะฐ:** ะ’ะพะทะผะพะถะฝะพัั‚ัŒ ะพะฑั€ะฐะฑะฐั‚ั‹ะฒะฐั‚ัŒ ะฝะตัะบะพะปัŒะบะพ ะธะทะพะฑั€ะฐะถะตะฝะธะน ะธะปะธ ะฒะธะดะตะพะบะฐะดั€ะพะฒ ะทะฐ ะพะดะธะฝ ะฟะฐะบะตั‚, ั‡ั‚ะพ ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝะพ ัƒัะบะพั€ัะตั‚ ะฒั€ะตะผั ะฒั‹ะฒะพะดะฐ. +- **ะ”ั€ัƒะถะตัั‚ะฒะตะฝะฝะฐั ะธะฝั‚ะตะณั€ะฐั†ะธั:** ะ›ะตะณะบะพ ะธะฝั‚ะตะณั€ะธั€ัƒะตั‚ัั ั ััƒั‰ะตัั‚ะฒัƒัŽั‰ะธะผะธ ะดะฐะฝะฝั‹ะผะธ ะธ ะดั€ัƒะณะธะผะธ ะฟั€ะพะณั€ะฐะผะผะฝั‹ะผะธ ะบะพะผะฟะพะฝะตะฝั‚ะฐะผะธ ะฑะปะฐะณะพะดะฐั€ั ะณะธะฑะบะพะผัƒ API. + +ะœะพะดะตะปะธ Ultralytics YOLO ะฒะพะทะฒั€ะฐั‰ะฐัŽั‚ ะปะธะฑะพ ัะฟะธัะพะบ ะพะฑัŠะตะบั‚ะพะฒ `Results`, ะปะธะฑะพ ะณะตะฝะตั€ะฐั‚ะพั€ ะพะฑัŠะตะบั‚ะพะฒ `Results` ะฒ Python, ัะบะพะฝะพะผัั‰ะธะน ะฟะฐะผัั‚ัŒ, ะบะพะณะดะฐ `stream=True` ะฟะตั€ะตะดะฐะตั‚ัั ะฒ ะผะพะดะตะปัŒ ะฒะพ ะฒั€ะตะผั ะฒั‹ะฒะพะดะฐ: + +!!! Example "ะŸั€ะตะดัะบะฐะทะฐะฝะธะต" + + === "ะ’ะตั€ะฝัƒั‚ัŒ ัะฟะธัะพะบ ั `stream=False`" + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะบะฐ ะผะพะดะตะปะธ + model = YOLO('yolov8n.pt') # ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะฐั ะผะพะดะตะปัŒ YOLOv8n + + # ะŸะฐะบะตั‚ะฝั‹ะน ะฒั‹ะฒะพะด ะฝะฐ ัะฟะธัะพะบ ะธะทะพะฑั€ะฐะถะตะฝะธะน + results = model(['im1.jpg', 'im2.jpg']) # ะฒะตั€ะฝัƒั‚ัŒ ัะฟะธัะพะบ ะพะฑัŠะตะบั‚ะพะฒ Results + + # ะžะฑั€ะฐะฑะพั‚ะบะฐ ัะฟะธัะบะฐ ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ + for result in results: + boxes = result.boxes # ะžะฑัŠะตะบั‚ Boxes ะดะปั ะฒั‹ะฒะพะดะฐ bbox + masks = result.masks # ะžะฑัŠะตะบั‚ Masks ะดะปั ะฒั‹ะฒะพะดะฐ ะผะฐัะพะบ ัะตะณะผะตะฝั‚ะฐั†ะธะธ + keypoints = result.keypoints # ะžะฑัŠะตะบั‚ Keypoints ะดะปั ะฒั‹ะฒะพะดะฐ ะฟะพะท + probs = result.probs # ะžะฑัŠะตะบั‚ Probs ะดะปั ะฒั‹ะฒะพะดะฐ ะฒะตั€ะพัั‚ะฝะพัั‚ะตะน ะบะปะฐััะธั„ะธะบะฐั†ะธะธ + ``` + + === "ะ’ะตั€ะฝัƒั‚ัŒ ะณะตะฝะตั€ะฐั‚ะพั€ ั `stream=True`" + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะบะฐ ะผะพะดะตะปะธ + model = YOLO('yolov8n.pt') # ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะฐั ะผะพะดะตะปัŒ YOLOv8n + + # ะŸะฐะบะตั‚ะฝั‹ะน ะฒั‹ะฒะพะด ะฝะฐ ัะฟะธัะพะบ ะธะทะพะฑั€ะฐะถะตะฝะธะน + results = model(['im1.jpg', 'im2.jpg'], stream=True) # ะฒะตั€ะฝัƒั‚ัŒ ะณะตะฝะตั€ะฐั‚ะพั€ ะพะฑัŠะตะบั‚ะพะฒ Results + + # ะžะฑั€ะฐะฑะพั‚ะบะฐ ะณะตะฝะตั€ะฐั‚ะพั€ะฐ ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ + for result in results: + boxes = result.boxes # ะžะฑัŠะตะบั‚ Boxes ะดะปั ะฒั‹ะฒะพะดะฐ bbox + masks = result.masks # ะžะฑัŠะตะบั‚ Masks ะดะปั ะฒั‹ะฒะพะดะฐ ะผะฐัะพะบ ัะตะณะผะตะฝั‚ะฐั†ะธะธ + keypoints = result.keypoints # ะžะฑัŠะตะบั‚ Keypoints ะดะปั ะฒั‹ะฒะพะดะฐ ะฟะพะท + probs = result.probs # ะžะฑัŠะตะบั‚ Probs ะดะปั ะฒั‹ะฒะพะดะฐ ะฒะตั€ะพัั‚ะฝะพัั‚ะตะน ะบะปะฐััะธั„ะธะบะฐั†ะธะธ + ``` + +## ะ˜ัั‚ะพั‡ะฝะธะบะธ ะฒั‹ะฒะพะดะฐ + +YOLOv8 ะผะพะถะตั‚ ะพะฑั€ะฐะฑะฐั‚ั‹ะฒะฐั‚ัŒ ั€ะฐะทะปะธั‡ะฝั‹ะต ั‚ะธะฟั‹ ะฒั…ะพะดะฝั‹ั… ะธัั‚ะพั‡ะฝะธะบะพะฒ ะดะปั ะฒั‹ะฒะพะดะฐ, ะบะฐะบ ะฟะพะบะฐะทะฐะฝะพ ะฒ ั‚ะฐะฑะปะธั†ะต ะฝะธะถะต. ะ˜ัั‚ะพั‡ะฝะธะบะธ ะฒะบะปัŽั‡ะฐัŽั‚ ัั‚ะฐั‚ะธั‡ะตัะบะธะต ะธะทะพะฑั€ะฐะถะตะฝะธั, ะฒะธะดะตะพะฟะพั‚ะพะบะธ ะธ ั€ะฐะทะปะธั‡ะฝั‹ะต ั„ะพั€ะผะฐั‚ั‹ ะดะฐะฝะฝั‹ั…. ะ’ ั‚ะฐะฑะปะธั†ะต ั‚ะฐะบะถะต ัƒะบะฐะทะฐะฝะพ, ะผะพะถะฝะพ ะปะธ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะบะฐะถะดั‹ะน ะธัั‚ะพั‡ะฝะธะบ ะฒ ั€ะตะถะธะผะต ะฟะพั‚ะพะบะพะฒะพะน ะฟะตั€ะตะดะฐั‡ะธ ั ะฐั€ะณัƒะผะตะฝั‚ะพะผ `stream=True` โœ…. ะ ะตะถะธะผ ะฟะพั‚ะพะบะพะฒะพะน ะฟะตั€ะตะดะฐั‡ะธ ะฟะพะปะตะทะตะฝ ะดะปั ะพะฑั€ะฐะฑะพั‚ะบะธ ะฒะธะดะตะพ ะธะปะธ ะถะธะฒั‹ั… ั‚ั€ะฐะฝัะปัั†ะธะน, ั‚ะฐะบ ะบะฐะบ ัะพะทะดะฐะตั‚ ะณะตะฝะตั€ะฐั‚ะพั€ ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ะฒะผะตัั‚ะพ ะทะฐะณั€ัƒะทะบะธ ะฒัะตั… ะบะฐะดั€ะพะฒ ะฒ ะฟะฐะผัั‚ัŒ. + +!!! Tip "ะกะพะฒะตั‚" + + ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต `stream=True` ะดะปั ะพะฑั€ะฐะฑะพั‚ะบะธ ะดะปะธะฝะฝั‹ั… ะฒะธะดะตะพั€ะพะปะธะบะพะฒ ะธะปะธ ะฑะพะปัŒัˆะธั… ะฝะฐะฑะพั€ะพะฒ ะดะฐะฝะฝั‹ั… ะดะปั ัั„ั„ะตะบั‚ะธะฒะฝะพะณะพ ัƒะฟั€ะฐะฒะปะตะฝะธั ะฟะฐะผัั‚ัŒัŽ. ะšะพะณะดะฐ `stream=False`, ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ ะดะปั ะฒัะตั… ะบะฐะดั€ะพะฒ ะธะปะธ ั‚ะพั‡ะตะบ ะดะฐะฝะฝั‹ั… ั…ั€ะฐะฝัั‚ัั ะฒ ะฟะฐะผัั‚ะธ, ั‡ั‚ะพ ะผะพะถะตั‚ ะฑั‹ัั‚ั€ะพ ะฝะฐะบะพะฟะธั‚ัŒัั ะธ ะฒั‹ะทะฒะฐั‚ัŒ ะพัˆะธะฑะบะธ ะฟะตั€ะตะฟะพะปะฝะตะฝะธั ะฟะฐะผัั‚ะธ ะดะปั ะฑะพะปัŒัˆะธั… ะฒั…ะพะดะพะฒ. ะ’ ะพั‚ะปะธั‡ะธะต ะพั‚ ัั‚ะพะณะพ, `stream=True` ะธัะฟะพะปัŒะทัƒะตั‚ ะณะตะฝะตั€ะฐั‚ะพั€, ะบะพั‚ะพั€ั‹ะน ั…ั€ะฐะฝะธั‚ ะฒ ะฟะฐะผัั‚ะธ ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ ั‚ะพะปัŒะบะพ ั‚ะตะบัƒั‰ะตะณะพ ะบะฐะดั€ะฐ ะธะปะธ ั‚ะพั‡ะบะธ ะดะฐะฝะฝั‹ั…, ะทะฝะฐั‡ะธั‚ะตะปัŒะฝะพ ัะพะบั€ะฐั‰ะฐั ะฟะพั‚ั€ะตะฑะปะตะฝะธะต ะฟะฐะผัั‚ะธ ะธ ะฟั€ะตะดะพั‚ะฒั€ะฐั‰ะฐั ะฟั€ะพะฑะปะตะผั‹ ั ะฟะตั€ะตะฟะพะปะฝะตะฝะธะตะผ ะฟะฐะผัั‚ะธ. + +| ะ˜ัั‚ะพั‡ะฝะธะบ | ะั€ะณัƒะผะตะฝั‚ | ะขะธะฟ | ะ—ะฐะผะตั‚ะบะธ | +|-----------------|--------------------------------------------|------------------|---------------------------------------------------------------------------------------------------------------| +| ะธะทะพะฑั€ะฐะถะตะฝะธะต | `'image.jpg'` | `str` ะธะปะธ `Path` | ะžะดะธะฝะพั‡ะฝั‹ะน ั„ะฐะนะป ะธะทะพะฑั€ะฐะถะตะฝะธั. | +| URL | `'https://ultralytics.com/images/bus.jpg'` | `str` | URL ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะต. | +| ัะบั€ะธะฝัˆะพั‚ | `'screen'` | `str` | ะกะฝัั‚ัŒ ัะบั€ะธะฝัˆะพั‚. | +| PIL | `Image.open('im.jpg')` | `PIL.Image` | ะคะพั€ะผะฐั‚ HWC ั RGB ะบะฐะฝะฐะปะฐะผะธ. | +| OpenCV | `cv2.imread('im.jpg')` | `np.ndarray` | ะคะพั€ะผะฐั‚ HWC ั BGR ะบะฐะฝะฐะปะฐะผะธ `uint8 (0-255)`. | +| numpy | `np.zeros((640,1280,3))` | `np.ndarray` | ะคะพั€ะผะฐั‚ HWC ั BGR ะบะฐะฝะฐะปะฐะผะธ `uint8 (0-255)`. | +| torch | `torch.zeros(16,3,320,640)` | `torch.Tensor` | ะคะพั€ะผะฐั‚ BCHW ั RGB ะบะฐะฝะฐะปะฐะผะธ `float32 (0.0-1.0)`. | +| CSV | `'sources.csv'` | `str` ะธะปะธ `Path` | CSV-ั„ะฐะนะป, ัะพะดะตั€ะถะฐั‰ะธะน ะฟัƒั‚ะธ ะบ ะธะทะพะฑั€ะฐะถะตะฝะธัะผ, ะฒะธะดะตะพ ะธะปะธ ะบะฐั‚ะฐะปะพะณะฐะผ. | +| ะฒะธะดะตะพ โœ… | `'video.mp4'` | `str` ะธะปะธ `Path` | ะ’ะธะดะตะพั„ะฐะนะป ะฒ ั„ะพั€ะผะฐั‚ะฐั…, ะฒั€ะพะดะต MP4, AVI ะธ ั‚.ะด. | +| ะบะฐั‚ะฐะปะพะณ โœ… | `'path/'` | `str` ะธะปะธ `Path` | ะŸัƒั‚ัŒ ะบ ะบะฐั‚ะฐะปะพะณัƒ, ัะพะดะตั€ะถะฐั‰ะตะผัƒ ะธะทะพะฑั€ะฐะถะตะฝะธั ะธะปะธ ะฒะธะดะตะพ. | +| ะณะปะพะฑ โœ… | `'path/*.jpg'` | `str` | ะจะฐะฑะปะพะฝ ะณะปะพะฑะฐ ะดะปั ัะพะฟะพัั‚ะฐะฒะปะตะฝะธั ะฝะตัะบะพะปัŒะบะธั… ั„ะฐะนะปะพะฒ. ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต ัะธะผะฒะพะป `*` ะบะฐะบ ะฟะพะดัั‚ะฐะฝะพะฒะพั‡ะฝั‹ะน. | +| YouTube โœ… | `'https://youtu.be/LNwODJXcvt4'` | `str` | URL ะฝะฐ ะฒะธะดะตะพ YouTube. | +| ะฟะพั‚ะพะบ โœ… | `'rtsp://example.com/media.mp4'` | `str` | URL ะดะปั ะฟะพั‚ะพะบะพะฒั‹ั… ะฟั€ะพั‚ะพะบะพะปะพะฒ, ั‚ะฐะบะธั… ะบะฐะบ RTSP, RTMP, TCP, ะธะปะธ IP-ะฐะดั€ะตั. | +| ะผะฝะพะณะพ-ะฟะพั‚ะพะบะพะฒ โœ… | `'list.streams'` | `str` ะธะปะธ `Path` | ะขะตะบัั‚ะพะฒั‹ะน ั„ะฐะนะป `*.streams` ั ะพะดะฝะธะผ URL ะฟะพั‚ะพะบะฐ ะฝะฐ ัั‚ั€ะพะบัƒ, ะฝะฐะฟั€ะธะผะตั€, 8 ะฟะพั‚ะพะบะพะฒ ะทะฐะฟัƒัั‚ัั‚ัั ั ะฟะฐะบะตั‚ะพะผ ั€ะฐะทะผะตั€ะพะผ 8. | + +ะะธะถะต ะฟั€ะธะฒะตะดะตะฝั‹ ะฟั€ะธะผะตั€ั‹ ะบะพะดะฐ ะดะปั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะบะฐะถะดะพะณะพ ั‚ะธะฟะฐ ะธัั‚ะพั‡ะฝะธะบะฐ: + +!!! Example "ะ˜ัั‚ะพั‡ะฝะธะบะธ ะฟั€ะตะดัะบะฐะทะฐะฝะธะน" + + === "ะธะทะพะฑั€ะฐะถะตะฝะธะต" + ะ’ั‹ะฟะพะปะฝะธั‚ัŒ ะฒั‹ะฒะพะด ะฝะฐ ั„ะฐะนะป ะธะทะพะฑั€ะฐะถะตะฝะธั. + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8n + model = YOLO('yolov8n.pt') + + # ะžะฟั€ะตะดะตะปะธั‚ะต ะฟัƒั‚ัŒ ะบ ั„ะฐะนะปัƒ ะธะทะพะฑั€ะฐะถะตะฝะธั + source = 'path/to/image.jpg' + + # ะ’ั‹ะฟะพะปะฝะธั‚ัŒ ะฒั‹ะฒะพะด ะฝะฐ ะธัั‚ะพั‡ะฝะธะบ + results = model(source) # ัะฟะธัะพะบ ะพะฑัŠะตะบั‚ะพะฒ Results + ``` + + === "ัะบั€ะธะฝัˆะพั‚" + ะ’ั‹ะฟะพะปะฝะธั‚ัŒ ะฒั‹ะฒะพะด ะฝะฐ ั‚ะตะบัƒั‰ะตะต ัะพะดะตั€ะถะธะผะพะต ัะบั€ะฐะฝะฐ ะฒ ะฒะธะดะต ัะบั€ะธะฝัˆะพั‚ะฐ. + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8n + model = YOLO('yolov8n.pt') + + # ะžะฟั€ะตะดะตะปะตะฝะธะต ั‚ะตะบัƒั‰ะตะณะพ ัะบั€ะธะฝัˆะพั‚ะฐ ะบะฐะบ ะธัั‚ะพั‡ะฝะธะบะฐ + source = 'screen' + + # ะ’ั‹ะฟะพะปะฝะธั‚ัŒ ะฒั‹ะฒะพะด ะฝะฐ ะธัั‚ะพั‡ะฝะธะบ + results = model(source) # ัะฟะธัะพะบ ะพะฑัŠะตะบั‚ะพะฒ Results + ``` + + === "URL" + ะ’ั‹ะฟะพะปะฝะธั‚ัŒ ะฒั‹ะฒะพะด ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะต ะธะปะธ ะฒะธะดะตะพ, ั€ะฐะทะผะตั‰ะตะฝะฝั‹ะต ัƒะดะฐะปะตะฝะฝะพ ะฟะพ URL. + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8n + model = YOLO('yolov8n.pt') + + # ะžะฟั€ะตะดะตะปะตะฝะธะต URL ัƒะดะฐะปะตะฝะฝะพะณะพ ะธะทะพะฑั€ะฐะถะตะฝะธั ะธะปะธ ะฒะธะดะตะพ + source = 'https://ultralytics.com/images/bus.jpg' + + # ะ’ั‹ะฟะพะปะฝะธั‚ัŒ ะฒั‹ะฒะพะด ะฝะฐ ะธัั‚ะพั‡ะฝะธะบ + results = model(source) # ัะฟะธัะพะบ ะพะฑัŠะตะบั‚ะพะฒ Results + ``` + + === "PIL" + ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะต, ะพั‚ะบั€ั‹ั‚ะพะต ั ะฟะพะผะพั‰ัŒัŽ Python Imaging Library (PIL). + ```python + from PIL import Image + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8n + model = YOLO('yolov8n.pt') + + # ะžั‚ะบั€ั‹ั‚ะธะต ะธะทะพะฑั€ะฐะถะตะฝะธั ั ะฟะพะผะพั‰ัŒัŽ PIL + source = Image.open('path/to/image.jpg') + + # ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ะฝะฐ ะธัั‚ะพั‡ะฝะธะบ + results = model(source) # ัะฟะธัะพะบ ะพะฑัŠะตะบั‚ะพะฒ Results + ``` + + === "OpenCV" + ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะต, ะฟั€ะพั‡ะธั‚ะฐะฝะฝะพะต ั ะฟะพะผะพั‰ัŒัŽ OpenCV. + ```python + import cv2 + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8n + model = YOLO('yolov8n.pt') + + # ะงั‚ะตะฝะธะต ะธะทะพะฑั€ะฐะถะตะฝะธั ั ะฟะพะผะพั‰ัŒัŽ OpenCV + source = cv2.imread('path/to/image.jpg') + + # ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ะฝะฐ ะธัั‚ะพั‡ะฝะธะบ + results = model(source) # ัะฟะธัะพะบ ะพะฑัŠะตะบั‚ะพะฒ Results + ``` + + === "numpy" + ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะต, ะฟั€ะตะดัั‚ะฐะฒะปะตะฝะฝะพะต ะฒ ะฒะธะดะต ะผะฐััะธะฒะฐ numpy. + ```python + import numpy as np + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8n + model = YOLO('yolov8n.pt') + + # ะกะพะทะดะฐะฝะธะต ัะปัƒั‡ะฐะนะฝะพะณะพ ะผะฐััะธะฒะฐ numpy ั ั„ะพั€ะผะพะน HWC (640, 640, 3) ัะพ ะทะฝะฐั‡ะตะฝะธัะผะธ ะฒ ะดะธะฐะฟะฐะทะพะฝะต [0, 255] ะธ ั‚ะธะฟะพะผ uint8 + source = np.random.randint(low=0, high=255, size=(640, 640, 3), dtype='uint8') + + # ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ะฝะฐ ะธัั‚ะพั‡ะฝะธะบ + results = model(source) # ัะฟะธัะพะบ ะพะฑัŠะตะบั‚ะพะฒ Results + ``` + + === "torch" + ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะต, ะฟั€ะตะดัั‚ะฐะฒะปะตะฝะฝะพะต ะฒ ะฒะธะดะต ั‚ะตะฝะทะพั€ะฐ PyTorch. + ```python + import torch + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8n + model = YOLO('yolov8n.pt') + + # ะกะพะทะดะฐะฝะธะต ัะปัƒั‡ะฐะนะฝะพะณะพ ั‚ะตะฝะทะพั€ะฐ torch ั ั„ะพั€ะผะพะน BCHW (1, 3, 640, 640) ัะพ ะทะฝะฐั‡ะตะฝะธัะผะธ ะฒ ะดะธะฐะฟะฐะทะพะฝะต [0, 1] ะธ ั‚ะธะฟะพะผ float32 + source = torch.rand(1, 3, 640, 640, dtype=torch.float32) + + # ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะฒั‹ะฒะพะดะฐ ะฝะฐ ะธัั‚ะพั‡ะฝะธะบ + results = model(source) # ัะฟะธัะพะบ ะพะฑัŠะตะบั‚ะพะฒ Results diff --git a/ultralytics/docs/ru/modes/predict.md:Zone.Identifier b/ultralytics/docs/ru/modes/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/modes/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/modes/track.md b/ultralytics/docs/ru/modes/track.md new file mode 100755 index 0000000..97bd877 --- /dev/null +++ b/ultralytics/docs/ru/modes/track.md @@ -0,0 +1,200 @@ +--- +comments: true +description: ะฃะทะฝะฐะนั‚ะต, ะบะฐะบ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ Ultralytics YOLO ะดะปั ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ะฒะธะดะตะพะฟะพั‚ะพะบะฐั…. ะ ัƒะบะพะฒะพะดัั‚ะฒะฐ ะฟะพ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธัŽ ั€ะฐะทะปะธั‡ะฝั‹ั… ั‚ั€ะตะบะตั€ะพะฒ ะธ ะฝะฐัั‚ั€ะพะนะบะต ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ ั‚ั€ะตะบะตั€ะฐ. +keywords: Ultralytics, YOLO, ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ะฒะธะดะตะพะฟะพั‚ะพะบะธ, BoT-SORT, ByteTrack, ั€ัƒะบะพะฒะพะดัั‚ะฒะพ ะฝะฐ Python, ั€ัƒะบะพะฒะพะดัั‚ะฒะพ CLI +--- + +# ะœะฝะพะถะตัั‚ะฒะตะฝะฝะพะต ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ ั ะฟะพะผะพั‰ัŒัŽ Ultralytics YOLO + +ะŸั€ะธะผะตั€ั‹ ะผะฝะพะถะตัั‚ะฒะตะฝะฝะพะณะพ ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะพะฑัŠะตะบั‚ะพะฒ + +ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ ะฒ ัั„ะตั€ะต ะฒะธะดะตะพะฐะฝะฐะปะธั‚ะธะบะธ ัะฒะปัะตั‚ัั ะบะปัŽั‡ะตะฒะพะน ะทะฐะดะฐั‡ะตะน, ะบะพั‚ะพั€ะฐั ะพะฟั€ะตะดะตะปัะตั‚ ะฝะต ั‚ะพะปัŒะบะพ ะผะตัั‚ะพะฟะพะปะพะถะตะฝะธะต ะธ ะบะปะฐัั ะพะฑัŠะตะบั‚ะพะฒ ะฒ ะบะฐะดั€ะต, ะฝะพ ั‚ะฐะบะถะต ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ ัƒะฝะธะบะฐะปัŒะฝั‹ะน ID ะดะปั ะบะฐะถะดะพะณะพ ะพะฑะฝะฐั€ัƒะถะตะฝะฝะพะณะพ ะพะฑัŠะตะบั‚ะฐ ะฟะพ ะผะตั€ะต ั€ะฐะทะฒะธั‚ะธั ะฒะธะดะตะพ. ะŸั€ะธะปะพะถะตะฝะธั ะฑะตะทะณั€ะฐะฝะธั‡ะฝั‹ โ€” ะพั‚ ะฝะฐะฑะปัŽะดะตะฝะธั ะธ ะฑะตะทะพะฟะฐัะฝะพัั‚ะธ ะดะพ ะฐะฝะฐะปะธั‚ะธะบะธ ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ ะฒ ัะฟะพั€ั‚ะต. + +## ะŸะพั‡ะตะผัƒ ัั‚ะพะธั‚ ะฒั‹ะฑั€ะฐั‚ัŒ Ultralytics YOLO ะดะปั ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะพะฑัŠะตะบั‚ะพะฒ? + +ะ’ั‹ะฒะพะด ั ั‚ั€ะตะบะตั€ะพะฒ Ultralytics ัะพะณะปะฐััƒะตั‚ัั ัะพ ัั‚ะฐะฝะดะฐั€ั‚ะฝั‹ะผ ะพะฑะฝะฐั€ัƒะถะตะฝะธะตะผ ะพะฑัŠะตะบั‚ะพะฒ, ะฝะพ ะธะผะตะตั‚ ะดะพะฑะฐะฒะปะตะฝะฝั‹ะต ID ะพะฑัŠะตะบั‚ะพะฒ. ะญั‚ะพ ัƒะฟั€ะพั‰ะฐะตั‚ ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ ะฒ ะฒะธะดะตะพะฟะพั‚ะพะบะฐั… ะธ ะฒั‹ะฟะพะปะฝะตะฝะธะต ะฟะพัะปะตะดัƒัŽั‰ะตะน ะฐะฝะฐะปะธั‚ะธะบะธ. ะ’ะพั‚ ะฟะพั‡ะตะผัƒ ะฒั‹ ะดะพะปะถะฝั‹ ั€ะฐััะผะพั‚ั€ะตั‚ัŒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะต Ultralytics YOLO ะดะปั ะฒะฐัˆะธั… ะฟะพั‚ั€ะตะฑะฝะพัั‚ะตะน ะฒ ะพั‚ัะปะตะถะธะฒะฐะฝะธะธ ะพะฑัŠะตะบั‚ะพะฒ: + +- **ะญั„ั„ะตะบั‚ะธะฒะฝะพัั‚ัŒ:** ะžะฑั€ะฐะฑะพั‚ะบะฐ ะฒะธะดะตะพะฟะพั‚ะพะบะพะฒ ะฒ ั€ะตะถะธะผะต ั€ะตะฐะปัŒะฝะพะณะพ ะฒั€ะตะผะตะฝะธ ะฑะตะท ะฟะพั‚ะตั€ะธ ั‚ะพั‡ะฝะพัั‚ะธ. +- **ะ“ะธะฑะบะพัั‚ัŒ:** ะŸะพะดะดะตั€ะถะบะฐ ะผะฝะพะถะตัั‚ะฒะฐ ะฐะปะณะพั€ะธั‚ะผะพะฒ ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะธ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะน. +- **ะŸั€ะพัั‚ะพั‚ะฐ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั:** ะŸั€ะพัั‚ะพะน Python API ะธ CLI-ะพะฟั†ะธะธ ะดะปั ะฑั‹ัั‚ั€ะพะน ะธะฝั‚ะตะณั€ะฐั†ะธะธ ะธ ั€ะฐะทะฒะตั€ั‚ั‹ะฒะฐะฝะธั. +- **ะะฐัั‚ั€ะฐะธะฒะฐะตะผะพัั‚ัŒ:** ะ›ะตะณะบะพัั‚ัŒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ั ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะธะผะธ ะพะฑัƒั‡ะตะฝะฝั‹ะผะธ ะผะพะดะตะปัะผะธ YOLO, ะฟะพะทะฒะพะปััŽั‰ะฐั ะธะฝั‚ะตะณั€ะฐั†ะธัŽ ะฒ ัะฟะตั†ะธั„ะธั‡ะตัะบะธะต ะดะปั ะดะพะผะตะฝะฐ ะฟั€ะธะปะพะถะตะฝะธั. + +

+
+ +
+ ะกะผะพั‚ั€ะธั‚ะต: ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ ะธ ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ั Ultralytics YOLOv8. +

+ +## ะŸั€ะธะบะปะฐะดะฝั‹ะต ะฟั€ะธะผะตะฝะตะฝะธั + +| ะขั€ะฐะฝัะฟะพั€ั‚ | ะ ะธั‚ะตะนะป | ะะบะฒะฐะบัƒะปัŒั‚ัƒั€ะฐ | +|:---------------------------------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------------:| +| ![ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ั‚ั€ะฐะฝัะฟะพั€ั‚ะฝั‹ั… ัั€ะตะดัั‚ะฒ](https://github.com/RizwanMunawar/ultralytics/assets/62513924/ee6e6038-383b-4f21-ac29-b2a1c7d386ab) | ![ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ะปัŽะดะตะน](https://github.com/RizwanMunawar/ultralytics/assets/62513924/93bb4ee2-77a0-4e4e-8eb6-eb8f527f0527) | ![ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ั€ั‹ะฑ](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a5146d0f-bfa8-4e0a-b7df-3c1446cd8142) | +| ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ั‚ั€ะฐะฝัะฟะพั€ั‚ะฝั‹ั… ัั€ะตะดัั‚ะฒ | ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ะปัŽะดะตะน | ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ั€ั‹ะฑ | + +## ะšะปัŽั‡ะตะฒั‹ะต ะพัะพะฑะตะฝะฝะพัั‚ะธ + +Ultralytics YOLO ั€ะฐััˆะธั€ัะตั‚ ัะฒะพะธ ะฒะพะทะผะพะถะฝะพัั‚ะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะดะปั ะพะฑะตัะฟะตั‡ะตะฝะธั ะฝะฐะดะตะถะฝะพะณะพ ะธ ัƒะฝะธะฒะตั€ัะฐะปัŒะฝะพะณะพ ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะพะฑัŠะตะบั‚ะพะฒ: + +- **ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ:** ะ‘ะตะทะฟั€ะตั€ั‹ะฒะฝะพะต ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ ะฒ ะฒะธะดะตะพ ั ะฒั‹ัะพะบะพะน ั‡ะฐัั‚ะพั‚ะพะน ะบะฐะดั€ะพะฒ. +- **ะŸะพะดะดะตั€ะถะบะฐ ะผะฝะพะถะตัั‚ะฒะฐ ั‚ั€ะตะบะตั€ะพะฒ:** ะ’ั‹ะฑะพั€ ะธะท ั€ะฐะทะฝะพะพะฑั€ะฐะทะธั ัƒัั‚ะฐะฝะพะฒะปะตะฝะฝั‹ั… ะฐะปะณะพั€ะธั‚ะผะพะฒ ะพั‚ัะปะตะถะธะฒะฐะฝะธั. +- **ะะฐัั‚ั€ะฐะธะฒะฐะตะผั‹ะต ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ ั‚ั€ะตะบะตั€ะพะฒ:** ะะฐัั‚ั€ะพะนะบะฐ ะฐะปะณะพั€ะธั‚ะผะฐ ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะดะปั ะบะพะฝะบั€ะตั‚ะฝั‹ั… ั‚ั€ะตะฑะพะฒะฐะฝะธะน ะฟัƒั‚ะตะผ ั€ะตะณัƒะปะธั€ะพะฒะบะธ ั€ะฐะทะปะธั‡ะฝั‹ั… ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ. + +## ะ”ะพัั‚ัƒะฟะฝั‹ะต ั‚ั€ะตะบะตั€ั‹ + +Ultralytics YOLO ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ ัะปะตะดัƒัŽั‰ะธะต ะฐะปะณะพั€ะธั‚ะผั‹ ะพั‚ัะปะตะถะธะฒะฐะฝะธั. ะ˜ั… ะผะพะถะฝะพ ะฒะบะปัŽั‡ะธั‚ัŒ, ะฟะตั€ะตะดะฐะฒ ัะพะพั‚ะฒะตั‚ัั‚ะฒัƒัŽั‰ะธะน YAML ั„ะฐะนะป ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ, ะฝะฐะฟั€ะธะผะตั€ `tracker=tracker_type.yaml`: + +* [BoT-SORT](https://github.com/NirAharon/BoT-SORT) - ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต `botsort.yaml`, ั‡ั‚ะพะฑั‹ ะฐะบั‚ะธะฒะธั€ะพะฒะฐั‚ัŒ ัั‚ะพั‚ ั‚ั€ะตะบะตั€. +* [ByteTrack](https://github.com/ifzhang/ByteTrack) - ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต `bytetrack.yaml`, ั‡ั‚ะพะฑั‹ ะฐะบั‚ะธะฒะธั€ะพะฒะฐั‚ัŒ ัั‚ะพั‚ ั‚ั€ะตะบะตั€. + +ะขั€ะตะบะตั€ ะฟะพ ัƒะผะพะปั‡ะฐะฝะธัŽ - BoT-SORT. + +## ะžั‚ัะปะตะถะธะฒะฐะฝะธะต + +ะ”ะปั ะทะฐะฟัƒัะบะฐ ั‚ั€ะตะบะตั€ะฐ ะฝะฐ ะฒะธะดะตะพะฟะพั‚ะพะบะฐั… ะธัะฟะพะปัŒะทัƒะนั‚ะต ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ Detect, Segment ะธะปะธ Pose, ั‚ะฐะบะธะต ะบะฐะบ YOLOv8n, YOLOv8n-seg ะธ YOLOv8n-pose. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะธะปะธ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('yolov8n.pt') # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ Detect + model = YOLO('yolov8n-seg.pt') # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ Segment + model = YOLO('yolov8n-pose.pt') # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ Pose + model = YOLO('path/to/best.pt') # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบัƒัŽ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ + + # ะ’ั‹ะฟะพะปะฝะธั‚ัŒ ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ั ะฟะพะผะพั‰ัŒัŽ ะผะพะดะตะปะธ + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True) # ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ั ั‚ั€ะตะบะตั€ะพะผ ะฟะพ ัƒะผะพะปั‡ะฐะฝะธัŽ + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True, tracker="bytetrack.yaml") # ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ั ั‚ั€ะตะบะตั€ะพะผ ByteTrack + ``` + + === "CLI" + + ```bash + # ะ’ั‹ะฟะพะปะฝะธั‚ัŒ ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ั ั€ะฐะทะปะธั‡ะฝั‹ะผะธ ะผะพะดะตะปัะผะธ ะธัะฟะพะปัŒะทัƒั ะบะพะผะฐะฝะดะฝั‹ะน ะธะฝั‚ะตั€ั„ะตะนั + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" # ะžั„ะธั†ะธะฐะปัŒะฝะฐั ะผะพะดะตะปัŒ Detect + yolo track model=yolov8n-seg.pt source="https://youtu.be/LNwODJXcvt4" # ะžั„ะธั†ะธะฐะปัŒะฝะฐั ะผะพะดะตะปัŒ Segment + yolo track model=yolov8n-pose.pt source="https://youtu.be/LNwODJXcvt4" # ะžั„ะธั†ะธะฐะปัŒะฝะฐั ะผะพะดะตะปัŒ Pose + yolo track model=path/to/best.pt source="https://youtu.be/LNwODJXcvt4" # ะŸะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะฐั ะพะฑัƒั‡ะตะฝะฝะฐั ะผะพะดะตะปัŒ + + # ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ั‚ั€ะตะบะตั€ะฐ ByteTrack + yolo track model=path/to/best.pt tracker="bytetrack.yaml" + ``` + +ะšะฐะบ ะฒะธะดะฝะพ ะธะท ะฒั‹ัˆะตัƒะบะฐะทะฐะฝะฝะพะณะพ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั, ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ะดะพัั‚ัƒะฟะฝะพ ะดะปั ะฒัะตั… ะผะพะดะตะปะตะน Detect, Segment ะธ Pose, ั€ะฐะฑะพั‚ะฐัŽั‰ะธั… ั ะฒะธะดะตะพ ะธะปะธ ะฟะพั‚ะพะบะพะฒั‹ะผะธ ะธัั‚ะพั‡ะฝะธะบะฐะผะธ. + +## ะšะพะฝั„ะธะณัƒั€ะฐั†ะธั + +### ะั€ะณัƒะผะตะฝั‚ั‹ ะดะปั ะพั‚ัะปะตะถะธะฒะฐะฝะธั + +ะšะพะฝั„ะธะณัƒั€ะฐั†ะธั ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะธะผะตะตั‚ ะพะฑั‰ะธะต ัะฒะพะนัั‚ะฒะฐ ั ั€ะตะถะธะผะพะผ Predict, ั‚ะฐะบะธะต ะบะฐะบ `conf`, `iou` ะธ `show`. ะ”ะปั ะดะฐะปัŒะฝะตะนัˆะตะน ะฝะฐัั‚ั€ะพะนะบะธ ะพะฑั€ะฐั‚ะธั‚ะตััŒ ะบ ัั‚ั€ะฐะฝะธั†ะต ะผะพะดะตะปะธ [Predict](https://docs.ultralytics.com/modes/predict/). + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะะฐัั‚ั€ะพะนั‚ะต ะฟะฐั€ะฐะผะตั‚ั€ั‹ ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะธ ะทะฐะฟัƒัั‚ะธั‚ะต ั‚ั€ะตะบะตั€ + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", conf=0.3, iou=0.5, show=True) + ``` + + === "CLI" + + ```bash + # ะะฐัั‚ั€ะพะนั‚ะต ะฟะฐั€ะฐะผะตั‚ั€ั‹ ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะธ ะทะฐะฟัƒัั‚ะธั‚ะต ั‚ั€ะตะบะตั€, ะธัะฟะพะปัŒะทัƒั ะบะพะผะฐะฝะดะฝั‹ะน ะธะฝั‚ะตั€ั„ะตะนั + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" conf=0.3, iou=0.5 show + ``` + +### ะ’ั‹ะฑะพั€ ั‚ั€ะตะบะตั€ะฐ + +Ultralytics ั‚ะฐะบะถะต ะฟะพะทะฒะพะปัะตั‚ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะธะทะผะตะฝะตะฝะฝั‹ะน ั„ะฐะนะป ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ ั‚ั€ะตะบะตั€ะฐ. ะ”ะปั ัั‚ะพะณะพ ะฟั€ะพัั‚ะพ ัะดะตะปะฐะนั‚ะต ะบะพะฟะธัŽ ั„ะฐะนะปะฐ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ ั‚ั€ะตะบะตั€ะฐ (ะฝะฐะฟั€ะธะผะตั€, `custom_tracker.yaml`) ะธะท [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers) ะธ ะธะทะผะตะฝะธั‚ะต ะปัŽะฑั‹ะต ะฝะฐัั‚ั€ะพะนะบะธ (ะบั€ะพะผะต `tracker_type`) ะฒ ัะพะพั‚ะฒะตั‚ัั‚ะฒะธะธ ั ะฒะฐัˆะธะผะธ ะฟะพั‚ั€ะตะฑะฝะพัั‚ัะผะธ. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะผะพะดะตะปัŒ ะธ ะทะฐะฟัƒัั‚ะธั‚ะต ั‚ั€ะตะบะตั€ ั ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะธะผ ั„ะฐะนะปะพะผ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", tracker='custom_tracker.yaml') + ``` + + === "CLI" + + ```bash + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะผะพะดะตะปัŒ ะธ ะทะฐะฟัƒัั‚ะธั‚ะต ั‚ั€ะตะบะตั€ ั ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะธะผ ั„ะฐะนะปะพะผ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ, ะธัะฟะพะปัŒะทัƒั ะบะพะผะฐะฝะดะฝั‹ะน ะธะฝั‚ะตั€ั„ะตะนั + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" tracker='custom_tracker.yaml' + ``` + +ะ”ะปั ะฟะพะปะฝะพะณะพ ัะฟะธัะบะฐ ะฐั€ะณัƒะผะตะฝั‚ะพะฒ ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะพะฑั€ะฐั‚ะธั‚ะตััŒ ะบ ัั‚ั€ะฐะฝะธั†ะต [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers). + +## ะŸั€ะธะผะตั€ั‹ ะฝะฐ Python + +### ะฆะธะบะป ัะพั…ั€ะฐะฝะตะฝะธั ัะปะตะดะพะฒ + +ะ’ะพั‚ ะฟั€ะธะผะตั€ ัะบั€ะธะฟั‚ะฐ Python, ะธัะฟะพะปัŒะทัƒัŽั‰ะธะน OpenCV (`cv2`) ะธ YOLOv8 ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฝะฐ ะบะฐะดั€ะฐั… ะฒะธะดะตะพ. ะ’ ัั‚ะพะผ ัั†ะตะฝะฐั€ะธะธ ะฟั€ะตะดะฟะพะปะฐะณะฐะตั‚ัั, ั‡ั‚ะพ ะฒั‹ ัƒะถะต ัƒัั‚ะฐะฝะพะฒะธะปะธ ะฝะตะพะฑั…ะพะดะธะผั‹ะต ะฟะฐะบะตั‚ั‹ (`opencv-python` ะธ `ultralytics`). ะั€ะณัƒะผะตะฝั‚ `persist=True` ัƒะบะฐะทั‹ะฒะฐะตั‚ ั‚ั€ะตะบะตั€ัƒ, ั‡ั‚ะพ ั‚ะตะบัƒั‰ะตะต ะธะทะพะฑั€ะฐะถะตะฝะธะต ะธะปะธ ะบะฐะดั€ ัะฒะปัะตั‚ัั ัะปะตะดัƒัŽั‰ะธะผ ะฒ ะฟะพัะปะตะดะพะฒะฐั‚ะตะปัŒะฝะพัั‚ะธ ะธ ะพะถะธะดะฐะตั‚, ั‡ั‚ะพ ัะปะตะดั‹ ั ะฟั€ะตะดั‹ะดัƒั‰ะตะณะพ ะธะทะพะฑั€ะฐะถะตะฝะธั ะฑัƒะดัƒั‚ ะฟั€ะธััƒั‚ัั‚ะฒะพะฒะฐั‚ัŒ ะฒ ั‚ะตะบัƒั‰ะตะผ ะธะทะพะฑั€ะฐะถะตะฝะธะธ. + +!!! Example "ะฆะธะบะป ั ะฟะพั‚ะพะบะพะฒั‹ะผ ะพั‚ัะปะตะถะธะฒะฐะฝะธะตะผ for-loop" + + ```python + import cv2 + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะผะพะดะตะปัŒ YOLOv8 + model = YOLO('yolov8n.pt') + + # ะžั‚ะบั€ะพะนั‚ะต ะฒะธะดะตะพั„ะฐะนะป + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # ะฆะธะบะป ะฟะพ ะบะฐะดั€ะฐะผ ะฒะธะดะตะพ + while cap.isOpened(): + # ะงั‚ะตะฝะธะต ะบะฐะดั€ะฐ ะธะท ะฒะธะดะตะพ + success, frame = cap.read() + + if success: + # ะ’ั‹ะฟะพะปะฝะธั‚ะต ะพั‚ัะปะตะถะธะฒะฐะฝะธะต YOLOv8 ะดะปั ะบะฐะดั€ะฐ, ัะพั…ั€ะฐะฝัั ัะปะตะดั‹ ะผะตะถะดัƒ ะบะฐะดั€ะฐะผะธ + results = model.track(frame, persist=True) + + # ะ’ะธะทัƒะฐะปะธะทะธั€ัƒะนั‚ะต ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ ะฝะฐ ะบะฐะดั€ะต + annotated_frame = results[0].plot() + + # ะŸะพะบะฐะถะธั‚ะต ะฐะฝะฝะพั‚ะธั€ะพะฒะฐะฝะฝั‹ะน ะบะฐะดั€ + cv2.imshow("ะžั‚ัะปะตะถะธะฒะฐะฝะธะต YOLOv8", annotated_frame) + + # ะŸั€ะตั€ะฒะฐั‚ัŒ ั†ะธะบะป, ะตัะปะธ ะฝะฐะถะฐั‚ะฐ ะบะปะฐะฒะธัˆะฐ 'q' + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # ะŸั€ะตั€ะฒะฐั‚ัŒ ั†ะธะบะป, ะตัะปะธ ะดะพัั‚ะธะณะฝัƒั‚ ะบะพะฝะตั† ะฒะธะดะตะพ + break + + # ะžัะฒะพะฑะพะดะธั‚ะต ะพะฑัŠะตะบั‚ ะทะฐั…ะฒะฐั‚ะฐ ะฒะธะดะตะพ ะธ ะทะฐะบั€ะพะนั‚ะต ะพะบะฝะพ ะพั‚ะพะฑั€ะฐะถะตะฝะธั + cap.release() + cv2.destroyAllWindows() + ``` + +ะžะฑั€ะฐั‚ะธั‚ะต ะฒะฝะธะผะฐะฝะธะต ะฝะฐ ะธะทะผะตะฝะตะฝะธะต ั `model(frame)` ะฝะฐ `model.track(frame)`, ะบะพั‚ะพั€ะพะต ะฟะพะทะฒะพะปัะตั‚ ะฒะบะปัŽั‡ะธั‚ัŒ ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ ะฒะผะตัั‚ะพ ะฟั€ะพัั‚ะพะณะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธั. ะญั‚ะพั‚ ะธะทะผะตะฝะตะฝะฝั‹ะน ัะบั€ะธะฟั‚ ะฑัƒะดะตั‚ ะฒั‹ะฟะพะปะฝัั‚ัŒ ั‚ั€ะตะบะตั€ ะฝะฐ ะบะฐะถะดะพะผ ะบะฐะดั€ะต ะฒะธะดะตะพ, ะฒะธะทัƒะฐะปะธะทะธั€ะพะฒะฐั‚ัŒ ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ ะธ ะพั‚ะพะฑั€ะฐะถะฐั‚ัŒ ะธั… ะฒ ะพะบะฝะต. ะฆะธะบะป ะผะพะถะฝะพ ะทะฐะฒะตั€ัˆะธั‚ัŒ ะฝะฐะถะฐั‚ะธะตะผ 'q'. + +## ะกะพะดะตะนัั‚ะฒะธะต ะฒ ะฝะพะฒั‹ั… ั‚ั€ะตะบะตั€ะฐั… + +ะ’ั‹ ัะฒะปัะตั‚ะตััŒ ะฟั€ะพั„ะตััะธะพะฝะฐะปะพะผ ะฒ ะผะฝะพะถะตัั‚ะฒะตะฝะฝะพะผ ะพั‚ัะปะตะถะธะฒะฐะฝะธะธ ะพะฑัŠะตะบั‚ะพะฒ ะธ ัƒัะฟะตัˆะฝะพ ั€ะตะฐะปะธะทะพะฒะฐะปะธ ะธะปะธ ะฐะดะฐะฟั‚ะธั€ะพะฒะฐะปะธ ะฐะปะณะพั€ะธั‚ะผ ะพั‚ัะปะตะถะธะฒะฐะฝะธั ั Ultralytics YOLO? ะœั‹ ะฟั€ะธะณะปะฐัˆะฐะตะผ ะฒะฐั ะฒะฝะตัั‚ะธ ัะฒะพะน ะฒะบะปะฐะด ะฒ ะฝะฐัˆ ั€ะฐะทะดะตะป Trackers ะฝะฐ [ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers)! ะ’ะฐัˆะธ ั€ะตะฐะปัŒะฝั‹ะต ะฟั€ะธะปะพะถะตะฝะธั ะธ ั€ะตัˆะตะฝะธั ะผะพะณัƒั‚ ะฑั‹ั‚ัŒ ะฑะตัั†ะตะฝะฝั‹ะผะธ ะดะปั ะฟะพะปัŒะทะพะฒะฐั‚ะตะปะตะน, ั€ะฐะฑะพั‚ะฐัŽั‰ะธั… ะฝะฐะด ะทะฐะดะฐั‡ะฐะผะธ ะพั‚ัะปะตะถะธะฒะฐะฝะธั. + +ะ’ะฝะพัั ัะฒะพะน ะฒะบะปะฐะด ะฒ ัั‚ะพั‚ ั€ะฐะทะดะตะป, ะฒั‹ ะฟะพะผะพะณะฐะตั‚ะต ั€ะฐััˆะธั€ะธั‚ัŒ ัะฟะตะบั‚ั€ ะดะพัั‚ัƒะฟะฝั‹ั… ั€ะตัˆะตะฝะธะน ะดะปั ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะฒ ั€ะฐะผะบะฐั… ั„ั€ะตะนะผะฒะพั€ะบะฐ Ultralytics YOLO, ะดะพะฑะฐะฒะปัั ะตั‰ะต ะพะดะธะฝ ัƒั€ะพะฒะตะฝัŒ ั„ัƒะฝะบั†ะธะพะฝะฐะปัŒะฝะพัั‚ะธ ะธ ะฟะพะปะตะทะฝะพัั‚ะธ ะดะปั ัะพะพะฑั‰ะตัั‚ะฒะฐ. + +ะงั‚ะพะฑั‹ ะฝะฐั‡ะฐั‚ัŒ ัะฒะพะน ะฒะบะปะฐะด, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ะพะทะฝะฐะบะพะผัŒั‚ะตััŒ ั ะฝะฐัˆะธะผ [ะ ัƒะบะพะฒะพะดัั‚ะฒะพะผ ะดะปั ัƒั‡ะฐัั‚ะฝะธะบะพะฒ](https://docs.ultralytics.com/help/contributing) ะดะปั ะฟะพะปัƒั‡ะตะฝะธั ะฟะพะปะฝะพะน ะธะฝัั‚ั€ัƒะบั†ะธะธ ะฟะพ ะพั‚ะฟั€ะฐะฒะบะต Pull Request (PR) ๐Ÿ› ๏ธ. ะœั‹ ะฒ ะฟั€ะตะดะฒะบัƒัˆะตะฝะธะธ ัƒะฒะธะดะตั‚ัŒ, ั‡ั‚ะพ ะฒั‹ ะฟั€ะธะฝะตัะตั‚ะต ะฝะฐ ัั‚ะพะป! + +ะ’ะผะตัั‚ะต ะดะฐะฒะฐะนั‚ะต ัƒะปัƒั‡ัˆะธะผ ะฒะพะทะผะพะถะฝะพัั‚ะธ ะพั‚ัะปะตะถะธะฒะฐะฝะธั ัะบะพัะธัั‚ะตะผั‹ Ultralytics YOLO ๐Ÿ™! diff --git a/ultralytics/docs/ru/modes/track.md:Zone.Identifier b/ultralytics/docs/ru/modes/track.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/modes/track.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/modes/train.md b/ultralytics/docs/ru/modes/train.md new file mode 100755 index 0000000..38642f3 --- /dev/null +++ b/ultralytics/docs/ru/modes/train.md @@ -0,0 +1,206 @@ +--- +comments: true +description: ะŸะพัˆะฐะณะพะฒะพะต ั€ัƒะบะพะฒะพะดัั‚ะฒะพ ะฟะพ ะพะฑัƒั‡ะตะฝะธัŽ ะผะพะดะตะปะตะน YOLOv8 ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ Ultralytics YOLO, ะฒะบะปัŽั‡ะฐั ะฟั€ะธะผะตั€ั‹ ะพะฑัƒั‡ะตะฝะธั ะฝะฐ ะพะดะฝะพะผ ะธ ะฝะตัะบะพะปัŒะบะธั… GPU +keywords: Ultralytics, YOLOv8, YOLO, ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ั€ะตะถะธะผ ะพะฑัƒั‡ะตะฝะธั, ะฝะฐัั‚ั€ะฐะธะฒะฐะตะผั‹ะน ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั…, ะพะฑัƒั‡ะตะฝะธะต ะฝะฐ GPU, ะผะฝะพะณะพ-GPU, ะณะธะฟะตั€ะฟะฐั€ะฐะผะตั‚ั€ั‹, ะฟั€ะธะผะตั€ั‹ CLI, ะฟั€ะธะผะตั€ั‹ Python +--- + +# ะžะฑัƒั‡ะตะฝะธะต ะผะพะดะตะปะตะน ั ะฟะพะผะพั‰ัŒัŽ Ultralytics YOLO + +ะญะบะพัะธัั‚ะตะผะฐ ะธ ะธะฝั‚ะตะณั€ะฐั†ะธะธ Ultralytics YOLO + +## ะ’ะฒะตะดะตะฝะธะต + +ะžะฑัƒั‡ะตะฝะธะต ะณะปัƒะฑะพะบะพะน ะพะฑัƒั‡ะฐัŽั‰ะตะน ะผะพะดะตะปะธ ะฒะบะปัŽั‡ะฐะตั‚ ะฒ ัะตะฑั ะฟะพะดะฐั‡ัƒ ะดะฐะฝะฝั‹ั… ะธ ะฝะฐัั‚ั€ะพะนะบัƒ ะตั‘ ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ, ั‚ะฐะบ ั‡ั‚ะพะฑั‹ ะพะฝะฐ ะผะพะณะปะฐ ะดะตะปะฐั‚ัŒ ั‚ะพั‡ะฝั‹ะต ะฟั€ะพะณะฝะพะทั‹. ะ ะตะถะธะผ ะพะฑัƒั‡ะตะฝะธั ะฒ Ultralytics YOLOv8 ะฟั€ะตะดะฝะฐะทะฝะฐั‡ะตะฝ ะดะปั ัั„ั„ะตะบั‚ะธะฒะฝะพะณะพ ะธ ั€ะตะทัƒะปัŒั‚ะฐั‚ะธะฒะฝะพะณะพ ะพะฑัƒั‡ะตะฝะธั ะผะพะดะตะปะตะน ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ั ะฟะพะปะฝั‹ะผ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะฒะพะทะผะพะถะฝะพัั‚ะตะน ัะพะฒั€ะตะผะตะฝะฝะพะน ะฐะฟะฟะฐั€ะฐั‚ัƒั€ั‹. ะญั‚ะพ ั€ัƒะบะพะฒะพะดัั‚ะฒะพ ะฝะฐั†ะตะปะตะฝะพ ะฝะฐ ะพะฟะธัะฐะฝะธะต ะฒัะตั… ะดะตั‚ะฐะปะตะน, ะฝะตะพะฑั…ะพะดะธะผั‹ั… ะดะปั ะฝะฐั‡ะฐะปะฐ ะพะฑัƒั‡ะตะฝะธั ะฒะฐัˆะธั… ะผะพะดะตะปะตะน ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะฑะพะณะฐั‚ะพะณะพ ะฝะฐะฑะพั€ะฐ ั„ัƒะฝะบั†ะธะน YOLOv8. + +

+
+ +
+ ะกะผะพั‚ั€ะตั‚ัŒ: ะšะฐะบ ะพะฑัƒั‡ะธั‚ัŒ ะผะพะดะตะปัŒ YOLOv8 ะฝะฐ ะฒะฐัˆะตะผ ะฝะฐัั‚ั€ะฐะธะฒะฐะตะผะพะผ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… ะฒ Google Colab. +

+ +## ะŸะพั‡ะตะผัƒ ัั‚ะพะธั‚ ะฒั‹ะฑั€ะฐั‚ัŒ Ultralytics YOLO ะดะปั ะพะฑัƒั‡ะตะฝะธั? + +ะ’ะพั‚ ะฝะตัะบะพะปัŒะบะพ ัƒะฑะตะดะธั‚ะตะปัŒะฝั‹ั… ะฟั€ะธั‡ะธะฝ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ั€ะตะถะธะผ Train YOLOv8: + +- **ะญั„ั„ะตะบั‚ะธะฒะฝะพัั‚ัŒ:** ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต ะผะฐะบัะธะผัƒะผ ะฒะพะทะผะพะถะฝะพัั‚ะตะน ะฒะฐัˆะตะณะพ ะพะฑะพั€ัƒะดะพะฒะฐะฝะธั, ะฑัƒะดัŒ ั‚ะพ ะฝะฐัั‚ั€ะพะนะบะฐ ั ะพะดะฝะธะผ GPU ะธะปะธ ั€ะฐัะฟั€ะตะดะตะปะตะฝะธะต ะฝะฐะณั€ัƒะทะบะธ ะฝะฐ ะฝะตัะบะพะปัŒะบะพ GPU. +- **ะฃะฝะธะฒะตั€ัะฐะปัŒะฝะพัั‚ัŒ:** ะžะฑัƒั‡ะฐะนั‚ะต ะฝะฐ ะฝะฐัั‚ั€ะฐะธะฒะฐะตะผั‹ั… ะฝะฐะฑะพั€ะฐั… ะดะฐะฝะฝั‹ั…, ะฟะพะผะธะผะพ ัƒะถะต ะดะพัั‚ัƒะฟะฝั‹ั…, ั‚ะฐะบะธั… ะบะฐะบ COCO, VOC ะธ ImageNet. +- **ะ”ั€ัƒะถะตะปัŽะฑะฝั‹ะน ะธะฝั‚ะตั€ั„ะตะนั:** ะŸั€ะพัั‚ะพะน, ะฝะพ ะผะพั‰ะฝั‹ะน ะธะฝั‚ะตั€ั„ะตะนั ะบะพะผะฐะฝะดะฝะพะน ัั‚ั€ะพะบะธ (CLI) ะธ Python ะดะปั ะฟั€ัะผะพะปะธะฝะตะนะฝะพะณะพ ะพะฟั‹ั‚ะฐ ะพะฑัƒั‡ะตะฝะธั. +- **ะ“ะธะฑะบะพัั‚ัŒ ะณะธะฟะตั€ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ:** ะจะธั€ะพะบะธะน ัะฟะตะบั‚ั€ ะฝะฐัั‚ั€ะฐะธะฒะฐะตะผั‹ั… ะณะธะฟะตั€ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ ะดะปั ั‚ะพะฝะบะพะน ะฝะฐัั‚ั€ะพะนะบะธ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะผะพะดะตะปะธ. + +### ะšะปัŽั‡ะตะฒั‹ะต ะพัะพะฑะตะฝะฝะพัั‚ะธ ั€ะตะถะธะผะฐ Train + +ะ’ะพั‚ ะฝะตะบะพั‚ะพั€ั‹ะต ะทะฐะผะตั‚ะฝั‹ะต ะพัะพะฑะตะฝะฝะพัั‚ะธ ั€ะตะถะธะผะฐ Train YOLOv8: + +- **ะะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะฐั ะทะฐะณั€ัƒะทะบะฐ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั…:** ะกั‚ะฐะฝะดะฐั€ั‚ะฝั‹ะต ะฝะฐะฑะพั€ั‹ ะดะฐะฝะฝั‹ั…, ั‚ะฐะบะธะต ะบะฐะบ COCO, VOC ะธ ImageNet, ะทะฐะณั€ัƒะถะฐัŽั‚ัั ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธ ะฟั€ะธ ะฟะตั€ะฒะพะผ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ. +- **ะŸะพะดะดะตั€ะถะบะฐ ะผะฝะพะณะธั… GPU:** ะœะฐััˆั‚ะฐะฑะธั€ัƒะนั‚ะต ัƒัะธะปะธั ะฟะพ ะพะฑัƒั‡ะตะฝะธัŽ ะฑะตะท ะฟั€ะพะฑะปะตะผ ะฝะฐ ะฝะตัะบะพะปัŒะบะธั… GPU, ั‡ั‚ะพะฑั‹ ัƒัะบะพั€ะธั‚ัŒ ะฟั€ะพั†ะตัั. +- **ะะฐัั‚ั€ะพะนะบะฐ ะณะธะฟะตั€ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ:** ะ’ะพะทะผะพะถะฝะพัั‚ัŒ ะธะทะผะตะฝะตะฝะธั ะณะธะฟะตั€ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ ั‡ะตั€ะตะท ั„ะฐะนะปั‹ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ YAML ะธะปะธ ะฐั€ะณัƒะผะตะฝั‚ั‹ CLI. +- **ะ’ะธะทัƒะฐะปะธะทะฐั†ะธั ะธ ะผะพะฝะธั‚ะพั€ะธะฝะณ:** ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ะผะตั‚ั€ะธะบ ะพะฑัƒั‡ะตะฝะธั ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ ะธ ะฒะธะทัƒะฐะปะธะทะฐั†ะธั ะฟั€ะพั†ะตััะฐ ะพะฑัƒั‡ะตะฝะธั ะดะปั ะปัƒั‡ัˆะตะณะพ ะฟะพะฝะธะผะฐะฝะธั. + +!!! Tip "ะกะพะฒะตั‚" + + * ะะฐะฑะพั€ั‹ ะดะฐะฝะฝั‹ั… YOLOv8, ั‚ะฐะบะธะต ะบะฐะบ COCO, VOC, ImageNet ะธ ะผะฝะพะณะธะต ะดั€ัƒะณะธะต, ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธ ะทะฐะณั€ัƒะถะฐัŽั‚ัั ะฟั€ะธ ะฟะตั€ะฒะพะผ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ, ะฝะฐะฟั€ะธะผะตั€, `yolo train data=coco.yaml` + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะžะฑัƒั‡ะตะฝะธะต YOLOv8n ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO128 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… ั ั€ะฐะทะผะตั€ะพะผ ะธะทะพะฑั€ะฐะถะตะฝะธั 640. ะฃัั‚ั€ะพะนัั‚ะฒะพ ะดะปั ะพะฑัƒั‡ะตะฝะธั ะผะพะถะตั‚ ะฑั‹ั‚ัŒ ัƒะบะฐะทะฐะฝะพ ั ะฟะพะผะพั‰ัŒัŽ ะฐั€ะณัƒะผะตะฝั‚ะฐ `device`. ะ•ัะปะธ ะฐั€ะณัƒะผะตะฝั‚ ะฝะต ะฟะตั€ะตะดะฐะฝ, ะฑัƒะดะตั‚ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒัั GPU `device=0`, ะตัะปะธ ะดะพัั‚ัƒะฟะตะฝ, ะฒ ะฟั€ะพั‚ะธะฒะฝะพะผ ัะปัƒั‡ะฐะต ะฑัƒะดะตั‚ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒัั `device=cpu`. ะกะผะพั‚ั€ะธั‚ะต ั€ะฐะทะดะตะป ะั€ะณัƒะผะตะฝั‚ั‹ ะฝะธะถะต ะดะปั ะฟะพะปะฝะพะณะพ ัะฟะธัะบะฐ ะฐั€ะณัƒะผะตะฝั‚ะพะฒ ะพะฑัƒั‡ะตะฝะธั. + +!!! Example "ะŸั€ะธะผะตั€ ะพะฑัƒั‡ะตะฝะธั ะฝะฐ ะพะดะฝะพะผ GPU ะธ CPU" + + ะฃัั‚ั€ะพะนัั‚ะฒะพ ะพะฟั€ะตะดะตะปัะตั‚ัั ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธ. ะ•ัะปะธ ะดะพัั‚ัƒะฟะตะฝ GPU, ั‚ะพ ะพะฝ ะฑัƒะดะตั‚ ะธัะฟะพะปัŒะทะพะฒะฐะฝ, ะธะฝะฐั‡ะต ะพะฑัƒั‡ะตะฝะธะต ะฝะฐั‡ะฝะตั‚ัั ะฝะฐ CPU. + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ + model = YOLO('yolov8n.yaml') # ัะพะทะดะฐั‚ัŒ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML + model = YOLO('yolov8n.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ (ั€ะตะบะพะผะตะฝะดัƒะตั‚ัั ะดะปั ะพะฑัƒั‡ะตะฝะธั) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # ัะพะทะดะฐั‚ัŒ ะธะท YAML ะธ ะฟะตั€ะตะฝะตัั‚ะธ ะฒะตัะฐ + + # ะžะฑัƒั‡ะธั‚ัŒ ะผะพะดะตะปัŒ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + + === "CLI" + + ```bash + # ะกะพะทะดะฐั‚ัŒ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML ะธ ะฝะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ั ะฝัƒะปั + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # ะะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ั ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ *.pt + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # ะกะพะทะดะฐั‚ัŒ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML, ะฟะตั€ะตะฝะตัั‚ะธ ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ ะธ ะฝะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### ะžะฑัƒั‡ะตะฝะธะต ะฝะฐ ะฝะตัะบะพะปัŒะบะธั… GPU + +ะžะฑัƒั‡ะตะฝะธะต ะฝะฐ ะฝะตัะบะพะปัŒะบะธั… GPU ะฟะพะทะฒะพะปัะตั‚ ะฑะพะปะตะต ัั„ั„ะตะบั‚ะธะฒะฝะพ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะดะพัั‚ัƒะฟะฝั‹ะต ะฐะฟะฟะฐั€ะฐั‚ะฝั‹ะต ั€ะตััƒั€ัั‹, ั€ะฐัะฟั€ะตะดะตะปัั ะฝะฐะณั€ัƒะทะบัƒ ะฟะพ ะพะฑัƒั‡ะตะฝะธัŽ ะฝะฐ ะฝะตัะบะพะปัŒะบะพ GPU. ะญั‚ะฐ ั„ัƒะฝะบั†ะธั ะดะพัั‚ัƒะฟะฝะฐ ะบะฐะบ ั‡ะตั€ะตะท Python API, ั‚ะฐะบ ะธ ั‡ะตั€ะตะท ะบะพะผะฐะฝะดะฝั‹ะน ะธะฝั‚ะตั€ั„ะตะนั. ะงั‚ะพะฑั‹ ะฒะบะปัŽั‡ะธั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ะฝะฐ ะฝะตัะบะพะปัŒะบะธั… GPU, ัƒะบะฐะถะธั‚ะต ะธะดะตะฝั‚ะธั„ะธะบะฐั‚ะพั€ั‹ ัƒัั‚ั€ะพะนัั‚ะฒ GPU, ะบะพั‚ะพั€ั‹ะต ะฒั‹ ั…ะพั‚ะธั‚ะต ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ. + +!!! Example "ะŸั€ะธะผะตั€ ะพะฑัƒั‡ะตะฝะธั ะฝะฐ ะฝะตัะบะพะปัŒะบะธั… GPU" + + ะงั‚ะพะฑั‹ ะพะฑัƒั‡ะธั‚ัŒ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ 2 GPU, ัƒัั‚ั€ะพะนัั‚ะฒ CUDA 0 ะธ 1 ะธัะฟะพะปัŒะทัƒะนั‚ะต ัะปะตะดัƒัŽั‰ะธะต ะบะพะผะฐะฝะดั‹. ะ ะฐััˆะธั€ัŒั‚ะต ะดะพ ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝั‹ั… GPU ะฟะพ ะผะตั€ะต ะฝะตะพะฑั…ะพะดะธะผะพัั‚ะธ. + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ + model = YOLO('yolov8n.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ (ั€ะตะบะพะผะตะฝะดัƒะตั‚ัั ะดะปั ะพะฑัƒั‡ะตะฝะธั) + + # ะžะฑัƒั‡ะธั‚ัŒ ะผะพะดะตะปัŒ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ 2 GPU + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device=[0, 1]) + ``` + + === "CLI" + + ```bash + # ะะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ั ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ *.pt ะธัะฟะพะปัŒะทัƒั GPU 0 ะธ 1 + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=0,1 + ``` + +### ะžะฑัƒั‡ะตะฝะธะต ะฝะฐ Apple M1 ะธ M2 ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ MPS + +ะก ะธะฝั‚ะตะณั€ะฐั†ะธะตะน ะฟะพะดะดะตั€ะถะบะธ ั‡ะธะฟะพะฒ Apple M1 ะธ M2 ะฒ ะผะพะดะตะปะธ Ultralytics YOLO ั‚ะตะฟะตั€ัŒ ะผะพะถะฝะพ ะพะฑัƒั‡ะฐั‚ัŒ ะฒะฐัˆะธ ะผะพะดะตะปะธ ะฝะฐ ัƒัั‚ั€ะพะนัั‚ะฒะฐั…, ะธัะฟะพะปัŒะทัƒัŽั‰ะธั… ะผะพั‰ะฝัƒัŽ ะฟะปะฐั‚ั„ะพั€ะผัƒ Metal Performance Shaders (MPS). MPS ะฟั€ะตะดะปะฐะณะฐะตั‚ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝั‹ะน ัะฟะพัะพะฑ ะฒั‹ะฟะพะปะฝะตะฝะธั ะฒั‹ั‡ะธัะปะตะฝะธะน ะธ ะทะฐะดะฐั‡ ะพะฑั€ะฐะฑะพั‚ะบะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน ะฝะฐ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะธั… ะบั€ะตะผะฝะธะตะฒั‹ั… ั‡ะธะฟะฐั… Apple. + +ะงั‚ะพะฑั‹ ะทะฐะฟัƒัั‚ะธั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ะฝะฐ ั‡ะธะฟะฐั… Apple M1 ะธ M2, ะฒั‹ ะดะพะปะถะฝั‹ ัƒะบะฐะทะฐั‚ัŒ 'mps' ะฒ ะบะฐั‡ะตัั‚ะฒะต ะฒะฐัˆะตะณะพ ัƒัั‚ั€ะพะนัั‚ะฒะฐ ะฟั€ะธ ะทะฐะฟัƒัะบะต ะฟั€ะพั†ะตััะฐ ะพะฑัƒั‡ะตะฝะธั. ะะธะถะต ะฟั€ะธะฒะตะดะตะฝั‹ ะฟั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั Python ะธ ะบะพะผะฐะฝะดะฝะพะน ัั‚ั€ะพะบะธ: + +!!! Example "ะŸั€ะธะผะตั€ ะพะฑัƒั‡ะตะฝะธั ั MPS" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ + model = YOLO('yolov8n.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ (ั€ะตะบะพะผะตะฝะดัƒะตั‚ัั ะดะปั ะพะฑัƒั‡ะตะฝะธั) + + # ะžะฑัƒั‡ะธั‚ัŒ ะผะพะดะตะปัŒ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ MPS + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device='mps') + ``` + + === "CLI" + + ```bash + # ะะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ั ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ *.pt ะธัะฟะพะปัŒะทัƒั MPS + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=mps + ``` + +ะ˜ัะฟะพะปัŒะทัƒั ะฒั‹ั‡ะธัะปะธั‚ะตะปัŒะฝั‹ะต ะฒะพะทะผะพะถะฝะพัั‚ะธ ั‡ะธะฟะพะฒ M1/M2, ัั‚ะพ ะฟะพะทะฒะพะปัะตั‚ ะฑะพะปะตะต ัั„ั„ะตะบั‚ะธะฒะฝะพ ะพะฑั€ะฐะฑะฐั‚ั‹ะฒะฐั‚ัŒ ะทะฐะดะฐั‡ะธ ะพะฑัƒั‡ะตะฝะธั. ะ”ะปั ะฑะพะปะตะต ะฟะพะดั€ะพะฑะฝะพะณะพ ั€ัƒะบะพะฒะพะดัั‚ะฒะฐ ะธ ั€ะฐััˆะธั€ะตะฝะฝั‹ั… ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ะพะฑั€ะฐั‚ะธั‚ะตััŒ ะบ [ะดะพะบัƒะผะตะฝั‚ะฐั†ะธะธ PyTorch MPS](https://pytorch.org/docs/stable/notes/mps.html). + +## ะ›ะพะณะธั€ะพะฒะฐะฝะธะต + +ะ’ ะฟั€ะพั†ะตััะต ะพะฑัƒั‡ะตะฝะธั ะผะพะดะตะปะธ YOLOv8 ะฒั‹ ะผะพะถะตั‚ะต ะฝะฐะนั‚ะธ ั†ะตะฝะฝั‹ะผ ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะผะพะดะตะปะธ ัะพ ะฒั€ะตะผะตะฝะตะผ. ะ—ะดะตััŒ ะฝะฐ ะฟะพะผะพั‰ัŒ ะฟั€ะธั…ะพะดะธั‚ ะปะพะณะธั€ะพะฒะฐะฝะธะต. YOLO ะพั‚ Ultralytics ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ ั‚ั€ะธ ั‚ะธะฟะฐ ะปะพะณะณะตั€ะพะฒ - Comet, ClearML ะธ TensorBoard. + +ะงั‚ะพะฑั‹ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะปะพะณะณะตั€, ะฒั‹ะฑะตั€ะธั‚ะต ะตะณะพ ะธะท ะฒั‹ะฟะฐะดะฐัŽั‰ะตะณะพ ะผะตะฝัŽ ะฒ ะฟั€ะธะฒะตะดะตะฝะฝะพะผ ะฒั‹ัˆะต ะฟั€ะธะผะตั€ะต ะบะพะดะฐ ะธ ะทะฐะฟัƒัั‚ะธั‚ะต ะตะณะพ. ะ’ั‹ะฑั€ะฐะฝะฝั‹ะน ะปะพะณะณะตั€ ะฑัƒะดะตั‚ ัƒัั‚ะฐะฝะพะฒะปะตะฝ ะธ ะธะฝะธั†ะธะฐะปะธะทะธั€ะพะฒะฐะฝ. + +### Comet + +[Comet](https://www.comet.ml/site/) - ัั‚ะพ ะฟะปะฐั‚ั„ะพั€ะผะฐ, ะบะพั‚ะพั€ะฐั ะฟะพะทะฒะพะปัะตั‚ ัƒั‡ะตะฝั‹ะผ ะธ ั€ะฐะทั€ะฐะฑะพั‚ั‡ะธะบะฐะผ ะพั‚ัะปะตะถะธะฒะฐั‚ัŒ, ัั€ะฐะฒะฝะธะฒะฐั‚ัŒ, ะพะฑัŠััะฝัั‚ัŒ ะธ ะพะฟั‚ะธะผะธะทะธั€ะพะฒะฐั‚ัŒ ัะบัะฟะตั€ะธะผะตะฝั‚ั‹ ะธ ะผะพะดะตะปะธ. ะžะฝะฐ ะฟั€ะตะดะพัั‚ะฐะฒะปัะตั‚ ั‚ะฐะบะธะต ั„ัƒะฝะบั†ะธะธ, ะบะฐะบ ะผะตั‚ั€ะธะบะธ ะฒ ั€ะตะฐะปัŒะฝะพะผ ะฒั€ะตะผะตะฝะธ, ัั€ะฐะฒะฝะตะฝะธะต ะบะพะดะฐ ะธ ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ะณะธะฟะตั€ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ. + +ะงั‚ะพะฑั‹ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ Comet: + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + ```python + # pip install comet_ml + import comet_ml + + comet_ml.init() + ``` + +ะะต ะทะฐะฑัƒะดัŒั‚ะต ะฒะพะนั‚ะธ ะฒ ัะฒะพัŽ ัƒั‡ะตั‚ะฝัƒัŽ ะทะฐะฟะธััŒ Comet ะฝะฐ ะธั… ัะฐะนั‚ะต ะธ ะฟะพะปัƒั‡ะธั‚ัŒ ัะฒะพะน API-ะบะปัŽั‡. ะ’ะฐะผ ะฝัƒะถะฝะพ ะฑัƒะดะตั‚ ะดะพะฑะฐะฒะธั‚ัŒ ะตะณะพ ะฒ ะฟะตั€ะตะผะตะฝะฝั‹ะต ัั€ะตะดั‹ ะธะปะธ ะฒ ัะฒะพะน ัะบั€ะธะฟั‚, ั‡ั‚ะพะฑั‹ ะฒะตัั‚ะธ ะถัƒั€ะฝะฐะป ัะฒะพะธั… ัะบัะฟะตั€ะธะผะตะฝั‚ะพะฒ. + +### ClearML + +[ClearML](https://www.clear.ml/) - ัั‚ะพ ะพั‚ะบั€ั‹ั‚ะฐั ะฟะปะฐั‚ั„ะพั€ะผะฐ, ะบะพั‚ะพั€ะฐั ะฐะฒั‚ะพะผะฐั‚ะธะทะธั€ัƒะตั‚ ะพั‚ัะปะตะถะธะฒะฐะฝะธะต ัะบัะฟะตั€ะธะผะตะฝั‚ะพะฒ ะธ ะฟะพะผะพะณะฐะตั‚ ะฒ ัั„ั„ะตะบั‚ะธะฒะฝะพะผ ะพะฑะผะตะฝะต ั€ะตััƒั€ัะฐะผะธ. ะžะฝะฐ ะฟั€ะตะดะฝะฐะทะฝะฐั‡ะตะฝะฐ ะดะปั ะฟะพะผะพั‰ะธ ะบะพะผะฐะฝะดะฐะผ ะฒ ัƒะฟั€ะฐะฒะปะตะฝะธะธ, ะฒั‹ะฟะพะปะฝะตะฝะธะธ ะธ ะฒะพัะฟั€ะพะธะทะฒะตะดะตะฝะธะธ ะธั… ั€ะฐะฑะพั‚ั‹ ะฒ ะพะฑะปะฐัั‚ะธ ML ะฑะพะปะตะต ัั„ั„ะตะบั‚ะธะฒะฝะพ. + +ะงั‚ะพะฑั‹ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ClearML: + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + ```python + # pip install clearml + import clearml + + clearml.browser_login() + ``` + +ะŸะพัะปะต ะทะฐะฟัƒัะบะฐ ัั‚ะพะณะพ ัะบั€ะธะฟั‚ะฐ ะฒะฐะผ ะฝัƒะถะฝะพ ะฑัƒะดะตั‚ ะฒะพะนั‚ะธ ะฒ ะฒะฐัˆัƒ ัƒั‡ะตั‚ะฝัƒัŽ ะทะฐะฟะธััŒ ClearML ะฒ ะฑั€ะฐัƒะทะตั€ะต ะธ ะฐัƒั‚ะตะฝั‚ะธั„ะธั†ะธั€ะพะฒะฐั‚ัŒ ะฒะฐัˆัƒ ัะตััะธัŽ. + +### TensorBoard + +[TensorBoard](https://www.tensorflow.org/tensorboard) - ัั‚ะพ ะธะฝัั‚ั€ัƒะผะตะฝั‚ ะฒะธะทัƒะฐะปะธะทะฐั†ะธะธ ะดะปั TensorFlow. ะžะฝ ะฟะพะทะฒะพะปัะตั‚ ะฒะฐะผ ะฒะธะทัƒะฐะปะธะทะธั€ะพะฒะฐั‚ัŒ ะณั€ะฐั„ TensorFlow, ะฒั‹ะฒะพะดะธั‚ัŒ ะบะพะปะธั‡ะตัั‚ะฒะตะฝะฝั‹ะต ะผะตั‚ั€ะธะบะธ ะพ ะฒั‹ะฟะพะปะฝะตะฝะธะธ ะฒะฐัˆะตะณะพ ะณั€ะฐั„ะฐ ะธ ะฟะพะบะฐะทั‹ะฒะฐั‚ัŒ ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝั‹ะต ะดะฐะฝะฝั‹ะต, ั‚ะฐะบะธะต ะบะฐะบ ะธะทะพะฑั€ะฐะถะตะฝะธั, ะฟั€ะพั…ะพะดัั‰ะธะต ั‡ะตั€ะตะท ะฝะตะณะพ. + +ะงั‚ะพะฑั‹ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ TensorBoard ะฒ [Google Colab](https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb): + +!!! Example "ะŸั€ะธะผะตั€" + + === "CLI" + ```bash + load_ext tensorboard + tensorboard --logdir ultralytics/runs # ะทะฐะผะตะฝะธั‚ัŒ ะฝะฐ ะดะธั€ะตะบั‚ะพั€ะธัŽ 'runs' + ``` + +ะงั‚ะพะฑั‹ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ TensorBoard ะปะพะบะฐะปัŒะฝะพ, ะทะฐะฟัƒัั‚ะธั‚ะต ะฟั€ะธะฒะตะดะตะฝะฝัƒัŽ ะฝะธะถะต ะบะพะผะฐะฝะดัƒ ะธ ะฟั€ะพัะผะพั‚ั€ะธั‚ะต ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ ะฟะพ ะฐะดั€ะตััƒ http://localhost:6006/. + +!!! Example "ะŸั€ะธะผะตั€" + + === "CLI" + ```bash + tensorboard --logdir ultralytics/runs # ะทะฐะผะตะฝะธั‚ัŒ ะฝะฐ ะดะธั€ะตะบั‚ะพั€ะธัŽ 'runs' + ``` + +ะญั‚ะพ ะทะฐะณั€ัƒะทะธั‚ TensorBoard ะธ ะฝะฐะฟั€ะฐะฒะธั‚ ะตะณะพ ะบ ะบะฐั‚ะฐะปะพะณัƒ, ะณะดะต ัะพั…ั€ะฐะฝััŽั‚ัั ะฒะฐัˆะธ ะถัƒั€ะฝะฐะปั‹ ะพะฑัƒั‡ะตะฝะธั. + +ะŸะพัะปะต ะฝะฐัั‚ั€ะพะนะบะธ ะฒะฐัˆะตะณะพ ะปะพะณะณะตั€ะฐ ะฒั‹ ะผะพะถะตั‚ะต ะฟั€ะพะดะพะปะถะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ะผะพะดะตะปะธ. ะ’ัะต ะผะตั‚ั€ะธะบะธ ะพะฑัƒั‡ะตะฝะธั ะฑัƒะดัƒั‚ ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธ ะทะฐะฟะธัะฐะฝั‹ ะฝะฐ ะฒั‹ะฑั€ะฐะฝะฝะพะน ะฒะฐะผะธ ะฟะปะฐั‚ั„ะพั€ะผะต, ะธ ะฒั‹ ัะผะพะถะตั‚ะต ะฟะพะปัƒั‡ะธั‚ัŒ ะดะพัั‚ัƒะฟ ะบ ัั‚ะธะผ ะถัƒั€ะฝะฐะปะฐะผ, ั‡ั‚ะพะฑั‹ ะพั‚ัะปะตะถะธะฒะฐั‚ัŒ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะฒะฐัˆะตะน ะผะพะดะตะปะธ ัะพ ะฒั€ะตะผะตะฝะตะผ, ัั€ะฐะฒะฝะธะฒะฐั‚ัŒ ั€ะฐะทะปะธั‡ะฝั‹ะต ะผะพะดะตะปะธ ะธ ะพะฟั€ะตะดะตะปัั‚ัŒ ะพะฑะปะฐัั‚ะธ ะดะปั ัƒะปัƒั‡ัˆะตะฝะธั. diff --git a/ultralytics/docs/ru/modes/train.md:Zone.Identifier b/ultralytics/docs/ru/modes/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/modes/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/modes/val.md b/ultralytics/docs/ru/modes/val.md new file mode 100755 index 0000000..1d7fb5d --- /dev/null +++ b/ultralytics/docs/ru/modes/val.md @@ -0,0 +1,86 @@ +--- +comments: true +description: ะ ัƒะบะพะฒะพะดัั‚ะฒะพ ะฟะพ ะฟั€ะพะฒะตั€ะบะต ะผะพะดะตะปะตะน YOLOv8. ะฃะทะฝะฐะนั‚ะต, ะบะฐะบ ะพั†ะตะฝะธั‚ัŒ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะฒะฐัˆะธั… ะผะพะดะตะปะตะน YOLO, ะธัะฟะพะปัŒะทัƒั ะฟะฐั€ะฐะผะตั‚ั€ั‹ ะฟั€ะพะฒะตั€ะบะธ ะธ ะผะตั‚ั€ะธะบะธ ั ะฟั€ะธะผะตั€ะฐะผะธ ะฝะฐ Python ะธ CLI. +keywords: Ultralytics, YOLO ะ”ะพะบัƒะผะตะฝั‚ะฐั†ะธั, YOLOv8, ะฟั€ะพะฒะตั€ะบะฐ, ะพั†ะตะฝะบะฐ ะผะพะดะตะปะธ, ะณะธะฟะตั€ะฟะฐั€ะฐะผะตั‚ั€ั‹, ั‚ะพั‡ะฝะพัั‚ัŒ, ะผะตั‚ั€ะธะบะธ, Python, CLI +--- + +# ะ’ะฐะปะธะดะฐั†ะธั ะผะพะดะตะปะตะน ั Ultralytics YOLO + +Ultralytics YOLO ัะบะพัะธัั‚ะตะผะฐ ะธ ะธะฝั‚ะตะณั€ะฐั†ะธะธ + +## ะ’ะฒะตะดะตะฝะธะต + +ะ’ะฐะปะธะดะฐั†ะธั ัะฒะปัะตั‚ัั ะบั€ะธั‚ะธั‡ะตัะบะธ ะฒะฐะถะฝั‹ะผ ัั‚ะฐะฟะพะผ ะฒ ะฟั€ะพั†ะตััะต ะผะฐัˆะธะฝะฝะพะณะพ ะพะฑัƒั‡ะตะฝะธั, ะฟะพะทะฒะพะปััŽั‰ะธะผ ะพั†ะตะฝะธั‚ัŒ ะบะฐั‡ะตัั‚ะฒะพ ะฒะฐัˆะธั… ะพะฑัƒั‡ะตะฝะฝั‹ั… ะผะพะดะตะปะตะน. ะ ะตะถะธะผ Val ะฒ Ultralytics YOLOv8 ะพะฑะตัะฟะตั‡ะธะฒะฐะตั‚ ะฝะฐะฑะพั€ ะธะฝัั‚ั€ัƒะผะตะฝั‚ะพะฒ ะธ ะผะตั‚ั€ะธะบ ะดะปั ะพั†ะตะฝะบะธ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะฒะฐัˆะธั… ะผะพะดะตะปะตะน ะฟะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธัŽ ะพะฑัŠะตะบั‚ะพะฒ. ะญั‚ะพ ั€ัƒะบะพะฒะพะดัั‚ะฒะพ ัะปัƒะถะธั‚ ะฟะพะปะฝั‹ะผ ั€ะตััƒั€ัะพะผ ะดะปั ะฟะพะฝะธะผะฐะฝะธั ั‚ะพะณะพ, ะบะฐะบ ัั„ั„ะตะบั‚ะธะฒะฝะพ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ั€ะตะถะธะผ Val, ั‡ั‚ะพะฑั‹ ะพะฑะตัะฟะตั‡ะธั‚ัŒ ั‚ะพั‡ะฝะพัั‚ัŒ ะธ ะฝะฐะดะตะถะฝะพัั‚ัŒ ะฒะฐัˆะธั… ะผะพะดะตะปะตะน. + +## ะ—ะฐั‡ะตะผ ะฟั€ะพะฒะตั€ัั‚ัŒ ั Ultralytics YOLO? + +ะ’ะพั‚ ะฟะพั‡ะตะผัƒ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะต ั€ะตะถะธะผะฐ Val YOLOv8 ะฒั‹ะณะพะดะฝะพ: + +- **ะขะพั‡ะฝะพัั‚ัŒ:** ะŸะพะปัƒั‡ะธั‚ะต ั‚ะพั‡ะฝั‹ะต ะผะตั‚ั€ะธะบะธ, ั‚ะฐะบะธะต ะบะฐะบ mAP50, mAP75 ะธ mAP50-95, ะดะปั ะฒัะตัั‚ะพั€ะพะฝะฝะตะน ะพั†ะตะฝะบะธ ะฒะฐัˆะตะน ะผะพะดะตะปะธ. +- **ะฃะดะพะฑัั‚ะฒะพ:** ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต ะฒัั‚ั€ะพะตะฝะฝั‹ะต ั„ัƒะฝะบั†ะธะธ, ะบะพั‚ะพั€ั‹ะต ะทะฐะฟะพะผะธะฝะฐัŽั‚ ะฝะฐัั‚ั€ะพะนะบะธ ะพะฑัƒั‡ะตะฝะธั, ัƒะฟั€ะพั‰ะฐั ะฟั€ะพั†ะตัั ะฒะฐะปะธะดะฐั†ะธะธ. +- **ะ“ะธะฑะบะพัั‚ัŒ:** ะŸั€ะพะฒะตั€ัะนั‚ะต ะฒะฐัˆัƒ ะผะพะดะตะปัŒ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ั‚ะตั… ะถะต ะธะปะธ ั€ะฐะทะฝั‹ั… ะฝะฐะฑะพั€ะพะฒ ะดะฐะฝะฝั‹ั… ะธ ั€ะฐะทะผะตั€ะพะฒ ะธะทะพะฑั€ะฐะถะตะฝะธะน. +- **ะะฐัั‚ั€ะพะนะบะฐ ะณะธะฟะตั€ะฟะฐั€ะฐะผะตั‚ั€ะพะฒ:** ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต ะผะตั‚ั€ะธะบะธ ะฟั€ะพะฒะตั€ะบะธ ะดะปั ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝะพะน ะฝะฐัั‚ั€ะพะนะบะธ ะฒะฐัˆะตะน ะผะพะดะตะปะธ ะดะปั ะปัƒั‡ัˆะตะน ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ. + +### ะžัะฝะพะฒะฝั‹ะต ั„ัƒะฝะบั†ะธะธ ั€ะตะถะธะผะฐ Val + +ะ’ะพั‚ ะฝะตะบะพั‚ะพั€ั‹ะต ะทะฐะผะตั‚ะฝั‹ะต ั„ัƒะฝะบั†ะธะธ, ะฟั€ะตะดะปะฐะณะฐะตะผั‹ะต ั€ะตะถะธะผะพะผ Val YOLOv8: + +- **ะะฒั‚ะพะผะฐั‚ะธะทะธั€ะพะฒะฐะฝะฝั‹ะต ะฝะฐัั‚ั€ะพะนะบะธ:** ะœะพะดะตะปะธ ะทะฐะฟะพะผะธะฝะฐัŽั‚ ัะฒะพะธ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ ะพะฑัƒั‡ะตะฝะธั ะดะปั ะฟั€ะพัั‚ะพะน ะฒะฐะปะธะดะฐั†ะธะธ. +- **ะŸะพะดะดะตั€ะถะบะฐ ะผะฝะพะถะตัั‚ะฒะฐ ะผะตั‚ั€ะธะบ:** ะžั†ะตะฝะธั‚ะต ะฒะฐัˆัƒ ะผะพะดะตะปัŒ, ะพัะฝะพะฒั‹ะฒะฐัััŒ ะฝะฐ ั€ัะดะต ะผะตั‚ั€ะธะบ ั‚ะพั‡ะฝะพัั‚ะธ. +- **CLI ะธ Python API:** ะ’ั‹ะฑะตั€ะธั‚ะต ะธะฝั‚ะตั€ั„ะตะนั ะบะพะผะฐะฝะดะฝะพะน ัั‚ั€ะพะบะธ ะธะปะธ Python API ะฒ ะทะฐะฒะธัะธะผะพัั‚ะธ ะพั‚ ะฒะฐัˆะตะณะพ ะฟั€ะตะดะฟะพั‡ั‚ะตะฝะธั ะดะปั ะฟั€ะพะฒะตั€ะบะธ. +- **ะกะพะฒะผะตัั‚ะธะผะพัั‚ัŒ ะดะฐะฝะฝั‹ั…:** ะ‘ะตัะฟะตั€ะตะฑะพะนะฝะพ ั€ะฐะฑะพั‚ะฐะตั‚ ั ะฝะฐะฑะพั€ะฐะผะธ ะดะฐะฝะฝั‹ั…, ะธัะฟะพะปัŒะทัƒะตะผั‹ะผะธ ะฒะพ ะฒั€ะตะผั ั„ะฐะทั‹ ะพะฑัƒั‡ะตะฝะธั, ะฐ ั‚ะฐะบะถะต ั ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะธะผะธ ะฝะฐะฑะพั€ะฐะผะธ ะดะฐะฝะฝั‹ั…. + +!!! Tip "ะกะพะฒะตั‚" + + * ะœะพะดะตะปะธ YOLOv8 ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธ ะทะฐะฟะพะผะธะฝะฐัŽั‚ ัะฒะพะธ ะฝะฐัั‚ั€ะพะนะบะธ ะพะฑัƒั‡ะตะฝะธั, ั‚ะฐะบ ั‡ั‚ะพ ะฒั‹ ะผะพะถะตั‚ะต ะปะตะณะบะพ ะฟั€ะพะฒะตั€ะธั‚ัŒ ะผะพะดะตะปัŒ ั ั‚ะตะผ ะถะต ั€ะฐะทะผะตั€ะพะผ ะธะทะพะฑั€ะฐะถะตะฝะธั ะธ ะฝะฐ ะพั€ะธะณะธะฝะฐะปัŒะฝะพะผ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั…, ะฟั€ะพัั‚ะพ ะธัะฟะพะปัŒะทัƒั `yolo val model=yolov8n.pt` ะธะปะธ `model('yolov8n.pt').val()` + +## ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั + +ะŸั€ะพะฒะตั€ัŒั‚ะต ั‚ะพั‡ะฝะพัั‚ัŒ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8n ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO128. ะั€ะณัƒะผะตะฝั‚ั‹ ะฟะตั€ะตะดะฐะฒะฐั‚ัŒ ะฝะต ั‚ั€ะตะฑัƒะตั‚ัั, ั‚ะฐะบ ะบะฐะบ `ะผะพะดะตะปัŒ` ัะพั…ั€ะฐะฝัะตั‚ `ะดะฐะฝะฝั‹ะต` ะธ ะฐั€ะณัƒะผะตะฝั‚ั‹ ะฒ ะบะฐั‡ะตัั‚ะฒะต ะฐั‚ั€ะธะฑัƒั‚ะพะฒ ะผะพะดะตะปะธ. ะกะผ. ั€ะฐะทะดะตะป ะั€ะณัƒะผะตะฝั‚ั‹ ะฝะธะถะต ะดะปั ะฟะพะปะฝะพะณะพ ัะฟะธัะบะฐ ะฐั€ะณัƒะผะตะฝั‚ะพะฒ ัะบัะฟะพั€ั‚ะฐ. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะบะฐ ะผะพะดะตะปะธ + model = YOLO('yolov8n.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบัƒัŽ ะผะพะดะตะปัŒ + + # ะŸั€ะพะฒะตั€ะบะฐ ะผะพะดะตะปะธ + metrics = model.val() # ะฐั€ะณัƒะผะตะฝั‚ั‹ ะฝะต ะฝัƒะถะฝั‹, ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… ะธ ะฝะฐัั‚ั€ะพะนะบะธ ะทะฐะฟะพะผะฝะตะฝั‹ + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ัะฟะธัะพะบ ัะพะดะตั€ะถะธั‚ map50-95 ะบะฐะถะดะพะน ะบะฐั‚ะตะณะพั€ะธะธ + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # ะฟั€ะพะฒะตั€ะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + yolo detect val model=path/to/best.pt # ะฟั€ะพะฒะตั€ะธั‚ัŒ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบัƒัŽ ะผะพะดะตะปัŒ + ``` + +## ะั€ะณัƒะผะตะฝั‚ั‹ + +ะะฐัั‚ั€ะพะนะบะธ ะฟั€ะพะฒะตั€ะบะธ ะดะปั ะผะพะดะตะปะตะน YOLO ะพั‚ะฝะพััั‚ัั ะบ ั€ะฐะทะปะธั‡ะฝั‹ะผ ะณะธะฟะตั€ะฟะฐั€ะฐะผะตั‚ั€ะฐะผ ะธ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธัะผ, ะธัะฟะพะปัŒะทัƒะตะผั‹ะผ ะดะปั ะพั†ะตะฝะบะธ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะผะพะดะตะปะธ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… ะดะปั ะฟั€ะพะฒะตั€ะบะธ. ะญั‚ะธ ะฝะฐัั‚ั€ะพะนะบะธ ะผะพะณัƒั‚ ะฒะปะธัั‚ัŒ ะฝะฐ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ, ัะบะพั€ะพัั‚ัŒ ะธ ั‚ะพั‡ะฝะพัั‚ัŒ ะผะพะดะตะปะธ. ะะตะบะพั‚ะพั€ั‹ะต ะพะฑั‰ะธะต ะฟะฐั€ะฐะผะตั‚ั€ั‹ ะฟั€ะพะฒะตั€ะบะธ YOLO ะฒะบะปัŽั‡ะฐัŽั‚ ั€ะฐะทะผะตั€ ะฟะฐะบะตั‚ะฐ, ั‡ะฐัั‚ะพั‚ัƒ ะฟั€ะพะฒะตะดะตะฝะธั ะฟั€ะพะฒะตั€ะบะธ ะฒะพ ะฒั€ะตะผั ะพะฑัƒั‡ะตะฝะธั ะธ ะผะตั‚ั€ะธะบะธ, ะธัะฟะพะปัŒะทัƒะตะผั‹ะต ะดะปั ะพั†ะตะฝะบะธ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะผะพะดะตะปะธ. ะ”ั€ัƒะณะธะต ั„ะฐะบั‚ะพั€ั‹, ะบะพั‚ะพั€ั‹ะต ะผะพะณัƒั‚ ะฒะปะธัั‚ัŒ ะฝะฐ ะฟั€ะพั†ะตัั ะฟั€ะพะฒะตั€ะบะธ, ะฒะบะปัŽั‡ะฐัŽั‚ ั€ะฐะทะผะตั€ ะธ ัะพัั‚ะฐะฒ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… ะดะปั ะฟั€ะพะฒะตั€ะบะธ ะธ ะบะพะฝะบั€ะตั‚ะฝัƒัŽ ะทะฐะดะฐั‡ัƒ, ะดะปั ะบะพั‚ะพั€ะพะน ะธัะฟะพะปัŒะทัƒะตั‚ัั ะผะพะดะตะปัŒ. ะ’ะฐะถะฝะพ ั‚ั‰ะฐั‚ะตะปัŒะฝะพ ะฝะฐัั‚ั€ะพะธั‚ัŒ ะธ ะฟั€ะพะฒะตัั‚ะธ ัะบัะฟะตั€ะธะผะตะฝั‚ั‹ ั ัั‚ะธะผะธ ะฟะฐั€ะฐะผะตั‚ั€ะฐะผะธ, ั‡ั‚ะพะฑั‹ ัƒะฑะตะดะธั‚ัŒัั, ั‡ั‚ะพ ะผะพะดะตะปัŒ ั…ะพั€ะพัˆะพ ั€ะฐะฑะพั‚ะฐะตั‚ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… ะดะปั ะฟั€ะพะฒะตั€ะบะธ ะธ ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะธ ะฟั€ะตะดะพั‚ะฒั€ะฐั‰ะตะฝะธั ะฟะตั€ะตะพะฑัƒั‡ะตะฝะธั. + +| ะšะปัŽั‡ | ะ—ะฝะฐั‡ะตะฝะธะต | ะžะฟะธัะฐะฝะธะต | +|---------------|----------|-----------------------------------------------------------------------------------------| +| `data` | `None` | ะฟัƒั‚ัŒ ะบ ั„ะฐะนะปัƒ ะดะฐะฝะฝั‹ั…, ะฝะฐะฟั€ะธะผะตั€, coco128.yaml | +| `imgsz` | `640` | ั€ะฐะทะผะตั€ ะฒั…ะพะดะฝั‹ั… ะธะทะพะฑั€ะฐะถะตะฝะธะน ะบะฐะบ ั†ะตะปะพะต ั‡ะธัะปะพ | +| `batch` | `16` | ะบะพะปะธั‡ะตัั‚ะฒะพ ะธะทะพะฑั€ะฐะถะตะฝะธะน ะฒ ะฟะฐะบะตั‚ะต (-1 ะดะปั AutoBatch) | +| `save_json` | `False` | ัะพั…ั€ะฐะฝะธั‚ัŒ ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ ะฒ ั„ะฐะนะป JSON | +| `save_hybrid` | `False` | ัะพั…ั€ะฐะฝะธั‚ัŒ ะณะธะฑั€ะธะดะฝัƒัŽ ะฒะตั€ัะธัŽ ะผะตั‚ะพะบ (ะผะตั‚ะบะธ + ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝั‹ะต ะฟั€ะตะดัะบะฐะทะฐะฝะธั) | +| `conf` | `0.001` | ะฟะพั€ะพะณ ัƒะฒะตั€ะตะฝะฝะพัั‚ะธ ะพะฑัŠะตะบั‚ะฐ ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั | +| `iou` | `0.6` | ะฟะพั€ะพะณ ะฟะตั€ะตัะตั‡ะตะฝะธั ะฟะพ ะพะฑัŠะตะดะธะฝะตะฝะธัŽ (IoU) ะดะปั NMS (ะฝะตั‡ะตั‚ะบะพะต ัั€ะฐะฒะฝะตะฝะธะต) | +| `max_det` | `300` | ะผะฐะบัะธะผะฐะปัŒะฝะพะต ะบะพะปะธั‡ะตัั‚ะฒะพ ะพะฑะฝะฐั€ัƒะถะตะฝะธะน ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะต | +| `half` | `True` | ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะฟะพะปัƒะฟั€ะตั†ะธะทะธะพะฝะฝะพัั‚ัŒ (FP16) | +| `device` | `None` | ัƒัั‚ั€ะพะนัั‚ะฒะพ ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั, ะฝะฐะฟั€ะธะผะตั€, cuda device=0/1/2/3 ะธะปะธ device=cpu | +| `dnn` | `False` | ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ OpenCV DNN ะดะปั ONNX ะธะฝั„ะตั€ะตะฝั†ะธะธ | +| `plots` | `False` | ะฟะพะบะฐะทั‹ะฒะฐั‚ัŒ ะณั€ะฐั„ะธะบะธ ะฒะพ ะฒั€ะตะผั ะพะฑัƒั‡ะตะฝะธั | +| `rect` | `False` | ะฟั€ัะผะพัƒะณะพะปัŒะฝะฐั ะฒะฐะปะธะดะฐั†ะธั ั ะบะพะปะปะตะบั†ะธะตะน ะบะฐะถะดะพะณะพ ะฟะฐะบะตั‚ะฐ ะดะปั ะผะธะฝะธะผะฐะปัŒะฝะพะน ะฟะฐะดะดะธะฝะณะฐ | +| `split` | `val` | ั€ะฐะทะดะตะป ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… ะดะปั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะฒ ะฒะฐะปะธะดะฐั†ะธะธ, ะฝะฐะฟั€ะธะผะตั€, 'val', 'test' ะธะปะธ 'train' | +| diff --git a/ultralytics/docs/ru/modes/val.md:Zone.Identifier b/ultralytics/docs/ru/modes/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/modes/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/quickstart.md b/ultralytics/docs/ru/quickstart.md new file mode 100755 index 0000000..e47dd6d --- /dev/null +++ b/ultralytics/docs/ru/quickstart.md @@ -0,0 +1,198 @@ +--- +comments: true +description: ะ˜ะทัƒั‡ะตะฝะธะต ั€ะฐะทะปะธั‡ะฝั‹ั… ะผะตั‚ะพะดะพะฒ ัƒัั‚ะฐะฝะพะฒะบะธ Ultralytics ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ pip, conda, git ะธ Docker. ะžัะฒะพะตะฝะธะต ั€ะฐะฑะพั‚ั‹ ั Ultralytics ั‡ะตั€ะตะท ะธะฝั‚ะตั€ั„ะตะนั ะบะพะผะฐะฝะดะฝะพะน ัั‚ั€ะพะบะธ ะธะปะธ ะฒ ั€ะฐะผะบะฐั… ะฒะฐัˆะธั… ะฟั€ะพะตะบั‚ะพะฒ ะฝะฐ Python. +keywords: ัƒัั‚ะฐะฝะพะฒะบะฐ Ultralytics, ัƒัั‚ะฐะฝะพะฒะบะฐ pip Ultralytics, ัƒัั‚ะฐะฝะพะฒะบะฐ Docker Ultralytics, ะธะฝั‚ะตั€ั„ะตะนั ะบะพะผะฐะฝะดะฝะพะน ัั‚ั€ะพะบะธ Ultralytics, Python ะธะฝั‚ะตั€ั„ะตะนั Ultralytics +--- + +## ะฃัั‚ะฐะฝะพะฒะบะฐ Ultralytics + +Ultralytics ะฟั€ะตะดะปะฐะณะฐะตั‚ ั€ะฐะทะปะธั‡ะฝั‹ะต ะผะตั‚ะพะดั‹ ัƒัั‚ะฐะฝะพะฒะบะธ, ะฒะบะปัŽั‡ะฐั pip, conda ะธ Docker. ะฃัั‚ะฐะฝะพะฒะธั‚ะต YOLOv8 ั‡ะตั€ะตะท ะฟะฐะบะตั‚ `ultralytics` pip ะดะปั ะฟะพัะปะตะดะฝะตะณะพ ัั‚ะฐะฑะธะปัŒะฝะพะณะพ ะฒั‹ะฟัƒัะบะฐ ะธะปะธ ะฟัƒั‚ะตะผ ะบะปะพะฝะธั€ะพะฒะฐะฝะธั [ั€ะตะฟะพะทะธั‚ะพั€ะธั Ultralytics ะฝะฐ GitHub](https://github.com/ultralytics/ultralytics) ะดะปั ะฟะพะปัƒั‡ะตะฝะธั ัะฐะผะพะน ะฐะบั‚ัƒะฐะปัŒะฝะพะน ะฒะตั€ัะธะธ. Docker ะผะพะถะฝะพ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะฟะฐะบะตั‚ะฐ ะฒ ะธะทะพะปะธั€ะพะฒะฐะฝะฝะพะผ ะบะพะฝั‚ะตะนะฝะตั€ะต, ะธะทะฑะตะณะฐั ะปะพะบะฐะปัŒะฝะพะน ัƒัั‚ะฐะฝะพะฒะบะธ. + +!!! Example "ะฃัั‚ะฐะฝะพะฒะบะฐ" + + === "ะฃัั‚ะฐะฝะพะฒะบะฐ ั‡ะตั€ะตะท Pip (ั€ะตะบะพะผะตะฝะดัƒะตั‚ัั)" + ะฃัั‚ะฐะฝะพะฒะธั‚ะต ะฟะฐะบะตั‚ `ultralytics` ั ะฟะพะผะพั‰ัŒัŽ pip ะธะปะธ ะพะฑะฝะพะฒะธั‚ะต ััƒั‰ะตัั‚ะฒัƒัŽั‰ัƒัŽ ัƒัั‚ะฐะฝะพะฒะบัƒ, ะทะฐะฟัƒัั‚ะธะฒ `pip install -U ultralytics`. ะŸะพัะตั‚ะธั‚ะต ะธะฝะดะตะบั ะฟะฐะบะตั‚ะพะฒ Python (PyPI) ะดะปั ะฟะพะปัƒั‡ะตะฝะธั ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝะพะน ะธะฝั„ะพั€ะผะฐั†ะธะธ ะพ ะฟะฐะบะตั‚ะต `ultralytics`: [https://pypi.org/project/ultralytics/](https://pypi.org/project/ultralytics/). + + [![ะ’ะตั€ัะธั PyPI](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![ะ—ะฐะณั€ัƒะทะบะธ](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + + ```bash + # ะฃัั‚ะฐะฝะพะฒะบะฐ ะฟะฐะบะตั‚ะฐ ultralytics ะธะท PyPI + pip install ultralytics + ``` + + ะ’ั‹ ั‚ะฐะบะถะต ะผะพะถะตั‚ะต ัƒัั‚ะฐะฝะพะฒะธั‚ัŒ ะฟะฐะบะตั‚ `ultralytics` ะฝะฐะฟั€ัะผัƒัŽ ะธะท [ั€ะตะฟะพะทะธั‚ะพั€ะธั ะฝะฐ GitHub](https://github.com/ultralytics/ultralytics). ะญั‚ะพ ะผะพะถะตั‚ ะฑั‹ั‚ัŒ ะฟะพะปะตะทะฝะพ, ะตัะปะธ ะฒั‹ ั…ะพั‚ะธั‚ะต ะฟะพะปัƒั‡ะธั‚ัŒ ะฟะพัะปะตะดะฝัŽัŽ ะฒะตั€ัะธัŽ ะดะปั ั€ะฐะทั€ะฐะฑะพั‚ะบะธ. ะฃะฑะตะดะธั‚ะตััŒ, ั‡ั‚ะพ ะฒ ะฒะฐัˆะตะน ัะธัั‚ะตะผะต ัƒัั‚ะฐะฝะพะฒะปะตะฝ ะธะฝัั‚ั€ัƒะผะตะฝั‚ ะบะพะผะฐะฝะดะฝะพะน ัั‚ั€ะพะบะธ Git. ะšะพะผะฐะฝะดะฐ `@main` ัƒัั‚ะฐะฝะฐะฒะปะธะฒะฐะตั‚ ะฒะตั‚ะบัƒ `main`, ะบะพั‚ะพั€ัƒัŽ ะผะพะถะฝะพ ะธะทะผะตะฝะธั‚ัŒ ะฝะฐ ะดั€ัƒะณัƒัŽ, ะบ ะฟั€ะธะผะตั€ัƒ, `@my-branch`, ะธะปะธ ัƒะดะฐะปะธั‚ัŒ ะฟะพะปะฝะพัั‚ัŒัŽ, ั‡ั‚ะพะฑั‹ ะฟะพ ัƒะผะพะปั‡ะฐะฝะธัŽ ะธัะฟะพะปัŒะทะพะฒะฐะปะฐััŒ ะฒะตั‚ะบะฐ `main`. + + ```bash + # ะฃัั‚ะฐะฝะพะฒะบะฐ ะฟะฐะบะตั‚ะฐ ultralytics ะธะท GitHub + pip install git+https://github.com/ultralytics/ultralytics.git@main + ``` + + === "ะฃัั‚ะฐะฝะพะฒะบะฐ ั‡ะตั€ะตะท Conda" + Conda - ัั‚ะพ ะฐะปัŒั‚ะตั€ะฝะฐั‚ะธะฒะฝั‹ะน ะผะตะฝะตะดะถะตั€ ะฟะฐะบะตั‚ะพะฒ ะดะปั pip, ะบะพั‚ะพั€ั‹ะน ั‚ะฐะบะถะต ะผะพะถะตั‚ ะฑั‹ั‚ัŒ ะธัะฟะพะปัŒะทะพะฒะฐะฝ ะดะปั ัƒัั‚ะฐะฝะพะฒะบะธ. ะŸะพัะตั‚ะธั‚ะต Anaconda ะดะปั ะฟะพะปัƒั‡ะตะฝะธั ะดะพะฟะพะปะฝะธั‚ะตะปัŒะฝะพะน ะธะฝั„ะพั€ะผะฐั†ะธะธ: [https://anaconda.org/conda-forge/ultralytics](https://anaconda.org/conda-forge/ultralytics). ะ ะตะฟะพะทะธั‚ะพั€ะธะน ะดะปั ะพะฑะฝะพะฒะปะตะฝะธั conda ะฟะฐะบะตั‚ะฐ Ultralytics ะฝะฐั…ะพะดะธั‚ัั ะทะดะตััŒ: [https://github.com/conda-forge/ultralytics-feedstock/](https://github.com/conda-forge/ultralytics-feedstock/). + + [![Conda Recipe](https://img.shields.io/badge/recipe-ultralytics-green.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda ะ—ะฐะณั€ัƒะทะบะธ](https://img.shields.io/conda/dn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda ะ’ะตั€ัะธั](https://img.shields.io/conda/vn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda ะŸะปะฐั‚ั„ะพั€ะผั‹](https://img.shields.io/conda/pn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) + + ```bash + # ะฃัั‚ะฐะฝะพะฒะบะฐ ะฟะฐะบะตั‚ะฐ ultralytics ั ะฟะพะผะพั‰ัŒัŽ conda + conda install -c conda-forge ultralytics + ``` + + !!! Note "ะ—ะฐะผะตั‚ะบะฐ" + + ะ•ัะปะธ ะฒั‹ ัƒัั‚ะฐะฝะฐะฒะปะธะฒะฐะตั‚ะต ะฟะฐะบะตั‚ ะฒ ัั€ะตะดะต CUDA, ะปัƒั‡ัˆะตะน ะฟั€ะฐะบั‚ะธะบะพะน ะฑัƒะดะตั‚ ัƒัั‚ะฐะฝะพะฒะบะฐ `ultralytics`, `pytorch` ะธ `pytorch-cuda` ะพะดะฝะพะน ะบะพะผะฐะฝะดะพะน, ั‡ั‚ะพะฑั‹ ะผะตะฝะตะดะถะตั€ ะฟะฐะบะตั‚ะพะฒ conda ะผะพะณ ั€ะฐะทั€ะตัˆะธั‚ัŒ ะปัŽะฑั‹ะต ะบะพะฝั„ะปะธะบั‚ั‹ ะธะปะธ ัƒัั‚ะฐะฝะพะฒะธั‚ัŒ `pytorch-cuda` ะฟะพัะปะตะดะฝะธะผ, ั‡ั‚ะพะฑั‹ ะฟั€ะธ ะฝะตะพะฑั…ะพะดะธะผะพัั‚ะธ ะพะฝ ะผะพะณ ะทะฐะผะตะฝะธั‚ัŒ ะฟะฐะบะตั‚ `pytorch`, ะฟั€ะตะดะฝะฐะทะฝะฐั‡ะตะฝะฝั‹ะน ะดะปั ะฆะŸ. + + ```bash + # ะฃัั‚ะฐะฝะพะฒะบะฐ ะฒัะตั… ะฟะฐะบะตั‚ะพะฒ ะฒะผะตัั‚ะต ั ะฟะพะผะพั‰ัŒัŽ conda + conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics + ``` + + ### ะžะฑั€ะฐะท Conda ะดะปั Docker + + ะžะฑั€ะฐะทั‹ Conda Ultralytics ั‚ะฐะบะถะต ะดะพัั‚ัƒะฟะฝั‹ ะฝะฐ [DockerHub](https://hub.docker.com/r/ultralytics/ultralytics). ะญั‚ะธ ะพะฑั€ะฐะทั‹ ะพัะฝะพะฒะฐะฝั‹ ะฝะฐ [Miniconda3](https://docs.conda.io/projects/miniconda/en/latest/) ะธ ัะฒะปััŽั‚ัั ะฟั€ะพัั‚ั‹ะผ ัะฟะพัะพะฑะพะผ ะฝะฐั‡ะฐั‚ัŒ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ `ultralytics` ะฒ ัั€ะตะดะต Conda. + + ```bash + # ะฃัั‚ะฐะฝะพะฒะบะฐ ะธะผะตะฝะธ ะพะฑั€ะฐะทะฐ ะฒ ะฟะตั€ะตะผะตะฝะฝัƒัŽ + t=ultralytics/ultralytics:latest-conda + + # ะกะบะฐั‡ะธะฒะฐะฝะธะต ะฟะพัะปะตะดะฝะตะณะพ ะพะฑั€ะฐะทะฐ ultralytics ั Docker Hub + sudo docker pull $t + + # ะ—ะฐะฟัƒัะบ ะพะฑั€ะฐะทะฐ ultralytics ะฒ ะบะพะฝั‚ะตะนะฝะตั€ะต ั ะฟะพะดะดะตั€ะถะบะพะน GPU + sudo docker run -it --ipc=host --gpus all $t # ะฒัะต GPU + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # ะฒั‹ะฑะพั€ GPU + ``` + + === "ะšะปะพะฝะธั€ะพะฒะฐะฝะธะต Git" + ะšะปะพะฝะธั€ัƒะนั‚ะต ั€ะตะฟะพะทะธั‚ะพั€ะธะน `ultralytics`, ะตัะปะธ ะฒั‹ ะทะฐะธะฝั‚ะตั€ะตัะพะฒะฐะฝั‹ ะฒ ัƒั‡ะฐัั‚ะธะธ ะฒ ั€ะฐะทั€ะฐะฑะพั‚ะบะต ะธะปะธ ั…ะพั‚ะธั‚ะต ัะบัะฟะตั€ะธะผะตะฝั‚ะธั€ะพะฒะฐั‚ัŒ ั ะฟะพัะปะตะดะฝะธะผ ะธัั…ะพะดะฝั‹ะผ ะบะพะดะพะผ. ะŸะพัะปะต ะบะปะพะฝะธั€ะพะฒะฐะฝะธั ะฟะตั€ะตะนะดะธั‚ะต ะฒ ะบะฐั‚ะฐะปะพะณ ะธ ัƒัั‚ะฐะฝะพะฒะธั‚ะต ะฟะฐะบะตั‚ ะฒ ั€ะตะถะธะผะต ั€ะตะดะฐะบั‚ะธั€ะพะฒะฐะฝะธั `-e` ั ะฟะพะผะพั‰ัŒัŽ pip. + + ```bash + # ะšะปะพะฝะธั€ะพะฒะฐะฝะธะต ั€ะตะฟะพะทะธั‚ะพั€ะธั ultralytics + git clone https://github.com/ultralytics/ultralytics + + # ะŸะตั€ะตั…ะพะด ะฒ ะบะปะพะฝะธั€ะพะฒะฐะฝะฝั‹ะน ะบะฐั‚ะฐะปะพะณ + cd ultralytics + + # ะฃัั‚ะฐะฝะพะฒะบะฐ ะฟะฐะบะตั‚ะฐ ะฒ ั€ะตะถะธะผะต ั€ะตะดะฐะบั‚ะธั€ะพะฒะฐะฝะธั ะดะปั ั€ะฐะทั€ะฐะฑะพั‚ะบะธ + pip install -e . + ``` + +ะกะผะพั‚ั€ะธั‚ะต ั„ะฐะนะป [requirements.txt](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) `ultralytics` ะดะปั ัะฟะธัะบะฐ ะทะฐะฒะธัะธะผะพัั‚ะตะน. ะžะฑั€ะฐั‚ะธั‚ะต ะฒะฝะธะผะฐะฝะธะต, ั‡ั‚ะพ ะฒัะต ะฟั€ะธะฒะตะดะตะฝะฝั‹ะต ะฒั‹ัˆะต ะฟั€ะธะผะตั€ั‹ ัƒัั‚ะฐะฝะฐะฒะปะธะฒะฐัŽั‚ ะฒัะต ะฝะตะพะฑั…ะพะดะธะผั‹ะต ะทะฐะฒะธัะธะผะพัั‚ะธ. + +

+
+ +
+ Watch: Ultralytics YOLO Quick Start Guide +

+ +!!! Tip "ะกะพะฒะตั‚" + + ะขั€ะตะฑะพะฒะฐะฝะธั PyTorch ะทะฐะฒะธััั‚ ะพั‚ ะพะฟะตั€ะฐั†ะธะพะฝะฝะพะน ัะธัั‚ะตะผั‹ ะธ ั‚ั€ะตะฑะพะฒะฐะฝะธะน CUDA, ะฟะพัั‚ะพะผัƒ ั€ะตะบะพะผะตะฝะดัƒะตั‚ัั ัะฝะฐั‡ะฐะปะฐ ัƒัั‚ะฐะฝะพะฒะธั‚ัŒ PyTorch, ัะปะตะดัƒั ะธะฝัั‚ั€ัƒะบั†ะธัะผ ะฝะฐ [https://pytorch.org/get-started/locally](https://pytorch.org/get-started/locally). + + + ะ˜ะฝัั‚ั€ัƒะบั†ะธะธ ะฟะพ ัƒัั‚ะฐะฝะพะฒะบะต PyTorch + + +## ะ˜ัะฟะพะปัŒะทะพะฒะฐะฝะธะต Ultralytics ั CLI + +ะ˜ะฝั‚ะตั€ั„ะตะนั ะบะพะผะฐะฝะดะฝะพะน ัั‚ั€ะพะบะธ (CLI) Ultralytics ะฟะพะทะฒะพะปัะตั‚ ะฒั‹ะฟะพะปะฝัั‚ัŒ ะฟั€ะพัั‚ั‹ะต ะบะพะผะฐะฝะดั‹ ะพะดะฝะพะน ัั‚ั€ะพะบะพะน ะฑะตะท ะฝะตะพะฑั…ะพะดะธะผะพัั‚ะธ ะฝะฐัั‚ั€ะพะนะบะธ Python ัั€ะตะดั‹. CLI ะฝะต ั‚ั€ะตะฑัƒะตั‚ ะฝะฐัั‚ั€ะพะนะบะธ ะธะปะธ ะบะพะดะฐ ะฝะฐ Python. ะ’ัะต ะทะฐะดะฐั‡ะธ ะผะพะถะฝะพ ะปะตะณะบะพ ะฒั‹ะฟะพะปะฝะธั‚ัŒ ะธะท ั‚ะตั€ะผะธะฝะฐะปะฐ ั ะฟะพะผะพั‰ัŒัŽ ะบะพะผะฐะฝะดั‹ `yolo`. ะŸั€ะพั‡ั‚ะธั‚ะต [ะ ัƒะบะพะฒะพะดัั‚ะฒะพ ะฟะพ CLI](/../usage/cli.md), ั‡ั‚ะพะฑั‹ ัƒะทะฝะฐั‚ัŒ ะฑะพะปัŒัˆะต ะพ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ YOLOv8 ะธะท ะบะพะผะฐะฝะดะฝะพะน ัั‚ั€ะพะบะธ. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Cะธะฝั‚ะฐะบัะธั" + + ะšะพะผะฐะฝะดั‹ Ultralytics `yolo` ะธัะฟะพะปัŒะทัƒัŽั‚ ัะปะตะดัƒัŽั‰ะธะน ัะธะฝั‚ะฐะบัะธั: + ```bash + yolo ะ—ะะ”ะะงะ ะ ะ•ะ–ะ˜ะœ ะะ ะ“ะฃะœะ•ะะขะซ + + ะ“ะดะต ะ—ะะ”ะะงะ (ะฝะตะพะฑัะทะฐั‚ะตะปัŒะฝะพ) ะพะดะฝะฐ ะธะท [detect, segment, classify] + ะ ะ•ะ–ะ˜ะœ (ะพะฑัะทะฐั‚ะตะปัŒะฝะพ) ะพะดะธะฝ ะธะท [train, val, predict, export, track] + ะะ ะ“ะฃะœะ•ะะขะซ (ะฝะตะพะฑัะทะฐั‚ะตะปัŒะฝะพ) ะปัŽะฑะพะต ะบะพะปะธั‡ะตัั‚ะฒะพ ะฟะฐั€ 'arg=value', ะบะพั‚ะพั€ั‹ะต ะฟะตั€ะตะพะฟั€ะตะดะตะปััŽั‚ ะฝะฐัั‚ั€ะพะนะบะธ ะฟะพ ัƒะผะพะปั‡ะฐะฝะธัŽ. + ``` + ะกะผะพั‚ั€ะธั‚ะต ะฒัะต ะะ ะ“ะฃะœะ•ะะขะซ ะฒ ะฟะพะปะฝะพะผ [ะ ัƒะบะพะฒะพะดัั‚ะฒะต ะฟะพ ะบะพะฝั„ะธะณัƒั€ะฐั†ะธะธ](/../usage/cfg.md) ะธะปะธ ั ะฟะพะผะพั‰ัŒัŽ `yolo cfg` + + === "Train" + + ะžะฑัƒั‡ะตะฝะธะต ะผะพะดะตะปะธ ะดะปั ะดะตั‚ะตะบั†ะธะธ ะฝะฐ 10 ัะฟะพั…ะฐั… ั ะฝะฐั‡ะฐะปัŒะฝะพะน ัะบะพั€ะพัั‚ัŒัŽ ะพะฑัƒั‡ะตะฝะธั 0.01 + ```bash + yolo train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "Predict" + + ะŸั€ะพะณะฝะพะทะธั€ะพะฒะฐะฝะธะต ะฒะธะดะตะพ ั YouTube ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะฟั€ะธ ั€ะฐะทะผะตั€ะต ะธะทะพะฑั€ะฐะถะตะฝะธั 320: + ```bash + yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "Val" + + ะ’ะฐะปะธะดะฐั†ะธั ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ ะดะตั‚ะตะบั†ะธะธ ั ั€ะฐะทะผะตั€ะพะผ ะฟะฐั€ั‚ะธะธ 1 ะธ ั€ะฐะทะผะตั€ะพะผ ะธะทะพะฑั€ะฐะถะตะฝะธั 640: + ```bash + yolo val model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + + === "Export" + + ะญะบัะฟะพั€ั‚ ะผะพะดะตะปะธ ะบะปะฐััะธั„ะธะบะฐั†ะธะธ YOLOv8n ะฒ ั„ะพั€ะผะฐั‚ ONNX ั ั€ะฐะทะผะตั€ะพะผ ะธะทะพะฑั€ะฐะถะตะฝะธั 224 ะฝะฐ 128 (TASK ะฝะต ั‚ั€ะตะฑัƒะตั‚ัั) + ```bash + yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128 + ``` + + === "Special" + + ะ’ั‹ะฟะพะปะฝะตะฝะธะต ัะฟะตั†ะธะฐะปัŒะฝั‹ั… ะบะพะผะฐะฝะด ะดะปั ะฟั€ะพัะผะพั‚ั€ะฐ ะฒะตั€ัะธะธ, ะฝะฐัั‚ั€ะพะตะบ, ะทะฐะฟัƒัะบะฐ ะฟั€ะพะฒะตั€ะพะบ ะธ ะดั€ัƒะณะพะณะพ: + ```bash + yolo help + yolo checks + yolo version + yolo settings + yolo copy-cfg + yolo cfg + ``` + +!!! Warning "ะŸั€ะตะดัƒะฟั€ะตะถะดะตะฝะธะต" + + ะั€ะณัƒะผะตะฝั‚ั‹ ะดะพะปะถะฝั‹ ะฟะตั€ะตะดะฐะฒะฐั‚ัŒัั ะฒ ะฒะธะดะต ะฟะฐั€ `arg=val`, ั€ะฐะทะดะตะปะตะฝะฝั‹ั… ะทะฝะฐะบะพะผ ั€ะฐะฒะตะฝัั‚ะฒะฐ `=`, ะธ ั€ะฐะทะดะตะปะตะฝั‹ ะฟั€ะพะฑะตะปะฐะผะธ ` ` ะผะตะถะดัƒ ะฟะฐั€ะฐะผะธ. ะะต ะธัะฟะพะปัŒะทัƒะนั‚ะต ะฟั€ะตั„ะธะบัั‹ ะฐั€ะณัƒะผะตะฝั‚ะพะฒ `--` ะธะปะธ ะทะฐะฟัั‚ั‹ะต `,` ะผะตะถะดัƒ ะฐั€ะณัƒะผะตะฝั‚ะฐะผะธ. + + - `yolo predict model=yolov8n.pt imgsz=640 conf=0.25`   โœ… + - `yolo predict model yolov8n.pt imgsz 640 conf 0.25`   โŒ + - `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25`   โŒ + +[ะ ัƒะบะพะฒะพะดัั‚ะฒะพ ะฟะพ CLI](/../usage/cli.md){ .md-button } + +## ะ˜ัะฟะพะปัŒะทะพะฒะฐะฝะธะต Ultralytics ั Python + +Python ะธะฝั‚ะตั€ั„ะตะนั YOLOv8 ะฟะพะทะฒะพะปัะตั‚ ะปะตะณะบะพ ะธะฝั‚ะตะณั€ะธั€ะพะฒะฐั‚ัŒ ะตะณะพ ะฒ ะฒะฐัˆะธ Python ะฟั€ะพะตะบั‚ั‹, ัƒะฟั€ะพั‰ะฐั ะทะฐะณั€ัƒะทะบัƒ, ะฒั‹ะฟะพะปะฝะตะฝะธะต ะธ ะพะฑั€ะฐะฑะพั‚ะบัƒ ั€ะตะทัƒะปัŒั‚ะฐั‚ะพะฒ ั€ะฐะฑะพั‚ั‹ ะผะพะดะตะปะธ. ะ˜ะฝั‚ะตั€ั„ะตะนั Python ั€ะฐะทั€ะฐะฑะพั‚ะฐะฝ ั ะฐะบั†ะตะฝั‚ะพะผ ะฝะฐ ะฟั€ะพัั‚ะพั‚ัƒ ะธ ัƒะดะพะฑัั‚ะฒะพ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั, ะฟะพะทะฒะพะปัั ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัะผ ะฑั‹ัั‚ั€ะพ ะฒะฝะตะดั€ัั‚ัŒ ั„ัƒะฝะบั†ะธะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ, ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธ ะบะปะฐััะธั„ะธะบะฐั†ะธะธ ะฒ ะธั… ะฟั€ะพะตะบั‚ะฐั…. ะญั‚ะพ ะดะตะปะฐะตั‚ ะธะฝั‚ะตั€ั„ะตะนั Python YOLOv8 ะฝะตะทะฐะผะตะฝะธะผั‹ะผ ะธะฝัั‚ั€ัƒะผะตะฝั‚ะพะผ ะดะปั ั‚ะตั…, ะบั‚ะพ ั…ะพั‡ะตั‚ ะฒะบะปัŽั‡ะธั‚ัŒ ัั‚ะธ ั„ัƒะฝะบั†ะธะธ ะฒ ัะฒะพะธ Python ะฟั€ะพะตะบั‚ั‹. + +ะะฐะฟั€ะธะผะตั€, ะฟะพะปัŒะทะพะฒะฐั‚ะตะปะธ ะผะพะณัƒั‚ ะทะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ, ะพะฑัƒั‡ะธั‚ัŒ ะตะต, ะพั†ะตะฝะธั‚ัŒ ะตะต ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ัŒ ะฝะฐ ะฒะฐะปะธะดะฐั†ะธะพะฝะฝะพะผ ะฝะฐะฑะพั€ะต, ะธ ะดะฐะถะต ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐั‚ัŒ ะตะต ะฒ ั„ะพั€ะผะฐั‚ ONNX ะฒัะตะณะพ ะทะฐ ะฝะตัะบะพะปัŒะบะพ ัั‚ั€ะพะบ ะบะพะดะฐ. ะŸะพะดั€ะพะฑะฝะตะต ะพ ั‚ะพะผ, ะบะฐะบ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ YOLOv8 ะฒ ะฒะฐัˆะธั… Python ะฟั€ะพะตะบั‚ะฐั…, ั‡ะธั‚ะฐะนั‚ะต ะฒ [ะ ัƒะบะพะฒะพะดัั‚ะฒะต ะฟะพ Python](/../usage/python.md). + +!!! Example "ะŸั€ะธะผะตั€" + + ```python + from ultralytics import YOLO + + # ะกะพะทะดะฐะฝะธะต ะฝะพะฒะพะน YOLO ะผะพะดะตะปะธ ั ะฝัƒะปั + model = YOLO('yolov8n.yaml') + + # ะ—ะฐะณั€ัƒะทะบะฐ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน YOLO ะผะพะดะตะปะธ (ั€ะตะบะพะผะตะฝะดัƒะตั‚ัั ะดะปั ะพะฑัƒั‡ะตะฝะธั) + model = YOLO('yolov8n.pt') + + # ะžะฑัƒั‡ะตะฝะธะต ะผะพะดะตะปะธ ั ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะตะผ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… 'coco128.yaml' ะฝะฐ 3 ัะฟะพั…ะธ + results = model.train(data='coco128.yaml', epochs=3) + + # ะžั†ะตะฝะบะฐ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ ะผะพะดะตะปะธ ะฝะฐ ะฒะฐะปะธะดะฐั†ะธะพะฝะฝะพะผ ะฝะฐะฑะพั€ะต + results = model.val() + + # ะ’ั‹ะฟะพะปะฝะตะฝะธะต ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ ั ะฟะพะผะพั‰ัŒัŽ ะผะพะดะตะปะธ + results = model('https://ultralytics.com/images/bus.jpg') + + # ะญะบัะฟะพั€ั‚ ะผะพะดะตะปะธ ะฒ ั„ะพั€ะผะฐั‚ ONNX + success = model.export(format='onnx') + ``` + +[ะ ัƒะบะพะฒะพะดัั‚ะฒะพ ะฟะพ Python](/../usage/python.md){.md-button .md-button--primary} diff --git a/ultralytics/docs/ru/quickstart.md:Zone.Identifier b/ultralytics/docs/ru/quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/tasks/classify.md b/ultralytics/docs/ru/tasks/classify.md new file mode 100755 index 0000000..f255949 --- /dev/null +++ b/ultralytics/docs/ru/tasks/classify.md @@ -0,0 +1,172 @@ +--- +comments: true +description: ะฃะทะฝะฐะนั‚ะต ะพ ะผะพะดะตะปัั… ะบะปะฐััะธั„ะธะบะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน YOLOv8 Classify. ะŸะพะปัƒั‡ะธั‚ะต ะฟะพะดั€ะพะฑะฝัƒัŽ ะธะฝั„ะพั€ะผะฐั†ะธัŽ ะพ ัะฟะธัะบะต ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ั… ะผะพะดะตะปะตะน ะธ ะบะฐะบ ะฟั€ะพะฒะตัั‚ะธ ะžะฑัƒั‡ะตะฝะธะต, ะ’ะฐะปะธะดะฐั†ะธัŽ, ะŸั€ะตะดัะบะฐะทะฐะฝะธะต ะธ ะญะบัะฟะพั€ั‚ ะผะพะดะตะปะตะน. +keywords: Ultralytics, YOLOv8, ะบะปะฐััะธั„ะธะบะฐั†ะธั ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ, YOLOv8n-cls, ะพะฑัƒั‡ะตะฝะธะต, ะฒะฐะปะธะดะฐั†ะธั, ะฟั€ะตะดัะบะฐะทะฐะฝะธะต, ัะบัะฟะพั€ั‚ ะผะพะดะตะปะธ +--- + +# ะšะปะฐััะธั„ะธะบะฐั†ะธั ะธะทะพะฑั€ะฐะถะตะฝะธะน + +ะŸั€ะธะผะตั€ั‹ ะบะปะฐััะธั„ะธะบะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน + +ะšะปะฐััะธั„ะธะบะฐั†ะธั ะธะทะพะฑั€ะฐะถะตะฝะธะน - ัั‚ะพ ัะฐะผะฐั ะฟั€ะพัั‚ะฐั ะธะท ั‚ั€ะตั… ะทะฐะดะฐั‡ ะธ ะทะฐะบะปัŽั‡ะฐะตั‚ัั ะฒ ะบะปะฐััะธั„ะธะบะฐั†ะธะธ ะฒัะตะณะพ ะธะทะพะฑั€ะฐะถะตะฝะธั ะฟะพ ะพะดะฝะพะผัƒ ะธะท ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฟั€ะตะดะตะปะตะฝะฝั‹ั… ะบะปะฐััะพะฒ. + +ะ’ั‹ั…ะพะด ะบะปะฐััะธั„ะธะบะฐั‚ะพั€ะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะน - ัั‚ะพ ะพะดะธะฝ ะบะปะฐััะพะฒั‹ะน ัั€ะปั‹ะบ ะธ ัƒั€ะพะฒะตะฝัŒ ะดะพะฒะตั€ะธั. ะšะปะฐััะธั„ะธะบะฐั†ะธั ะธะทะพะฑั€ะฐะถะตะฝะธะน ะฟะพะปะตะทะฝะฐ, ะบะพะณะดะฐ ะฒะฐะผ ะฝัƒะถะฝะพ ะทะฝะฐั‚ัŒ ั‚ะพะปัŒะบะพ ะบ ะบะฐะบะพะผัƒ ะบะปะฐัััƒ ะพั‚ะฝะพัะธั‚ัั ะธะทะพะฑั€ะฐะถะตะฝะธะต, ะธ ะฝะต ะฝัƒะถะฝะพ ะทะฝะฐั‚ัŒ, ะณะดะต ะฝะฐั…ะพะดัั‚ัั ะพะฑัŠะตะบั‚ั‹ ะดะฐะฝะฝะพะณะพ ะบะปะฐััะฐ ะธะปะธ ะบะฐะบะพะฒะฐ ะธั… ั‚ะพั‡ะฝะฐั ั„ะพั€ะผะฐ. + +!!! Tip "ะกะพะฒะตั‚" + + ะœะพะดะตะปะธ YOLOv8 Classify ะธัะฟะพะปัŒะทัƒัŽั‚ ััƒั„ั„ะธะบั `-cls`, ะฝะฐะฟั€ะธะผะตั€ `yolov8n-cls.pt`, ะธ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝั‹ ะฝะฐ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +## [ะœะพะดะตะปะธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +ะ—ะดะตััŒ ะฟะพะบะฐะทะฐะฝั‹ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ ะบะปะฐััะธั„ะธะบะฐั†ะธะธ YOLOv8. ะœะพะดะตะปะธ ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั, ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะธ ะฟะพะทั‹ ะพะฑัƒั‡ะฐัŽั‚ัั ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), ะฒ ั‚ะพ ะฒั€ะตะผั ะบะฐะบ ะผะพะดะตะปะธ ะบะปะฐััะธั„ะธะบะฐั†ะธะธ ะพะฑัƒั‡ะฐัŽั‚ัั ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +[ะœะพะดะตะปะธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธ ะทะฐะณั€ัƒะถะฐัŽั‚ัั ะธะท ะฟะพัะปะตะดะฝะตะณะพ ั€ะตะปะธะทะฐ Ultralytics [release](https://github.com/ultralytics/assets/releases) ะฟั€ะธ ะฟะตั€ะฒะพะผ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ. + +| ะœะพะดะตะปัŒ | ะ ะฐะทะผะตั€
(ะฟะธะบัะตะปะธ) | ะขะพั‡ะฝะพัั‚ัŒ
top1 | ะขะพั‡ะฝะพัั‚ัŒ
top5 | ะกะบะพั€ะพัั‚ัŒ
CPU ONNX
(ะผั) | ะกะบะพั€ะพัั‚ัŒ
A100 TensorRT
(ะผั) | ะŸะฐั€ะฐะผะตั‚ั€ั‹
(ะœ) | FLOPs
(ะ‘) ะฝะฐ 640 | +|----------------------------------------------------------------------------------------------|--------------------------|-----------------------|-----------------------|-----------------------------------|----------------------------------------|-----------------------|--------------------------| +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- ะ—ะฝะฐั‡ะตะฝะธั **ั‚ะพั‡ะฝะพัั‚ัŒ** ัƒะบะฐะทั‹ะฒะฐัŽั‚ ะฝะฐ ั‚ะพั‡ะฝะพัั‚ัŒ ะผะพะดะตะปะธ ะฝะฐ ะฒะฐะปะธะดะฐั†ะธะพะฝะฝะพะผ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… [ImageNet](https://www.image-net.org/). +
ะŸะพะฒั‚ะพั€ะธั‚ัŒ ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ ะผะพะถะฝะพ ั ะฟะพะผะพั‰ัŒัŽ `yolo val classify data=path/to/ImageNet device=0`. +- **ะกะบะพั€ะพัั‚ัŒ** ัƒัั€ะตะดะฝะตะฝะฐ ะฟะพ ะธะทะพะฑั€ะฐะถะตะฝะธัะผ ะดะปั ะฒะฐะปะธะดะฐั†ะธะธ ImageNet, ะธัะฟะพะปัŒะทัƒั ะธะฝัั‚ะฐะฝั [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/). +
ะŸะพะฒั‚ะพั€ะธั‚ัŒ ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ ะผะพะถะฝะพ ั ะฟะพะผะพั‰ัŒัŽ `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`. + +## ะžะฑัƒั‡ะตะฝะธะต + +ะžะฑัƒั‡ะธั‚ะต ะผะพะดะตะปัŒ YOLOv8n-cls ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… MNIST160 ะฝะฐ ะฟั€ะพั‚ัะถะตะฝะธะธ 100 ัะฟะพั… ั ั€ะฐะทะผะตั€ะพะผ ะธะทะพะฑั€ะฐะถะตะฝะธั 64. ะŸะพะปะฝั‹ะน ัะฟะธัะพะบ ะดะพัั‚ัƒะฟะฝั‹ั… ะฐั€ะณัƒะผะตะฝั‚ะพะฒ ะฟั€ะธะฒะตะดะตะฝ ะฝะฐ ัั‚ั€ะฐะฝะธั†ะต [ะšะพะฝั„ะธะณัƒั€ะฐั†ะธั](/../usage/cfg.md). + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะผะพะดะตะปัŒ + model = YOLO('yolov8n-cls.yaml') # ัะพะทะดะฐะนั‚ะต ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML + model = YOLO('yolov8n-cls.pt') # ะทะฐะณั€ัƒะทะธั‚ะต ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ (ั€ะตะบะพะผะตะฝะดัƒะตั‚ัั ะดะปั ะพะฑัƒั‡ะตะฝะธั) + model = YOLO('yolov8n-cls.yaml').load('yolov8n-cls.pt') # ัะพะทะดะฐะนั‚ะต ะธะท YAML ะธ ะฟะตั€ะตะฝะตัะธั‚ะต ะฒะตัะฐ + + # ะžะฑัƒั‡ะธั‚ะต ะผะพะดะตะปัŒ + ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ = model.train(data='mnist160', epochs=100, imgsz=64) + ``` + + === "CLI" + + ```bash + # ะกะพะทะดะฐะนั‚ะต ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML ะธ ะฝะฐั‡ะฝะธั‚ะต ะพะฑัƒั‡ะตะฝะธะต ั ะฝัƒะปั + yolo classify train data=mnist160 model=yolov8n-cls.yaml epochs=100 imgsz=64 + + # ะะฐั‡ะฝะธั‚ะต ะพะฑัƒั‡ะตะฝะธะต ั ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน *.pt ะผะพะดะตะปะธ + yolo classify train data=mnist160 model=yolov8n-cls.pt epochs=100 imgsz=64 + + # ะกะพะทะดะฐะนั‚ะต ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML, ะฟะตั€ะตะฝะตัะธั‚ะต ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ ะธ ะฝะฐั‡ะฝะธั‚ะต ะพะฑัƒั‡ะตะฝะธะต + yolo classify train data=mnist160 model=yolov8n-cls.yaml pretrained=yolov8n-cls.pt epochs=100 imgsz=64 + ``` + +### ะคะพั€ะผะฐั‚ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… + +ะคะพั€ะผะฐั‚ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… ะดะปั ะบะปะฐััะธั„ะธะบะฐั†ะธะธ YOLO ะผะพะถะฝะพ ะฟะพะดั€ะพะฑะฝะพ ะธะทัƒั‡ะธั‚ัŒ ะฒ [ะ ัƒะบะพะฒะพะดัั‚ะฒะต ะฟะพ ะฝะฐะฑะพั€ะฐะผ ะดะฐะฝะฝั‹ั…](../../../datasets/classify/index.md). + +## ะ’ะฐะปะธะดะฐั†ะธั + +ะŸั€ะพะฒะตั€ัŒั‚ะต ั‚ะพั‡ะฝะพัั‚ัŒ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8n-cls ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… MNIST160. ะะต ะฝัƒะถะฝะพ ะฟะตั€ะตะดะฐะฒะฐั‚ัŒ ะบะฐะบะธะต-ะปะธะฑะพ ะฐั€ะณัƒะผะตะฝั‚ั‹, ั‚ะฐะบ ะบะฐะบ `model` ัะพั…ั€ะฐะฝัะตั‚ ัะฒะพะธ `data` ะธ ะฐั€ะณัƒะผะตะฝั‚ั‹ ะฒ ะบะฐั‡ะตัั‚ะฒะต ะฐั‚ั€ะธะฑัƒั‚ะพะฒ ะผะพะดะตะปะธ. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะผะพะดะตะปัŒ + model = YOLO('yolov8n-cls.pt') # ะทะฐะณั€ัƒะทะธั‚ะต ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ะต ัะพะฑัั‚ะฒะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ + + # ะŸั€ะพะฒะตะดะธั‚ะต ะฒะฐะปะธะดะฐั†ะธัŽ ะผะพะดะตะปะธ + ะผะตั‚ั€ะธะบะธ = model.val() # ะฐั€ะณัƒะผะตะฝั‚ั‹ ะฝะต ะฝัƒะถะฝั‹, ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… ะธ ะฝะฐัั‚ั€ะพะนะบะธ ะทะฐะฟะพะผะฝะตะฝั‹ + ะผะตั‚ั€ะธะบะธ.top1 # ั‚ะพั‡ะฝะพัั‚ัŒ top1 + ะผะตั‚ั€ะธะบะธ.top5 # ั‚ะพั‡ะฝะพัั‚ัŒ top5 + ``` + === "CLI" + + ```bash + yolo classify val model=yolov8n-cls.pt # ะฒะฐะปะธะดะฐั†ะธั ะพั„ะธั†ะธะฐะปัŒะฝะพะน ะผะพะดะตะปะธ + yolo classify val model=path/to/best.pt # ะฒะฐะปะธะดะฐั†ะธั ัะพะฑัั‚ะฒะตะฝะฝะพะน ะผะพะดะตะปะธ + ``` + +## ะŸั€ะตะดัะบะฐะทะฐะฝะธะต + +ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv8n-cls ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะฟั€ะตะดัะบะฐะทะฐะฝะธะน ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธัั…. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะผะพะดะตะปัŒ + model = YOLO('yolov8n-cls.pt') # ะทะฐะณั€ัƒะทะธั‚ะต ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ะต ัะพะฑัั‚ะฒะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ + + # ะกะดะตะปะฐะนั‚ะต ะฟั€ะตะดัะบะฐะทะฐะฝะธะต ั ะฟะพะผะพั‰ัŒัŽ ะผะพะดะตะปะธ + ั€ะตะทัƒะปัŒั‚ะฐั‚ั‹ = model('https://ultralytics.com/images/bus.jpg') # ัะดะตะปะฐะนั‚ะต ะฟั€ะตะดัะบะฐะทะฐะฝะธะต ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ + ``` + === "CLI" + + ```bash + yolo classify predict model=yolov8n-cls.pt source='https://ultralytics.com/images/bus.jpg' # ะฟั€ะตะดัะบะฐะทะฐะฝะธะต ั ะพั„ะธั†ะธะฐะปัŒะฝะพะน ะผะพะดะตะปัŒัŽ + yolo classify predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ะฟั€ะตะดัะบะฐะทะฐะฝะธะต ั ัะพะฑัั‚ะฒะตะฝะฝะพะน ะผะพะดะตะปัŒัŽ + ``` + +ะŸะพะดั€ะพะฑะฝะฐั ะธะฝั„ะพั€ะผะฐั†ะธั ะพ ั€ะตะถะธะผะต `predict` ะฟั€ะธะฒะตะดะตะฝะฐ ะฝะฐ ัั‚ั€ะฐะฝะธั†ะต [ะŸั€ะตะดัะบะฐะทะฐะฝะธะต](https://docs.ultralytics.com/modes/predict/). + +## ะญะบัะฟะพั€ั‚ + +ะญะบัะฟะพั€ั‚ะธั€ัƒะนั‚ะต ะผะพะดะตะปัŒ YOLOv8n-cls ะฒ ะดั€ัƒะณะพะน ั„ะพั€ะผะฐั‚, ะฝะฐะฟั€ะธะผะตั€, ONNX, CoreML ะธ ั‚. ะด. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะผะพะดะตะปัŒ + model = YOLO('yolov8n-cls.pt') # ะทะฐะณั€ัƒะทะธั‚ะต ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ะต ัะพะฑัั‚ะฒะตะฝะฝัƒัŽ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ + + # ะญะบัะฟะพั€ั‚ะธั€ัƒะนั‚ะต ะผะพะดะตะปัŒ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-cls.pt format=onnx # ัะบัะฟะพั€ั‚ ะพั„ะธั†ะธะฐะปัŒะฝะพะน ะผะพะดะตะปะธ + yolo export model=path/to/best.pt format=onnx # ัะบัะฟะพั€ั‚ ัะพะฑัั‚ะฒะตะฝะฝะพะน ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ + ``` + +ะ”ะพัั‚ัƒะฟะฝั‹ะต ั„ะพั€ะผะฐั‚ั‹ ัะบัะฟะพั€ั‚ะฐ YOLOv8-cls ะฟั€ะตะดัั‚ะฐะฒะปะตะฝั‹ ะฒ ั‚ะฐะฑะปะธั†ะต ะฝะธะถะต. ะ’ั‹ ะผะพะถะตั‚ะต ะฒั‹ะฟะพะปะฝัั‚ัŒ ะฟั€ะตะดัะบะฐะทะฐะฝะธั ะธะปะธ ะฒะฐะปะธะดะฐั†ะธัŽ ะฟั€ัะผะพ ะฝะฐ ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐะฝะฝั‹ั… ะผะพะดะตะปัั…, ะฝะฐะฟั€ะธะผะตั€, `yolo predict model=yolov8n-cls.onnx`. ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะฟะพะบะฐะทะฐะฝั‹ ะดะปั ะฒะฐัˆะตะน ะผะพะดะตะปะธ ะฟะพัะปะต ะทะฐะฒะตั€ัˆะตะฝะธั ัะบัะฟะพั€ั‚ะฐ. + +| ะคะพั€ะผะฐั‚ | ะั€ะณัƒะผะตะฝั‚ `format` | ะœะพะดะตะปัŒ | ะœะตั‚ะฐะดะฐะฝะฝั‹ะต | ะั€ะณัƒะผะตะฝั‚ั‹ | +|--------------------------------------------------------------------|-------------------|-------------------------------|------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-cls.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-cls.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-cls.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-cls_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-cls.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-cls.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-cls_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-cls.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-cls.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-cls_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-cls_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-cls_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-cls_ncnn_model/` | โœ… | `imgsz`, `half` | + +ะŸะพะดั€ะพะฑะฝะฐั ะธะฝั„ะพั€ะผะฐั†ะธั ะพะฑ ัะบัะฟะพั€ั‚ะต ะฟั€ะธะฒะตะดะตะฝะฐ ะฝะฐ ัั‚ั€ะฐะฝะธั†ะต [ะญะบัะฟะพั€ั‚](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/ru/tasks/classify.md:Zone.Identifier b/ultralytics/docs/ru/tasks/classify.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/tasks/classify.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/tasks/detect.md b/ultralytics/docs/ru/tasks/detect.md new file mode 100755 index 0000000..8110dd5 --- /dev/null +++ b/ultralytics/docs/ru/tasks/detect.md @@ -0,0 +1,184 @@ +--- +comments: true +description: ะžั„ะธั†ะธะฐะปัŒะฝะฐั ะดะพะบัƒะผะตะฝั‚ะฐั†ะธั YOLOv8 ะพั‚ Ultralytics. ะฃะทะฝะฐะนั‚ะต, ะบะฐะบ ะฟั€ะพะฒะพะดะธั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต, ะฟั€ะพะฒะตั€ะบัƒ, ะฟั€ะตะดัะบะฐะทะฐะฝะธะต ะธ ัะบัะฟะพั€ั‚ ะผะพะดะตะปะตะน ะฒ ั€ะฐะทะปะธั‡ะฝั‹ั… ั„ะพั€ะผะฐั‚ะฐั…. ะ’ะบะปัŽั‡ะฐั ะฟะพะดั€ะพะฑะฝั‹ะต ัั‚ะฐั‚ะธัั‚ะธั‡ะตัะบะธะต ะดะฐะฝะฝั‹ะต ะพ ะฟั€ะพะธะทะฒะพะดะธั‚ะตะปัŒะฝะพัั‚ะธ. +keywords: YOLOv8, Ultralytics, ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ, ะพะฑัƒั‡ะตะฝะธะต, ะฒะฐะปะธะดะฐั†ะธั, ะฟั€ะตะดัะบะฐะทะฐะฝะธะต, ัะบัะฟะพั€ั‚ ะผะพะดะตะปะตะน, COCO, ImageNet, PyTorch, ONNX, CoreML +--- + +# ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ + +ะŸั€ะธะผะตั€ั‹ ะพะฑะฝะฐั€ัƒะถะตะฝะธั ะพะฑัŠะตะบั‚ะพะฒ + +ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ โ€“ ัั‚ะพ ะทะฐะดะฐั‡ะฐ, ะบะพั‚ะพั€ะฐั ะฒะบะปัŽั‡ะฐะตั‚ ะธะดะตะฝั‚ะธั„ะธะบะฐั†ะธัŽ ะผะตัั‚ะพะฟะพะปะพะถะตะฝะธั ะธ ะบะปะฐััะฐ ะพะฑัŠะตะบั‚ะพะฒ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ ะธะปะธ ะฒะธะดะตะพ. + +ะ ะตะทัƒะปัŒั‚ะฐั‚ ั€ะฐะฑะพั‚ั‹ ะดะตั‚ะตะบั‚ะพั€ะฐ ะพะฑัŠะตะบั‚ะพะฒ โ€“ ัั‚ะพ ะฝะฐะฑะพั€ ะพะณั€ะฐะฝะธั‡ะธะฒะฐัŽั‰ะธั… ั€ะฐะผะพะบ, ะบะพั‚ะพั€ั‹ะต ะทะฐะบะปัŽั‡ะฐัŽั‚ ะฒ ัะตะฑะต ะพะฑัŠะตะบั‚ั‹ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ, ะฒะผะตัั‚ะต ั ะผะตั‚ะบะฐะผะธ ะบะปะฐััะพะฒ ะธ ัƒั€ะพะฒะฝัะผะธ ะดะพัั‚ะพะฒะตั€ะฝะพัั‚ะธ ะดะปั ะบะฐะถะดะพะน ั€ะฐะผะบะธ. ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ ัะฒะปัะตั‚ัั ั…ะพั€ะพัˆะธะผ ะฒั‹ะฑะพั€ะพะผ, ะบะพะณะดะฐ ะฝะตะพะฑั…ะพะดะธะผะพ ะพะฟั€ะตะดะตะปะธั‚ัŒ ะพะฑัŠะตะบั‚ั‹ ะธะฝั‚ะตั€ะตัะฐ ะฒ ัั†ะตะฝะต, ะฝะพ ะฝะต ะฝัƒะถะฝะพ ั‚ะพั‡ะฝะพ ะทะฝะฐั‚ัŒ, ะณะดะต ะฝะฐั…ะพะดะธั‚ัั ะพะฑัŠะตะบั‚ ะธะปะธ ะตะณะพ ั‚ะพั‡ะฝัƒัŽ ั„ะพั€ะผัƒ. + +

+
+ +
+ ะกะผะพั‚ั€ะธั‚ะต: ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ ั ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปัŒัŽ Ultralytics YOLOv8. +

+ +!!! Tip "ะกะพะฒะตั‚" + + YOLOv8 Detect ะผะพะดะตะปะธ ัะฒะปััŽั‚ัั ัั‚ะฐะฝะดะฐั€ั‚ะฝั‹ะผะธ ะผะพะดะตะปัะผะธ YOLOv8, ั‚ะพ ะตัั‚ัŒ `yolov8n.pt`, ะธ ะฟั€ะตะดะพะฑัƒั‡ะตะฝั‹ ะฝะฐ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml). + +## [ะœะพะดะตะปะธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +ะ—ะดะตััŒ ะฟะพะบะฐะทะฐะฝั‹ ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ YOLOv8 Detect. ะœะพะดะตะปะธ Detect, Segment ะธ Pose ะฟั€ะตะดะพะฑัƒั‡ะตะฝั‹ ะฝะฐ ะดะฐั‚ะฐัะตั‚ะต [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), ะฒ ั‚ะพ ะฒั€ะตะผั ะบะฐะบ ะผะพะดะตะปะธ Classify ะฟั€ะตะดะพะฑัƒั‡ะตะฝั‹ ะฝะฐ ะดะฐั‚ะฐัะตั‚ะต [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +[ะœะพะดะตะปะธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธ ะทะฐะณั€ัƒะถะฐัŽั‚ัั ั ะฟะพัะปะตะดะฝะตะณะพ ั€ะตะปะธะทะฐ Ultralytics [release](https://github.com/ultralytics/assets/releases) ะฟั€ะธ ะฟะตั€ะฒะพะผ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ. + +| ะœะพะดะตะปัŒ | ั€ะฐะทะผะตั€
(ะฟะธะบัะตะปะธ) | mAPval
50-95 | ะกะบะพั€ะพัั‚ัŒ
CPU ONNX
(ะผั) | ะกะบะพั€ะพัั‚ัŒ
A100 TensorRT
(ะผั) | ะฟะฐั€ะฐะผะตั‚ั€ั‹
(ะœ) | FLOPs
(ะ‘) | +|--------------------------------------------------------------------------------------|--------------------------|----------------------|-----------------------------------|----------------------------------------|-----------------------|-------------------| +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- **mAPval** ะทะฝะฐั‡ะตะฝะธั ะดะปั ะพะดะธะฝะพั‡ะฝะพะน ะผะพะดะตะปะธ ะพะดะธะฝะพั‡ะฝะพะณะพ ะผะฐััˆั‚ะฐะฑะฐ ะฝะฐ ะดะฐั‚ะฐัะตั‚ะต [COCO val2017](http://cocodataset.org). +
ะ”ะปั ะฒะพัะฟั€ะพะธะทะฒะตะดะตะฝะธั ะธัะฟะพะปัŒะทัƒะนั‚ะต `yolo val detect data=coco.yaml device=0` +- **ะกะบะพั€ะพัั‚ัŒ** ัƒัั€ะตะดะฝะตะฝะฐ ะฟะพ ะธะทะพะฑั€ะฐะถะตะฝะธัะผ COCO val ะฝะฐ ัะบะทะตะผะฟะปัั€ะต [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/). +
ะ”ะปั ะฒะพัะฟั€ะพะธะทะฒะตะดะตะฝะธั ะธัะฟะพะปัŒะทัƒะนั‚ะต `yolo val detect data=coco128.yaml batch=1 device=0|cpu` + +## ะžะฑัƒั‡ะตะฝะธะต + +ะžะฑัƒั‡ะธั‚ะต ะผะพะดะตะปัŒ YOLOv8n ะฝะฐ ะดะฐั‚ะฐัะตั‚ะต COCO128 ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… ั ั€ะฐะทะผะตั€ะพะผ ะธะทะพะฑั€ะฐะถะตะฝะธั 640. ะŸะพะปะฝั‹ะน ัะฟะธัะพะบ ะดะพัั‚ัƒะฟะฝั‹ั… ะฐั€ะณัƒะผะตะฝั‚ะพะฒ ัะผ. ะฝะฐ ัั‚ั€ะฐะฝะธั†ะต [ะšะพะฝั„ะธะณัƒั€ะฐั†ะธั](/../usage/cfg.md). + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะผะพะดะตะปัŒ + model = YOLO('yolov8n.yaml') # ัะพะทะดะฐั‚ัŒ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML + model = YOLO('yolov8n.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ (ั€ะตะบะพะผะตะฝะดัƒะตั‚ัั ะดะปั ะพะฑัƒั‡ะตะฝะธั) + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # ัะพะทะดะฐั‚ัŒ ะธะท YAML ะธ ะฟะตั€ะตะฝะตัั‚ะธ ะฒะตัะฐ + + # ะžะฑัƒั‡ะธั‚ะต ะผะพะดะตะปัŒ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # ะกะพะทะดะฐั‚ัŒ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML ะธ ะฝะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ั ะฝัƒะปั + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # ะะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ั ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ *.pt + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # ะกะพะทะดะฐั‚ัŒ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML, ะฟะตั€ะตะฝะตัั‚ะธ ะฒ ะฝะตะต ะฟั€ะตะดะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ ะธ ะฝะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### ะคะพั€ะผะฐั‚ ะดะฐั‚ะฐัะตั‚ะฐ + +ะคะพั€ะผะฐั‚ ะดะฐั‚ะฐัะตั‚ะฐ ะดะปั ะพะฑะฝะฐั€ัƒะถะตะฝะธั YOLO ะผะพะถะฝะพ ะฝะฐะนั‚ะธ ะฑะพะปะตะต ะฟะพะดั€ะพะฑะฝะพ ะฒ [ะ ัƒะบะพะฒะพะดัั‚ะฒะต ะฟะพ ะดะฐั‚ะฐัะตั‚ะฐะผ](../../../datasets/detect/index.md). ะงั‚ะพะฑั‹ ะบะพะฝะฒะตั€ั‚ะธั€ะพะฒะฐั‚ัŒ ะฒะฐัˆ ััƒั‰ะตัั‚ะฒัƒัŽั‰ะธะน ะดะฐั‚ะฐัะตั‚ ะธะท ะดั€ัƒะณะธั… ั„ะพั€ะผะฐั‚ะพะฒ (ะฝะฐะฟั€ะธะผะตั€, COCO ะธ ั‚.ะด.) ะฒ ั„ะพั€ะผะฐั‚ YOLO, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ะธัะฟะพะปัŒะทัƒะนั‚ะต ะธะฝัั‚ั€ัƒะผะตะฝั‚ [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) ะพั‚ Ultralytics. + +## ะ’ะฐะปะธะดะฐั†ะธั + +ะŸั€ะพะฒะตั€ัŒั‚ะต ั‚ะพั‡ะฝะพัั‚ัŒ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8n ะฝะฐ ะดะฐั‚ะฐัะตั‚ะต COCO128. ะะตะพะฑั…ะพะดะธะผะพ ะฟะตั€ะตะดะฐั‚ัŒ ะฐั€ะณัƒะผะตะฝั‚ั‹, ะฟะพัะบะพะปัŒะบัƒ `model` ัะพั…ั€ะฐะฝัะตั‚ ัะฒะพะธ `data` ะธ ะฐั€ะณัƒะผะตะฝั‚ั‹ ะพะฑัƒั‡ะตะฝะธั ะบะฐะบ ะฐั‚ั€ะธะฑัƒั‚ั‹ ะผะพะดะตะปะธ. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะผะพะดะตะปัŒ + model = YOLO('yolov8n.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ัะพะฑัั‚ะฒะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ + + # ะŸั€ะพะฒะตั€ัŒั‚ะต ะผะพะดะตะปัŒ + metrics = model.val() # ะฐั€ะณัƒะผะตะฝั‚ั‹ ะฝะต ะฝัƒะถะฝั‹, ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… ะธ ะฝะฐัั‚ั€ะพะนะบะธ ะทะฐะฟะพะผะธะฝะฐัŽั‚ัั + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ัะฟะธัะพะบ ัะพะดะตั€ะถะธั‚ map50-95 ะดะปั ะบะฐะถะดะพะน ะบะฐั‚ะตะณะพั€ะธะธ + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # val ะพั„ะธั†ะธะฐะปัŒะฝะฐั ะผะพะดะตะปัŒ + yolo detect val model=path/to/best.pt # val ัะพะฑัั‚ะฒะตะฝะฝะฐั ะผะพะดะตะปัŒ + ``` + +## ะŸั€ะตะดัะบะฐะทะฐะฝะธะต + +ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv8n ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะฟั€ะตะดัะบะฐะทะฐะฝะธะน ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธัั…. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะผะพะดะตะปัŒ + model = YOLO('yolov8n.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ัะพะฑัั‚ะฒะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ + + # ะกะดะตะปะฐะนั‚ะต ะฟั€ะตะดัะบะฐะทะฐะฝะธะต ั ะฟะพะผะพั‰ัŒัŽ ะผะพะดะตะปะธ + results = model('https://ultralytics.com/images/bus.jpg') # ัะดะตะปะฐั‚ัŒ ะฟั€ะตะดัะบะฐะทะฐะฝะธะต ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ + ``` + === "CLI" + + ```bash + yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' # ะฟั€ะตะดัะบะฐะทะฐะฝะธะต ั ะพั„ะธั†ะธะฐะปัŒะฝะพะน ะผะพะดะตะปัŒัŽ + yolo detect predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ะฟั€ะตะดัะบะฐะทะฐะฝะธะต ั ัะพะฑัั‚ะฒะตะฝะฝะพะน ะผะพะดะตะปัŒัŽ + ``` + +ะŸะพะปะฝั‹ะต ะดะตั‚ะฐะปะธ ั€ะตะถะธะผะฐ `predict` ัะผะพั‚ั€ะธั‚ะต ะฝะฐ ัั‚ั€ะฐะฝะธั†ะต [ะŸั€ะตะดัะบะฐะทะฐะฝะธะต](https://docs.ultralytics.com/modes/predict/). + +## ะญะบัะฟะพั€ั‚ + +ะญะบัะฟะพั€ั‚ะธั€ัƒะนั‚ะต ะผะพะดะตะปัŒ YOLOv8n ะฒ ะดั€ัƒะณะพะน ั„ะพั€ะผะฐั‚, ั‚ะฐะบะพะน ะบะฐะบ ONNX, CoreML ะธ ะดั€. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ะต ะผะพะดะตะปัŒ + model = YOLO('yolov8n.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ัะพะฑัั‚ะฒะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ ะฟะพัะปะต ะพะฑัƒั‡ะตะฝะธั + + # ะญะบัะฟะพั€ั‚ะธั€ัƒะนั‚ะต ะผะพะดะตะปัŒ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # ัะบัะฟะพั€ั‚ ะพั„ะธั†ะธะฐะปัŒะฝะพะน ะผะพะดะตะปะธ + yolo export model=path/to/best.pt format=onnx # ัะบัะฟะพั€ั‚ ัะพะฑัั‚ะฒะตะฝะฝะพะน ะผะพะดะตะปะธ ะฟะพัะปะต ะพะฑัƒั‡ะตะฝะธั + ``` + +ะ”ะพัั‚ัƒะฟะฝั‹ะต ั„ะพั€ะผะฐั‚ั‹ ัะบัะฟะพั€ั‚ะฐ YOLOv8 ะฟั€ะธะฒะตะดะตะฝั‹ ะฒ ั‚ะฐะฑะปะธั†ะต ะฝะธะถะต. ะ’ั‹ ะผะพะถะตั‚ะต ะฒั‹ะฟะพะปะฝัั‚ัŒ ะฟั€ะตะดัะบะฐะทะฐะฝะธั ะธะปะธ ะฟั€ะพะฒะตั€ะบัƒ ะฝะตะฟะพัั€ะตะดัั‚ะฒะตะฝะฝะพ ะฝะฐ ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐะฝะฝั‹ั… ะผะพะดะตะปัั…, ะฝะฐะฟั€ะธะผะตั€ `yolo predict model=yolov8n.onnx`. ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะดะปั ะฒะฐัˆะตะน ะผะพะดะตะปะธ ะฟะพะบะฐะทะฐะฝั‹ ะฟะพัะปะต ะทะฐะฒะตั€ัˆะตะฝะธั ัะบัะฟะพั€ั‚ะฐ. + +| ะคะพั€ะผะฐั‚ | ะั€ะณัƒะผะตะฝั‚ `format` | ะœะพะดะตะปัŒ | ะœะตั‚ะฐะดะฐะฝะฝั‹ะต | ะั€ะณัƒะผะตะฝั‚ั‹ | +|--------------------------------------------------------------------|-------------------|---------------------------|------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +ะŸะพะปะฝั‹ะต ะดะตั‚ะฐะปะธ ั€ะตะถะธะผะฐ `export` ัะผะพั‚ั€ะธั‚ะต ะฝะฐ ัั‚ั€ะฐะฝะธั†ะต [ะญะบัะฟะพั€ั‚](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/ru/tasks/detect.md:Zone.Identifier b/ultralytics/docs/ru/tasks/detect.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/tasks/detect.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/tasks/index.md b/ultralytics/docs/ru/tasks/index.md new file mode 100755 index 0000000..cc88209 --- /dev/null +++ b/ultralytics/docs/ru/tasks/index.md @@ -0,0 +1,55 @@ +--- +comments: true +description: ะฃะทะฝะฐะนั‚ะต ะพ ะบะปัŽั‡ะตะฒั‹ั… ะทะฐะดะฐั‡ะฐั… ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั, ะบะพั‚ะพั€ั‹ะต ะผะพะถะตั‚ ะฒั‹ะฟะพะปะฝัั‚ัŒ YOLOv8, ะฒะบะปัŽั‡ะฐั ะพะฑะฝะฐั€ัƒะถะตะฝะธะต, ัะตะณะผะตะฝั‚ะฐั†ะธัŽ, ะบะปะฐััะธั„ะธะบะฐั†ะธัŽ ะธ ะพั†ะตะฝะบัƒ ะฟะพะทั‹. ะŸะพะนะผะธั‚ะต, ะบะฐะบ ะพะฝะธ ะผะพะณัƒั‚ ะฑั‹ั‚ัŒ ะธัะฟะพะปัŒะทะพะฒะฐะฝั‹ ะฒ ะฒะฐัˆะธั… AI ะฟั€ะพะตะบั‚ะฐั…. +keywords: Ultralytics, YOLOv8, ะžะฑะฝะฐั€ัƒะถะตะฝะธะต, ะกะตะณะผะตะฝั‚ะฐั†ะธั, ะšะปะฐััะธั„ะธะบะฐั†ะธั, ะžั†ะตะฝะบะฐ ะŸะพะทั‹, AI ะคั€ะตะนะผะฒะพั€ะบ, ะ—ะฐะดะฐั‡ะธ ะšะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะ—ั€ะตะฝะธั +--- + +# ะ—ะฐะดะฐั‡ะธ Ultralytics YOLOv8 + +
+ะŸะพะดะดะตั€ะถะธะฒะฐะตะผั‹ะต ะทะฐะดะฐั‡ะธ Ultralytics YOLO + +YOLOv8 โ€” ัั‚ะพ AI ั„ั€ะตะนะผะฒะพั€ะบ, ะฟะพะดะดะตั€ะถะธะฒะฐัŽั‰ะธะน ะผะฝะพะถะตัั‚ะฒะพ ะทะฐะดะฐั‡ ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั **ะทะฐะดะฐั‡ะธ**. ะคั€ะตะนะผะฒะพั€ะบ ะผะพะถะตั‚ ะฑั‹ั‚ัŒ ะธัะฟะพะปัŒะทะพะฒะฐะฝ ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั [ะพะฑะฝะฐั€ัƒะถะตะฝะธั](detect.md), [ัะตะณะผะตะฝั‚ะฐั†ะธะธ](segment.md), [ะบะปะฐััะธั„ะธะบะฐั†ะธะธ](classify.md) ะธ ะพั†ะตะฝะบะธ [ะฟะพะทั‹](pose.md). ะšะฐะถะดะฐั ะธะท ัั‚ะธั… ะทะฐะดะฐั‡ ะธะผะตะตั‚ ั€ะฐะทะปะธั‡ะฝั‹ะต ั†ะตะปะธ ะธ ะพะฑะปะฐัั‚ะธ ะฟั€ะธะผะตะฝะตะฝะธั. + +!!! Note "ะ—ะฐะผะตั‚ะบะฐ" + + ๐Ÿšง ะะฐัˆะฐ ะผะฝะพะณะพัะทั‹ั‡ะฝะฐั ะดะพะบัƒะผะตะฝั‚ะฐั†ะธั ะฒ ะฝะฐัั‚ะพัั‰ะตะต ะฒั€ะตะผั ะฝะฐั…ะพะดะธั‚ัั ะฒ ัั‚ะฐะดะธะธ ั€ะฐะทั€ะฐะฑะพั‚ะบะธ, ะธ ะผั‹ ัƒัะตั€ะดะฝะพ ั€ะฐะฑะพั‚ะฐะตะผ ะฝะฐะด ะตะต ัƒะปัƒั‡ัˆะตะฝะธะตะผ. ะกะฟะฐัะธะฑะพ ะทะฐ ะฒะฐัˆะต ั‚ะตั€ะฟะตะฝะธะต! ๐Ÿ™ + +

+
+ +
+ ะกะผะพั‚ั€ะธั‚ะต: ะ˜ะทัƒั‡ะธั‚ะต ะทะฐะดะฐั‡ะธ Ultralytics YOLO: ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ะกะตะณะผะตะฝั‚ะฐั†ะธั, ะžั‚ัะปะตะถะธะฒะฐะฝะธะต ะธ ะžั†ะตะฝะบะฐ ะฟะพะทั‹. +

+ +## [ะžะฑะฝะฐั€ัƒะถะตะฝะธะต](detect.md) + +ะžะฑะฝะฐั€ัƒะถะตะฝะธะต โ€” ัั‚ะพ ะพัะฝะพะฒะฝะฐั ะทะฐะดะฐั‡ะฐ, ะฟะพะดะดะตั€ะถะธะฒะฐะตะผะฐั YOLOv8. ะžะฝะฐ ะทะฐะบะปัŽั‡ะฐะตั‚ัั ะฒ ะพะฑะฝะฐั€ัƒะถะตะฝะธะธ ะพะฑัŠะตะบั‚ะพะฒ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ ะธะปะธ ะบะฐะดั€ะต ะฒะธะดะตะพ ะธ ั€ะธัะพะฒะฐะฝะธะธ ะฒะพะบั€ัƒะณ ะฝะธั… ะพะณั€ะฐะฝะธั‡ะธะฒะฐัŽั‰ะธั… ั€ะฐะผะพะบ. ะžะฑะฝะฐั€ัƒะถะตะฝะฝั‹ะต ะพะฑัŠะตะบั‚ั‹ ะบะปะฐััะธั„ะธั†ะธั€ัƒัŽั‚ัั ะฝะฐ ั€ะฐะทะฝั‹ะต ะบะฐั‚ะตะณะพั€ะธะธ ะฝะฐ ะพัะฝะพะฒะต ะธั… ั…ะฐั€ะฐะบั‚ะตั€ะธัั‚ะธะบ. YOLOv8 ะผะพะถะตั‚ ะพะฑะฝะฐั€ัƒะถะธะฒะฐั‚ัŒ ะฝะตัะบะพะปัŒะบะพ ะพะฑัŠะตะบั‚ะพะฒ ะฝะฐ ะพะดะฝะพะผ ะธะทะพะฑั€ะฐะถะตะฝะธะธ ะธะปะธ ะฒะธะดะตะพะบะฐะดั€ะต ั ะฒั‹ัะพะบะพะน ั‚ะพั‡ะฝะพัั‚ัŒัŽ ะธ ัะบะพั€ะพัั‚ัŒัŽ. + +[ะŸั€ะธะผะตั€ั‹ ะžะฑะฝะฐั€ัƒะถะตะฝะธั](detect.md){ .md-button } + +## [ะกะตะณะผะตะฝั‚ะฐั†ะธั](segment.md) + +ะกะตะณะผะตะฝั‚ะฐั†ะธั โ€” ัั‚ะพ ะทะฐะดะฐั‡ะฐ, ะบะพั‚ะพั€ะฐั ะฒะบะปัŽั‡ะฐะตั‚ ั€ะฐะทะฑะธะตะฝะธะต ะธะทะพะฑั€ะฐะถะตะฝะธั ะฝะฐ ั€ะฐะทะฝั‹ะต ั€ะตะณะธะพะฝั‹ ะฝะฐ ะพัะฝะพะฒะต ัะพะดะตั€ะถะธะผะพะณะพ ะธะทะพะฑั€ะฐะถะตะฝะธั. ะšะฐะถะดะพะผัƒ ั€ะตะณะธะพะฝัƒ ะฟั€ะธัะฒะฐะธะฒะฐะตั‚ัั ะผะตั‚ะบะฐ ะฝะฐ ะพัะฝะพะฒะต ะตะณะพ ัะพะดะตั€ะถะธะผะพะณะพ. ะญั‚ะฐ ะทะฐะดะฐั‡ะฐ ะฟะพะปะตะทะฝะฐ ะฒ ั‚ะฐะบะธั… ะฟั€ะธะปะพะถะตะฝะธัั…, ะบะฐะบ ัะตะณะผะตะฝั‚ะฐั†ะธั ะธะทะพะฑั€ะฐะถะตะฝะธะน ะธ ะผะตะดะธั†ะธะฝัะบะฐั ะฒะธะทัƒะฐะปะธะทะฐั†ะธั. YOLOv8 ะธัะฟะพะปัŒะทัƒะตั‚ ะฒะฐั€ะธะฐั†ะธัŽ ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ U-Net ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ัะตะณะผะตะฝั‚ะฐั†ะธะธ. + +[ะŸั€ะธะผะตั€ั‹ ะกะตะณะผะตะฝั‚ะฐั†ะธะธ](segment.md){ .md-button } + +## [ะšะปะฐััะธั„ะธะบะฐั†ะธั](classify.md) + +ะšะปะฐััะธั„ะธะบะฐั†ะธั โ€” ัั‚ะพ ะทะฐะดะฐั‡ะฐ, ะฒะบะปัŽั‡ะฐัŽั‰ะฐั ะบะปะฐััะธั„ะธะบะฐั†ะธัŽ ะธะทะพะฑั€ะฐะถะตะฝะธั ะฝะฐ ั€ะฐะทะฝั‹ะต ะบะฐั‚ะตะณะพั€ะธะธ. YOLOv8 ะผะพะถะตั‚ ะฑั‹ั‚ัŒ ะธัะฟะพะปัŒะทะพะฒะฐะฝ ะดะปั ะบะปะฐััะธั„ะธะบะฐั†ะธะธ ะธะทะพะฑั€ะฐะถะตะฝะธะน ะฝะฐ ะพัะฝะพะฒะต ะธั… ัะพะดะตั€ะถะธะผะพะณะพ. ะ”ะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะบะปะฐััะธั„ะธะบะฐั†ะธะธ ะธัะฟะพะปัŒะทัƒะตั‚ัั ะฒะฐั€ะธะฐั†ะธั ะฐั€ั…ะธั‚ะตะบั‚ัƒั€ั‹ EfficientNet. + +[ะŸั€ะธะผะตั€ั‹ ะšะปะฐััะธั„ะธะบะฐั†ะธะธ](classify.md){ .md-button } + +## [ะŸะพะทะฐ](pose.md) + +ะžะฑะฝะฐั€ัƒะถะตะฝะธะต ั‚ะพั‡ะตะบ ะฟะพะทั‹ ะธะปะธ ะบะปัŽั‡ะตะฒั‹ั… ั‚ะพั‡ะตะบ โ€” ัั‚ะพ ะทะฐะดะฐั‡ะฐ, ะบะพั‚ะพั€ะฐั ะฒะบะปัŽั‡ะฐะตั‚ ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะบะพะฝะบั€ะตั‚ะฝั‹ั… ั‚ะพั‡ะตะบ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ ะธะปะธ ะฒะธะดะตะพะบะฐะดั€ะต. ะญั‚ะธ ั‚ะพั‡ะบะธ ะฝะฐะทั‹ะฒะฐัŽั‚ัั ะบะปัŽั‡ะตะฒั‹ะผะธ ะธ ะธัะฟะพะปัŒะทัƒัŽั‚ัั ะดะปั ะพั‚ัะปะตะถะธะฒะฐะฝะธั ะดะฒะธะถะตะฝะธั ะธะปะธ ะพั†ะตะฝะบะธ ะฟะพะทั‹. YOLOv8 ะผะพะถะตั‚ ะพะฑะฝะฐั€ัƒะถะธะฒะฐั‚ัŒ ะบะปัŽั‡ะตะฒั‹ะต ั‚ะพั‡ะบะธ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ ะธะปะธ ะฒะธะดะตะพะบะฐะดั€ะต ั ะฒั‹ัะพะบะพะน ั‚ะพั‡ะฝะพัั‚ัŒัŽ ะธ ัะบะพั€ะพัั‚ัŒัŽ. + +[ะŸั€ะธะผะตั€ั‹ ะŸะพะท](pose.md){ .md-button } + +## ะ—ะฐะบะปัŽั‡ะตะฝะธะต + +YOLOv8 ะฟะพะดะดะตั€ะถะธะฒะฐะตั‚ ะผะฝะพะถะตัั‚ะฒะพ ะทะฐะดะฐั‡, ะฒะบะปัŽั‡ะฐั ะพะฑะฝะฐั€ัƒะถะตะฝะธะต, ัะตะณะผะตะฝั‚ะฐั†ะธัŽ, ะบะปะฐััะธั„ะธะบะฐั†ะธัŽ ะธ ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะบะปัŽั‡ะตะฒั‹ั… ั‚ะพั‡ะตะบ. ะšะฐะถะดะฐั ะธะท ัั‚ะธั… ะทะฐะดะฐั‡ ะธะผะตะตั‚ ั€ะฐะทะฝั‹ะต ั†ะตะปะธ ะธ ะพะฑะปะฐัั‚ะธ ะฟั€ะธะผะตะฝะตะฝะธั. ะŸะพะฝะธะผะฐั ั€ะฐะทะปะธั‡ะธั ะผะตะถะดัƒ ัั‚ะธะผะธ ะทะฐะดะฐั‡ะฐะผะธ, ะฒั‹ ะผะพะถะตั‚ะต ะฒั‹ะฑั€ะฐั‚ัŒ ะฟะพะดั…ะพะดัั‰ัƒัŽ ะทะฐะดะฐั‡ัƒ ะดะปั ะฒะฐัˆะตะณะพ ะฟั€ะธะปะพะถะตะฝะธั ะบะพะผะฟัŒัŽั‚ะตั€ะฝะพะณะพ ะทั€ะตะฝะธั. diff --git a/ultralytics/docs/ru/tasks/index.md:Zone.Identifier b/ultralytics/docs/ru/tasks/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/tasks/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/tasks/pose.md b/ultralytics/docs/ru/tasks/pose.md new file mode 100755 index 0000000..03d0d8d --- /dev/null +++ b/ultralytics/docs/ru/tasks/pose.md @@ -0,0 +1,176 @@ +--- +comments: true +description: ะฃะทะฝะฐะนั‚ะต, ะบะฐะบ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ Ultralytics YOLOv8 ะดะปั ะทะฐะดะฐั‡ ะพั†ะตะฝะบะธ ะฟะพะทั‹. ะะฐะนะดะธั‚ะต ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ, ัƒะทะฝะฐะนั‚ะต, ะบะฐะบ ะพะฑัƒั‡ะฐั‚ัŒ, ะฟั€ะพะฒะตั€ัั‚ัŒ, ะฟั€ะตะดัะบะฐะทั‹ะฒะฐั‚ัŒ ะธ ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐั‚ัŒ ัะฒะพะธ ัะพะฑัั‚ะฒะตะฝะฝั‹ะต. +--- + +# ะžั†ะตะฝะบะฐ ะฟะพะทั‹ + +![ะŸั€ะธะผะตั€ั‹ ะพั†ะตะฝะบะธ ะฟะพะทั‹](https://user-images.githubusercontent.com/26833433/243418616-9811ac0b-a4a7-452a-8aba-484ba32bb4a8.png) + +ะžั†ะตะฝะบะฐ ะฟะพะทั‹ โ€” ัั‚ะพ ะทะฐะดะฐั‡ะฐ, ะทะฐะบะปัŽั‡ะฐัŽั‰ะฐััั ะฒ ะพะฟั€ะตะดะตะปะตะฝะธะธ ะผะตัั‚ะพะฟะพะปะพะถะตะฝะธั ะพะฟั€ะตะดะตะปั‘ะฝะฝั‹ั… ั‚ะพั‡ะตะบ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ, ะพะฑั‹ั‡ะฝะพ ะฝะฐะทั‹ะฒะฐะตะผั‹ั… ะบะพะฝั‚ั€ะพะปัŒะฝั‹ะผะธ ั‚ะพั‡ะบะฐะผะธ. ะšะพะฝั‚ั€ะพะปัŒะฝั‹ะต ั‚ะพั‡ะบะธ ะผะพะณัƒั‚ ะฟั€ะตะดัั‚ะฐะฒะปัั‚ัŒ ั€ะฐะทะปะธั‡ะฝั‹ะต ั‡ะฐัั‚ะธ ะพะฑัŠะตะบั‚ะฐ, ั‚ะฐะบะธะต ะบะฐะบ ััƒัั‚ะฐะฒั‹, ะพั€ะธะตะฝั‚ะธั€ั‹ ะธะปะธ ะดั€ัƒะณะธะต ั…ะฐั€ะฐะบั‚ะตั€ะฝั‹ะต ะพัะพะฑะตะฝะฝะพัั‚ะธ. ะ ะฐัะฟะพะปะพะถะตะฝะธะต ะบะพะฝั‚ั€ะพะปัŒะฝั‹ั… ั‚ะพั‡ะตะบ ะพะฑั‹ั‡ะฝะพ ะฟั€ะตะดัั‚ะฐะฒะปะตะฝะพ ะฒ ะฒะธะดะต ะฝะฐะฑะพั€ะฐ 2D `[x, y]` ะธะปะธ 3D `[x, y, visible]` ะบะพะพั€ะดะธะฝะฐั‚. + +ะ ะตะทัƒะปัŒั‚ะฐั‚ ั€ะฐะฑะพั‚ั‹ ะผะพะดะตะปะธ ะพั†ะตะฝะบะธ ะฟะพะทั‹ โ€” ัั‚ะพ ะฝะฐะฑะพั€ ั‚ะพั‡ะตะบ, ะฟั€ะตะดัั‚ะฐะฒะปััŽั‰ะธั… ะบะพะฝั‚ั€ะพะปัŒะฝั‹ะต ั‚ะพั‡ะบะธ ะฝะฐ ะพะฑัŠะตะบั‚ะต ะฒ ะธะทะพะฑั€ะฐะถะตะฝะธะธ, ะพะฑั‹ั‡ะฝะพ ะฒะผะตัั‚ะต ั ะพั†ะตะฝะบะฐะผะธ ัƒะฒะตั€ะตะฝะฝะพัั‚ะธ ะดะปั ะบะฐะถะดะพะน ั‚ะพั‡ะบะธ. ะžั†ะตะฝะบะฐ ะฟะพะทั‹ ัะฒะปัะตั‚ัั ั…ะพั€ะพัˆะธะผ ะฒั‹ะฑะพั€ะพะผ, ะบะพะณะดะฐ ะฒะฐะผ ะฝัƒะถะฝะพ ะธะดะตะฝั‚ะธั„ะธั†ะธั€ะพะฒะฐั‚ัŒ ะบะพะฝะบั€ะตั‚ะฝั‹ะต ั‡ะฐัั‚ะธ ะพะฑัŠะตะบั‚ะฐ ะฒ ัั†ะตะฝะต ะธ ะธั… ั€ะฐัะฟะพะปะพะถะตะฝะธะต ะพั‚ะฝะพัะธั‚ะตะปัŒะฝะพ ะดั€ัƒะณ ะดั€ัƒะณะฐ. + +[ะกะผะพั‚ั€ะธั‚ะต: ะžั†ะตะฝะบะฐ ะฟะพะทั‹ ั Ultralytics YOLOv8.](https://www.youtube.com/embed/Y28xXQmju64?si=pCY4ZwejZFu6Z4kZ) + +!!! Tip "ะกะพะฒะตั‚" + + ะœะพะดะตะปะธ _pose_ YOLOv8 ะธัะฟะพะปัŒะทัƒัŽั‚ ััƒั„ั„ะธะบั `-pose`, ั‚.ะต. `yolov8n-pose.pt`. ะญั‚ะธ ะผะพะดะตะปะธ ะพะฑัƒั‡ะตะฝั‹ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… [COCO keypoints](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco-pose.yaml) ะธ ะฟะพะดั…ะพะดัั‚ ะดะปั ั€ะฐะทะปะธั‡ะฝั‹ั… ะทะฐะดะฐั‡ ะพั†ะตะฝะบะธ ะฟะพะทั‹. + +## [ะœะพะดะตะปะธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +ะ—ะดะตััŒ ะฟั€ะตะดัั‚ะฐะฒะปะตะฝั‹ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ YOLOv8 Pose. ะœะพะดะตะปะธ Detect, Segment ะธ Pose ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝั‹ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), ะฐ ะผะพะดะตะปะธ Classify โ€” ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +[ะœะพะดะตะปะธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) ัะบะฐั‡ะธะฒะฐัŽั‚ัั ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธ ะธะท ะฟะพัะปะตะดะฝะตะณะพ [ั€ะตะปะธะทะฐ](https://github.com/ultralytics/assets/releases) Ultralytics ะฟั€ะธ ะฟะตั€ะฒะพะผ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ. + +| ะœะพะดะตะปัŒ | ั€ะฐะทะผะตั€
(ะฟะธะบัะตะปะธ) | mAPpose
50-95 | mAPpose
50 | ะกะบะพั€ะพัั‚ัŒ
CPU ONNX
(ะผั) | ะกะบะพั€ะพัั‚ัŒ
A100 TensorRT
(ะผั) | ะฟะฐั€ะฐะผะตั‚ั€ั‹
(ะœ) | FLOPs
(ะ‘) | +|------------------------------------------------------------------------------------------------------|--------------------------|-----------------------|--------------------|-----------------------------------|----------------------------------------|-----------------------|-------------------| +| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | +| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | +| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | +| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | +| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | +| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +- **mAPval** ะทะฝะฐั‡ะตะฝะธั ะดะปั ะพะดะฝะพะน ะผะพะดะตะปะธ ะพะดะธะฝะพั‡ะฝะพะณะพ ะผะฐััˆั‚ะฐะฑะฐ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… [COCO Keypoints val2017](http://cocodataset.org). +
ะ’ะพัะฟั€ะพะธะทะฒะพะดะธั‚ัั ั ะฟะพะผะพั‰ัŒัŽ: `yolo val pose data=coco-pose.yaml device=0` +- **ะกะบะพั€ะพัั‚ัŒ** ัƒัั€ะตะดะฝะตะฝะฐ ะฟะพ ะธะทะพะฑั€ะฐะถะตะฝะธัะผ COCO val ะฝะฐ [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) ะธะฝัั‚ะฐะฝัะต. +
ะ’ะพัะฟั€ะพะธะทะฒะพะดะธั‚ัั ั ะฟะพะผะพั‰ัŒัŽ: `yolo val pose data=coco8-pose.yaml batch=1 device=0|cpu` + +## ะžะฑัƒั‡ะตะฝะธะต + +ะžะฑัƒั‡ะธั‚ะต ะผะพะดะตะปัŒ YOLOv8-pose ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO128-pose. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ + model = YOLO('yolov8n-pose.yaml') # ัะพะทะดะฐั‚ัŒ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML + model = YOLO('yolov8n-pose.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ (ั€ะตะบะพะผะตะฝะดัƒะตั‚ัั ะดะปั ะพะฑัƒั‡ะตะฝะธั) + model = YOLO('yolov8n-pose.yaml').load('yolov8n-pose.pt') # ัะพะทะดะฐั‚ัŒ ะธะท YAML ะธ ะฟะตั€ะตะฝะตัั‚ะธ ะฒะตัะฐ + + # ะžะฑัƒั‡ะธั‚ัŒ ะผะพะดะตะปัŒ + results = model.train(data='coco8-pose.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # ะกะพะทะดะฐั‚ัŒ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML ะธ ะฝะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ั ะฝัƒะปั + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml epochs=100 imgsz=640 + + # ะะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ั ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ *.pt + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.pt epochs=100 imgsz=640 + + # ะกะพะทะดะฐั‚ัŒ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML, ะฟะตั€ะตะฝะตัั‚ะธ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ ะธ ะฝะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml pretrained=yolov8n-pose.pt epochs=100 imgsz=640 + ``` + +### ะคะพั€ะผะฐั‚ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… + +ะคะพั€ะผะฐั‚ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… YOLO pose ะผะพะถะฝะพ ะฝะฐะนั‚ะธ ะฒ ะฟะพะดั€ะพะฑะฝะพัั‚ัั… ะฒ [ะ ัƒะบะพะฒะพะดัั‚ะฒะต ะฟะพ ะฝะฐะฑะพั€ะฐะผ ะดะฐะฝะฝั‹ั…](../../../datasets/pose/index.md). ะ”ะปั ะฟั€ะตะพะฑั€ะฐะทะพะฒะฐะฝะธั ััƒั‰ะตัั‚ะฒัƒัŽั‰ะตะณะพ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… ะธะท ะดั€ัƒะณะธั… ั„ะพั€ะผะฐั‚ะพะฒ (ะฝะฐะฟั€ะธะผะตั€, COCO ะธ ั‚.ะด.) ะฒ ั„ะพั€ะผะฐั‚ YOLO, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ะธัะฟะพะปัŒะทัƒะนั‚ะต ะธะฝัั‚ั€ัƒะผะตะฝั‚ [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) ะพั‚ Ultralytics. + +## ะŸั€ะพะฒะตั€ะบะฐ + +ะŸั€ะพะฒะตั€ัŒั‚ะต ั‚ะพั‡ะฝะพัั‚ัŒ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8n-pose ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO128-pose. ะั€ะณัƒะผะตะฝั‚ั‹ ะฝะต ะฝัƒะถะฝั‹, ั‚ะฐะบ ะบะฐะบ `model` +ะทะฐะฟะพะผะธะฝะฐะตั‚ ัะฒะพะธ `data` ะธ ะฐั€ะณัƒะผะตะฝั‚ั‹ ะบะฐะบ ะฐั‚ั€ะธะฑัƒั‚ั‹ ะผะพะดะตะปะธ. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ + model = YOLO('yolov8n-pose.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ัะฒะพัŽ ะผะพะดะตะปัŒ + + # ะŸั€ะพะฒะตั€ะธั‚ัŒ ะผะพะดะตะปัŒ + metrics = model.val() # ะฐั€ะณัƒะผะตะฝั‚ั‹ ะฝะต ะฝัƒะถะฝั‹, ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… ะธ ะฝะฐัั‚ั€ะพะนะบะธ ะทะฐะฟะพะผะฝะตะฝั‹ + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ัะฟะธัะพะบ ัะพะดะตั€ะถะธั‚ map50-95 ะดะปั ะบะฐะถะดะพะน ะบะฐั‚ะตะณะพั€ะธะธ + ``` + === "CLI" + + ```bash + yolo pose val model=yolov8n-pose.pt # ะฟั€ะพะฒะตั€ะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + yolo pose val model=path/to/best.pt # ะฟั€ะพะฒะตั€ะธั‚ัŒ ัะฒะพัŽ ะผะพะดะตะปัŒ + ``` + +## ะŸั€ะตะดัะบะฐะทะฐะฝะธะต + +ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv8n-pose ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะฟั€ะตะดัะบะฐะทะฐะฝะธะน ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธัั…. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ + model = YOLO('yolov8n-pose.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ัะฒะพัŽ ะผะพะดะตะปัŒ + + # ะกะดะตะปะฐั‚ัŒ ะฟั€ะตะดัะบะฐะทะฐะฝะธะต ะผะพะดะตะปัŒัŽ + results = model('https://ultralytics.com/images/bus.jpg') # ะฟั€ะตะดัะบะฐะทะฐั‚ัŒ ะฟะพ ะธะทะพะฑั€ะฐะถะตะฝะธัŽ + ``` + === "CLI" + + ```bash + yolo pose predict model=yolov8n-pose.pt source='https://ultralytics.com/images/bus.jpg' # ะฟั€ะตะดัะบะฐะทะฐั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝะพะน ะผะพะดะตะปัŒัŽ + yolo pose predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ะฟั€ะตะดัะบะฐะทะฐั‚ัŒ ัะฒะพะตะน ะผะพะดะตะปัŒัŽ + ``` + +ะŸะพะปะฝั‹ะต ะดะตั‚ะฐะปะธ ั€ะฐะฑะพั‚ั‹ ะฒ ั€ะตะถะธะผะต `predict` ัะผะพั‚ั€ะธั‚ะต ะฝะฐ ัั‚ั€ะฐะฝะธั†ะต [Predict](https://docs.ultralytics.com/modes/predict/). + +## ะญะบัะฟะพั€ั‚ + +ะญะบัะฟะพั€ั‚ะธั€ัƒะนั‚ะต ะผะพะดะตะปัŒ YOLOv8n Pose ะฒ ะดั€ัƒะณะพะน ั„ะพั€ะผะฐั‚, ั‚ะฐะบะพะน ะบะฐะบ ONNX, CoreML ะธ ั‚.ะด. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ + model = YOLO('yolov8n-pose.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ัะฒะพัŽ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ + + # ะญะบัะฟะพั€ั‚ะธั€ะพะฒะฐั‚ัŒ ะผะพะดะตะปัŒ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-pose.pt format=onnx # ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + yolo export model=path/to/best.pt format=onnx # ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐั‚ัŒ ัะฒะพัŽ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ + ``` + +ะ”ะพัั‚ัƒะฟะฝั‹ะต ั„ะพั€ะผะฐั‚ั‹ ัะบัะฟะพั€ั‚ะฐ ะผะพะดะตะปะธ YOLOv8-pose ะฟั€ะธะฒะตะดะตะฝั‹ ะฒ ั‚ะฐะฑะปะธั†ะต ะฝะธะถะต. ะ’ั‹ ะผะพะถะตั‚ะต ะดะตะปะฐั‚ัŒ ะฟั€ะตะดัะบะฐะทะฐะฝะธั ะธะปะธ ะฟั€ะพะฒะตั€ะบะธ ะฝะตะฟะพัั€ะตะดัั‚ะฒะตะฝะฝะพ ั ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐะฝะฝั‹ั… ะผะพะดะตะปะตะน, ะฝะฐะฟั€ะธะผะตั€, `yolo predict model=yolov8n-pose.onnx`. ะŸั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั ะฟะพะบะฐะทะฐะฝั‹ ะดะปั ะฒะฐัˆะตะน ะผะพะดะตะปะธ ะฟะพัะปะต ะทะฐะฒะตั€ัˆะตะฝะธั ัะบัะฟะพั€ั‚ะฐ. + +| ะคะพั€ะผะฐั‚ | ะั€ะณัƒะผะตะฝั‚ `format` | ะœะพะดะตะปัŒ | ะœะตั‚ะฐะดะฐะฝะฝั‹ะต | ะั€ะณัƒะผะตะฝั‚ั‹ | +|--------------------------------------------------------------------|-------------------|--------------------------------|------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-pose.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-pose.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-pose.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-pose_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-pose.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-pose.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-pose_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-pose.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-pose.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-pose_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-pose_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-pose_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-pose_ncnn_model/` | โœ… | `imgsz`, `half` | + +ะŸะพะปะฝั‹ะต ะดะตั‚ะฐะปะธ ัะบัะฟะพั€ั‚ะฐ ัะผะพั‚ั€ะธั‚ะต ะฝะฐ ัั‚ั€ะฐะฝะธั†ะต [Export](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/ru/tasks/pose.md:Zone.Identifier b/ultralytics/docs/ru/tasks/pose.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/tasks/pose.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/ru/tasks/segment.md b/ultralytics/docs/ru/tasks/segment.md new file mode 100755 index 0000000..94d67b3 --- /dev/null +++ b/ultralytics/docs/ru/tasks/segment.md @@ -0,0 +1,189 @@ +--- +comments: true +description: ะะฐัƒั‡ะธั‚ะตััŒ ะธัะฟะพะปัŒะทะพะฒะฐั‚ัŒ ะผะพะดะตะปะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ะพะฑัŠะตะบั‚ะพะฒ ั ะฟะพะผะพั‰ัŒัŽ Ultralytics YOLO. ะ˜ะฝัั‚ั€ัƒะบั†ะธะธ ะฟะพ ะพะฑัƒั‡ะตะฝะธัŽ, ะฒะฐะปะธะดะฐั†ะธะธ, ะฟั€ะตะดัะบะฐะทะฐะฝะธัŽ ะธะทะพะฑั€ะฐะถะตะฝะธะน ะธ ัะบัะฟะพั€ั‚ัƒ ะผะพะดะตะปะตะน. +keywords: yolov8, ัะตะณะผะตะฝั‚ะฐั†ะธั ะพะฑัŠะตะบั‚ะพะฒ, Ultralytics, ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… COCO, ัะตะณะผะตะฝั‚ะฐั†ะธั ะธะทะพะฑั€ะฐะถะตะฝะธะน, ะพะฑะฝะฐั€ัƒะถะตะฝะธะต ะพะฑัŠะตะบั‚ะพะฒ, ะพะฑัƒั‡ะตะฝะธะต ะผะพะดะตะปะตะน, ะฒะฐะปะธะดะฐั†ะธั ะผะพะดะตะปะตะน, ะฟั€ะตะดัะบะฐะทะฐะฝะธั ะธะทะพะฑั€ะฐะถะตะฝะธะน, ัะบัะฟะพั€ั‚ ะผะพะดะตะปะตะน +--- + +# ะกะตะณะผะตะฝั‚ะฐั†ะธั ัะบะทะตะผะฟะปัั€ะพะฒ + +ะŸั€ะธะผะตั€ั‹ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ัะบะทะตะผะฟะปัั€ะพะฒ + +ะกะตะณะผะตะฝั‚ะฐั†ะธั ัะบะทะตะผะฟะปัั€ะพะฒ ะธะดั‘ั‚ ะฝะฐ ัˆะฐะณ ะดะฐะปัŒัˆะต ะฟะพ ัั€ะฐะฒะฝะตะฝะธัŽ ั ะพะฑะฝะฐั€ัƒะถะตะฝะธะตะผ ะพะฑัŠะตะบั‚ะพะฒ ะธ ะฒะบะปัŽั‡ะฐะตั‚ ะธะดะตะฝั‚ะธั„ะธะบะฐั†ะธัŽ ะพั‚ะดะตะปัŒะฝั‹ั… ะพะฑัŠะตะบั‚ะพะฒ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ ะธ ะธั… ัะตะณะผะตะฝั‚ะฐั†ะธัŽ ะพั‚ ะพัั‚ะฐะปัŒะฝะพะน ั‡ะฐัั‚ะธ ะธะทะพะฑั€ะฐะถะตะฝะธั. + +ะ ะตะทัƒะปัŒั‚ะฐั‚ะพะผ ะผะพะดะตะปะธ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ัะบะทะตะผะฟะปัั€ะพะฒ ัะฒะปัะตั‚ัั ะฝะฐะฑะพั€ ะผะฐัะพะบ ะธะปะธ ะบะพะฝั‚ัƒั€ะพะฒ, ะพั‡ะตั€ั‡ะธะฒะฐัŽั‰ะธั… ะบะฐะถะดั‹ะน ะพะฑัŠะตะบั‚ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ, ะฒะผะตัั‚ะต ั ะบะปะฐััะพะฒั‹ะผะธ ะผะตั‚ะบะฐะผะธ ะธ ะบะพัั„ั„ะธั†ะธะตะฝั‚ะฐะผะธ ัƒะฒะตั€ะตะฝะฝะพัั‚ะธ ะดะปั ะบะฐะถะดะพะณะพ ะพะฑัŠะตะบั‚ะฐ. ะกะตะณะผะตะฝั‚ะฐั†ะธั ัะบะทะตะผะฟะปัั€ะพะฒ ะฟะพะปะตะทะฝะฐ, ะบะพะณะดะฐ ะฒะฐะผ ะฝัƒะถะฝะพ ะทะฝะฐั‚ัŒ ะฝะต ั‚ะพะปัŒะบะพ, ะณะดะต ะฝะฐั…ะพะดัั‚ัั ะพะฑัŠะตะบั‚ั‹ ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธะธ, ะฝะพ ะธ ะธั… ั‚ะพั‡ะฝัƒัŽ ั„ะพั€ะผัƒ. + +

+
+ +
+ ะกะผะพั‚ั€ะธั‚ะต: ะ—ะฐะฟัƒัะบ ัะตะณะผะตะฝั‚ะฐั†ะธะธ ั ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปัŒัŽ Ultralytics YOLOv8 ะฝะฐ Python. +

+ +!!! Tip "ะกะพะฒะตั‚" + + ะœะพะดะตะปะธ YOLOv8 Segment ะธัะฟะพะปัŒะทัƒัŽั‚ ััƒั„ั„ะธะบั `-seg`, ะฝะฐะฟั€ะธะผะตั€ `yolov8n-seg.pt` ะธ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝั‹ ะฝะฐ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml). + +## [ะœะพะดะตะปะธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +ะ—ะดะตััŒ ะฟะพะบะฐะทะฐะฝั‹ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะผะพะดะตะปะธ Segment YOLOv8. ะœะพะดะตะปะธ Detect, Segment ะธ Pose ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝั‹ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml), ะฒ ั‚ะพ ะฒั€ะตะผั ะบะฐะบ ะผะพะดะตะปะธ Classify ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝั‹ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml). + +[ะœะพะดะตะปะธ](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) ะฐะฒั‚ะพะผะฐั‚ะธั‡ะตัะบะธ ะทะฐะณั€ัƒะถะฐัŽั‚ัั ะธะท ะฟะพัะปะตะดะฝะตะณะพ [ั€ะตะปะธะทะฐ](https://github.com/ultralytics/assets/releases) Ultralytics ะฟั€ะธ ะฟะตั€ะฒะพะผ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธะธ. + +| ะœะพะดะตะปัŒ | ั€ะฐะทะผะตั€
(ะฟะธะบัะตะปะธ) | mAPbox
50-95 | mAPmask
50-95 | ะกะบะพั€ะพัั‚ัŒ
CPU ONNX
(ะผั) | ะกะบะพั€ะพัั‚ัŒ
A100 TensorRT
(ะผั) | ะฟะฐั€ะฐะผะตั‚ั€ั‹
(ะœ) | FLOPs
(B) | +|----------------------------------------------------------------------------------------------|--------------------------|----------------------|-----------------------|-----------------------------------|----------------------------------------|-----------------------|-------------------| +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- ะ—ะฝะฐั‡ะตะฝะธั **mAPval** ะดะปั ะพะดะธะฝะพั‡ะฝะพะน ะผะพะดะตะปะธ ะพะดะธะฝะพั‡ะฝะพะณะพ ะผะฐััˆั‚ะฐะฑะฐ ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… [COCO val2017](http://cocodataset.org). +
ะ’ะพัะฟั€ะพะธะทะฒะตะดะธั‚ะต ั ะฟะพะผะพั‰ัŒัŽ `yolo val segment data=coco.yaml device=0` +- **ะกะบะพั€ะพัั‚ัŒ** ัƒัั€ะตะดะฝะตะฝะฐ ะดะปั ะธะทะพะฑั€ะฐะถะตะฝะธะน COCO val ะฝะฐ [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) + ะธะฝัั‚ะฐะฝัะต. +
ะ’ะพัะฟั€ะพะธะทะฒะตะดะธั‚ะต ั ะฟะพะผะพั‰ัŒัŽ `yolo val segment data=coco128-seg.yaml batch=1 device=0|cpu` + +## ะžะฑัƒั‡ะตะฝะธะต + +ะžะฑัƒั‡ะธั‚ะต ะผะพะดะตะปัŒ YOLOv8n-seg ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO128-seg ะฒ ั‚ะตั‡ะตะฝะธะต 100 ัะฟะพั… ะฟั€ะธ ั€ะฐะทะผะตั€ะต ะธะทะพะฑั€ะฐะถะตะฝะธั 640. ะŸะพะปะฝั‹ะน ัะฟะธัะพะบ ะดะพัั‚ัƒะฟะฝั‹ั… ะฐั€ะณัƒะผะตะฝั‚ะพะฒ ัะผ. ะฝะฐ ัั‚ั€ะฐะฝะธั†ะต [ะšะพะฝั„ะธะณัƒั€ะฐั†ะธั](/../usage/cfg.md). + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ + model = YOLO('yolov8n-seg.yaml') # ัะพะทะดะฐั‚ัŒ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML + model = YOLO('yolov8n-seg.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ (ั€ะตะบะพะผะตะฝะดัƒะตั‚ัั ะดะปั ะพะฑัƒั‡ะตะฝะธั) + model = YOLO('yolov8n-seg.yaml').load('yolov8n.pt') # ัะพะทะดะฐั‚ัŒ ะธะท YAML ะธ ะฟะตั€ะตะฝะตัั‚ะธ ะฒะตัะฐ + + # ะžะฑัƒั‡ะธั‚ัŒ ะผะพะดะตะปัŒ + results = model.train(data='coco128-seg.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # ะกะพะทะดะฐั‚ัŒ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML ะธ ะฝะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ั ะฝัƒะปั + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml epochs=100 imgsz=640 + + # ะะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต ั ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ *.pt + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.pt epochs=100 imgsz=640 + + # ะกะพะทะดะฐั‚ัŒ ะฝะพะฒัƒัŽ ะผะพะดะตะปัŒ ะธะท YAML, ะฟะตั€ะตะฝะตัั‚ะธ ะฟั€ะตะดะฒะฐั€ะธั‚ะตะปัŒะฝะพ ะพะฑัƒั‡ะตะฝะฝั‹ะต ะฒะตัะฐ ะธ ะฝะฐั‡ะฐั‚ัŒ ะพะฑัƒั‡ะตะฝะธะต + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml pretrained=yolov8n-seg.pt epochs=100 imgsz=640 + ``` + +### ะคะพั€ะผะฐั‚ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… + +ะคะพั€ะผะฐั‚ ะฝะฐะฑะพั€ะฐ ะดะฐะฝะฝั‹ั… ะดะปั ัะตะณะผะตะฝั‚ะฐั†ะธะธ YOLO ะผะพะถะฝะพ ะฝะฐะนั‚ะธ ะดะตั‚ะฐะปัŒะฝะพ ะฒ [ะ ัƒะบะพะฒะพะดัั‚ะฒะต ะฟะพ ะฝะฐะฑะพั€ะฐะผ ะดะฐะฝะฝั‹ั…](../../../datasets/segment/index.md). ะงั‚ะพะฑั‹ ะบะพะฝะฒะตั€ั‚ะธั€ะพะฒะฐั‚ัŒ ัะฒะพะน ััƒั‰ะตัั‚ะฒัƒัŽั‰ะธะน ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… ะธะท ะดั€ัƒะณะธั… ั„ะพั€ะผะฐั‚ะพะฒ (ะฝะฐะฟั€ะธะผะตั€, COCO ะธ ั‚.ะด.) ะฒ ั„ะพั€ะผะฐั‚ YOLO, ะฟะพะถะฐะปัƒะนัั‚ะฐ, ะธัะฟะพะปัŒะทัƒะนั‚ะต ะธะฝัั‚ั€ัƒะผะตะฝั‚ [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) ะพั‚ Ultralytics. + +## ะ’ะฐะปะธะดะฐั†ะธั + +ะŸั€ะพะฒะตั€ัŒั‚ะต ั‚ะพั‡ะฝะพัั‚ัŒ ะพะฑัƒั‡ะตะฝะฝะพะน ะผะพะดะตะปะธ YOLOv8n-seg ะฝะฐ ะฝะฐะฑะพั€ะต ะดะฐะฝะฝั‹ั… COCO128-seg. ะั€ะณัƒะผะตะฝั‚ั‹ ะฟะตั€ะตะดะฐะฒะฐั‚ัŒ ะฝะต ะฝัƒะถะฝะพ, ั‚ะฐะบ ะบะฐะบ `model` ัะพั…ั€ะฐะฝัะตั‚ `data` ะธ ะฐั€ะณัƒะผะตะฝั‚ั‹ ะพะฑัƒั‡ะตะฝะธั ะฒ ะบะฐั‡ะตัั‚ะฒะต ะฐั‚ั€ะธะฑัƒั‚ะพะฒ ะผะพะดะตะปะธ. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ + model = YOLO('yolov8n-seg.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบัƒัŽ ะผะพะดะตะปัŒ + + # ะŸั€ะพะฒะฐะปะธะดะธั€ะพะฒะฐั‚ัŒ ะผะพะดะตะปัŒ + metrics = model.val() # ะฐั€ะณัƒะผะตะฝั‚ั‹ ะฝะต ะฝัƒะถะฝั‹, ะฝะฐะฑะพั€ ะดะฐะฝะฝั‹ั… ะธ ะฝะฐัั‚ั€ะพะนะบะธ ะทะฐะฟะพะผะฝะตะฝั‹ + metrics.box.map # map50-95(B) + metrics.box.map50 # map50(B) + metrics.box.map75 # map75(B) + metrics.box.maps # ัะฟะธัะพะบ ัะพะดะตั€ะถะธั‚ map50-95(B) ะบะฐะถะดะพะน ะบะฐั‚ะตะณะพั€ะธะธ + metrics.seg.map # map50-95(M) + metrics.seg.map50 # map50(M) + metrics.seg.map75 # map75(M) + metrics.seg.maps # ัะฟะธัะพะบ ัะพะดะตั€ะถะธั‚ map50-95(M) ะบะฐะถะดะพะน ะบะฐั‚ะตะณะพั€ะธะธ + ``` + === "CLI" + + ```bash + yolo segment val model=yolov8n-seg.pt # ะฒะฐะปะธะดะฐั†ะธั ะพั„ะธั†ะธะฐะปัŒะฝะพะน ะผะพะดะตะปะธ + yolo segment val model=path/to/best.pt # ะฒะฐะปะธะดะฐั†ะธั ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะพะน ะผะพะดะตะปะธ + ``` + +## ะŸั€ะตะดัะบะฐะทะฐะฝะธะต + +ะ˜ัะฟะพะปัŒะทัƒะนั‚ะต ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ YOLOv8n-seg ะดะปั ะฒั‹ะฟะพะปะฝะตะฝะธั ะฟั€ะตะดัะบะฐะทะฐะฝะธะน ะฝะฐ ะธะทะพะฑั€ะฐะถะตะฝะธัั…. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ + model = YOLO('yolov8n-seg.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบัƒัŽ ะผะพะดะตะปัŒ + + # ะกะดะตะปะฐั‚ัŒ ะฟั€ะตะดัะบะฐะทะฐะฝะธะต ั ะฟะพะผะพั‰ัŒัŽ ะผะพะดะตะปะธ + results = model('https://ultralytics.com/images/bus.jpg') # ะฟั€ะตะดัะบะฐะทะฐั‚ัŒ ะฟะพ ะธะทะพะฑั€ะฐะถะตะฝะธัŽ + ``` + === "CLI" + + ```bash + yolo segment predict model=yolov8n-seg.pt source='https://ultralytics.com/images/bus.jpg' # ะฟั€ะตะดัะบะฐะทะฐั‚ัŒ ั ะพั„ะธั†ะธะฐะปัŒะฝะพะน ะผะพะดะตะปัŒัŽ + yolo segment predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ะฟั€ะตะดัะบะฐะทะฐั‚ัŒ ั ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบะพะน ะผะพะดะตะปัŒัŽ + ``` + +ะŸะพะปะฝะฐั ะธะฝั„ะพั€ะผะฐั†ะธั ะพ ั€ะตะถะธะผะต `predict` ะฝะฐ ัั‚ั€ะฐะฝะธั†ะต [Predict](https://docs.ultralytics.com/modes/predict/). + +## ะญะบัะฟะพั€ั‚ + +ะญะบัะฟะพั€ั‚ะธั€ัƒะนั‚ะต ะผะพะดะตะปัŒ YOLOv8n-seg ะฒ ะดั€ัƒะณะพะน ั„ะพั€ะผะฐั‚, ะฝะฐะฟั€ะธะผะตั€ ONNX, CoreML ะธ ั‚.ะด. + +!!! Example "ะŸั€ะธะผะตั€" + + === "Python" + + ```python + from ultralytics import YOLO + + # ะ—ะฐะณั€ัƒะทะธั‚ัŒ ะผะพะดะตะปัŒ + model = YOLO('yolov8n-seg.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + model = YOLO('path/to/best.pt') # ะทะฐะณั€ัƒะทะธั‚ัŒ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบัƒัŽ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ + + # ะญะบัะฟะพั€ั‚ะธั€ะพะฒะฐั‚ัŒ ะผะพะดะตะปัŒ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-seg.pt format=onnx # ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐั‚ัŒ ะพั„ะธั†ะธะฐะปัŒะฝัƒัŽ ะผะพะดะตะปัŒ + yolo export model=path/to/best.pt format=onnx # ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐั‚ัŒ ะฟะพะปัŒะทะพะฒะฐั‚ะตะปัŒัะบัƒัŽ ะพะฑัƒั‡ะตะฝะฝัƒัŽ ะผะพะดะตะปัŒ + ``` + +ะ”ะพัั‚ัƒะฟะฝั‹ะต ั„ะพั€ะผะฐั‚ั‹ ัะบัะฟะพั€ั‚ะฐ YOLOv8-seg ะฟั€ะธะฒะตะดะตะฝั‹ ะฒ ั‚ะฐะฑะปะธั†ะต ะฝะธะถะต. ะŸะพัะปะต ะทะฐะฒะตั€ัˆะตะฝะธั ัะบัะฟะพั€ั‚ะฐ ะดะปั ะฒะฐัˆะตะน ะผะพะดะตะปะธ ะฟะพะบะฐะทะฐะฝั‹ ะฟั€ะธะผะตั€ั‹ ะธัะฟะพะปัŒะทะพะฒะฐะฝะธั, ะฒะบะปัŽั‡ะฐั ะฟั€ัะผะพะต ะฟั€ะตะดัะบะฐะทะฐะฝะธะต ะธะปะธ ะฒะฐะปะธะดะฐั†ะธัŽ ะฝะฐ ัะบัะฟะพั€ั‚ะธั€ะพะฒะฐะฝะฝั‹ั… ะผะพะดะตะปัั…, ะฝะฐะฟั€ะธะผะตั€ `yolo predict model=yolov8n-seg.onnx`. + +| ะคะพั€ะผะฐั‚ | ะั€ะณัƒะผะตะฝั‚ `format` | ะœะพะดะตะปัŒ | ะœะตั‚ะฐะดะฐะฝะฝั‹ะต | ะั€ะณัƒะผะตะฝั‚ั‹ | +|--------------------------------------------------------------------|-------------------|-------------------------------|------------|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-seg.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-seg.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-seg.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-seg_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-seg.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-seg.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-seg_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-seg.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-seg.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-seg_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-seg_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-seg_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-seg_ncnn_model/` | โœ… | `imgsz`, `half` | + +ะŸะพะดั€ะพะฑะฝะพัั‚ะธ ะพ ั€ะตะถะธะผะต `export` ัะผะพั‚ั€ะธั‚ะต ะฝะฐ ัั‚ั€ะฐะฝะธั†ะต [Export](https://docs.ultralytics.com/modes/export/). diff --git a/ultralytics/docs/ru/tasks/segment.md:Zone.Identifier b/ultralytics/docs/ru/tasks/segment.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/ru/tasks/segment.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/update_translations.py b/ultralytics/docs/update_translations.py new file mode 100755 index 0000000..9c27c70 --- /dev/null +++ b/ultralytics/docs/update_translations.py @@ -0,0 +1,180 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license +""" +Script to fix broken Markdown links and front matter in language-specific directories zh, ko, ja, ru, de, fr, es, pt. + +This script processes markdown files in language-specific directories (like /zh/). It finds Markdown links and checks +their existence. If a link is broken and does not exist in the language-specific directory but exists in the /en/ +directory, the script updates the link to point to the corresponding file in the /en/ directory. + +It also ensures that front matter keywords like 'comments:', 'description:', and 'keywords:' are not translated and +remain in English. +""" + +import re +from pathlib import Path + + +class MarkdownLinkFixer: + """Class to fix Markdown links and front matter in language-specific directories.""" + + def __init__(self, base_dir, update_links=True, update_text=True): + """Initialize the MarkdownLinkFixer with the base directory.""" + self.base_dir = Path(base_dir) + self.update_links = update_links + self.update_text = update_text + self.md_link_regex = re.compile(r'\[([^]]+)]\(([^:)]+)\.md\)') + + @staticmethod + def replace_front_matter(content, lang_dir): + """Ensure front matter keywords remain in English.""" + english = ['comments', 'description', 'keywords'] + translations = { + 'zh': ['่ฏ„่ฎบ', 'ๆ่ฟฐ', 'ๅ…ณ้”ฎ่ฏ'], # Mandarin Chinese (Simplified) warning, sometimes translates as ๅ…ณ้”ฎๅญ— + 'es': ['comentarios', 'descripciรณn', 'palabras clave'], # Spanish + 'ru': ['ะบะพะผะผะตะฝั‚ะฐั€ะธะธ', 'ะพะฟะธัะฐะฝะธะต', 'ะบะปัŽั‡ะตะฒั‹ะต ัะปะพะฒะฐ'], # Russian + 'pt': ['comentรกrios', 'descriรงรฃo', 'palavras-chave'], # Portuguese + 'fr': ['commentaires', 'description', 'mots-clรฉs'], # French + 'de': ['kommentare', 'beschreibung', 'schlรผsselwรถrter'], # German + 'ja': ['ใ‚ณใƒกใƒณใƒˆ', '่ชฌๆ˜Ž', 'ใ‚ญใƒผใƒฏใƒผใƒ‰'], # Japanese + 'ko': ['๋Œ“๊ธ€', '์„ค๋ช…', 'ํ‚ค์›Œ๋“œ'], # Korean + 'hi': ['เคŸเคฟเคชเฅเคชเคฃเคฟเคฏเคพเค', 'เคตเคฟเคตเคฐเคฃ', 'เค•เฅ€เคตเคฐเฅเคก'], # Hindi + 'ar': ['ุงู„ุชุนู„ูŠู‚ุงุช', 'ุงู„ูˆุตู', 'ุงู„ูƒู„ู…ุงุช ุงู„ุฑุฆูŠุณูŠุฉ'] # Arabic + } # front matter translations for comments, description, keyword + + for term, eng_key in zip(translations.get(lang_dir.stem, []), english): + content = re.sub(rf'{term} *[๏ผš:].*', f'{eng_key}: true', content, flags=re.IGNORECASE) if \ + eng_key == 'comments' else re.sub(rf'{term} *[๏ผš:] *', f'{eng_key}: ', content, flags=re.IGNORECASE) + return content + + @staticmethod + def replace_admonitions(content, lang_dir): + """Ensure front matter keywords remain in English.""" + english = [ + 'Note', 'Summary', 'Tip', 'Info', 'Success', 'Question', 'Warning', 'Failure', 'Danger', 'Bug', 'Example', + 'Quote', 'Abstract', 'Seealso', 'Admonition'] + translations = { + 'en': + english, + 'zh': ['็ฌ”่ฎฐ', 'ๆ‘˜่ฆ', 'ๆ็คบ', 'ไฟกๆฏ', 'ๆˆๅŠŸ', '้—ฎ้ข˜', '่ญฆๅ‘Š', 'ๅคฑ่ดฅ', 'ๅฑ้™ฉ', 'ๆ•…้šœ', '็คบไพ‹', 'ๅผ•็”จ', 'ๆ‘˜่ฆ', 'ๅฆ่ง', '่ญฆๅ‘Š'], + 'es': [ + 'Nota', 'Resumen', 'Consejo', 'Informaciรณn', 'ร‰xito', 'Pregunta', 'Advertencia', 'Fracaso', 'Peligro', + 'Error', 'Ejemplo', 'Cita', 'Abstracto', 'Vรฉase Tambiรฉn', 'Amonestaciรณn'], + 'ru': [ + 'ะ—ะฐะผะตั‚ะบะฐ', 'ะกะฒะพะดะบะฐ', 'ะกะพะฒะตั‚', 'ะ˜ะฝั„ะพั€ะผะฐั†ะธั', 'ะฃัะฟะตั…', 'ะ’ะพะฟั€ะพั', 'ะŸั€ะตะดัƒะฟั€ะตะถะดะตะฝะธะต', 'ะะตัƒะดะฐั‡ะฐ', 'ะžะฟะฐัะฝะพัั‚ัŒ', + 'ะžัˆะธะฑะบะฐ', 'ะŸั€ะธะผะตั€', 'ะฆะธั‚ะฐั‚ะฐ', 'ะะฑัั‚ั€ะฐะบั‚', 'ะกะผ. ะขะฐะบะถะต', 'ะŸั€ะตะดะพัั‚ะตั€ะตะถะตะฝะธะต'], + 'pt': [ + 'Nota', 'Resumo', 'Dica', 'Informaรงรฃo', 'Sucesso', 'Questรฃo', 'Aviso', 'Falha', 'Perigo', 'Bug', + 'Exemplo', 'Citaรงรฃo', 'Abstrato', 'Veja Tambรฉm', 'Advertรชncia'], + 'fr': [ + 'Note', 'Rรฉsumรฉ', 'Conseil', 'Info', 'Succรจs', 'Question', 'Avertissement', 'ร‰chec', 'Danger', 'Bug', + 'Exemple', 'Citation', 'Abstrait', 'Voir Aussi', 'Admonestation'], + 'de': [ + 'Hinweis', 'Zusammenfassung', 'Tipp', 'Info', 'Erfolg', 'Frage', 'Warnung', 'Ausfall', 'Gefahr', + 'Fehler', 'Beispiel', 'Zitat', 'Abstrakt', 'Siehe Auch', 'Ermahnung'], + 'ja': ['ใƒŽใƒผใƒˆ', '่ฆ็ด„', 'ใƒ’ใƒณใƒˆ', 'ๆƒ…ๅ ฑ', 'ๆˆๅŠŸ', '่ณชๅ•', '่ญฆๅ‘Š', 'ๅคฑๆ•—', 'ๅฑ้™บ', 'ใƒใ‚ฐ', 'ไพ‹', 'ๅผ•็”จ', 'ๆŠ„้Œฒ', 'ๅ‚็…ง', '่จ“ๅ‘Š'], + 'ko': ['๋…ธํŠธ', '์š”์•ฝ', 'ํŒ', '์ •๋ณด', '์„ฑ๊ณต', '์งˆ๋ฌธ', '๊ฒฝ๊ณ ', '์‹คํŒจ', '์œ„ํ—˜', '๋ฒ„๊ทธ', '์˜ˆ์ œ', '์ธ์šฉ', '์ถ”์ƒ', '์ฐธ์กฐ', '๊ฒฝ๊ณ '], + 'hi': [ + 'เคจเฅ‹เคŸ', 'เคธเคพเคฐเคพเค‚เคถ', 'เคธเฅเคเคพเคต', 'เคœเคพเคจเค•เคพเคฐเฅ€', 'เคธเคซเคฒเคคเคพ', 'เคชเฅเคฐเคถเฅเคจ', 'เคšเฅ‡เคคเคพเคตเคจเฅ€', 'เคตเคฟเคซเคฒเคคเคพ', 'เค–เคคเคฐเคพ', 'เคฌเค—', 'เค‰เคฆเคพเคนเคฐเคฃ', + 'เค‰เคฆเฅเคงเคฐเคฃ', 'เคธเคพเคฐ', 'เคฆเฅ‡เค–เฅ‡เค‚ เคญเฅ€', 'เค†เค—เคพเคนเฅ€'], + 'ar': [ + 'ู…ู„ุงุญุธุฉ', 'ู…ู„ุฎุต', 'ู†ุตูŠุญุฉ', 'ู…ุนู„ูˆู…ุงุช', 'ู†ุฌุงุญ', 'ุณุคุงู„', 'ุชุญุฐูŠุฑ', 'ูุดู„', 'ุฎุทุฑ', 'ุนุทู„', 'ู…ุซุงู„', 'ุงู‚ุชุจุงุณ', + 'ู…ู„ุฎุต', 'ุงู†ุธุฑ ุฃูŠุถุงู‹', 'ุชุญุฐูŠุฑ']} + + for term, eng_key in zip(translations.get(lang_dir.stem, []), english): + if lang_dir.stem != 'en': + content = re.sub(rf'!!! *{eng_key} *\n', f'!!! {eng_key} "{term}"\n', content, flags=re.IGNORECASE) + content = re.sub(rf'!!! *{term} *\n', f'!!! {eng_key} "{term}"\n', content, flags=re.IGNORECASE) + content = re.sub(rf'!!! *{term}', f'!!! {eng_key}', content, flags=re.IGNORECASE) + content = re.sub(r'!!! *"', '!!! Example "', content, flags=re.IGNORECASE) + + return content + + @staticmethod + def update_iframe(content): + """Update the 'allow' attribute of iframe if it does not contain the specific English permissions.""" + english = 'accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share' + pattern = re.compile(f'allow="(?!{re.escape(english)}).+?"') + return pattern.sub(f'allow="{english}"', content) + + def link_replacer(self, match, parent_dir, lang_dir, use_abs_link=False): + """Replace broken links with corresponding links in the /en/ directory.""" + text, path = match.groups() + linked_path = (parent_dir / path).resolve().with_suffix('.md') + + if not linked_path.exists(): + en_linked_path = Path(str(linked_path).replace(str(lang_dir), str(lang_dir.parent / 'en'))) + if en_linked_path.exists(): + if use_abs_link: + # Use absolute links WARNING: BUGS, DO NOT USE + docs_root_relative_path = en_linked_path.relative_to(lang_dir.parent) + updated_path = str(docs_root_relative_path).replace('en/', '/../') + else: + # Use relative links + steps_up = len(parent_dir.relative_to(self.base_dir).parts) + updated_path = Path('../' * steps_up) / en_linked_path.relative_to(self.base_dir) + updated_path = str(updated_path).replace('/en/', '/') + + print(f"Redirecting link '[{text}]({path})' from {parent_dir} to {updated_path}") + return f'[{text}]({updated_path})' + else: + print(f"Warning: Broken link '[{text}]({path})' found in {parent_dir} does not exist in /docs/en/.") + + return match.group(0) + + @staticmethod + def update_html_tags(content): + """Updates HTML tags in docs.""" + alt_tag = 'MISSING' + + # Remove closing slashes from self-closing HTML tags + pattern = re.compile(r'<([^>]+?)\s*/>') + content = re.sub(pattern, r'<\1>', content) + + # Find all images without alt tags and add placeholder alt text + pattern = re.compile(r'!\[(.*?)\]\((.*?)\)') + content, num_replacements = re.subn(pattern, lambda match: f'![{match.group(1) or alt_tag}]({match.group(2)})', + content) + + # Add missing alt tags to HTML images + pattern = re.compile(r']*src=["\'](.*?)["\'][^>]*>') + content, num_replacements = re.subn(pattern, lambda match: match.group(0).replace('>', f' alt="{alt_tag}">', 1), + content) + + return content + + def process_markdown_file(self, md_file_path, lang_dir): + """Process each markdown file in the language directory.""" + print(f'Processing file: {md_file_path}') + with open(md_file_path, encoding='utf-8') as file: + content = file.read() + + if self.update_links: + content = self.md_link_regex.sub(lambda m: self.link_replacer(m, md_file_path.parent, lang_dir), content) + + if self.update_text: + content = self.replace_front_matter(content, lang_dir) + content = self.replace_admonitions(content, lang_dir) + content = self.update_iframe(content) + content = self.update_html_tags(content) + + with open(md_file_path, 'w', encoding='utf-8') as file: + file.write(content) + + def process_language_directory(self, lang_dir): + """Process each language-specific directory.""" + print(f'Processing language directory: {lang_dir}') + for md_file in lang_dir.rglob('*.md'): + self.process_markdown_file(md_file, lang_dir) + + def run(self): + """Run the link fixing and front matter updating process for each language-specific directory.""" + for subdir in self.base_dir.iterdir(): + if subdir.is_dir() and re.match(r'^\w\w$', subdir.name): + self.process_language_directory(subdir) + + +if __name__ == '__main__': + # Set the path to your MkDocs 'docs' directory here + docs_dir = str(Path(__file__).parent.resolve()) + fixer = MarkdownLinkFixer(docs_dir, update_links=True, update_text=True) + fixer.run() diff --git a/ultralytics/docs/update_translations.py:Zone.Identifier b/ultralytics/docs/update_translations.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/update_translations.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/index.md b/ultralytics/docs/zh/index.md new file mode 100755 index 0000000..d7d58de --- /dev/null +++ b/ultralytics/docs/zh/index.md @@ -0,0 +1,85 @@ +--- +comments: true +description: ๆŽข็ดขUltralytics YOLOv8็š„ๅฎŒๆ•ดๆŒ‡ๅ—๏ผŒ่ฟ™ๆ˜ฏไธ€ไธช้ซ˜้€Ÿใ€้ซ˜็ฒพๅบฆ็š„็›ฎๆ ‡ๆฃ€ๆต‹ๅ’Œๅ›พๅƒๅˆ†ๅ‰ฒๆจกๅž‹ใ€‚ๅŒ…ๆ‹ฌๅฎ‰่ฃ…ใ€้ข„ๆต‹ใ€่ฎญ็ปƒๆ•™็จ‹็ญ‰ใ€‚ +keywords: Ultralytics, YOLOv8, ็›ฎๆ ‡ๆฃ€ๆต‹, ๅ›พๅƒๅˆ†ๅ‰ฒ, ๆœบๅ™จๅญฆไน , ๆทฑๅบฆๅญฆไน , ่ฎก็ฎ—ๆœบ่ง†่ง‰, YOLOv8ๅฎ‰่ฃ…, YOLOv8้ข„ๆต‹, YOLOv8่ฎญ็ปƒ, YOLOๅކๅฒ, YOLO่ฎธๅฏ +--- + +# Ultralytics ไธญๆ–‡ๆ–‡ๆกฃ + +
+

+ + Ultralytics YOLO banner +

+ Ultralytics GitHub + space + Ultralytics LinkedIn + space + Ultralytics Twitter + space + Ultralytics YouTube + space + Ultralytics TikTok + space + Ultralytics Instagram + space + Ultralytics Discord +
+
+ Ultralytics CI + Ultralytics Code Coverage + YOLOv8 Citation + Docker Pulls + Discord +
+ Run on Gradient + Open In Colab + Open In Kaggle +
+ +ไป‹็ป [Ultralytics](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics)๏ผŒ่ฟ™ๆ˜ฏๅค‡ๅ—ๅฅฝ่ฏ„็š„ๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ๅ’Œๅ›พๅƒๅˆ†ๅ‰ฒๆจกๅž‹็š„ๆœ€ๆ–ฐ็‰ˆๆœฌใ€‚YOLOv8ๅŸบไบŽๆทฑๅบฆๅญฆไน ๅ’Œ่ฎก็ฎ—ๆœบ่ง†่ง‰็š„ๅ‰ๆฒฟ่ฟ›ๅฑ•๏ผŒๆไพ›ไบ†ๆ— ไธŽไผฆๆฏ”็š„้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€ง่กจ็Žฐใ€‚ๅฎƒ็š„็ฒพ็ฎ€่ฎพ่ฎกไฝฟๅ…ถ้€‚็”จไบŽๅ„็งๅบ”็”จ๏ผŒๅนถไธ”ๅฏไปฅ่ฝปๆพ้€‚ๅบ”ไธๅŒ็š„็กฌไปถๅนณๅฐ๏ผŒไปŽ่พน็ผ˜่ฎพๅค‡ๅˆฐไบ‘APIใ€‚ + +ๆŽข็ดขYOLOv8ๆ–‡ๆกฃ๏ผŒ่ฟ™ๆ˜ฏไธ€ไธชๅ…จ้ข็š„่ต„ๆบ๏ผŒๆ—จๅœจๅธฎๅŠฉๆ‚จ็†่งฃๅนถๅˆฉ็”จๅ…ถๅŠŸ่ƒฝๅ’Œ่ƒฝๅŠ›ใ€‚ๆ— ่ฎบๆ‚จๆ˜ฏ็ป้ชŒไธฐๅฏŒ็š„ๆœบๅ™จๅญฆไน ไปŽไธš่€…่ฟ˜ๆ˜ฏๆ–ฐๅ…ฅ่กŒ่€…๏ผŒ่ฏฅไธญๅฟƒๆ—จๅœจๆœ€ๅคงๅŒ–YOLOv8ๅœจๆ‚จ็š„้กน็›ฎไธญ็š„ๆฝœๅŠ›ใ€‚ + +## ไปŽๅ“ช้‡Œๅผ€ๅง‹ + +- **ๅฎ‰่ฃ…** `ultralytics` ๅนถ้€š่ฟ‡ pip ๅœจๅ‡ ๅˆ†้’Ÿๅ†…ๅผ€ๅง‹่ฟ่กŒ   [:material-clock-fast: ๅผ€ๅง‹ไฝฟ็”จ](quickstart.md){ .md-button } +- **้ข„ๆต‹** ไฝฟ็”จYOLOv8้ข„ๆต‹ๆ–ฐ็š„ๅ›พๅƒๅ’Œ่ง†้ข‘   [:octicons-image-16: ๅœจๅ›พๅƒไธŠ้ข„ๆต‹](modes/predict.md){ .md-button } +- **่ฎญ็ปƒ** ๅœจๆ‚จ่‡ชๅทฑ็š„่‡ชๅฎšไน‰ๆ•ฐๆฎ้›†ไธŠ่ฎญ็ปƒๆ–ฐ็š„YOLOv8ๆจกๅž‹   [:fontawesome-solid-brain: ่ฎญ็ปƒๆจกๅž‹](modes/train.md){ .md-button } +- **ๆŽข็ดข** YOLOv8็š„ไปปๅŠก๏ผŒๅฆ‚ๅˆ†ๅ‰ฒใ€ๅˆ†็ฑปใ€ๅงฟๆ€ๅ’Œ่ทŸ่ธช   [:material-magnify-expand: ๆŽข็ดขไปปๅŠก](tasks/index.md){ .md-button } + +

+
+ +
+ ่ง‚็œ‹๏ผš ๅœจGoogle Colabไธญๅฆ‚ไฝ•่ฎญ็ปƒๆ‚จ็š„่‡ชๅฎšไน‰ๆ•ฐๆฎ้›†ไธŠ็š„YOLOv8ๆจกๅž‹ใ€‚ +

+ +## YOLO๏ผš็ฎ€ๅฒ + +[YOLO](https://arxiv.org/abs/1506.02640) (You Only Look Once)๏ผŒ็”ฑๅŽ็››้กฟๅคงๅญฆ็š„Joseph Redmonๅ’ŒAli Farhadiๅผ€ๅ‘็š„ๆต่กŒ็›ฎๆ ‡ๆฃ€ๆต‹ๅ’Œๅ›พๅƒๅˆ†ๅ‰ฒๆจกๅž‹๏ผŒไบŽ2015ๅนดๆŽจๅ‡บ๏ผŒ็”ฑไบŽๅ…ถ้ซ˜้€Ÿๅ’Œๅ‡†็กฎๆ€ง่€Œ่ฟ…้€Ÿๆต่กŒใ€‚ + +- [YOLOv2](https://arxiv.org/abs/1612.08242) ๅœจ2016ๅนดๅ‘ๅธƒ๏ผŒ้€š่ฟ‡ๅผ•ๅ…ฅๆ‰น้‡ๅฝ’ไธ€ๅŒ–ใ€้”šๆก†ๅ’Œ็ปดๅบฆ่š็ฑปๆฅๆ”น่ฟ›ไบ†ๅŽŸๅง‹ๆจกๅž‹ใ€‚ +- [YOLOv3](https://pjreddie.com/media/files/papers/YOLOv3.pdf) ๅœจ2018ๅนดๆŽจๅ‡บ๏ผŒ่ฟ›ไธ€ๆญฅๅขžๅผบไบ†ๆจกๅž‹็š„ๆ€ง่ƒฝ๏ผŒไฝฟ็”จไบ†ๆ›ด้ซ˜ๆ•ˆ็š„ไธปๅนฒ็ฝ‘็ปœใ€ๅคšไธช้”š็‚นๅ’Œ็ฉบ้—ด้‡‘ๅญ—ๅก”ๆฑ ๅŒ–ใ€‚ +- [YOLOv4](https://arxiv.org/abs/2004.10934) ๅœจ2020ๅนดๅ‘ๅธƒ๏ผŒๅผ•ๅ…ฅไบ†Mosaicๆ•ฐๆฎๅขžๅผบใ€ๆ–ฐ็š„ๆ— ้”šๆฃ€ๆต‹ๅคดๅ’Œๆ–ฐ็š„ๆŸๅคฑๅ‡ฝๆ•ฐ็ญ‰ๅˆ›ๆ–ฐๅŠŸ่ƒฝใ€‚ +- [YOLOv5](https://github.com/ultralytics/yolov5) ่ฟ›ไธ€ๆญฅๆ”น่ฟ›ไบ†ๆจกๅž‹็š„ๆ€ง่ƒฝ๏ผŒๅนถๅขžๅŠ ไบ†ๆ–ฐๅŠŸ่ƒฝ๏ผŒๅฆ‚่ถ…ๅ‚ๆ•ฐไผ˜ๅŒ–ใ€้›†ๆˆๅฎž้ชŒ่ทŸ่ธชๅ’Œ่‡ชๅŠจๅฏผๅ‡บๅˆฐๅธธ็”จ็š„ๅฏผๅ‡บๆ ผๅผใ€‚ +- [YOLOv6](https://github.com/meituan/YOLOv6) ๅœจ2022ๅนด็”ฑ[็พŽๅ›ข](https://about.meituan.com/)ๅผ€ๆบ๏ผŒ็Žฐๅœจๆญฃๅœจ่ฏฅๅ…ฌๅธ็š„่ฎธๅคš่‡ชๅŠจ้€่ดงๆœบๅ™จไบบไธญไฝฟ็”จใ€‚ +- [YOLOv7](https://github.com/WongKinYiu/yolov7) ๅœจCOCOๅ…ณ้”ฎ็‚นๆ•ฐๆฎ้›†ไธŠๆทปๅŠ ไบ†้ขๅค–็š„ไปปๅŠก๏ผŒๅฆ‚ๅงฟๆ€ไผฐ่ฎกใ€‚ +- [YOLOv8](https://github.com/ultralytics/ultralytics) ๆ˜ฏUltralytics็š„YOLO็š„ๆœ€ๆ–ฐ็‰ˆๆœฌใ€‚ไฝœไธบไธ€็งๅ‰ๆฒฟใ€ๆœ€ๅ…ˆ่ฟ›(SOTA)็š„ๆจกๅž‹๏ผŒYOLOv8ๅœจไน‹ๅ‰็‰ˆๆœฌ็š„ๆˆๅŠŸๅŸบ็ก€ไธŠๅผ•ๅ…ฅไบ†ๆ–ฐๅŠŸ่ƒฝๅ’Œๆ”น่ฟ›๏ผŒไปฅๆ้ซ˜ๆ€ง่ƒฝใ€็ตๆดปๆ€งๅ’Œๆ•ˆ็އใ€‚YOLOv8ๆ”ฏๆŒๅ…จ่Œƒๅ›ด็š„่ง†่ง‰AIไปปๅŠก๏ผŒๅŒ…ๆ‹ฌ[ๆฃ€ๆต‹](https://docs.ultralytics.com/tasks/detect/), [ๅˆ†ๅ‰ฒ](https://docs.ultralytics.com/tasks/segment/), [ๅงฟๆ€ไผฐ่ฎก](https://docs.ultralytics.com/tasks/pose/), [่ทŸ่ธช](https://docs.ultralytics.com/modes/track/), ๅ’Œ[ๅˆ†็ฑป](https://docs.ultralytics.com/tasks/classify/)ใ€‚่ฟ™็งๅคšๅŠŸ่ƒฝๆ€งไฝฟ็”จๆˆท่ƒฝๅคŸๅˆฉ็”จYOLOv8็š„ๅŠŸ่ƒฝๅบ”ๅฏนๅคš็งๅบ”็”จๅ’Œ้ข†ๅŸŸ็š„้œ€ๆฑ‚ใ€‚ + +## YOLO่ฎธๅฏ่ฏ๏ผšUltralytics YOLOๆ˜ฏๅฆ‚ไฝ•ๆŽˆๆƒ็š„๏ผŸ + +Ultralyticsๆไพ›ไธค็ง่ฎธๅฏ้€‰้กนไปฅ้€‚ๅบ”ไธๅŒ็š„ไฝฟ็”จๅœบๆ™ฏ๏ผš + +- **AGPL-3.0่ฎธๅฏ่ฏ**๏ผš่ฟ™็ง[OSI-approved](https://opensource.org/licenses/)ๅผ€ๆบ่ฎธๅฏ่ฏ้žๅธธ้€‚ๅˆๅญฆ็”Ÿๅ’Œ็ˆฑๅฅฝ่€…๏ผŒไฟƒ่ฟ›ไบ†ๅผ€ๆ”พ็š„ๅˆไฝœๅ’Œ็Ÿฅ่ฏ†ๅ…ฑไบซใ€‚ๆ›ดๅคš่ฏฆ็ป†ไฟกๆฏ่ฏทๅ‚้˜…[LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE)ๆ–‡ไปถใ€‚ +- **ไผไธš่ฎธๅฏ่ฏ**๏ผš่ฟ™็ง่ฎธๅฏ่ฏ่ฎพ่ฎก็”จไบŽๅ•†ไธš็”จ้€”๏ผŒๅ…่ฎธๅฐ†Ultralytics่ฝฏไปถๅ’ŒAIๆจกๅž‹ๆ— ็ผ้›†ๆˆๅˆฐๅ•†ไธšๅ•†ๅ“ๅ’ŒๆœๅŠกไธญ๏ผŒ็ป•่ฟ‡AGPL-3.0็š„ๅผ€ๆบ่ฆๆฑ‚ใ€‚ๅฆ‚ๆžœๆ‚จ็š„ๅœบๆ™ฏๆถ‰ๅŠๅฐ†ๆˆ‘ไปฌ็š„่งฃๅ†ณๆ–นๆกˆๅตŒๅ…ฅๅˆฐๅ•†ไธšไบงๅ“ไธญ๏ผŒ่ฏท้€š่ฟ‡[Ultralytics Licensing](https://ultralytics.com/license)่”็ณปๆˆ‘ไปฌใ€‚ + +ๆˆ‘ไปฌ็š„ๆŽˆๆƒ็ญ–็•ฅๆ—จๅœจ็กฎไฟๆˆ‘ไปฌ็š„ๅผ€ๆบ้กน็›ฎ็š„ไปปไฝ•ๆ”น่ฟ›้ƒฝ่ƒฝๅ›ž้ฆˆๅˆฐ็คพๅŒบใ€‚ๆˆ‘ไปฌๅๅˆ†็่ง†ๅผ€ๆบๅŽŸๅˆ™โค๏ธ๏ผŒๆˆ‘ไปฌ็š„ไฝฟๅ‘ฝๆ˜ฏ็กฎไฟๆˆ‘ไปฌ็š„่ดก็Œฎ่ƒฝๅคŸไปฅๅฏนๆ‰€ๆœ‰ไบบๆœ‰็›Š็š„ๆ–นๅผ่ขซๅˆฉ็”จๅ’Œๆ‹“ๅฑ•ใ€‚ + +--- + +**ๆณจๆ„**๏ผšๆˆ‘ไปฌๆญฃๅœจๅŠชๅŠ›ไธบๆˆ‘ไปฌ็š„ๆ–‡ๆกฃ้กต้ขๆไพ›ไธญๆ–‡ๆ–‡ๆกฃ๏ผŒๅนถๅธŒๆœ›ๅœจๆŽฅไธ‹ๆฅ็š„ๅ‡ ไธชๆœˆๅ†…ๅ‘ๅธƒใ€‚่ฏทๅฏ†ๅˆ‡ๅ…ณๆณจๆˆ‘ไปฌ็š„ๆ›ดๆ–ฐ๏ผŒๅนถๆ„Ÿ่ฐขๆ‚จ็š„่€ๅฟƒ็ญ‰ๅพ…๐Ÿ™ใ€‚ diff --git a/ultralytics/docs/zh/index.md:Zone.Identifier b/ultralytics/docs/zh/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/models/fast-sam.md b/ultralytics/docs/zh/models/fast-sam.md new file mode 100755 index 0000000..ab7f269 --- /dev/null +++ b/ultralytics/docs/zh/models/fast-sam.md @@ -0,0 +1,193 @@ +--- +comments: true +description: FastSAMๆ˜ฏไธ€็งๅŸบไบŽๅท็งฏ็ฅž็ป็ฝ‘็ปœ็š„ๅฎžๆ—ถๅ›พๅƒๅฏน่ฑกๅˆ†ๅ‰ฒ่งฃๅ†ณๆ–นๆกˆใ€‚ๅฎƒๆไพ›ไบ†ๅ“่ถŠ็š„็”จๆˆทไบคไบ’ๅŠŸ่ƒฝใ€่ฎก็ฎ—ๆ•ˆ็އไปฅๅŠ้€‚็”จไบŽๅคš็ง่ง†่ง‰ไปปๅŠก็š„็‰นๆ€งใ€‚ +keywords: FastSAM, ๆœบๅ™จๅญฆไน , ๅŸบไบŽๅท็งฏ็ฅž็ป็ฝ‘็ปœ็š„่งฃๅ†ณๆ–นๆกˆ, ๅ›พๅƒๅฏน่ฑกๅˆ†ๅ‰ฒ, ๅฎžๆ—ถ่งฃๅ†ณๆ–นๆกˆ, Ultralytics, ่ง†่ง‰ไปปๅŠก, ๅ›พๅƒๅค„็†, ๅทฅไธšๅบ”็”จ, ็”จๆˆทไบคไบ’ +--- + +# Fast Segment Anything Model๏ผˆFastSAM๏ผ‰ + +Fast Segment Anything Model๏ผˆFastSAM๏ผ‰ๆ˜ฏไธ€็งๅˆ›ๆ–ฐ็š„ๅฎžๆ—ถๅท็งฏ็ฅž็ป็ฝ‘็ปœ๏ผˆCNN๏ผ‰ๆจกๅž‹๏ผŒ็”จไบŽๅ›พๅƒไธญ็š„ไปปๆ„ๅฏน่ฑกๅˆ†ๅ‰ฒไปปๅŠกใ€‚่ฏฅไปปๅŠกๆ—จๅœจๆ นๆฎๅ„็งๅฏ่ƒฝ็š„็”จๆˆทไบคไบ’ๆ็คบ๏ผŒๅฏนๅ›พๅƒไธญ็š„ไปปๆ„ๅฏน่ฑก่ฟ›่กŒๅˆ†ๅ‰ฒใ€‚FastSAMๅœจไฟๆŒๅ…ทๅค‡็ซžไบ‰ๆ€ง่ƒฝ็š„ๅŒๆ—ถ๏ผŒๆ˜พ่‘—้™ไฝŽไบ†่ฎก็ฎ—้œ€ๆฑ‚๏ผŒไฝฟๅ…ถๆˆไธบๅ„็ง่ง†่ง‰ไปปๅŠก็š„ๅฎž็”จ้€‰ๆ‹ฉใ€‚ + +![Fast Segment Anything Model๏ผˆFastSAM๏ผ‰ๆžถๆž„ๆฆ‚่ฟฐ](https://user-images.githubusercontent.com/26833433/248551984-d98f0f6d-7535-45d0-b380-2e1440b52ad7.jpg) + +## ๆฆ‚่ฟฐ + +FastSAMๆ—จๅœจ่งฃๅ†ณ[Segment Anything Model๏ผˆSAM๏ผ‰](sam.md)็š„ๅฑ€้™ๆ€ง๏ผŒSAMๆ˜ฏไธ€็ง่ฎก็ฎ—่ต„ๆบ้œ€ๆฑ‚ๅพˆ้ซ˜็š„Transformerๆจกๅž‹ใ€‚FastSAMๅฐ†ไปปๆ„ๅฏน่ฑกๅˆ†ๅ‰ฒไปปๅŠกๆ‹†ๅˆ†ไธบไธคไธช้กบๅบ้˜ถๆฎต๏ผšๆ‰€ๆœ‰ๅฎžไพ‹ๅˆ†ๅ‰ฒๅ’Œๆ็คบๅผ•ๅฏผ้€‰ๆ‹ฉใ€‚็ฌฌไธ€้˜ถๆฎตไฝฟ็”จ[YOLOv8-seg](../tasks/segment.md)็”Ÿๆˆๅ›พๅƒไธญๆ‰€ๆœ‰ๅฎžไพ‹็š„ๅˆ†ๅ‰ฒๆŽฉ็ ใ€‚ๅœจ็ฌฌไบŒ้˜ถๆฎต๏ผŒ่พ“ๅ‡บไธŽๆ็คบๅฏนๅบ”็š„ๆ„Ÿๅ…ด่ถฃๅŒบๅŸŸใ€‚ + +## ไธป่ฆ็‰น็‚น + +1. **ๅฎžๆ—ถ่งฃๅ†ณๆ–นๆกˆ๏ผš** FastSAMๅˆฉ็”จCNN็š„่ฎก็ฎ—ๆ•ˆ็އๆไพ›ไบ†ๅ›พๅƒไธญไปปๆ„ๅฏน่ฑกๅˆ†ๅ‰ฒไปปๅŠก็š„ๅฎžๆ—ถ่งฃๅ†ณๆ–นๆกˆ๏ผŒ้€‚็”จไบŽ้œ€่ฆๅฟซ้€Ÿ็ป“ๆžœ็š„ๅทฅไธšๅบ”็”จใ€‚ + +2. **้ซ˜ๆ•ˆๅ’Œ้ซ˜ๆ€ง่ƒฝ๏ผš** FastSAMๅœจๆ˜พ่‘—้™ไฝŽ่ฎก็ฎ—ๅ’Œ่ต„ๆบ้œ€ๆฑ‚็š„ๅŒๆ—ถ๏ผŒไธไผš้™ไฝŽๆ€ง่ƒฝ่ดจ้‡ใ€‚ๅฎƒไธŽSAMๅ…ทๆœ‰็›ธๅฝ“็š„ๆ€ง่ƒฝ๏ผŒไฝ†่ฎก็ฎ—่ต„ๆบๅคงๅน…ๅ‡ๅฐ‘๏ผŒ่ƒฝๅคŸๅฎž็Žฐๅฎžๆ—ถๅบ”็”จใ€‚ + +3. **ๆ็คบๅผ•ๅฏผๅˆ†ๅ‰ฒ๏ผš** FastSAMๅฏไปฅ้€š่ฟ‡ๅ„็งๅฏ่ƒฝ็š„็”จๆˆทไบคไบ’ๆ็คบๆฅๅˆ†ๅ‰ฒๅ›พๅƒไธญ็š„ไปปๆ„ๅฏน่ฑก๏ผŒๆไพ›ไบ†ไธๅŒๅœบๆ™ฏไธ‹็š„็ตๆดปๆ€งๅ’Œ้€‚ๅบ”ๆ€งใ€‚ + +4. **ๅŸบไบŽYOLOv8-seg๏ผš** FastSAMๅŸบไบŽ[YOLOv8-seg](../tasks/segment.md)๏ผŒๆ˜ฏไธ€็ง้…ๅค‡ๅฎžไพ‹ๅˆ†ๅ‰ฒๅˆ†ๆ”ฏ็š„็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จใ€‚่ฟ™ไฝฟๅพ—ๅฎƒ่ƒฝๅคŸๆœ‰ๆ•ˆๅœฐ็”Ÿๆˆๅ›พๅƒไธญๆ‰€ๆœ‰ๅฎžไพ‹็š„ๅˆ†ๅ‰ฒๆŽฉ็ ใ€‚ + +5. **ๅŸบๅ‡†ๆต‹่ฏ•ไธญๅ…ทๆœ‰็ซžไบ‰ๅŠ›็š„็ป“ๆžœ๏ผš** ๅœจMS COCO็š„ๅฏน่ฑกๆ่ฎฎไปปๅŠกไธญ๏ผŒFastSAMๅœจๅ•ไธชNVIDIA RTX 3090ไธŠไปฅๆ˜พ่‘—ๆ›ดๅฟซ็š„้€Ÿๅบฆ่Žทๅพ—้ซ˜ๅˆ†๏ผŒไธŽ[SAM](sam.md)็›ธๆฏ”๏ผŒๆ˜พ็คบๅ‡บๅ…ถๆ•ˆ็އๅ’Œ่ƒฝๅŠ›ใ€‚ + +6. **ๅฎž้™…ๅบ”็”จ๏ผš** ๆๅ‡บ็š„ๆ–นๆณ•ไปฅ้žๅธธ้ซ˜็š„้€Ÿๅบฆไธบๅคง้‡่ง†่ง‰ไปปๅŠกๆไพ›ไบ†ไธ€็งๆ–ฐ็š„ๅฎž็”จ่งฃๅ†ณๆ–นๆกˆ๏ผŒๆฏ”ๅฝ“ๅ‰ๆ–นๆณ•ๅฟซๅๅ‡ ๅ€ไนƒ่‡ณๆ•ฐ็™พๅ€ใ€‚ + +7. **ๆจกๅž‹ๅŽ‹็ผฉ็š„ๅฏ่กŒๆ€ง๏ผš** FastSAM้€š่ฟ‡ๅผ•ๅ…ฅไบบๅทฅๅ…ˆ้ชŒๅˆฐ็ป“ๆž„ไธญ๏ผŒๅฑ•็คบไบ†้€š่ฟ‡่ทฏๅพ„ๆ˜พ่‘—ๅ‡ๅฐ‘่ฎก็ฎ—ๅทฅไฝœ้‡็š„ๅฏ่กŒๆ€ง๏ผŒไธบ้€š็”จ่ง†่ง‰ไปปๅŠก็š„ๅคงๅž‹ๆจกๅž‹ๆžถๆž„ๅผ€่พŸไบ†ๆ–ฐ็š„ๅฏ่ƒฝๆ€งใ€‚ + +## ๅฏ็”จๆจกๅž‹ใ€ๆ”ฏๆŒ็š„ไปปๅŠกๅ’Œๆ“ไฝœๆจกๅผ + +่ฏฅ่กจๆ ผๅˆ—ๅ‡บไบ†ๅฏ็”จ็š„ๆจกๅž‹ๅŠๅ…ถ็‰นๅฎš็š„้ข„่ฎญ็ปƒๆƒ้‡๏ผŒๅฎƒไปฌๆ”ฏๆŒ็š„ไปปๅŠกไปฅๅŠๅฎƒไปฌไธŽไธๅŒๆ“ไฝœๆจกๅผ๏ผˆๅฆ‚[ๆŽจๆ–ญ](../modes/predict.md)ใ€[้ชŒ่ฏ](../modes/val.md)ใ€[่ฎญ็ปƒ](../modes/train.md)ๅ’Œ[ๅฏผๅ‡บ](../modes/export.md)๏ผ‰็š„ๅ…ผๅฎนๆ€ง๏ผŒ็”ฑๆ”ฏๆŒ็š„ๆจกๅผ็”จโœ…่กจ็คบ๏ผŒไธๆ”ฏๆŒ็š„ๆจกๅผ็”จโŒ่กจ็คบใ€‚ + +| ๆจกๅž‹็ฑปๅž‹ | ้ข„่ฎญ็ปƒๆƒ้‡ | ๆ”ฏๆŒ็š„ไปปๅŠก | ๆŽจๆ–ญ | ้ชŒ่ฏ | ่ฎญ็ปƒ | ๅฏผๅ‡บ | +|-----------|----------------|-----------------------------|----|----|----|----| +| FastSAM-s | `FastSAM-s.pt` | [ๅฎžไพ‹ๅˆ†ๅ‰ฒ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| FastSAM-x | `FastSAM-x.pt` | [ๅฎžไพ‹ๅˆ†ๅ‰ฒ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## ็”จๆณ•็คบไพ‹ + +FastSAMๆจกๅž‹ๅพˆๅฎนๆ˜“้›†ๆˆๅˆฐPythonๅบ”็”จ็จ‹ๅบไธญใ€‚Ultralyticsๆไพ›ไบ†็”จๆˆทๅ‹ๅฅฝ็š„Python APIๅ’ŒCLIๅ‘ฝไปคไปฅ็ฎ€ๅŒ–ๅผ€ๅ‘ใ€‚ + +### ้ข„ๆต‹็”จๆณ• + +่ฆๅฏนๅ›พๅƒ่ฟ›่กŒๅฏน่ฑกๆฃ€ๆต‹๏ผŒๅฏไปฅไฝฟ็”จไธ‹้ข็š„`predict`ๆ–นๆณ•๏ผš + +!!! Example "็คบไพ‹" + + === "Python" + ```python + from ultralytics import FastSAM + from ultralytics.models.fastsam import FastSAMPrompt + + # ๅฎšไน‰ๆŽจๆ–ญๆบ + source = 'path/to/bus.jpg' + + # ๅˆ›ๅปบFastSAMๆจกๅž‹ + model = FastSAM('FastSAM-s.pt') # ๆˆ– FastSAM-x.pt + + # ๅœจๅ›พๅƒไธŠ่ฟ่กŒๆŽจๆ–ญ + everything_results = model(source, device='cpu', retina_masks=True, imgsz=1024, conf=0.4, iou=0.9) + + # ๅ‡†ๅค‡Prompt Processๅฏน่ฑก + prompt_process = FastSAMPrompt(source, everything_results, device='cpu') + + # Everythingๆ็คบ + ann = prompt_process.everything_prompt() + + # Bbox้ป˜่ฎคๅฝข็Šถ[0,0,0,0] -> [x1,y1,x2,y2] + ann = prompt_process.box_prompt(bbox=[200, 200, 300, 300]) + + # ๆ–‡ๆœฌๆ็คบ + ann = prompt_process.text_prompt(text='a photo of a dog') + + # ็‚นๆ็คบ + # ้ป˜่ฎค็‚น[[0,0]] [[x1,y1],[x2,y2]] + # ้ป˜่ฎคpoint_label [0] [1,0] 0๏ผš่ƒŒๆ™ฏ๏ผŒ1๏ผšๅ‰ๆ™ฏ + ann = prompt_process.point_prompt(points=[[200, 200]], pointlabel=[1]) + prompt_process.plot(annotations=ann, output='./') + ``` + + === "CLI" + ```bash + # ๅŠ ่ฝฝFastSAMๆจกๅž‹ๅนถไฝฟ็”จ่ฏฅๆจกๅž‹ๅˆ†ๅ‰ฒๅ›พๅƒไธญ็š„ๆ‰€ๆœ‰ๅฏน่ฑก + yolo segment predict model=FastSAM-s.pt source=path/to/bus.jpg imgsz=640 + ``` + +ๆญค็‰‡ๆฎตๆผ”็คบไบ†ๅŠ ่ฝฝ้ข„่ฎญ็ปƒๆจกๅž‹ๅนถๅœจๅ›พๅƒไธŠ่ฟ›่กŒ้ข„ๆต‹็š„็ฎ€ๅ•ๆ€งใ€‚ + +### ้ชŒ่ฏ็”จๆณ• + +ๅฏไปฅ้‡‡็”จไปฅไธ‹ๆ–นๅผๅฏนๆ•ฐๆฎ้›†ไธŠ็š„ๆจกๅž‹่ฟ›่กŒ้ชŒ่ฏ๏ผš + +!!! Example "็คบไพ‹" + + === "Python" + ```python + from ultralytics import FastSAM + + # ๅˆ›ๅปบFastSAMๆจกๅž‹ + model = FastSAM('FastSAM-s.pt') # ๆˆ– FastSAM-x.pt + + # ้ชŒ่ฏๆจกๅž‹ + results = model.val(data='coco8-seg.yaml') + ``` + + === "CLI" + ```bash + # ๅŠ ่ฝฝFastSAMๆจกๅž‹๏ผŒๅนถๅœจCOCO8็คบไพ‹ๆ•ฐๆฎ้›†ไธŠ่ฟ›่กŒ้ชŒ่ฏ๏ผŒๅ›พๅƒๅคงๅฐไธบ640 + yolo segment val model=FastSAM-s.pt data=coco8.yaml imgsz=640 + ``` + +่ฏทๆณจๆ„๏ผŒFastSAMไป…ๆ”ฏๆŒๆฃ€ๆต‹ๅ’Œๅˆ†ๅ‰ฒๅ•ไธช็ฑปๅˆซ็š„ๅฏน่ฑกใ€‚่ฟ™ๆ„ๅ‘ณ็€ๅฎƒๅฐ†่ฏ†ๅˆซๅ’Œๅˆ†ๅ‰ฒๆ‰€ๆœ‰ๅฏน่ฑกไธบ็›ธๅŒ็š„็ฑปๅˆซใ€‚ๅ› ๆญค๏ผŒๅœจๅ‡†ๅค‡ๆ•ฐๆฎ้›†ๆ—ถ๏ผŒ้œ€่ฆๅฐ†ๆ‰€ๆœ‰ๅฏน่ฑก็š„็ฑปๅˆซID่ฝฌๆขไธบ0ใ€‚ + +## FastSAMๅฎ˜ๆ–น็”จๆณ• + +FastSAMไนŸๅฏไปฅ็›ดๆŽฅไปŽ[https://github.com/CASIA-IVA-Lab/FastSAM](https://github.com/CASIA-IVA-Lab/FastSAM)ๅญ˜ๅ‚จๅบ“ไธญ่Žทๅ–ใ€‚ไปฅไธ‹ๆ˜ฏๆ‚จๅฏ่ƒฝ้‡‡ๅ–็š„ไฝฟ็”จFastSAM็š„ๅ…ธๅž‹ๆญฅ้ชค็š„็ฎ€่ฆๆฆ‚่ฟฐ๏ผš + +### ๅฎ‰่ฃ… + +1. ๅ…‹้š†FastSAMๅญ˜ๅ‚จๅบ“๏ผš + ```shell + git clone https://github.com/CASIA-IVA-Lab/FastSAM.git + ``` + +2. ๅˆ›ๅปบๅนถๆฟ€ๆดปไธ€ไธชๅธฆๆœ‰Python 3.9็š„Conda็Žฏๅขƒ๏ผš + ```shell + conda create -n FastSAM python=3.9 + conda activate FastSAM + ``` + +3. ่ฟ›ๅ…ฅๅ…‹้š†็š„ๅญ˜ๅ‚จๅบ“ๅนถๅฎ‰่ฃ…ๆ‰€้œ€็š„่ฝฏไปถๅŒ…๏ผš + ```shell + cd FastSAM + pip install -r requirements.txt + ``` + +4. ๅฎ‰่ฃ…CLIPๆจกๅž‹๏ผš + ```shell + pip install git+https://github.com/openai/CLIP.git + ``` + +### ็คบไพ‹็”จๆณ• + +1. ไธ‹่ฝฝ[ๆจกๅž‹ๆฃ€ๆŸฅ็‚น](https://drive.google.com/file/d/1m1sjY4ihXBU1fZXdQ-Xdj-mDltW-2Rqv/view?usp=sharing)ใ€‚ + +2. ไฝฟ็”จFastSAM่ฟ›่กŒๆŽจๆ–ญใ€‚็คบไพ‹ๅ‘ฝไปค๏ผš + + - ๅœจๅ›พๅƒไธญๅˆ†ๅ‰ฒๆ‰€ๆœ‰ๅ†…ๅฎน๏ผš + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg + ``` + + - ไฝฟ็”จๆ–‡ๆœฌๆ็คบๅˆ†ๅ‰ฒ็‰นๅฎšๅฏน่ฑก๏ผš + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --text_prompt "the yellow dog" + ``` + + - ๅœจ่พน็•Œๆก†ไธญๅˆ†ๅ‰ฒๅฏน่ฑก๏ผˆไปฅxywhๆ ผๅผๆไพ›่พน็•Œๆก†ๅๆ ‡๏ผ‰๏ผš + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --box_prompt "[570,200,230,400]" + ``` + + - ๅœจ็‰นๅฎš็‚น้™„่ฟ‘ๅˆ†ๅ‰ฒๅฏน่ฑก๏ผš + ```shell + python Inference.py --model_path ./weights/FastSAM.pt --img_path ./images/dogs.jpg --point_prompt "[[520,360],[620,300]]" --point_label "[1,0]" + ``` + +ๆญคๅค–๏ผŒๆ‚จๅฏไปฅๅœจ[Colabๆผ”็คบ](https://colab.research.google.com/drive/1oX14f6IneGGw612WgVlAiy91UHwFAvr9?usp=sharing)ไธŠๅฐ่ฏ•FastSAM๏ผŒๆˆ–ๅœจ[HuggingFace Webๆผ”็คบ](https://huggingface.co/spaces/An-619/FastSAM)ไธŠ่ฟ›่กŒๅฏ่ง†ๅŒ–ไฝ“้ชŒใ€‚ + +## ๅผ•็”จๅ’Œ่‡ด่ฐข + +ๆˆ‘ไปฌ่ฆๆ„Ÿ่ฐขFastSAMไฝœ่€…ๅœจๅฎžๆ—ถๅฎžไพ‹ๅˆ†ๅ‰ฒ้ข†ๅŸŸไฝœๅ‡บ็š„้‡่ฆ่ดก็Œฎ๏ผš + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{zhao2023fast, + title={Fast Segment Anything}, + author={Xu Zhao and Wenchao Ding and Yongqi An and Yinglong Du and Tao Yu and Min Li and Ming Tang and Jinqiao Wang}, + year={2023}, + eprint={2306.12156}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ๅฏๅœจ[arXiv](https://arxiv.org/abs/2306.12156)ไธŠๆ‰พๅˆฐๅŽŸๅง‹็š„FastSAM่ฎบๆ–‡ใ€‚ไฝœ่€…ๅทฒ็ปๅ…ฌๅผ€ไบ†ไป–ไปฌ็š„ๅทฅไฝœ๏ผŒไปฃ็ ๅบ“ๅฏไปฅๅœจ[GitHub](https://github.com/CASIA-IVA-Lab/FastSAM)ไธŠ่Žทๅ–ใ€‚ๆˆ‘ไปฌๆ„Ÿ่ฐขไป–ไปฌๅœจๆŽจๅŠจ่ฏฅ้ข†ๅŸŸไปฅๅŠไฝฟไป–ไปฌ็š„ๅทฅไฝœๅฏนๆ›ดๅนฟๆณ›็š„็คพๅŒบๅฏ่ฎฟ้—ฎๆ–น้ขๆ‰€ๅš็š„ๅŠชๅŠ›ใ€‚ diff --git a/ultralytics/docs/zh/models/fast-sam.md:Zone.Identifier b/ultralytics/docs/zh/models/fast-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/models/fast-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/models/index.md b/ultralytics/docs/zh/models/index.md new file mode 100755 index 0000000..a9a29db --- /dev/null +++ b/ultralytics/docs/zh/models/index.md @@ -0,0 +1,98 @@ +--- +comments: true +description: ๆŽข็ดข Ultralytics ๆ”ฏๆŒ็š„ๅคšๆ ทๅŒ– YOLO ็ณปๅˆ—ใ€SAMใ€MobileSAMใ€FastSAMใ€YOLO-NAS ๅ’Œ RT-DETR ๆจกๅž‹ใ€‚ๅผ€ๅฏๆ‚จ็š„ CLI ๅ’Œ Python ไฝฟ็”จ็คบไพ‹ไน‹ๆ—…ใ€‚ +keywords: Ultralytics, ๆ–‡ๆกฃ, YOLO, SAM, MobileSAM, FastSAM, YOLO-NAS, RT-DETR, ๆจกๅž‹, ๆžถๆž„, Python, CLI +--- + +# Ultralytics ๆ”ฏๆŒ็š„ๆจกๅž‹ + +ๆฌข่ฟŽๆฅๅˆฐ Ultralytics ็š„ๆจกๅž‹ๆ–‡ๆกฃ๏ผๆˆ‘ไปฌๆไพ›ๅคš็งๆจกๅž‹็š„ๆ”ฏๆŒ๏ผŒๆฏ็งๆจกๅž‹้ƒฝ้’ˆๅฏน็‰นๅฎšไปปๅŠก้‡่บซๅฎšๅš๏ผŒๅฆ‚[ๅฏน่ฑกๆฃ€ๆต‹](../tasks/detect.md)ใ€[ๅฎžไพ‹ๅˆ†ๅ‰ฒ](../tasks/segment.md)ใ€[ๅ›พๅƒๅˆ†็ฑป](../tasks/classify.md)ใ€[ๅงฟๆ€ไผฐ่ฎก](../tasks/pose.md)ไปฅๅŠ[ๅคšๅฏน่ฑก่ทŸ่ธช](../modes/track.md)ใ€‚ๅฆ‚ๆžœๆ‚จๆœ‰ๅ…ด่ถฃๅฐ†ๆ‚จ็š„ๆจกๅž‹ๆžถๆž„่ดก็Œฎ็ป™ Ultralytics๏ผŒ่ฏทๆŸฅ็œ‹ๆˆ‘ไปฌ็š„[่ดก็ŒฎๆŒ‡ๅ—](../../help/contributing.md)ใ€‚ + +!!! Note "ๆณจๆ„" + + ๐Ÿšง ๆˆ‘ไปฌ็š„ๅคš่ฏญ่จ€ๆ–‡ๆกฃ็›ฎๅ‰ๆญฃๅœจๅปบ่ฎพไธญ๏ผŒๆˆ‘ไปฌๆญฃๅœจๅŠชๅŠ›่ฟ›่กŒๅฎŒๅ–„ใ€‚ๆ„Ÿ่ฐขๆ‚จ็š„่€ๅฟƒ็ญ‰ๅพ…๏ผ๐Ÿ™ + +## ็‰น่‰ฒๆจกๅž‹ + +ไปฅไธ‹ๆ˜ฏไธ€ไบ›ๅ…ณ้”ฎๆจกๅž‹็š„ไป‹็ป๏ผš + +1. **[YOLOv3](yolov3.md)**๏ผš็”ฑ Joseph Redmon ๆœ€ๅˆๅผ€ๅ‘็š„ YOLO ๆจกๅž‹ๅฎถๆ—็š„็ฌฌไธ‰็‰ˆ๏ผŒไปฅๅ…ถ้ซ˜ๆ•ˆ็š„ๅฎžๆ—ถๅฏน่ฑกๆฃ€ๆต‹่ƒฝๅŠ›่€Œ้—ปๅใ€‚ +2. **[YOLOv4](yolov4.md)**๏ผš็”ฑ Alexey Bochkovskiy ๅœจ 2020 ๅนดๅ‘ๅธƒ็š„ YOLOv3 ็š„ darknet ๅŽŸ็”Ÿๆ›ดๆ–ฐ็‰ˆๆœฌใ€‚ +3. **[YOLOv5](yolov5.md)**๏ผšUltralytics ๆ”น่ฟ›็š„ YOLO ๆžถๆž„็‰ˆๆœฌ๏ผŒไธŽๅ…ˆๅ‰็‰ˆๆœฌ็›ธๆฏ”๏ผŒๆไพ›ไบ†ๆ›ดๅฅฝ็š„ๆ€ง่ƒฝๅ’Œ้€Ÿๅบฆๆƒ่กกใ€‚ +4. **[YOLOv6](yolov6.md)**๏ผš็”ฑ[็พŽๅ›ข](https://about.meituan.com/)ๅœจ 2022 ๅนดๅ‘ๅธƒ๏ผŒ็”จไบŽๅ…ฌๅธๅคšไธช่‡ชไธป้€่ดงๆœบๅ™จไบบไธญใ€‚ +5. **[YOLOv7](yolov7.md)**๏ผšYOLOv4 ไฝœ่€…ๅœจ 2022 ๅนดๅ‘ๅธƒ็š„ๆ›ดๆ–ฐ็‰ˆ YOLO ๆจกๅž‹ใ€‚ +6. **[YOLOv8](yolov8.md) NEW ๐Ÿš€**๏ผšYOLO ๅฎถๆ—็š„ๆœ€ๆ–ฐ็‰ˆๆœฌ๏ผŒๅ…ทๅค‡ๅฎžไพ‹ๅˆ†ๅ‰ฒใ€ๅงฟๆ€/ๅ…ณ้”ฎ็‚นไผฐ่ฎกๅ’Œๅˆ†็ฑป็ญ‰ๅขžๅผบ่ƒฝๅŠ›ใ€‚ +7. **[Segment Anything Model (SAM)](sam.md)**๏ผšMeta ็š„ Segment Anything Model (SAM)ใ€‚ +8. **[Mobile Segment Anything Model (MobileSAM)](mobile-sam.md)**๏ผš็”ฑๅบ†็†™ๅคงๅญฆๅผ€ๅ‘็š„็งปๅŠจๅบ”็”จ MobileSAMใ€‚ +9. **[Fast Segment Anything Model (FastSAM)](fast-sam.md)**๏ผšไธญๅ›ฝ็ง‘ๅญฆ้™ข่‡ชๅŠจๅŒ–็ ”็ฉถๆ‰€ๅ›พๅƒไธŽ่ง†้ข‘ๅˆ†ๆž็ป„ๅผ€ๅ‘็š„ FastSAMใ€‚ +10. **[YOLO-NAS](yolo-nas.md)**๏ผšYOLO ็ฅž็ป็ฝ‘็ปœ็ป“ๆž„ๆœ็ดข (NAS) ๆจกๅž‹ใ€‚ +11. **[Realtime Detection Transformers (RT-DETR)](rtdetr.md)**๏ผš็™พๅบฆ PaddlePaddle ๅฎžๆ—ถๆฃ€ๆต‹ๅ˜ๆขๅ™จ (RT-DETR) ๆจกๅž‹ใ€‚ + +

+
+ +
+ ่ง‚็œ‹๏ผš ไฝฟ็”จ Ultralytics YOLO ๆจกๅž‹ๅœจๅ‡ ่กŒไปฃ็ ไธญ่ฟ่กŒใ€‚ +

+ +## ๅ…ฅ้—จ๏ผšไฝฟ็”จ็คบไพ‹ + +ๆญค็คบไพ‹ๆไพ›ไบ†็ฎ€ๅ•็š„ YOLO ่ฎญ็ปƒๅ’ŒๆŽจ็†็คบไพ‹ใ€‚ๆœ‰ๅ…ณ่ฟ™ไบ›ๅ’Œๅ…ถไป–[ๆจกๅผ](../modes/index.md)็š„ๅฎŒๆ•ดๆ–‡ๆกฃ๏ผŒ่ฏทๆŸฅ็œ‹[Predict](../modes/predict.md)ใ€[Train](../modes/train.md)ใ€[Val](../modes/val.md) ๅ’Œ [Export](../modes/export.md) ๆ–‡ๆกฃ้กต้ขใ€‚ + +่ฏทๆณจๆ„๏ผŒไปฅไธ‹็คบไพ‹้€‚็”จไบŽๅฏน่ฑกๆฃ€ๆต‹็š„ YOLOv8 [Detect](../tasks/detect.md) ๆจกๅž‹ใ€‚ๆœ‰ๅ…ณๅ…ถไป–ๆ”ฏๆŒไปปๅŠก็š„่ฏฆ็ป†ไฟกๆฏ๏ผŒ่ฏทๆŸฅ็œ‹[Segment](../tasks/segment.md)ใ€[Classify](../tasks/classify.md) ๅ’Œ [Pose](../tasks/pose.md) ๆ–‡ๆกฃใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ๅฏๅฐ† PyTorch ้ข„่ฎญ็ปƒ็š„ `*.pt` ๆจกๅž‹ไปฅๅŠ้…็ฝฎๆ–‡ไปถ `*.yaml` ไผ ๅ…ฅ `YOLO()`ใ€`SAM()`ใ€`NAS()` ๅ’Œ `RTDETR()` ็ฑป๏ผŒไปฅๅœจ Python ไธญๅˆ›ๅปบๆจกๅž‹ๅฎžไพ‹๏ผš + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ COCO ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๆ˜พ็คบๆจกๅž‹ไฟกๆฏ๏ผˆๅฏ้€‰๏ผ‰ + model.info() + + # ๅœจ COCO8 ็คบไพ‹ๆ•ฐๆฎ้›†ไธŠ่ฎญ็ปƒๆจกๅž‹ 100 ไธชๅ‘จๆœŸ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ไฝฟ็”จ YOLOv8n ๆจกๅž‹ๅฏน 'bus.jpg' ๅ›พๅƒ่ฟ›่กŒๆŽจ็† + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + CLI ๅ‘ฝไปคๅฏ็›ดๆŽฅ่ฟ่กŒๆจกๅž‹๏ผš + + ```bash + # ๅŠ ่ฝฝ COCO ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹๏ผŒๅนถๅœจ COCO8 ็คบไพ‹ๆ•ฐๆฎ้›†ไธŠ่ฎญ็ปƒ 100 ไธชๅ‘จๆœŸ + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # ๅŠ ่ฝฝ COCO ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹๏ผŒๅนถๅฏน 'bus.jpg' ๅ›พๅƒ่ฟ›่กŒๆŽจ็† + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## ่ดก็Œฎๆ–ฐๆจกๅž‹ + +ๆœ‰ๅ…ด่ถฃๅฐ†ๆ‚จ็š„ๆจกๅž‹่ดก็Œฎ็ป™ Ultralytics ๅ—๏ผŸๅคชๅฅฝไบ†๏ผๆˆ‘ไปฌๅง‹็ปˆๆฌข่ฟŽๆ‰ฉๅฑ•ๆˆ‘ไปฌ็š„ๆจกๅž‹ๆŠ•่ต„็ป„ๅˆใ€‚ + +1. **Fork ไป“ๅบ“**๏ผšไปŽ Fork [Ultralytics GitHub ไป“ๅบ“](https://github.com/ultralytics/ultralytics) ๅผ€ๅง‹ใ€‚ + +2. **ๅ…‹้š†ๆ‚จ็š„ Fork**๏ผšๅฐ†ๆ‚จ็š„ Fork ๅ…‹้š†ๅˆฐๆ‚จ็š„ๆœฌๅœฐๆœบๅ™จ๏ผŒๅนถๅˆ›ๅปบไธ€ไธชๆ–ฐ็š„ๅˆ†ๆ”ฏ่ฟ›่กŒๅทฅไฝœใ€‚ + +3. **ๅฎž็Žฐๆ‚จ็š„ๆจกๅž‹**๏ผšๆŒ‰็…งๆˆ‘ไปฌๅœจ[่ดก็ŒฎๆŒ‡ๅ—](../../help/contributing.md)ไธญๆไพ›็š„็ผ–็ ๆ ‡ๅ‡†ๅ’ŒๆŒ‡ๅ—ๆทปๅŠ ๆ‚จ็š„ๆจกๅž‹ใ€‚ + +4. **ๅฝปๅบ•ๆต‹่ฏ•**๏ผš็กฎไฟๅฝปๅบ•ๆต‹่ฏ•ๆ‚จ็š„ๆจกๅž‹๏ผŒๆ— ่ฎบๆ˜ฏ็‹ฌ็ซ‹ๆต‹่ฏ•่ฟ˜ๆ˜ฏไฝœไธบๆตๆฐด็บฟ็š„ไธ€้ƒจๅˆ†ใ€‚ + +5. **ๅˆ›ๅปบๆ‹‰ๅ–่ฏทๆฑ‚**๏ผšไธ€ๆ—ฆๆ‚จๅฏนๆ‚จ็š„ๆจกๅž‹ๆปกๆ„๏ผŒๅฐฑๅˆ›ๅปบไธ€ไธชๆ‹‰ๅ–่ฏทๆฑ‚ไปฅไพ›ไธปไป“ๅบ“ๅฎกๆŸฅใ€‚ + +6. **ไปฃ็ ๅฎกๆŸฅไธŽๅˆๅนถ**๏ผš็ป่ฟ‡ๅฎกๆŸฅ๏ผŒๅฆ‚ๆžœๆ‚จ็š„ๆจกๅž‹็ฌฆๅˆๆˆ‘ไปฌ็š„ๆ ‡ๅ‡†๏ผŒๅฎƒๅฐ†่ขซๅˆๅนถๅˆฐไธปไป“ๅบ“ไธญใ€‚ + +ๆœ‰ๅ…ณ่ฏฆ็ป†ๆญฅ้ชค๏ผŒ่ฏทๅ‚้˜…ๆˆ‘ไปฌ็š„[่ดก็ŒฎๆŒ‡ๅ—](../../help/contributing.md)ใ€‚ diff --git a/ultralytics/docs/zh/models/index.md:Zone.Identifier b/ultralytics/docs/zh/models/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/models/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/models/mobile-sam.md b/ultralytics/docs/zh/models/mobile-sam.md new file mode 100755 index 0000000..98b191d --- /dev/null +++ b/ultralytics/docs/zh/models/mobile-sam.md @@ -0,0 +1,116 @@ +--- +comments: true +description: ไบ†่งฃๆœ‰ๅ…ณMobileSAM็š„ๆ›ดๅคšไฟกๆฏ๏ผŒๅŒ…ๆ‹ฌๅ…ถๅฎž็Žฐใ€ไธŽๅŽŸๅง‹SAM็š„ๆฏ”่พƒ๏ผŒไปฅๅŠๅœจUltralyticsๆก†ๆžถไธญๅฆ‚ไฝ•ไธ‹่ฝฝๅ’Œๆต‹่ฏ•ๅฎƒใ€‚็ซ‹ๅณๆ”น่ฟ›ๆ‚จ็š„็งปๅŠจๅบ”็”จ็จ‹ๅบใ€‚ +keywords: MobileSAM, Ultralytics, SAM, ็งปๅŠจๅบ”็”จ, Arxiv, GPU, API, ๅ›พๅƒ็ผ–็ ๅ™จ, ่’™็‰ˆ่งฃ็ ๅ™จ, ๆจกๅž‹ไธ‹่ฝฝ, ๆต‹่ฏ•ๆ–นๆณ• +--- + +![MobileSAM Logo](https://github.com/ChaoningZhang/MobileSAM/blob/master/assets/logo2.png?raw=true) + +# ็งปๅŠจ็ซฏ็ป†ๅˆ†ๆจกๅž‹๏ผˆMobileSAM๏ผ‰ + +MobileSAM ่ฎบๆ–‡็Žฐๅœจๅฏไปฅๅœจ [arXiv](https://arxiv.org/pdf/2306.14289.pdf) ไธŠๆ‰พๅˆฐใ€‚ + +ๅฏไปฅ้€š่ฟ‡ๆญค [ๆผ”็คบ้“พๆŽฅ](https://huggingface.co/spaces/dhkim2810/MobileSAM) ่ฎฟ้—ฎๅœจ CPU ไธŠ่ฟ่กŒ็š„ MobileSAM ๆผ”็คบใ€‚ๅœจ Mac i5 CPU ไธŠ๏ผŒๆ€ง่ƒฝๅคง็บฆ้œ€่ฆ 3 ็ง’ใ€‚ๅœจ Hugging Face ็š„ๆผ”็คบไธญ๏ผŒ็•Œ้ขๅ’Œๆ€ง่ƒฝ่พƒไฝŽ็š„ CPU ๅฏผ่‡ดๅ“ๅบ”่พƒๆ…ข๏ผŒไฝ†ๅฎƒไป็„ถ่ƒฝๆœ‰ๆ•ˆๅœฐๅทฅไฝœใ€‚ + +MobileSAM ๅทฒๅœจ Grounding-SAMใ€AnyLabeling ๅ’Œ Segment Anything in 3D ็ญ‰ๅคšไธช้กน็›ฎไธญๅฎžๆ–ฝใ€‚ๆ‚จๅฏไปฅๅœจ [Grounding-SAM](https://github.com/IDEA-Research/Grounded-Segment-Anything)ใ€[AnyLabeling](https://github.com/vietanhdev/anylabeling) ๅ’Œ [Segment Anything in 3D](https://github.com/Jumpat/SegmentAnythingin3D) ไธŠๆ‰พๅˆฐ่ฟ™ไบ›้กน็›ฎใ€‚ + +MobileSAM ไฝฟ็”จๅ•ไธช GPU ๅœจไธๅˆฐไธ€ๅคฉ็š„ๆ—ถ้—ดๅ†…ๅฏน 10 ไธ‡ไธชๆ•ฐๆฎ้›†๏ผˆๅŽŸๅง‹ๅ›พๅƒ็š„ 1%๏ผ‰่ฟ›่กŒ่ฎญ็ปƒใ€‚ๅ…ณไบŽๆญค่ฎญ็ปƒ็š„ไปฃ็ ๅฐ†ๅœจๅฐ†ๆฅๆไพ›ใ€‚ + +## ๅฏ็”จๆจกๅž‹ใ€ๆ”ฏๆŒ็š„ไปปๅŠกๅ’Œๆ“ไฝœๆจกๅผ + +ไปฅไธ‹่กจๆ ผๆ˜พ็คบไบ†ๅฏ็”จๆจกๅž‹ๅŠๅ…ถๅ…ทไฝ“็š„้ข„่ฎญ็ปƒๆƒ้‡๏ผŒๅฎƒไปฌๆ”ฏๆŒ็š„ไปปๅŠกไปฅๅŠไธŽไธๅŒๆ“ไฝœๆจกๅผ๏ผˆ[้ข„ๆต‹](../modes/predict.md)ใ€[้ชŒ่ฏ](../modes/val.md)ใ€[่ฎญ็ปƒ](../modes/train.md) ๅ’Œ [ๅฏผๅ‡บ](../modes/export.md)๏ผ‰็š„ๅ…ผๅฎนๆ€ง๏ผŒๅ…ถไธญๆ”ฏๆŒ็š„ๆจกๅผ็”จ โœ… ่กจ็คบ๏ผŒไธๆ”ฏๆŒ็š„ๆจกๅผ็”จ โŒ ่กจ็คบใ€‚ + +| ๆจกๅž‹็ฑปๅž‹ | ้ข„่ฎญ็ปƒๆƒ้‡ | ๆ”ฏๆŒ็š„ไปปๅŠก | ้ข„ๆต‹ | ้ชŒ่ฏ | ่ฎญ็ปƒ | ๅฏผๅ‡บ | +|-----------|-----------------|-----------------------------|----|----|----|----| +| MobileSAM | `mobile_sam.pt` | [ๅฎžไพ‹ๅˆ†ๅ‰ฒ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## ไปŽ SAM ่ฟ็งปๅˆฐ MobileSAM + +็”ฑไบŽ MobileSAM ไฟ็•™ไบ†ไธŽๅŽŸๅง‹ SAM ็›ธๅŒ็š„ๆต็จ‹๏ผŒๆˆ‘ไปฌๅทฒๅฐ†ๅŽŸๅง‹ SAM ็š„้ข„ๅค„็†ใ€ๅŽๅค„็†ๅ’Œๆ‰€ๆœ‰ๅ…ถไป–ๆŽฅๅฃๆ•ดๅˆๅˆฐ MobileSAM ไธญใ€‚ๅ› ๆญค๏ผŒ็›ฎๅ‰ไฝฟ็”จๅŽŸๅง‹ SAM ็š„็”จๆˆทๅฏไปฅไปฅๆœ€ๅฐ็š„ๅŠชๅŠ›่ฟ็งปๅˆฐ MobileSAMใ€‚ + +MobileSAM ๅœจๆ€ง่ƒฝไธŠไธŽๅŽŸๅง‹ SAM ็›ธๅฝ“๏ผŒๅนถไฟ็•™ไบ†็›ธๅŒ็š„ๆต็จ‹๏ผŒๅชๆ˜ฏๆ›ดๆ”นไบ†ๅ›พๅƒ็ผ–็ ๅ™จใ€‚ๅ…ทไฝ“่€Œ่จ€๏ผŒๆˆ‘ไปฌ็”จ่พƒๅฐ็š„ Tiny-ViT๏ผˆ5M๏ผ‰ๆ›ฟๆขไบ†ๅŽŸๅง‹็š„็ฌจ้‡็š„ ViT-H ็ผ–็ ๅ™จ๏ผˆ632M๏ผ‰ใ€‚ๅœจๅ•ไธช GPU ไธŠ๏ผŒMobileSAM ๆฏๅผ ๅ›พ็‰‡็š„่ฟ่กŒๆ—ถ้—ด็บฆไธบ 12 ๆฏซ็ง’๏ผšๅ›พๅƒ็ผ–็ ๅ™จ็บฆ 8 ๆฏซ็ง’๏ผŒ่’™็‰ˆ่งฃ็ ๅ™จ็บฆ 4 ๆฏซ็ง’ใ€‚ + +ไปฅไธ‹่กจๆ ผๆฏ”่พƒไบ†ๅŸบไบŽ ViT ็š„ๅ›พๅƒ็ผ–็ ๅ™จ๏ผš + +| ๅ›พๅƒ็ผ–็ ๅ™จ | ๅŽŸๅง‹ SAM | MobileSAM | +|-------|--------|-----------| +| ๅ‚ๆ•ฐ | 611M | 5M | +| ้€Ÿๅบฆ | 452ms | 8ms | + +ๅŽŸๅง‹ SAM ๅ’Œ MobileSAM ๅ‡ไฝฟ็”จ็›ธๅŒ็š„ๆ็คบๅผ•ๅฏผ่’™็‰ˆ่งฃ็ ๅ™จ๏ผš + +| ่’™็‰ˆ่งฃ็ ๅ™จ | ๅŽŸๅง‹ SAM | MobileSAM | +|-------|--------|-----------| +| ๅ‚ๆ•ฐ | 3.876M | 3.876M | +| ้€Ÿๅบฆ | 4ms | 4ms | + +ไปฅไธ‹ๆ˜ฏๆ•ดไธชๆต็จ‹็š„ๆฏ”่พƒ๏ผš + +| ๆ•ดไธชๆต็จ‹๏ผˆ็ผ–็ ๅ™จ+่งฃ็ ๅ™จ๏ผ‰ | ๅŽŸๅง‹ SAM | MobileSAM | +|---------------|--------|-----------| +| ๅ‚ๆ•ฐ | 615M | 9.66M | +| ้€Ÿๅบฆ | 456ms | 12ms | + +MobileSAM ๅ’ŒๅŽŸๅง‹ SAM ็š„ๆ€ง่ƒฝ้€š่ฟ‡ไฝฟ็”จ็‚นๅ’Œๆก†ไฝœไธบๆ็คบ่ฟ›่กŒๆผ”็คบใ€‚ + +![็‚นไฝœไธบๆ็คบ็š„ๅ›พๅƒ](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +![ๆก†ไฝœไธบๆ็คบ็š„ๅ›พๅƒ](https://raw.githubusercontent.com/ChaoningZhang/MobileSAM/master/assets/mask_box.jpg?raw=true) + +MobileSAM ็š„ๆ€ง่ƒฝไผ˜ไบŽๅฝ“ๅ‰็š„ FastSAM๏ผŒๅฐบๅฏธๅคง็บฆๅ‡ๅฐไบ† 5 ๅ€๏ผŒ้€Ÿๅบฆๅฟซไบ†็บฆ 7 ๅ€ใ€‚ๆœ‰ๅ…ณๆ›ดๅคš่ฏฆ็ป†ไฟกๆฏ๏ผŒ่ฏท่ฎฟ้—ฎ [MobileSAM ้กน็›ฎ้กต้ข](https://github.com/ChaoningZhang/MobileSAM)ใ€‚ + +## ๅœจ Ultralytics ไธญๆต‹่ฏ• MobileSAM + +ไธŽๅŽŸๅง‹ SAM ไธ€ๆ ท๏ผŒๆˆ‘ไปฌๅœจ Ultralytics ไธญๆไพ›ไบ†ไธ€็ง็ฎ€ๅ•็š„ๆต‹่ฏ•ๆ–นๆณ•๏ผŒๅŒ…ๆ‹ฌ็‚นๆ็คบๅ’Œๆก†ๆ็คบ็š„ๆจกๅผใ€‚ + +### ๆจกๅž‹ไธ‹่ฝฝ + +ๆ‚จๅฏไปฅๅœจ [่ฟ™้‡Œ](https://github.com/ChaoningZhang/MobileSAM/blob/master/weights/mobile_sam.pt) ไธ‹่ฝฝๆจกๅž‹ใ€‚ + +### ็‚นๆ็คบ + +!!! Example "็คบไพ‹" + + === "Python" + ```python + from ultralytics import SAM + + # ่ฝฝๅ…ฅๆจกๅž‹ + model = SAM('mobile_sam.pt') + + # ๅŸบไบŽ็‚นๆ็คบ้ข„ๆต‹ไธ€ไธชๅˆ†ๆฎต + model.predict('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +### ๆก†ๆ็คบ + +!!! Example "็คบไพ‹" + + === "Python" + ```python + from ultralytics import SAM + + # ่ฝฝๅ…ฅๆจกๅž‹ + model = SAM('mobile_sam.pt') + + # ๅŸบไบŽๆก†ๆ็คบ้ข„ๆต‹ไธ€ไธชๅˆ†ๆฎต + model.predict('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + ``` + +ๆˆ‘ไปฌไฝฟ็”จ็›ธๅŒ็š„ API ๅฎž็Žฐไบ† `MobileSAM` ๅ’Œ `SAM`ใ€‚ๆœ‰ๅ…ณๆ›ดๅคš็”จๆณ•ไฟกๆฏ๏ผŒ่ฏทๅ‚้˜… [SAM ้กต้ข](sam.md)ใ€‚ + +## ๅผ•็”จๅ’Œ้ธฃ่ฐข + +ๅฆ‚ๆžœๆ‚จๅœจ็ ”็ฉถๆˆ–ๅผ€ๅ‘ๅทฅไฝœไธญๅ‘็Žฐ MobileSAM ๅฏนๆ‚จๆœ‰็”จ๏ผŒ่ฏท่€ƒ่™‘ๅผ•็”จๆˆ‘ไปฌ็š„่ฎบๆ–‡๏ผš + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{mobile_sam, + title={Faster Segment Anything: Towards Lightweight SAM for Mobile Applications}, + author={Zhang, Chaoning and Han, Dongshen and Qiao, Yu and Kim, Jung Uk and Bae, Sung Ho and Lee, Seungkyu and Hong, Choong Seon}, + journal={arXiv preprint arXiv:2306.14289}, + year={2023} + } diff --git a/ultralytics/docs/zh/models/mobile-sam.md:Zone.Identifier b/ultralytics/docs/zh/models/mobile-sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/models/mobile-sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/models/rtdetr.md b/ultralytics/docs/zh/models/rtdetr.md new file mode 100755 index 0000000..e6940d1 --- /dev/null +++ b/ultralytics/docs/zh/models/rtdetr.md @@ -0,0 +1,93 @@ +--- +comments: true +description: ไบ†่งฃ็™พๅบฆ็š„RT-DETR๏ผŒไธ€็งๅŸบไบŽVision Transformers็š„้ซ˜ๆ•ˆ็ตๆดป็š„ๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จ๏ผŒๅŒ…ๆ‹ฌ้ข„่ฎญ็ปƒๆจกๅž‹็š„็‰นๆ€งๅ’Œไผ˜ๅŠฟใ€‚ +keywords: RT-DETRใ€Baiduใ€Vision Transformersใ€็›ฎๆ ‡ๆฃ€ๆต‹ใ€ๅฎžๆ—ถ่กจ็Žฐใ€CUDAใ€TensorRTใ€IoUๆ„Ÿ็Ÿฅ็š„ๆŸฅ่ฏข้€‰ๆ‹ฉใ€Ultralyticsใ€Python APIใ€PaddlePaddle +--- + +# ็™พๅบฆ็š„RT-DETR๏ผšๅŸบไบŽVision Transformers็š„ๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จ + +## ๆฆ‚่งˆ + +็™พๅบฆๅผ€ๅ‘็š„ๅฎžๆ—ถๆฃ€ๆต‹ๅ˜ๆขๅ™จ๏ผˆRT-DETR๏ผ‰ๆ˜ฏไธ€็งๅฐ–็ซฏ็š„็ซฏๅˆฐ็ซฏ็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จ๏ผŒๅ…ทๆœ‰ๅฎžๆ—ถๆ€ง่ƒฝๅ’Œ้ซ˜ๅ‡†็กฎๆ€งใ€‚ๅฎƒๅˆฉ็”จVision Transformers (ViT) ็š„ๅผบๅคงๅŠŸ่ƒฝ๏ผŒ้€š่ฟ‡่งฃ่€ฆๅ†…้ƒจๅฐบๅบฆไบคไบ’ๅ’Œ่ทจๅฐบๅบฆ่žๅˆ๏ผŒ้ซ˜ๆ•ˆๅค„็†ๅคšๅฐบๅบฆ็‰นๅพใ€‚RT-DETR้žๅธธ็ตๆดป้€‚ๅบ”ๅ„็งๆŽจๆ–ญ้€Ÿๅบฆ็š„่ฐƒๆ•ด๏ผŒๆ”ฏๆŒไฝฟ็”จไธๅŒ็š„่งฃ็ ๅ™จๅฑ‚่€Œๆ— ้œ€้‡ๆ–ฐ่ฎญ็ปƒใ€‚่ฏฅๆจกๅž‹ๅœจCUDAๅ’ŒTensorRT็ญ‰ๅŠ ้€ŸๅŽ็ซฏไธŠ่กจ็Žฐๅ‡บ่‰ฒ๏ผŒ่ถ…่ถŠไบ†่ฎธๅคšๅ…ถไป–ๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จใ€‚ + +![ๆจกๅž‹็คบไพ‹ๅ›พๅƒ](https://user-images.githubusercontent.com/26833433/238963168-90e8483f-90aa-4eb6-a5e1-0d408b23dd33.png) +**็™พๅบฆ็š„RT-DETRๆฆ‚่งˆ** ็™พๅบฆ็š„RT-DETRๆจกๅž‹ๆžถๆž„ๅ›พๆ˜พ็คบไบ†้ชจๅนฒ็ฝ‘็š„ๆœ€ๅŽไธ‰ไธช้˜ถๆฎต{S3, S4, S5}ไฝœไธบ็ผ–็ ๅ™จ่พ“ๅ…ฅใ€‚้ซ˜ๆ•ˆ็š„ๆททๅˆ็ผ–็ ๅ™จ้€š่ฟ‡ๅ†…้ƒจๅฐบๅบฆ็‰นๅพไบคไบ’๏ผˆAIFI๏ผ‰ๅ’Œ่ทจๅฐบๅบฆ็‰นๅพ่žๅˆๆจกๅ—๏ผˆCCFM๏ผ‰ๅฐ†ๅคšๅฐบๅบฆ็‰นๅพ่ฝฌๆขไธบๅ›พๅƒ็‰นๅพๅบๅˆ—ใ€‚้‡‡็”จIoUๆ„Ÿ็Ÿฅ็š„ๆŸฅ่ฏข้€‰ๆ‹ฉๆฅ้€‰ๆ‹ฉไธ€ๅฎšๆ•ฐ้‡็š„ๅ›พๅƒ็‰นๅพไฝœไธบ่งฃ็ ๅ™จ็š„ๅˆๅง‹ๅฏน่ฑกๆŸฅ่ฏขใ€‚ๆœ€ๅŽ๏ผŒ่งฃ็ ๅ™จ้€š่ฟ‡่พ…ๅŠฉ้ข„ๆต‹ๅคด่ฟญไปฃไผ˜ๅŒ–ๅฏน่ฑกๆŸฅ่ฏข๏ผŒ็”Ÿๆˆๆก†ๅ’Œ็ฝฎไฟกๅบฆๅพ—ๅˆ†ใ€‚๏ผˆ[ๆ–‡็ซ ๆฅๆบ](https://arxiv.org/pdf/2304.08069.pdf)๏ผ‰ + +### ไธป่ฆ็‰น็‚น + +- **้ซ˜ๆ•ˆ็š„ๆททๅˆ็ผ–็ ๅ™จ๏ผš** ็™พๅบฆ็š„RT-DETRไฝฟ็”จ้ซ˜ๆ•ˆ็š„ๆททๅˆ็ผ–็ ๅ™จ๏ผŒ้€š่ฟ‡่งฃ่€ฆๅ†…้ƒจๅฐบๅบฆไบคไบ’ๅ’Œ่ทจๅฐบๅบฆ่žๅˆๆฅๅค„็†ๅคšๅฐบๅบฆ็‰นๅพใ€‚่ฟ™็ง็‹ฌ็‰น็š„Vision Transformersๆžถๆž„้™ไฝŽไบ†่ฎก็ฎ—ๆˆๆœฌ๏ผŒๅฎž็Žฐๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ใ€‚ +- **IoUๆ„Ÿ็Ÿฅ็š„ๆŸฅ่ฏข้€‰ๆ‹ฉ๏ผš** ็™พๅบฆ็š„RT-DETRๅˆฉ็”จIoUๆ„Ÿ็Ÿฅ็š„ๆŸฅ่ฏข้€‰ๆ‹ฉๆ”น่ฟ›ไบ†ๅฏน่ฑกๆŸฅ่ฏข็š„ๅˆๅง‹ๅŒ–ใ€‚่ฟ™ไฝฟๅพ—ๆจกๅž‹่ƒฝๅคŸ่š็„ฆไบŽๅœบๆ™ฏไธญๆœ€็›ธๅ…ณ็š„ๅฏน่ฑก๏ผŒๆ้ซ˜ไบ†ๆฃ€ๆต‹ๅ‡†็กฎๆ€งใ€‚ +- **็ตๆดป็š„ๆŽจๆ–ญ้€Ÿๅบฆ๏ผš** ็™พๅบฆ็š„RT-DETRๆ”ฏๆŒไฝฟ็”จไธๅŒ็š„่งฃ็ ๅ™จๅฑ‚็ตๆดป่ฐƒๆ•ดๆŽจๆ–ญ้€Ÿๅบฆ๏ผŒๆ— ้œ€้‡ๆ–ฐ่ฎญ็ปƒใ€‚่ฟ™็ง้€‚ๅบ”ๆ€งๆœ‰ๅŠฉไบŽๅœจๅ„็งๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ๅœบๆ™ฏไธญๅฎž้™…ๅบ”็”จใ€‚ + +## ้ข„่ฎญ็ปƒๆจกๅž‹ + +Ultralytics Python APIๆไพ›ไบ†ไธๅŒๅฐบๅบฆ็š„้ข„่ฎญ็ปƒPaddlePaddle RT-DETRๆจกๅž‹๏ผš + +- RT-DETR-L: ๅœจCOCO val2017ไธŠ่พพๅˆฐ53.0%็š„AP๏ผŒๅœจT4 GPUไธŠ่พพๅˆฐ114 FPS +- RT-DETR-X: ๅœจCOCO val2017ไธŠ่พพๅˆฐ54.8%็š„AP๏ผŒๅœจT4 GPUไธŠ่พพๅˆฐ74 FPS + +## ไฝฟ็”จ็คบไพ‹ + +ๆญค็คบไพ‹ๆไพ›ไบ†็ฎ€ๅ•็š„RT-DETR่ฎญ็ปƒๅ’ŒๆŽจๆ–ญ็คบไพ‹ใ€‚ๆœ‰ๅ…ณ่ฟ™ไบ›ๅ’Œๅ…ถไป–[ๆจกๅผ](../modes/index.md)็š„ๅฎŒๆ•ดๆ–‡ๆกฃ๏ผŒ่ฏทๅ‚้˜…[้ข„ๆต‹](../modes/predict.md)ใ€[่ฎญ็ปƒ](../modes/train.md)ใ€[้ชŒ่ฏ](../modes/val.md)ๅ’Œ[ๅฏผๅ‡บ](../modes/export.md)ๆ–‡ๆกฃ้กต้ขใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import RTDETR + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„COCO RT-DETR-lๆจกๅž‹ + model = RTDETR('rtdetr-l.pt') + + # ๆ˜พ็คบๆจกๅž‹ไฟกๆฏ๏ผˆๅฏ้€‰๏ผ‰ + model.info() + + # ไฝฟ็”จCOCO8็คบไพ‹ๆ•ฐๆฎ้›†ๅฏนๆจกๅž‹่ฟ›่กŒ100ไธชepoch็š„่ฎญ็ปƒ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ไฝฟ็”จRT-DETR-lๆจกๅž‹ๅœจ'bus.jpg'ๅ›พๅƒไธŠ่ฟ่กŒๆŽจๆ–ญ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ```bash + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„COCO RT-DETR-lๆจกๅž‹๏ผŒๅนถๅœจCOCO8็คบไพ‹ๆ•ฐๆฎ้›†ไธŠ่ฟ›่กŒ100ไธชepoch็š„่ฎญ็ปƒ + yolo train model=rtdetr-l.pt data=coco8.yaml epochs=100 imgsz=640 + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„COCO RT-DETR-lๆจกๅž‹๏ผŒๅนถๅœจ'bus.jpg'ๅ›พๅƒไธŠ่ฟ่กŒๆŽจๆ–ญ + yolo predict model=rtdetr-l.pt source=path/to/bus.jpg + ``` + +## ๆ”ฏๆŒ็š„ไปปๅŠกๅ’Œๆจกๅผ + +่ฏฅ่กจๆ ผๆไพ›ไบ†ๅ„ไธชๆจกๅž‹็ฑปๅž‹ใ€ๅ…ทไฝ“็š„้ข„่ฎญ็ปƒๆƒ้‡ใ€ๅ„ไธชๆจกๅž‹ๆ”ฏๆŒ็š„ไปปๅŠกไปฅๅŠๆ”ฏๆŒ็š„ๅ„็งๆจกๅผ๏ผˆ[่ฎญ็ปƒ](../modes/train.md)ใ€[้ชŒ่ฏ](../modes/val.md)ใ€[้ข„ๆต‹](../modes/predict.md)ใ€[ๅฏผๅ‡บ](../modes/export.md)๏ผ‰๏ผŒๅ…ถไธญโœ…่กจ็คบๆ”ฏๆŒใ€‚ + +| ๆจกๅž‹็ฑปๅž‹ | ้ข„่ฎญ็ปƒๆƒ้‡ | ๆ”ฏๆŒ็š„ไปปๅŠก | ๆŽจๆ–ญ | ้ชŒ่ฏ | ่ฎญ็ปƒ | ๅฏผๅ‡บ | +|---------------------|---------------|----------------------------|----|----|----|----| +| RT-DETR-Large | `rtdetr-l.pt` | [็›ฎๆ ‡ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| RT-DETR-Extra-Large | `rtdetr-x.pt` | [็›ฎๆ ‡ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +## ๅผ•็”จๅ’Œ่‡ด่ฐข + +ๅฆ‚ๆžœไฝ ๅœจ็ ”็ฉถๆˆ–ๅผ€ๅ‘ไธญไฝฟ็”จไบ†็™พๅบฆ็š„RT-DETR๏ผŒ่ฏทๅผ•็”จ[ๅŽŸๅง‹่ฎบๆ–‡](https://arxiv.org/abs/2304.08069)๏ผš + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{lv2023detrs, + title={DETRs Beat YOLOs on Real-time Object Detection}, + author={Wenyu Lv and Shangliang Xu and Yian Zhao and Guanzhong Wang and Jinman Wei and Cheng Cui and Yuning Du and Qingqing Dang and Yi Liu}, + year={2023}, + eprint={2304.08069}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ๆˆ‘ไปฌ่ฆๆ„Ÿ่ฐข็™พๅบฆๅ’Œ[PaddlePaddle](https://github.com/PaddlePaddle/PaddleDetection)ๅ›ข้˜Ÿไธบ่ฎก็ฎ—ๆœบ่ง†่ง‰็คพๅŒบๅˆ›ๅปบๅ’Œ็ปดๆŠคไบ†่ฟ™ไธชๅฎ่ดต็š„่ต„ๆบใ€‚้žๅธธๆ„Ÿ่ฐขไป–ไปฌไฝฟ็”จๅŸบไบŽVision Transformers็š„ๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จRT-DETRๅœจ่ฏฅ้ข†ๅŸŸๅšๅ‡บ็š„่ดก็Œฎใ€‚ + +*keywords: RT-DETRใ€Transformerใ€ViTใ€Vision Transformersใ€Baidu RT-DETRใ€PaddlePaddleใ€Paddle Paddle RT-DETR๏ผŒๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ใ€ๅŸบไบŽVision Transformers็š„็›ฎๆ ‡ๆฃ€ๆต‹ใ€้ข„่ฎญ็ปƒ็š„PaddlePaddle RT-DETRๆจกๅž‹ใ€็™พๅบฆRT-DETR็š„ไฝฟ็”จใ€Ultralytics Python API* diff --git a/ultralytics/docs/zh/models/rtdetr.md:Zone.Identifier b/ultralytics/docs/zh/models/rtdetr.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/models/rtdetr.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/models/sam.md b/ultralytics/docs/zh/models/sam.md new file mode 100755 index 0000000..dc4b3f8 --- /dev/null +++ b/ultralytics/docs/zh/models/sam.md @@ -0,0 +1,226 @@ +--- +comments: true +description: ๆŽข็ดขๆฅ่‡ชUltralytics็š„ๆœ€ๅ‰ๆฒฟ็š„Segment Anything Model (SAM)๏ผŒๅฎƒๅฏไปฅ่ฟ›่กŒๅฎžๆ—ถๅ›พๅƒๅˆ†ๅ‰ฒใ€‚ไบ†่งฃๅ…ถๅฏๆ็คบๅˆ†ๅ‰ฒใ€้›ถๆ ทๆœฌๆ€ง่ƒฝไปฅๅŠๅฆ‚ไฝ•ไฝฟ็”จๅฎƒใ€‚ +keywords: Ultralytics๏ผŒๅ›พๅƒๅˆ†ๅ‰ฒ๏ผŒSegment Anything Model๏ผŒSAM๏ผŒSA-1Bๆ•ฐๆฎ้›†๏ผŒๅฎžๆ—ถๆ€ง่ƒฝ๏ผŒ้›ถๆ ทๆœฌ่ฝฌ็งป๏ผŒ็›ฎๆ ‡ๆฃ€ๆต‹๏ผŒๅ›พๅƒๅˆ†ๆž๏ผŒๆœบๅ™จๅญฆไน  +--- + +# Segment Anything Model (SAM) + +ๆฌข่ฟŽๆฅๅˆฐไฝฟ็”จSegment Anything Model (SAM) ่ฟ›่กŒๅ›พๅƒๅˆ†ๅ‰ฒ็š„ๅ‰ๆฒฟใ€‚่ฟ™ไธช้ฉๅ‘ฝๆ€ง็š„ๆจกๅž‹้€š่ฟ‡ๅผ•ๅ…ฅๅฏไปฅๆ็คบ็š„ๅฎžๆ—ถๅ›พๅƒๅˆ†ๅ‰ฒ๏ผŒๆ ‘็ซ‹ไบ†้ข†ๅŸŸๆ–ฐ็š„ๆ ‡ๅ‡†ใ€‚ + +## SAM็š„ไป‹็ป๏ผšSegment Anything Model + +Segment Anything Model (SAM) ๆ˜ฏไธ€็งๅ…ˆ่ฟ›็š„ๅ›พๅƒๅˆ†ๅ‰ฒๆจกๅž‹๏ผŒๅฏไปฅ่ฟ›่กŒๅฏๆ็คบ็š„ๅˆ†ๅ‰ฒ๏ผŒไธบๅ›พๅƒๅˆ†ๆžไปปๅŠกๆไพ›ไบ†ๅ‰ๆ‰€ๆœชๆœ‰็š„็ตๆดปๆ€งใ€‚SAM ๆ˜ฏSegment Anything ้กน็›ฎ็š„ๆ ธๅฟƒ๏ผŒ่ฏฅ้กน็›ฎๅผ•ๅ…ฅไบ†ไธ€็งๆ–ฐ้ข–็š„ๆจกๅž‹ใ€ไปปๅŠกๅ’Œๅ›พๅƒๅˆ†ๅ‰ฒๆ•ฐๆฎ้›†ใ€‚ + +SAM ๅ…ˆ่ฟ›็š„่ฎพ่ฎกๅ…่ฎธๅฎƒๅœจๆฒกๆœ‰ๅ…ˆ้ชŒ็Ÿฅ่ฏ†็š„ๆƒ…ๅ†ตไธ‹้€‚ๅบ”ๆ–ฐ็š„ๅ›พๅƒๅˆ†ๅธƒๅ’ŒไปปๅŠก๏ผŒ่ฟ™ไธช็‰น็‚น่ขซ็งฐไธบ้›ถๆ ทๆœฌ่ฝฌ็งปใ€‚SAM ๅœจๅŒ…ๅซ11ไบฟไธชๆŽฉๆจก็š„SA-1Bๆ•ฐๆฎ้›†ไธŠ่ฟ›่กŒ่ฎญ็ปƒ๏ผŒ่ฏฅๆ•ฐๆฎ้›†ๅŒ…ๅซ่ถ…่ฟ‡1100ไธ‡ๅผ ็ฒพๅฟƒ็ญ–ๅˆ’็š„ๅ›พๅƒ๏ผŒSAM ๅœจ้›ถๆ ทๆœฌไปปๅŠกไธญ่กจ็Žฐๅ‡บ่‰ฒ๏ผŒ่ฎธๅคšๆƒ…ๅ†ตไธ‹่ถ…่ฟ‡ไบ†ไน‹ๅ‰็š„ๅฎŒๅ…จ็›‘็ฃ็ป“ๆžœใ€‚ + +![ๆ•ฐๆฎ้›†็คบไพ‹ๅ›พๅƒ](https://user-images.githubusercontent.com/26833433/238056229-0e8ffbeb-f81a-477e-a490-aff3d82fd8ce.jpg) +ไปŽๆˆ‘ไปฌๆ–ฐๅผ•ๅ…ฅ็š„SA-1Bๆ•ฐๆฎ้›†ไธญ้€‰ๆ‹ฉ็š„็คบไพ‹ๅ›พๅƒ๏ผŒๆ˜พ็คบไบ†่ฆ†็›–็š„ๆŽฉๆจกใ€‚SA-1BๅŒ…ๅซไบ†1100ไธ‡ไธชๅคšๆ ทๅŒ–ใ€้ซ˜ๅˆ†่พจ็އใ€่ฎธๅฏ็š„ๅ›พๅƒๅ’Œ11ไบฟไธช้ซ˜่ดจ้‡็š„ๅˆ†ๅ‰ฒๆŽฉๆจกใ€‚่ฟ™ไบ›ๆŽฉๆจก็”ฑSAMๅฎŒๅ…จ่‡ชๅŠจๅœฐ่ฟ›่กŒไบ†ๆณจ้‡Š๏ผŒ็ป่ฟ‡ไบบๅทฅ่ฏ„็บงๅ’Œๅคง้‡ๅฎž้ชŒ็š„้ชŒ่ฏ๏ผŒๅฎƒไปฌๅ…ทๆœ‰้ซ˜่ดจ้‡ๅ’Œๅคšๆ ทๆ€งใ€‚ๅ›พๅƒๆŒ‰ๆฏไธชๅ›พๅƒ็š„ๆŽฉๆจกๆ•ฐ้‡่ฟ›่กŒๅˆ†็ป„ไปฅ่ฟ›่กŒๅฏ่ง†ๅŒ–๏ผˆๅนณๅ‡ๆฏไธชๅ›พๅƒๆœ‰โˆผ100ไธชๆŽฉๆจก๏ผ‰ใ€‚ + +## Segment Anything Model (SAM) ็š„ไธป่ฆ็‰น็‚น + +- **ๅฏๆ็คบ็š„ๅˆ†ๅ‰ฒไปปๅŠก**๏ผšSAM ็š„่ฎพ่ฎก่€ƒ่™‘ไบ†ๅฏๆ็คบ็š„ๅˆ†ๅ‰ฒไปปๅŠก๏ผŒๅฎƒๅฏไปฅไปŽ็ป™ๅฎš็š„ๆ็คบไธญ็”Ÿๆˆๆœ‰ๆ•ˆ็š„ๅˆ†ๅ‰ฒๆŽฉๆจก๏ผŒไพ‹ๅฆ‚ๆŒ‡็คบๅฏน่ฑก็š„็ฉบ้—ดๆˆ–ๆ–‡ๆœฌ็บฟ็ดขใ€‚ +- **ๅ…ˆ่ฟ›็š„ๆžถๆž„**๏ผšSegment Anything Model ไฝฟ็”จๅผบๅคง็š„ๅ›พๅƒ็ผ–็ ๅ™จใ€ๆ็คบ็ผ–็ ๅ™จๅ’Œ่ฝป้‡็š„ๆŽฉๆจก่งฃ็ ๅ™จใ€‚่ฟ™็ง็‹ฌ็‰น็š„ๆžถๆž„ไฝฟๅพ—ๅˆ†ๆฎตไปปๅŠกไธญ็š„ๆ็คบ็ตๆดปใ€ๅฎžๆ—ถๆŽฉๆจก่ฎก็ฎ—ๅ’Œๆจก็ณŠๆ„Ÿ็Ÿฅๆˆไธบๅฏ่ƒฝใ€‚ +- **SA-1B ๆ•ฐๆฎ้›†**๏ผš็”ฑSegment Anything ้กน็›ฎๅผ•ๅ…ฅ็š„ SA-1B ๆ•ฐๆฎ้›†ๅŒ…ๅซ่ถ…่ฟ‡11ไบฟไธชๆŽฉๆจก็š„1100ไธ‡ๅผ ๅ›พๅƒใ€‚ไฝœไธบ่ฟ„ไปŠไธบๆญขๆœ€ๅคง็š„ๅˆ†ๅ‰ฒๆ•ฐๆฎ้›†๏ผŒๅฎƒไธบ SAM ๆไพ›ไบ†ไธ€ไธชๅคšๆ ทๅŒ–็š„ๅคง่ง„ๆจก่ฎญ็ปƒๆ•ฐๆฎๆบใ€‚ +- **้›ถๆ ทๆœฌๆ€ง่ƒฝ**๏ผšSAM ๅœจๅ„็งๅˆ†ๅ‰ฒไปปๅŠกไธญๅฑ•็Žฐๅ‡บๅ‡บ่‰ฒ็š„้›ถๆ ทๆœฌๆ€ง่ƒฝ๏ผŒไฝฟๅพ—ๅฎƒๆˆไธบไธ€ไธชๅฏไปฅ็ซ‹ๅณไฝฟ็”จ็š„ๅทฅๅ…ท๏ผŒๅฏนไบŽๅ„็งๅบ”็”จๆฅ่ฏด๏ผŒๅฏนๆ็คบๅทฅ็จ‹็š„้œ€ๆฑ‚ๅพˆๅฐใ€‚ + +ๅฆ‚ๆžœๆ‚จๆƒณไบ†่งฃๆ›ดๅคšๅ…ณไบŽSegment Anything Model ๅ’Œ SA-1B ๆ•ฐๆฎ้›†็š„่ฏฆ็ป†ไฟกๆฏ๏ผŒ่ฏท่ฎฟ้—ฎ[Segment Anything ็ฝ‘็ซ™](https://segment-anything.com)ๅนถๆŸฅ็œ‹็ ”็ฉถ่ฎบๆ–‡[Segment Anything](https://arxiv.org/abs/2304.02643)ใ€‚ + +## ๅฏ็”จๆจกๅž‹ใ€ๆ”ฏๆŒ็š„ไปปๅŠกๅ’Œๆ“ไฝœๆจกๅผ + +่ฟ™ไธช่กจๆ ผๅฑ•็คบไบ†ๅฏ็”จๆจกๅž‹ๅŠๅ…ถ็‰นๅฎš็š„้ข„่ฎญ็ปƒๆƒ้‡๏ผŒๅฎƒไปฌๆ”ฏๆŒ็š„ไปปๅŠก๏ผŒไปฅๅŠๅฎƒไปฌไธŽไธๅŒๆ“ไฝœๆจกๅผ๏ผˆ[Inference](../modes/predict.md)ใ€[Validation](../modes/val.md)ใ€[Training](../modes/train.md) ๅ’Œ [Export](../modes/export.md)๏ผ‰็š„ๅ…ผๅฎนๆ€ง๏ผŒ็”จ โœ… ่กจ็คบๆ”ฏๆŒ็š„ๆจกๅผ๏ผŒ็”จ โŒ ่กจ็คบไธๆ”ฏๆŒ็š„ๆจกๅผใ€‚ + +| ๆจกๅž‹็ฑปๅž‹ | ้ข„่ฎญ็ปƒๆƒ้‡ | ๆ”ฏๆŒ็š„ไปปๅŠก | ๆŽจๆ–ญ | ้ชŒ่ฏ | ่ฎญ็ปƒ | ๅฏผๅ‡บ | +|-----------|------------|-----------------------------|----|----|----|----| +| SAM base | `sam_b.pt` | [ๅฎžไพ‹ๅˆ†ๅ‰ฒ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | +| SAM large | `sam_l.pt` | [ๅฎžไพ‹ๅˆ†ๅ‰ฒ](../tasks/segment.md) | โœ… | โŒ | โŒ | โœ… | + +## ๅฆ‚ไฝ•ไฝฟ็”จ SAM: ๅ›พๅƒๅˆ†ๅ‰ฒ็š„ๅคšๅŠŸ่ƒฝๅ’Œๅผบๅคง + +Segment Anything Model ๅฏไปฅ็”จไบŽๅคš็งไธ‹ๆธธไปปๅŠก๏ผŒ่ถ…่ถŠ่ฎญ็ปƒๆ•ฐๆฎ็š„่Œƒๅ›ดใ€‚่ฟ™ๅŒ…ๆ‹ฌ่พน็ผ˜ๆฃ€ๆต‹๏ผŒ็›ฎๆ ‡ๆๆกˆ็”Ÿๆˆ๏ผŒๅฎžไพ‹ๅˆ†ๅ‰ฒๅ’Œๅˆๆญฅ็š„ๆ–‡ๆœฌๅˆฐๆŽฉๆจก้ข„ๆต‹ใ€‚้€š่ฟ‡ prompt ๅทฅ็จ‹๏ผŒSAM ๅฏไปฅๅฟซ้€Ÿ้€‚ๅบ”ๆ–ฐ็š„ไปปๅŠกๅ’Œๆ•ฐๆฎๅˆ†ๅธƒ๏ผŒไปฅ้›ถๆ ทๆœฌ็š„ๆ–นๅผ๏ผŒ็กฎ็ซ‹ๅ…ถไฝœไธบๅ›พๅƒๅˆ†ๅ‰ฒ้œ€ๆฑ‚็š„ๅคšๅŠŸ่ƒฝๅ’Œๅผบๅคงๅทฅๅ…ทใ€‚ + +### SAM ้ข„ๆต‹็คบไพ‹ + +!!! Example "ไฝฟ็”จๆ็คบ่ฟ›่กŒๅˆ†ๅ‰ฒ" + + ไฝฟ็”จ็ป™ๅฎš็š„ๆ็คบๅฏนๅ›พๅƒ่ฟ›่กŒๅˆ†ๅ‰ฒใ€‚ + + === "Python" + + ```python + from ultralytics import SAM + + # ๅŠ ่ฝฝๆจกๅž‹ + model = SAM('sam_b.pt') + + # ๆ˜พ็คบๆจกๅž‹ไฟกๆฏ๏ผˆๅฏ้€‰๏ผ‰ + model.info() + + # ไฝฟ็”จ่พน็•Œๆก†ๆ็คบ่ฟ›่กŒๆŽจๆ–ญ + model('ultralytics/assets/zidane.jpg', bboxes=[439, 437, 524, 709]) + + # ไฝฟ็”จ็‚นๆ็คบ่ฟ›่กŒๆŽจๆ–ญ + model('ultralytics/assets/zidane.jpg', points=[900, 370], labels=[1]) + ``` + +!!! Example "ๅˆ†ๅ‰ฒๆ•ดไธชๅ›พๅƒ" + + ๅˆ†ๅ‰ฒๆ•ดไธชๅ›พๅƒใ€‚ + + === "Python" + + ```python + from ultralytics import SAM + + # ๅŠ ่ฝฝๆจกๅž‹ + model = SAM('sam_b.pt') + + # ๆ˜พ็คบๆจกๅž‹ไฟกๆฏ๏ผˆๅฏ้€‰๏ผ‰ + model.info() + + # ่ฟ›่กŒๆŽจๆ–ญ + model('path/to/image.jpg') + ``` + + === "CLI" + + ```bash + # ไฝฟ็”จ SAM ๆจกๅž‹่ฟ›่กŒๆŽจๆ–ญ + yolo predict model=sam_b.pt source=path/to/image.jpg + ``` + +- ่ฟ™้‡Œ็š„้€ป่พ‘ๆ˜ฏ๏ผŒๅฆ‚ๆžœๆ‚จๆฒกๆœ‰ไผ ๅ…ฅไปปไฝ•ๆ็คบ๏ผˆ่พน็•Œๆก†/็‚น/ๆŽฉๆจก๏ผ‰๏ผŒๅˆ™ๅฏนๆ•ดไธชๅ›พๅƒ่ฟ›่กŒๅˆ†ๅ‰ฒใ€‚ + +!!! Example "SAMPredictor ็คบไพ‹" + + ่ฟ™็งๆ–นๆณ•ๅฏไปฅ่ฎพ็ฝฎๅ›พๅƒไธ€ๆฌก๏ผŒ็„ถๅŽๅคšๆฌก่ฟ่กŒๆ็คบๆŽจๆ–ญ๏ผŒ่€Œๆ— ้œ€ๅคšๆฌก่ฟ่กŒๅ›พๅƒ็ผ–็ ๅ™จใ€‚ + + === "ๆ็คบๆŽจๆ–ญ" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # ๅˆ›ๅปบ SAMPredictor + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # ่ฎพ็ฝฎๅ›พๅƒ + predictor.set_image("ultralytics/assets/zidane.jpg") # ไฝฟ็”จๅ›พๅƒๆ–‡ไปถ่ฎพ็ฝฎ + predictor.set_image(cv2.imread("ultralytics/assets/zidane.jpg")) # ไฝฟ็”จ np.ndarray ่ฎพ็ฝฎ + results = predictor(bboxes=[439, 437, 524, 709]) + results = predictor(points=[900, 370], labels=[1]) + + # ้‡็ฝฎๅ›พๅƒ + predictor.reset_image() + ``` + + ้€š่ฟ‡้™„ๅŠ ๅ‚ๆ•ฐๅฏนๆ•ดไธชๅ›พๅƒๅˆ†ๅ‰ฒใ€‚ + + === "ๅˆ†ๅ‰ฒๆ•ดไธชๅ›พๅƒ" + + ```python + from ultralytics.models.sam import Predictor as SAMPredictor + + # ๅˆ›ๅปบ SAMPredictor + overrides = dict(conf=0.25, task='segment', mode='predict', imgsz=1024, model="mobile_sam.pt") + predictor = SAMPredictor(overrides=overrides) + + # ไฝฟ็”จ้™„ๅŠ ๅ‚ๆ•ฐ่ฟ›่กŒๅˆ†ๅ‰ฒๆ•ดไธชๅ›พๅƒ + results = predictor(source="ultralytics/assets/zidane.jpg", crop_n_layers=1, points_stride=64) + ``` + +- ๆ›ดๅคšๅ…ณไบŽ`ๅˆ†ๅ‰ฒๆ•ดไธชๅ›พๅƒ`็š„้™„ๅŠ ๅ‚ๆ•ฐ๏ผŒ่ฏทๆŸฅ็œ‹[`Predictor/generate` ๅ‚่€ƒ](../../../reference/models/sam/predict.md)ใ€‚ + +## SAM ไธŽ YOLOv8 ็š„ๅฏนๆฏ” + +ๅœจ่ฟ™้‡Œ๏ผŒๆˆ‘ไปฌๅฐ† Meta ๆœ€ๅฐ็š„ SAM ๆจกๅž‹ SAM-b ไธŽ Ultralytics ็š„ๆœ€ๅฐๅˆ†ๅ‰ฒๆจกๅž‹ [YOLOv8n-seg](../tasks/segment.md) ่ฟ›่กŒๅฏนๆฏ”๏ผš + +| ๆจกๅž‹ | ๅคงๅฐ | ๅ‚ๆ•ฐ | ้€Ÿๅบฆ (CPU) | +|------------------------------------------------|-----------------------|----------------------|------------------------| +| Meta's SAM-b | 358 MB | 94.7 M | 51096 ms/im | +| [MobileSAM](mobile-sam.md) | 40.7 MB | 10.1 M | 46122 ms/im | +| [FastSAM-s](fast-sam.md) with YOLOv8 backbone | 23.7 MB | 11.8 M | 115 ms/im | +| Ultralytics [YOLOv8n-seg](../tasks/segment.md) | **6.7 MB** (็ผฉๅฐไบ†53.4ๅ€) | **3.4 M** (็ผฉๅฐไบ†27.9ๅ€) | **59 ms/im** (ๅŠ ้€Ÿไบ†866ๅ€) | + +่ฟ™ไธชๅฏนๆฏ”ๆ˜พ็คบไบ†ไธๅŒๆจกๅž‹ไน‹้—ด็š„ๆจกๅž‹ๅคงๅฐๅ’Œ้€ŸๅบฆไธŠๆ•ฐ้‡็บง็š„ๅทฎๅผ‚ใ€‚่™ฝ็„ถ SAM ๆไพ›ไบ†่‡ชๅŠจๅˆ†ๅ‰ฒ็š„็‹ฌ็‰น่ƒฝๅŠ›๏ผŒไฝ†ๅฎƒไธๆ˜ฏไธŽ YOLOv8 ๅˆ†ๅ‰ฒๆจกๅž‹็›ดๆŽฅ็ซžไบ‰็š„ไบงๅ“๏ผŒๅŽ่€…ไฝ“็งฏๆ›ดๅฐใ€้€Ÿๅบฆๆ›ดๅฟซใ€ๆ•ˆ็އๆ›ด้ซ˜ใ€‚ + +ๅœจ้…ๅค‡ๆœ‰16GB RAM็š„2023ๅนด Apple M2 MacBook ไธŠ่ฟ›่กŒไบ†ๆต‹่ฏ•ใ€‚่ฆ้‡็Žฐ่ฟ™ไธชๆต‹่ฏ•๏ผš + +!!! Example "็คบไพ‹" + + === "Python" + ```python + from ultralytics import FastSAM, SAM, YOLO + + # ๅˆ†ๆž SAM-b + model = SAM('sam_b.pt') + model.info() + model('ultralytics/assets') + + # ๅˆ†ๆž MobileSAM + model = SAM('mobile_sam.pt') + model.info() + model('ultralytics/assets') + + # ๅˆ†ๆž FastSAM-s + model = FastSAM('FastSAM-s.pt') + model.info() + model('ultralytics/assets') + + # ๅˆ†ๆž YOLOv8n-seg + model = YOLO('yolov8n-seg.pt') + model.info() + model('ultralytics/assets') + ``` + +## ่‡ชๅŠจๆณจ้‡Š๏ผšๅˆ›ๅปบๅˆ†ๅ‰ฒๆ•ฐๆฎ้›†็š„ๅฟซ้€Ÿ่ทฏๅพ„ + +่‡ชๅŠจๆณจ้‡Šๆ˜ฏ SAM ็š„ไธ€ไธชๅ…ณ้”ฎๅŠŸ่ƒฝ๏ผŒๅฎƒๅ…่ฎธ็”จๆˆทไฝฟ็”จ้ข„่ฎญ็ปƒ็š„ๆฃ€ๆต‹ๆจกๅž‹็”Ÿๆˆไธ€ไธช[ๅˆ†ๅ‰ฒๆ•ฐๆฎ้›†](https://docs.ultralytics.com/datasets/segment)ใ€‚่ฟ™ไธชๅŠŸ่ƒฝๅฏไปฅ้€š่ฟ‡่‡ชๅŠจ็”Ÿๆˆๅคง้‡ๅ›พๅƒ็š„ๅ‡†็กฎๆณจ้‡Š๏ผŒ็ป•่ฟ‡่€—ๆ—ถ็š„ๆ‰‹ๅŠจๆ ‡ๆณจ่ฟ‡็จ‹๏ผŒไปŽ่€Œๅฟซ้€Ÿ่Žทๅพ—้ซ˜่ดจ้‡็š„ๅˆ†ๅ‰ฒๆ•ฐๆฎ้›†ใ€‚ + +### ไฝฟ็”จๆฃ€ๆต‹ๆจกๅž‹็”Ÿๆˆๅˆ†ๅ‰ฒๆ•ฐๆฎ้›† + +่ฆไฝฟ็”จUltralyticsๆก†ๆžถๅฏนๆ•ฐๆฎ้›†่ฟ›่กŒ่‡ชๅŠจๆณจ้‡Š๏ผŒๅฏไปฅไฝฟ็”จๅฆ‚ไธ‹ๆ‰€็คบ็š„ `auto_annotate` ๅ‡ฝๆ•ฐ๏ผš + +!!! Example "็คบไพ‹" + + === "Python" + ```python + from ultralytics.data.annotator import auto_annotate + + auto_annotate(data="path/to/images", det_model="yolov8x.pt", sam_model='sam_b.pt') + ``` + +| ๅ‚ๆ•ฐ | ็ฑปๅž‹ | ๆ่ฟฐ | ้ป˜่ฎคๅ€ผ | +|------------|---------------|------------------------------------------|--------------| +| data | str | ๅŒ…ๅซ่ฆ่ฟ›่กŒๆณจ้‡Š็š„ๅ›พๅƒ็š„ๆ–‡ไปถๅคน็š„่ทฏๅพ„ใ€‚ | | +| det_model | str, ๅฏ้€‰ | ้ข„่ฎญ็ปƒ็š„ YOLO ๆฃ€ๆต‹ๆจกๅž‹๏ผŒ้ป˜่ฎคไธบ 'yolov8x.pt'ใ€‚ | 'yolov8x.pt' | +| sam_model | str, ๅฏ้€‰ | ้ข„่ฎญ็ปƒ็š„ SAM ๅˆ†ๅ‰ฒๆจกๅž‹๏ผŒ้ป˜่ฎคไธบ 'sam_b.pt'ใ€‚ | 'sam_b.pt' | +| device | str, ๅฏ้€‰ | ๅœจๅ…ถไธŠ่ฟ่กŒๆจกๅž‹็š„่ฎพๅค‡๏ผŒ้ป˜่ฎคไธบ็ฉบๅญ—็ฌฆไธฒ๏ผˆๅฆ‚ๆžœๅฏ็”จ๏ผŒๅˆ™ไธบ CPU ๆˆ– GPU๏ผ‰ใ€‚ | | +| output_dir | str, None, ๅฏ้€‰ | ไฟๅญ˜ๆณจ้‡Š็ป“ๆžœ็š„็›ฎๅฝ•ใ€‚้ป˜่ฎคไธบไธŽ 'data' ็›ฎๅฝ•ๅŒ็บง็š„ 'labels' ็›ฎๅฝ•ใ€‚ | None | + +`auto_annotate` ๅ‡ฝๆ•ฐๆŽฅๅ—ๆ‚จๅ›พๅƒ็š„่ทฏๅพ„๏ผŒๅนถๆไพ›ไบ†ๅฏ้€‰็š„ๅ‚ๆ•ฐ็”จไบŽๆŒ‡ๅฎš้ข„่ฎญ็ปƒ็š„ๆฃ€ๆต‹ๅ’Œ SAM ๅˆ†ๅ‰ฒๆจกๅž‹ใ€่ฟ่กŒๆจกๅž‹็š„่ฎพๅค‡๏ผŒไปฅๅŠไฟๅญ˜ๆณจ้‡Š็ป“ๆžœ็š„่พ“ๅ‡บ็›ฎๅฝ•ใ€‚ + +ไฝฟ็”จ้ข„่ฎญ็ปƒๆจกๅž‹่ฟ›่กŒ่‡ชๅŠจๆณจ้‡Šๅฏไปฅๅคงๅคงๅ‡ๅฐ‘ๅˆ›ๅปบ้ซ˜่ดจ้‡ๅˆ†ๅ‰ฒๆ•ฐๆฎ้›†ๆ‰€้œ€็š„ๆ—ถ้—ดๅ’Œๅทฅไฝœ้‡ใ€‚่ฟ™ไธชๅŠŸ่ƒฝ็‰นๅˆซๅฏนไบŽๅค„็†ๅคง้‡ๅ›พๅƒ้›†ๅˆ็š„็ ”็ฉถไบบๅ‘˜ๅ’Œๅผ€ๅ‘ไบบๅ‘˜้žๅธธๆœ‰็›Š๏ผŒๅ› ไธบๅฎƒๅ…่ฎธไป–ไปฌไธ“ๆณจไบŽๆจกๅž‹็š„ๅผ€ๅ‘ๅ’Œ่ฏ„ไผฐ๏ผŒ่€Œไธๆ˜ฏๆ‰‹ๅŠจๆณจ้‡Šใ€‚ + +## ๅผ•็”จๅ’Œ้ธฃ่ฐข + +ๅฆ‚ๆžœๆ‚จๅœจ็ ”็ฉถๆˆ–ๅผ€ๅ‘ไธญๅ‘็Žฐ SAM ๅฏนๆ‚จๆœ‰็”จ๏ผŒ่ฏท่€ƒ่™‘ๅผ•็”จๆˆ‘ไปฌ็š„่ฎบๆ–‡๏ผš + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{kirillov2023segment, + title={Segment Anything}, + author={Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollรกr and Ross Girshick}, + year={2023}, + eprint={2304.02643}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +ๆˆ‘ไปฌ่ฆๅ‘ Meta AI ่กจ็คบๆ„Ÿ่ฐข๏ผŒๆ„Ÿ่ฐขไป–ไปฌไธบ่ฎก็ฎ—ๆœบ่ง†่ง‰็คพๅŒบๅˆ›ๅปบๅ’Œ็ปดๆŠคไบ†่ฟ™ไธชๅฎ่ดต็š„่ต„ๆบใ€‚ + +*keywords: Segment Anything๏ผŒSegment Anything Model๏ผŒSAM๏ผŒMeta SAM๏ผŒๅ›พๅƒๅˆ†ๅ‰ฒ๏ผŒๅฏๆ็คบๅˆ†ๅ‰ฒ๏ผŒ้›ถๆ ทๆœฌๆ€ง่ƒฝ๏ผŒSA-1Bๆ•ฐๆฎ้›†๏ผŒๅ…ˆ่ฟ›ๆžถๆž„๏ผŒ่‡ชๅŠจๆณจ้‡Š๏ผŒUltralytics๏ผŒ้ข„่ฎญ็ปƒๆจกๅž‹๏ผŒSAM base๏ผŒSAM large๏ผŒๅฎžไพ‹ๅˆ†ๅ‰ฒ๏ผŒ่ฎก็ฎ—ๆœบ่ง†่ง‰๏ผŒAI๏ผŒไบบๅทฅๆ™บ่ƒฝ๏ผŒๆœบๅ™จๅญฆไน ๏ผŒๆ•ฐๆฎๆณจ้‡Š๏ผŒๅˆ†ๅ‰ฒๆŽฉๆจก๏ผŒๆฃ€ๆต‹ๆจกๅž‹๏ผŒYOLOๆฃ€ๆต‹ๆจกๅž‹๏ผŒbibtex๏ผŒMeta AIใ€‚* diff --git a/ultralytics/docs/zh/models/sam.md:Zone.Identifier b/ultralytics/docs/zh/models/sam.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/models/sam.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/models/yolo-nas.md b/ultralytics/docs/zh/models/yolo-nas.md new file mode 100755 index 0000000..fb2c248 --- /dev/null +++ b/ultralytics/docs/zh/models/yolo-nas.md @@ -0,0 +1,121 @@ +--- +comments: true +description: ๆŽข็ดข่ฏฆ็ป†็š„YOLO-NASๆ–‡ๆกฃ๏ผŒ่ฟ™ๆ˜ฏไธ€ไธชๆ›ด้ซ˜็บง็š„็‰ฉไฝ“ๆฃ€ๆต‹ๆจกๅž‹ใ€‚ไบ†่งฃๅ…ถ็‰น็‚นใ€้ข„่ฎญ็ปƒๆจกๅž‹ใ€ไธŽUltralytics Python API็š„ไฝฟ็”จ็ญ‰ๅ†…ๅฎนใ€‚ +keywords: YOLO-NAS, Deci AI, ็‰ฉไฝ“ๆฃ€ๆต‹, ๆทฑๅบฆๅญฆไน , ็ฅž็ปๆžถๆž„ๆœ็ดข, Ultralytics Python API, YOLOๆจกๅž‹, ้ข„่ฎญ็ปƒๆจกๅž‹, ้‡ๅŒ–, ไผ˜ๅŒ–, COCO, Objects365, Roboflow 100 +--- + +# YOLO-NAS + +## ๆฆ‚่ฟฐ + +็”ฑDeci AIๅผ€ๅ‘๏ผŒYOLO-NASๆ˜ฏไธ€็งๅผ€ๅˆ›ๆ€ง็š„็‰ฉไฝ“ๆฃ€ๆต‹ๅŸบ็ก€ๆจกๅž‹ใ€‚ๅฎƒๆ˜ฏๅ…ˆ่ฟ›็š„็ฅž็ปๆžถๆž„ๆœ็ดขๆŠ€ๆœฏ็š„ไบง็‰ฉ๏ผŒ็ป่ฟ‡็ฒพๅฟƒ่ฎพ่ฎกไปฅ่งฃๅ†ณไน‹ๅ‰YOLOๆจกๅž‹็š„ๅฑ€้™ๆ€งใ€‚YOLO-NASๅœจ้‡ๅŒ–ๆ”ฏๆŒๅ’Œๅ‡†็กฎๆ€ง-ๅปถ่ฟŸๆƒ่กกๆ–น้ขๅ–ๅพ—ไบ†้‡ๅคงๆ”น่ฟ›๏ผŒไปฃ่กจไบ†็‰ฉไฝ“ๆฃ€ๆต‹้ข†ๅŸŸ็š„้‡ๅคง้ฃž่ทƒใ€‚ + +![ๆจกๅž‹็คบไพ‹ๅ›พๅƒ](https://learnopencv.com/wp-content/uploads/2023/05/yolo-nas_COCO_map_metrics.png) +**YOLO-NASๆฆ‚่งˆใ€‚** YOLO-NAS้‡‡็”จ้‡ๅŒ–ๆ„Ÿ็Ÿฅๅ—ๅ’Œ้€‰ๆ‹ฉๆ€ง้‡ๅŒ–ๅฎž็Žฐๆœ€ไฝณๆ€ง่ƒฝใ€‚ๅฝ“ๅฐ†ๆจกๅž‹่ฝฌๆขไธบINT8้‡ๅŒ–็‰ˆๆœฌๆ—ถ๏ผŒๆจกๅž‹ไผš็ปๅކ่พƒๅฐ็š„็ฒพๅบฆๆŸๅคฑ๏ผŒๆฏ”ๅ…ถไป–ๆจกๅž‹ๆœ‰ๆ˜พ่‘—ๆ”น่ฟ›ใ€‚่ฟ™ไบ›ๅ…ˆ่ฟ›ๆŠ€ๆœฏไฝฟๅพ—YOLO-NASๆˆไธบๅ…ทๆœ‰ๅ‰ๆ‰€ๆœชๆœ‰็š„็‰ฉไฝ“ๆฃ€ๆต‹่ƒฝๅŠ›ๅ’Œๅ‡บ่‰ฒๆ€ง่ƒฝ็š„ๅ“่ถŠๆžถๆž„ใ€‚ + +### ไธป่ฆ็‰น็‚น + +- **้‡ๅŒ–ๅ‹ๅฅฝๅŸบๆœฌๅ—๏ผš** YOLO-NASๅผ•ๅ…ฅไบ†ไธ€็งๆ–ฐ็š„ๅŸบๆœฌๅ—๏ผŒๅฏน้‡ๅŒ–ๅ‹ๅฅฝ๏ผŒ่งฃๅ†ณไบ†ไน‹ๅ‰YOLOๆจกๅž‹็š„ไธ€ไธช้‡่ฆๅฑ€้™ๆ€งใ€‚ +- **้ซ˜็บง่ฎญ็ปƒๅ’Œ้‡ๅŒ–๏ผš** YOLO-NASๅˆฉ็”จๅ…ˆ่ฟ›็š„่ฎญ็ปƒๆ–นๆกˆๅ’Œ่ฎญ็ปƒๅŽ้‡ๅŒ–ไปฅๆ้ซ˜ๆ€ง่ƒฝใ€‚ +- **AutoNACไผ˜ๅŒ–ๅ’Œ้ข„่ฎญ็ปƒ๏ผš** YOLO-NASๅˆฉ็”จAutoNACไผ˜ๅŒ–๏ผŒๅนถๅœจ่‘—ๅๆ•ฐๆฎ้›†๏ผˆๅฆ‚COCOใ€Objects365ๅ’ŒRoboflow 100๏ผ‰ไธŠ่ฟ›่กŒไบ†้ข„่ฎญ็ปƒใ€‚่ฟ™็ง้ข„่ฎญ็ปƒไฝฟๅ…ถ้žๅธธ้€‚ๅˆ็”Ÿไบง็Žฏๅขƒไธญ็š„ไธ‹ๆธธ็‰ฉไฝ“ๆฃ€ๆต‹ไปปๅŠกใ€‚ + +## ้ข„่ฎญ็ปƒๆจกๅž‹ + +้€š่ฟ‡Ultralyticsๆไพ›็š„้ข„่ฎญ็ปƒYOLO-NASๆจกๅž‹๏ผŒไฝ“้ชŒไธ‹ไธ€ไปฃ็‰ฉไฝ“ๆฃ€ๆต‹็š„ๅผบๅคงๅŠŸ่ƒฝใ€‚่ฟ™ไบ›ๆจกๅž‹ๆ—จๅœจๅœจ้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€งๆ–น้ขๆไพ›ๅ‡บ่‰ฒ็š„ๆ€ง่ƒฝใ€‚ๆ นๆฎๆ‚จ็š„้œ€ๆฑ‚๏ผŒๅฏไปฅ้€‰ๆ‹ฉๅ„็ง้€‰้กน๏ผš + +| ๆจกๅž‹ | mAP | ๅปถ่ฟŸ๏ผˆms๏ผ‰ | +|------------------|-------|--------| +| YOLO-NAS S | 47.5 | 3.21 | +| YOLO-NAS M | 51.55 | 5.85 | +| YOLO-NAS L | 52.22 | 7.87 | +| YOLO-NAS S INT-8 | 47.03 | 2.36 | +| YOLO-NAS M INT-8 | 51.0 | 3.78 | +| YOLO-NAS L INT-8 | 52.1 | 4.78 | + +ๆฏไธชๆจกๅž‹ๅ˜ไฝ“ๅ‡ๆ—จๅœจๅœจๅ‡่กกๅนณๅ‡็ฒพๅบฆ๏ผˆmAP๏ผ‰ๅ’Œๅปถ่ฟŸไน‹้—ดๆไพ›ๅนณ่กก๏ผŒๅธฎๅŠฉๆ‚จไธบๆ€ง่ƒฝๅ’Œ้€Ÿๅบฆ้ƒฝ่ฟ›่กŒไผ˜ๅŒ–็š„็‰ฉไฝ“ๆฃ€ๆต‹ไปปๅŠกใ€‚ + +## ็”จๆณ•็คบไพ‹ + +้€š่ฟ‡ๆˆ‘ไปฌ็š„`ultralytics` pythonๅŒ…๏ผŒUltralyticsไฝฟๅพ—ๅฐ†YOLO-NASๆจกๅž‹้›†ๆˆๅˆฐๆ‚จ็š„Pythonๅบ”็”จ็จ‹ๅบไธญๅ˜ๅพ—ๅฎนๆ˜“ใ€‚่ฏฅๅŒ…ๆไพ›ไบ†ไธ€ไธช็”จๆˆทๅ‹ๅฅฝ็š„Python API๏ผŒไปฅ็ฎ€ๅŒ–ๆต็จ‹ใ€‚ + +ไปฅไธ‹็คบไพ‹ๅฑ•็คบไบ†ๅฆ‚ไฝ•ไฝฟ็”จ`ultralytics`ๅŒ…ไธŽYOLO-NASๆจกๅž‹่ฟ›่กŒๆŽจ็†ๅ’Œ้ชŒ่ฏ๏ผš + +### ๆŽจ็†ๅ’Œ้ชŒ่ฏ็คบไพ‹ + +่ฟ™ไธช็คบไพ‹ไธญ๏ผŒๆˆ‘ไปฌๅœจCOCO8ๆ•ฐๆฎ้›†ไธŠ้ชŒ่ฏYOLO-NAS-sใ€‚ + +!!! ไพ‹ๅญ + + ไปฅไธ‹็คบไพ‹ไธบYOLO-NASๆไพ›ไบ†็ฎ€ๅ•็š„ๆŽจ็†ๅ’Œ้ชŒ่ฏไปฃ็ ใ€‚ๆœ‰ๅ…ณๅค„็†ๆŽจ็†็ป“ๆžœ็š„ๆ–นๆณ•๏ผŒ่ฏทๅ‚่ง[Predict](../modes/predict.md)ๆจกๅผใ€‚ๆœ‰ๅ…ณไฝฟ็”จๅ…ถไป–ๆจกๅผ็š„YOLO-NAS็š„ๆ–นๆณ•๏ผŒ่ฏทๅ‚่ง[Val](../modes/val.md)ๅ’Œ[Export](../modes/export.md)ใ€‚`ultralytics`ๅŒ…ไธญ็š„YOLO-NASไธๆ”ฏๆŒ่ฎญ็ปƒใ€‚ + + === "Python" + + ๅฏไปฅๅฐ†้ข„่ฎญ็ปƒ็š„PyTorch `*.pt`ๆจกๅž‹ๆ–‡ไปถไผ ้€’็ป™`NAS()`็ฑปไปฅๅœจpythonไธญๅˆ›ๅปบไธ€ไธชๆจกๅž‹ๅฎžไพ‹๏ผš + + ```python + from ultralytics import NAS + + # ๅŠ ่ฝฝไธ€ไธชๅœจCOCOไธŠ้ข„่ฎญ็ปƒ็š„YOLO-NAS-sๆจกๅž‹ + model = NAS('yolo_nas_s.pt') + + # ๆ˜พ็คบๆจกๅž‹ไฟกๆฏ๏ผˆๅฏ้€‰๏ผ‰ + model.info() + + # ๅœจCOCO8็คบไพ‹ๆ•ฐๆฎ้›†ไธŠ้ชŒ่ฏๆจกๅž‹ + results = model.val(data='coco8.yaml') + + # ไฝฟ็”จYOLO-NAS-sๆจกๅž‹ๅฏน'bus.jpg'ๅ›พๅƒ่ฟ›่กŒๆŽจ็† + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ๅฏไปฅไฝฟ็”จCLIๅ‘ฝไปค็›ดๆŽฅ่ฟ่กŒๆจกๅž‹๏ผš + + ```bash + # ๅŠ ่ฝฝไธ€ไธชๅœจCOCOไธŠ้ข„่ฎญ็ปƒ็š„YOLO-NAS-sๆจกๅž‹๏ผŒๅนถ้ชŒ่ฏๅ…ถๅœจCOCO8็คบไพ‹ๆ•ฐๆฎ้›†ไธŠ็š„ๆ€ง่ƒฝ + yolo val model=yolo_nas_s.pt data=coco8.yaml + + # ๅŠ ่ฝฝไธ€ไธชๅœจCOCOไธŠ้ข„่ฎญ็ปƒ็š„YOLO-NAS-sๆจกๅž‹๏ผŒๅนถๅฏน'bus.jpg'ๅ›พๅƒ่ฟ›่กŒๆŽจ็† + yolo predict model=yolo_nas_s.pt source=path/to/bus.jpg + ``` + +## ๆ”ฏๆŒ็š„ไปปๅŠกๅ’Œๆจกๅผ + +ๆˆ‘ไปฌๆไพ›ไบ†ไธ‰็ง็ฑปๅž‹็š„YOLO-NASๆจกๅž‹๏ผšSmall (s)ใ€Medium (m)ๅ’ŒLarge (l)ใ€‚ๆฏ็ง็ฑปๅž‹้ƒฝๆ—จๅœจๆปก่ถณไธๅŒ็š„่ฎก็ฎ—ๅ’Œๆ€ง่ƒฝ้œ€ๆฑ‚๏ผš + +- **YOLO-NAS-s๏ผš** ้’ˆๅฏน่ฎก็ฎ—่ต„ๆบๆœ‰้™ไฝ†ๆ•ˆ็އ่‡ณๅ…ณ้‡่ฆ็š„็Žฏๅขƒ่ฟ›่กŒไบ†ไผ˜ๅŒ–ใ€‚ +- **YOLO-NAS-m๏ผš** ๆไพ›ๅนณ่กก็š„ๆ–นๆณ•๏ผŒ้€‚็”จไบŽๅ…ทๆœ‰ๆ›ด้ซ˜ๅ‡†็กฎๆ€ง็š„้€š็”จ็‰ฉไฝ“ๆฃ€ๆต‹ใ€‚ +- **YOLO-NAS-l๏ผš** ้ขๅ‘้œ€่ฆๆœ€้ซ˜ๅ‡†็กฎๆ€ง็š„ๅœบๆ™ฏ๏ผŒ่ฎก็ฎ—่ต„ๆบไธๆ˜ฏ้™ๅˆถๅ› ็ด ใ€‚ + +ไธ‹้ขๆ˜ฏๆฏไธชๆจกๅž‹็š„่ฏฆ็ป†ไฟกๆฏ๏ผŒๅŒ…ๆ‹ฌๅฎƒไปฌ็š„้ข„่ฎญ็ปƒๆƒ้‡้“พๆŽฅใ€ๆ”ฏๆŒ็š„ไปปๅŠกไปฅๅŠไธŽไธๅŒๆ“ไฝœๆจกๅผ็š„ๅ…ผๅฎนๆ€งใ€‚ + +| ๆจกๅž‹็ฑปๅž‹ | ้ข„่ฎญ็ปƒๆƒ้‡้“พๆŽฅ | ๆ”ฏๆŒ็š„ไปปๅŠก | ๆŽจ็† | ้ชŒ่ฏ | ่ฎญ็ปƒ | ๅฏผๅ‡บ | +|------------|-----------------------------------------------------------------------------------------------|----------------------------|----|----|----|----| +| YOLO-NAS-s | [yolo_nas_s.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_s.pt) | [็‰ฉไฝ“ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-m | [yolo_nas_m.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_m.pt) | [็‰ฉไฝ“ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | +| YOLO-NAS-l | [yolo_nas_l.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolo_nas_l.pt) | [็‰ฉไฝ“ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โŒ | โœ… | + +## ๅผ•็”จๅ’Œ่‡ด่ฐข + +ๅฆ‚ๆžœๆ‚จๅœจ็ ”็ฉถๆˆ–ๅผ€ๅ‘ๅทฅไฝœไธญไฝฟ็”จไบ†YOLO-NAS๏ผŒ่ฏทๅผ•็”จSuperGradients๏ผš + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{supergradients, + doi = {10.5281/ZENODO.7789328}, + url = {https://zenodo.org/record/7789328}, + author = {Aharon, Shay and {Louis-Dupont} and {Ofri Masad} and Yurkova, Kate and {Lotem Fridman} and {Lkdci} and Khvedchenya, Eugene and Rubin, Ran and Bagrov, Natan and Tymchenko, Borys and Keren, Tomer and Zhilko, Alexander and {Eran-Deci}}, + title = {Super-Gradients}, + publisher = {GitHub}, + journal = {GitHub repository}, + year = {2021}, + } + ``` + +ๆˆ‘ไปฌๅ‘Deci AI็š„[SuperGradients](https://github.com/Deci-AI/super-gradients/)ๅ›ข้˜Ÿ่กจ็คบๆ„Ÿ่ฐข๏ผŒไป–ไปฌ่‡ดๅŠ›ไบŽๅˆ›ๅปบๅ’Œ็ปดๆŠค่ฟ™ไธชๅฏน่ฎก็ฎ—ๆœบ่ง†่ง‰็คพๅŒบ้žๅธธๆœ‰ไปทๅ€ผ็š„่ต„ๆบใ€‚ๆˆ‘ไปฌ็›ธไฟกYOLO-NASๅ‡ญๅ€Ÿๅ…ถๅˆ›ๆ–ฐ็š„ๆžถๆž„ๅ’Œๅ“่ถŠ็š„็‰ฉไฝ“ๆฃ€ๆต‹่ƒฝๅŠ›๏ผŒๅฐ†ๆˆไธบๅผ€ๅ‘่€…ๅ’Œ็ ”็ฉถไบบๅ‘˜็š„้‡่ฆๅทฅๅ…ทใ€‚ + +*keywords: YOLO-NAS, Deci AI, ็‰ฉไฝ“ๆฃ€ๆต‹, ๆทฑๅบฆๅญฆไน , ็ฅž็ปๆžถๆž„ๆœ็ดข, Ultralytics Python API, YOLOๆจกๅž‹, SuperGradients, ้ข„่ฎญ็ปƒๆจกๅž‹, ้‡ๅŒ–ๅ‹ๅฅฝๅŸบๆœฌๅ—, ้ซ˜็บง่ฎญ็ปƒๆ–นๆกˆ, ่ฎญ็ปƒๅŽ้‡ๅŒ–, AutoNACไผ˜ๅŒ–, COCO, Objects365, Roboflow 100* diff --git a/ultralytics/docs/zh/models/yolo-nas.md:Zone.Identifier b/ultralytics/docs/zh/models/yolo-nas.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/models/yolo-nas.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/models/yolov3.md b/ultralytics/docs/zh/models/yolov3.md new file mode 100755 index 0000000..320c4a2 --- /dev/null +++ b/ultralytics/docs/zh/models/yolov3.md @@ -0,0 +1,98 @@ +--- +comments: true +description: ไบ†่งฃYOLOv3ใ€YOLOv3-Ultralyticsๅ’ŒYOLOv3u็š„ๆฆ‚่ฟฐใ€‚ไบ†่งฃๅฎƒไปฌ็š„ๅ…ณ้”ฎๅŠŸ่ƒฝใ€็”จ้€”ๅ’Œๆ”ฏๆŒ็š„็›ฎๆ ‡ๆฃ€ๆต‹ไปปๅŠกใ€‚ +keywords: YOLOv3ใ€YOLOv3-Ultralyticsใ€YOLOv3uใ€็›ฎๆ ‡ๆฃ€ๆต‹ใ€ๆŽจ็†ใ€่ฎญ็ปƒใ€Ultralytics +--- + +# YOLOv3ใ€YOLOv3-Ultralyticsๅ’ŒYOLOv3u + +## ๆฆ‚่ฟฐ + +ๆœฌๆ–‡ไป‹็ปไบ†ไธ‰ไธช็ดงๅฏ†็›ธๅ…ณ็š„็›ฎๆ ‡ๆฃ€ๆต‹ๆจกๅž‹๏ผŒๅˆ†ๅˆซๆ˜ฏ[YOLOv3](https://pjreddie.com/darknet/yolo/)ใ€[YOLOv3-Ultralytics](https://github.com/ultralytics/yolov3)ๅ’Œ[YOLOv3u](https://github.com/ultralytics/ultralytics)ใ€‚ + +1. **YOLOv3๏ผš** ่ฟ™ๆ˜ฏ็ฌฌไธ‰็‰ˆ You Only Look Once (YOLO) ็›ฎๆ ‡ๆฃ€ๆต‹็ฎ—ๆณ•ใ€‚YOLOv3 ๅœจๅ‰ไฝœ็š„ๅŸบ็ก€ไธŠ่ฟ›่กŒไบ†ๆ”น่ฟ›๏ผŒๅผ•ๅ…ฅไบ†ๅคšๅฐบๅบฆ้ข„ๆต‹ๅ’Œไธ‰็งไธๅŒๅฐบๅฏธ็š„ๆฃ€ๆต‹ๆ ธ๏ผŒๆ้ซ˜ไบ†ๆฃ€ๆต‹ๅ‡†็กฎๆ€งใ€‚ + +2. **YOLOv3-Ultralytics๏ผš** ่ฟ™ๆ˜ฏ Ultralytics ๅฏน YOLOv3 ๆจกๅž‹็š„ๅฎž็Žฐใ€‚ๅฎƒๅœจๅค็Žฐไบ†ๅŽŸๅง‹ YOLOv3 ๆžถๆž„็š„ๅŸบ็ก€ไธŠ๏ผŒๆไพ›ไบ†้ขๅค–็š„ๅŠŸ่ƒฝ๏ผŒๅฆ‚ๅฏนๆ›ดๅคš้ข„่ฎญ็ปƒๆจกๅž‹็š„ๆ”ฏๆŒๅ’Œๆ›ด็ฎ€ๅ•็š„ๅฎšๅˆถ้€‰้กนใ€‚ + +3. **YOLOv3u๏ผš** ่ฟ™ๆ˜ฏ YOLOv3-Ultralytics ็š„ๆ›ดๆ–ฐ็‰ˆๆœฌ๏ผŒๅฎƒๅผ•ๅ…ฅไบ† YOLOv8 ๆจกๅž‹ไธญไฝฟ็”จ็š„ๆ— ้”šใ€ๆ— ็‰ฉไฝ“ๆ€ง่ƒฝๅˆ†็ฆปๅคดใ€‚YOLOv3u ไฟ็•™ไบ† YOLOv3 ็š„ไธปๅนฒๅ’Œ้ขˆ้ƒจๆžถๆž„๏ผŒไฝ†ไฝฟ็”จไบ†ๆฅ่‡ช YOLOv8 ็š„ๆ›ดๆ–ฐๆฃ€ๆต‹ๅคดใ€‚ + +![Ultralytics YOLOv3](https://raw.githubusercontent.com/ultralytics/assets/main/yolov3/banner-yolov3.png) + +## ๅ…ณ้”ฎๅŠŸ่ƒฝ + +- **YOLOv3๏ผš** ๅผ•ๅ…ฅไบ†ไธ‰็งไธๅŒๅฐบๅบฆ็š„ๆฃ€ๆต‹๏ผŒ้‡‡็”จไบ†ไธ‰็งไธๅŒๅฐบๅฏธ็š„ๆฃ€ๆต‹ๆ ธ๏ผš13x13ใ€26x26 ๅ’Œ 52x52ใ€‚่ฟ™ๆ˜พ่‘—ๆ้ซ˜ไบ†ๅฏนไธๅŒๅคงๅฐๅฏน่ฑก็š„ๆฃ€ๆต‹ๅ‡†็กฎๆ€งใ€‚ๆญคๅค–๏ผŒYOLOv3 ่ฟ˜ไธบๆฏไธช่พน็•Œๆก†ๆทปๅŠ ไบ†ๅคšๆ ‡็ญพ้ข„ๆต‹ๅ’Œๆ›ดๅฅฝ็š„็‰นๅพๆๅ–็ฝ‘็ปœใ€‚ + +- **YOLOv3-Ultralytics๏ผš** Ultralytics ๅฏน YOLOv3 ็š„ๅฎž็Žฐๅ…ทๆœ‰ไธŽๅŽŸๅง‹ๆจกๅž‹็›ธๅŒ็š„ๆ€ง่ƒฝ๏ผŒไฝ†ๅขžๅŠ ไบ†ๅฏนๆ›ดๅคš้ข„่ฎญ็ปƒๆจกๅž‹ใ€้ขๅค–่ฎญ็ปƒๆ–นๆณ•ๅ’Œๆ›ด็ฎ€ๅ•็š„ๅฎšๅˆถ้€‰้กน็š„ๆ”ฏๆŒใ€‚่ฟ™ไฝฟๅพ—ๅฎƒๅœจๅฎž้™…ๅบ”็”จไธญๆ›ดๅŠ ้€š็”จๅ’Œๆ˜“็”จใ€‚ + +- **YOLOv3u๏ผš** ่ฟ™ไธชๆ›ดๆ–ฐ็š„ๆจกๅž‹้‡‡็”จไบ†ๆฅ่‡ช YOLOv8 ็š„ๆ— ้”šใ€ๆ— ็‰ฉไฝ“ๆ€ง่ƒฝๅˆ†็ฆปๅคดใ€‚้€š่ฟ‡ๆถˆ้™ค้ข„ๅฎšไน‰็š„้”šๆก†ๅ’Œ็‰ฉไฝ“ๆ€ง่ƒฝๅˆ†ๆ•ฐ็š„้œ€ๆฑ‚๏ผŒๆฃ€ๆต‹ๅคด่ฎพ่ฎกๅฏไปฅๆ้ซ˜ๆจกๅž‹ๅฏนไธๅŒๅคงๅฐๅ’Œๅฝข็Šถ็š„ๅฏน่ฑก็š„ๆฃ€ๆต‹่ƒฝๅŠ›ใ€‚่ฟ™ไฝฟๅพ— YOLOv3u ๅœจ็›ฎๆ ‡ๆฃ€ๆต‹ไปปๅŠกไธญๆ›ดๅŠ ๅผบๅคงๅ’Œๅ‡†็กฎใ€‚ + +## ๆ”ฏๆŒ็š„ไปปๅŠกๅ’Œๆจกๅผ + +YOLOv3 ็ณปๅˆ—๏ผŒๅŒ…ๆ‹ฌ YOLOv3ใ€YOLOv3-Ultralytics ๅ’Œ YOLOv3u๏ผŒไธ“้—จ็”จไบŽ็›ฎๆ ‡ๆฃ€ๆต‹ไปปๅŠกใ€‚่ฟ™ไบ›ๆจกๅž‹ไปฅๅœจๅ„็งๅฎž้™…ๅœบๆ™ฏไธญๅนณ่กกๅ‡†็กฎๆ€งๅ’Œ้€Ÿๅบฆ่€Œ้—ปๅใ€‚ๆฏไธชๅ˜ไฝ“้ƒฝๆไพ›ไบ†็‹ฌ็‰น็š„ๅŠŸ่ƒฝๅ’Œไผ˜ๅŒ–๏ผŒไฝฟๅ…ถ้€‚็”จไบŽๅ„็งๅบ”็”จๅœบๆ™ฏใ€‚ + +่ฟ™ไธ‰ไธชๆจกๅž‹้ƒฝๆ”ฏๆŒไธ€ๅฅ—ๅ…จ้ข็š„ๆจกๅผ๏ผŒ็กฎไฟๅœจๆจกๅž‹้ƒจ็ฝฒๅ’Œๅผ€ๅ‘็š„ๅ„ไธช้˜ถๆฎตๅ…ทๅค‡ๅคš็งๅŠŸ่ƒฝใ€‚่ฟ™ไบ›ๆจกๅผๅŒ…ๆ‹ฌ[ๆŽจ็†](../modes/predict.md)ใ€[้ชŒ่ฏ](../modes/val.md)ใ€[่ฎญ็ปƒ](../modes/train.md)ๅ’Œ[ๅฏผๅ‡บ](../modes/export.md)๏ผŒไธบ็”จๆˆทๆไพ›ไบ†ๆœ‰ๆ•ˆ็š„็›ฎๆ ‡ๆฃ€ๆต‹ๅฎŒๆ•ดๅทฅๅ…ทใ€‚ + +| ๆจกๅž‹็ฑปๅž‹ | ๆ”ฏๆŒ็š„ไปปๅŠก | ๆŽจ็† | ้ชŒ่ฏ | ่ฎญ็ปƒ | ๅฏผๅ‡บ | +|--------------------|----------------------------|----|----|----|----| +| YOLOv3 | [็›ฎๆ ‡ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3-Ultralytics | [็›ฎๆ ‡ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv3u | [็›ฎๆ ‡ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +่ฏฅ่กจๆ ผๆไพ›ไบ†ๆฏไธช YOLOv3 ๅ˜ไฝ“็š„่ƒฝๅŠ›ไธ€่งˆ๏ผŒ็ชๆ˜พไบ†ๅฎƒไปฌ็š„ๅคšๅŠŸ่ƒฝๆ€งๅ’Œ้€‚็”จๆ€ง๏ผŒไปฅ็”จไบŽ็›ฎๆ ‡ๆฃ€ๆต‹ๅทฅไฝœๆต็จ‹ไธญ็š„ๅ„็งไปปๅŠกๅ’Œๆ“ไฝœๆจกๅผใ€‚ + +## ็”จๆณ•็คบไพ‹ + +ไปฅไธ‹็คบไพ‹ๆไพ›ไบ†็ฎ€ๅ•็š„ YOLOv3 ่ฎญ็ปƒๅ’ŒๆŽจ็†็คบไพ‹ใ€‚ๆœ‰ๅ…ณ่ฟ™ไบ›ๅ’Œๅ…ถไป–ๆจกๅผ็š„ๅฎŒๆ•ดๆ–‡ๆกฃ๏ผŒ่ฏทๅ‚้˜… [Predict](../modes/predict.md)ใ€[Train](../modes/train.md)ใ€[Val](../modes/val.md) ๅ’Œ [Export](../modes/export.md) ๆ–‡ๆกฃ้กต้ขใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ๅฏไปฅๅฐ†้ข„ๅ…ˆ่ฎญ็ปƒ็š„ PyTorch `*.pt` ๆจกๅž‹ไปฅๅŠ้…็ฝฎ `*.yaml` ๆ–‡ไปถไผ ้€’็ป™ `YOLO()` ็ฑป๏ผŒไปฅๅœจ Python ไธญๅˆ›ๅปบๆจกๅž‹ๅฎžไพ‹๏ผš + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝไธ€ไธช็ป่ฟ‡ COCO ้ข„่ฎญ็ปƒ็š„ YOLOv3n ๆจกๅž‹ + model = YOLO('yolov3n.pt') + + # ๆ˜พ็คบๆจกๅž‹ไฟกๆฏ๏ผˆๅฏ้€‰๏ผ‰ + model.info() + + # ๅœจ COCO8 ็คบไพ‹ๆ•ฐๆฎ้›†ไธŠ่ฎญ็ปƒๆจกๅž‹100ไธชepoch + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ไฝฟ็”จ YOLOv3n ๆจกๅž‹ๅฏน 'bus.jpg' ๅ›พๅƒ่ฟ›่กŒๆŽจ็† + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ๅฏไปฅ็›ดๆŽฅไฝฟ็”จๅ‘ฝไปค่กŒ็•Œ้ข (CLI) ๆฅ่ฟ่กŒๆจกๅž‹๏ผš + + ```bash + # ๅŠ ่ฝฝไธ€ไธช็ป่ฟ‡ COCO ้ข„่ฎญ็ปƒ็š„ YOLOv3n ๆจกๅž‹๏ผŒๅนถๅœจ COCO8 ็คบไพ‹ๆ•ฐๆฎ้›†ไธŠ่ฎญ็ปƒ100ไธชepoch + yolo train model=yolov3n.pt data=coco8.yaml epochs=100 imgsz=640 + + # ๅŠ ่ฝฝไธ€ไธช็ป่ฟ‡ COCO ้ข„่ฎญ็ปƒ็š„ YOLOv3n ๆจกๅž‹๏ผŒๅนถๅฏน 'bus.jpg' ๅ›พๅƒ่ฟ›่กŒๆŽจ็† + yolo predict model=yolov3n.pt source=path/to/bus.jpg + ``` + +## ๅผ•็”จๅ’Œ่‡ด่ฐข + +ๅฆ‚ๆžœๆ‚จๅœจ็ ”็ฉถไธญไฝฟ็”จ YOLOv3๏ผŒ่ฏทๅผ•็”จๅŽŸๅง‹็š„ YOLO ่ฎบๆ–‡ๅ’Œ Ultralytics ็š„ YOLOv3 ไป“ๅบ“๏ผš + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Redmon, Joseph and Farhadi, Ali}, + journal={arXiv preprint arXiv:1804.02767}, + year={2018} + } + ``` + +ๆ„Ÿ่ฐข Joseph Redmon ๅ’Œ Ali Farhadi ๅผ€ๅ‘ไบ†ๅŽŸๅง‹็š„ YOLOv3 ๆจกๅž‹ใ€‚ diff --git a/ultralytics/docs/zh/models/yolov3.md:Zone.Identifier b/ultralytics/docs/zh/models/yolov3.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/models/yolov3.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/models/yolov4.md b/ultralytics/docs/zh/models/yolov4.md new file mode 100755 index 0000000..9091229 --- /dev/null +++ b/ultralytics/docs/zh/models/yolov4.md @@ -0,0 +1,71 @@ +--- +comments: true +description: ้€š่ฟ‡ๆˆ‘ไปฌ่ฏฆ็ป†็š„YOLOv4ๆŒ‡ๅ—๏ผŒๆŽข็ดขๆœ€ๅ…ˆ่ฟ›็š„ๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จใ€‚ไบ†่งฃๅ…ถๅปบ็ญ‘ไบฎ็‚น๏ผŒๅˆ›ๆ–ฐๅŠŸ่ƒฝๅ’Œๅบ”็”จ็คบไพ‹ใ€‚ +keywords: ultralytics, YOLOv4, ็›ฎๆ ‡ๆฃ€ๆต‹, ็ฅž็ป็ฝ‘็ปœ, ๅฎžๆ—ถๆฃ€ๆต‹, ็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จ, ๆœบๅ™จๅญฆไน  +--- + +# YOLOv4๏ผš้ซ˜้€Ÿๅ’Œ็ฒพ็กฎ็š„็›ฎๆ ‡ๆฃ€ๆต‹ + +ๆฌข่ฟŽๆฅๅˆฐUltralyticsๅ…ณไบŽYOLOv4็š„ๆ–‡ๆกฃ้กต้ข๏ผŒYOLOv4ๆ˜ฏ็”ฑAlexey BochkovskiyไบŽ2020ๅนดๅœจ [https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet) ๅ‘ๅธƒ็š„ๆœ€ๅ…ˆ่ฟ›็š„ๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จใ€‚YOLOv4ๆ—จๅœจๆไพ›้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€ง็š„ๆœ€ไฝณๅนณ่กก๏ผŒไฝฟๅ…ถๆˆไธบ่ฎธๅคšๅบ”็”จ็š„ไผ˜็ง€้€‰ๆ‹ฉใ€‚ + +![YOLOv4ๆžถๆž„ๅ›พ](https://user-images.githubusercontent.com/26833433/246185689-530b7fe8-737b-4bb0-b5dd-de10ef5aface.png) +**YOLOv4ๆžถๆž„ๅ›พ**ใ€‚ๅฑ•็คบไบ†YOLOv4็š„ๅคๆ‚็ฝ‘็ปœ่ฎพ่ฎก๏ผŒๅŒ…ๆ‹ฌไธปๅนฒ๏ผŒ้ขˆ้ƒจๅ’Œๅคด้ƒจ็ป„ไปถไปฅๅŠๅฎƒไปฌ็›ธไบ’่ฟžๆŽฅ็š„ๅฑ‚๏ผŒไปฅๅฎž็Žฐๆœ€ไฝณ็š„ๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ใ€‚ + +## ็ฎ€ไป‹ + +YOLOv4ไปฃ่กจYou Only Look Once็‰ˆๆœฌ4ใ€‚ๅฎƒๆ˜ฏไธบ่งฃๅ†ณไน‹ๅ‰YOLO็‰ˆๆœฌ๏ผˆๅฆ‚[YOLOv3](yolov3.md)๏ผ‰ๅ’Œๅ…ถไป–็›ฎๆ ‡ๆฃ€ๆต‹ๆจกๅž‹็š„ๅฑ€้™ๆ€ง่€Œๅผ€ๅ‘็š„ๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ๆจกๅž‹ใ€‚ไธŽๅ…ถไป–ๅŸบไบŽๅท็งฏ็ฅž็ป็ฝ‘็ปœ๏ผˆCNN๏ผ‰็š„็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จไธๅŒ๏ผŒYOLOv4ไธไป…้€‚็”จไบŽๆŽจ่็ณป็ปŸ๏ผŒ่ฟ˜ๅฏ็”จไบŽ็‹ฌ็ซ‹็š„่ฟ›็จ‹็ฎก็†ๅ’Œๅ‡ๅฐ‘ไบบๅทฅ่พ“ๅ…ฅใ€‚ๅฎƒๅœจไผ ็ปŸๅ›พๅฝขๅค„็†ๅ•ๅ…ƒ๏ผˆGPU๏ผ‰ไธŠ็š„ๆ“ไฝœๅฏไปฅไปฅ็ปๆตŽๅฎžๆƒ ็š„ไปทๆ ผ่ฟ›่กŒๅคง่ง„ๆจกไฝฟ็”จ๏ผŒๅนถไธ”่ฎพ่ฎกไธบๅœจๅธธ่ง„GPUไธŠๅฎžๆ—ถๅทฅไฝœ๏ผŒไป…้œ€่ฆไธ€ไธช่ฟ™ๆ ท็š„GPU่ฟ›่กŒ่ฎญ็ปƒใ€‚ + +## ๆžถๆž„ + +YOLOv4ๅˆฉ็”จไบ†ๅ‡ ไธชๅˆ›ๆ–ฐๅŠŸ่ƒฝ๏ผŒ่ฟ™ไบ›ๅŠŸ่ƒฝๅ…ฑๅŒไผ˜ๅŒ–ๅ…ถๆ€ง่ƒฝใ€‚่ฟ™ไบ›ๅŠŸ่ƒฝๅŒ…ๆ‹ฌๅŠ ๆƒๆฎ‹ๅทฎ่ฟžๆŽฅ๏ผˆWRC๏ผ‰๏ผŒ่ทจ้˜ถๆฎต้ƒจๅˆ†่ฟžๆŽฅ๏ผˆCSP๏ผ‰๏ผŒไบคๅ‰mini-Batchๅฝ’ไธ€ๅŒ–๏ผˆCmBN๏ผ‰๏ผŒ่‡ช้€‚ๅบ”ๅฏนๆŠ—่ฎญ็ปƒ๏ผˆSAT๏ผ‰๏ผŒMishๆฟ€ๆดปๅ‡ฝๆ•ฐ๏ผŒMosaicๆ•ฐๆฎๅขžๅผบ๏ผŒDropBlockๆญฃๅˆ™ๅŒ–ๅ’ŒCIoUๆŸๅคฑใ€‚่ฟ™ไบ›ๅŠŸ่ƒฝ็š„็ป„ๅˆๅฏไปฅๅฎž็Žฐๆœ€ๅ…ˆ่ฟ›็š„็ป“ๆžœใ€‚ + +ๅ…ธๅž‹็š„็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จ็”ฑๅ‡ ไธช้ƒจๅˆ†็ป„ๆˆ๏ผŒๅŒ…ๆ‹ฌ่พ“ๅ…ฅใ€ไธปๅนฒใ€้ขˆ้ƒจๅ’Œๅคด้ƒจใ€‚YOLOv4็š„ไธปๅนฒๆ˜ฏๅœจImageNetไธŠ้ข„่ฎญ็ปƒ็š„๏ผŒ็”จไบŽ้ข„ๆต‹ๅฏน่ฑก็š„็ฑปๅˆซๅ’Œ่พน็•Œๆก†ใ€‚ไธปๅนฒๅฏไปฅๆฅ่‡ชๅคšไธชๆจกๅž‹๏ผŒๅŒ…ๆ‹ฌVGGใ€ResNetใ€ResNeXtๆˆ–DenseNetใ€‚ๆฃ€ๆต‹ๅ™จ็š„้ขˆ้ƒจ้ƒจๅˆ†็”จไบŽไปŽไธๅŒ้˜ถๆฎตๆ”ถ้›†็‰นๅพๅ›พ๏ผŒ้€šๅธธๅŒ…ๆ‹ฌๅ‡ ๆก่‡ชๅบ•ๅ‘ไธŠ็š„่ทฏๅพ„ๅ’Œๅ‡ ๆก่‡ช้กถๅ‘ไธ‹็š„่ทฏๅพ„ใ€‚ๅคด้ƒจ้ƒจๅˆ†็”จไบŽ่ฟ›่กŒๆœ€็ปˆ็š„็›ฎๆ ‡ๆฃ€ๆต‹ๅ’Œๅˆ†็ฑปใ€‚ + +## ๅ…่ดน่ต ๅ“ + +YOLOv4่ฟ˜ไฝฟ็”จไบ†็งฐไธบโ€œๅ…่ดน่ต ๅ“โ€็š„ๆ–นๆณ•๏ผŒ่ฟ™ไบ›ๆ–นๆณ•ๅœจ่ฎญ็ปƒ่ฟ‡็จ‹ไธญๆ้ซ˜ๆจกๅž‹็š„ๅ‡†็กฎๆ€ง๏ผŒ่€ŒไธๅขžๅŠ ๆŽจ็†ๆˆๆœฌใ€‚ๆ•ฐๆฎๅขžๅผบๆ˜ฏ็›ฎๆ ‡ๆฃ€ๆต‹ไธญๅธธ็”จ็š„ไธ€็งๅ…่ดน่ต ๅ“ๆŠ€ๆœฏ๏ผŒๅฎƒๅขžๅŠ ไบ†่พ“ๅ…ฅๅ›พๅƒ็š„ๅ˜ๅผ‚ๆ€ง๏ผŒไปฅๆ้ซ˜ๆจกๅž‹็š„้ฒๆฃ’ๆ€งใ€‚ไธ€ไบ›ๆ•ฐๆฎๅขžๅผบ็š„ไพ‹ๅญๅŒ…ๆ‹ฌๅ…‰ๅบฆๅคฑ็œŸ๏ผˆ่ฐƒๆ•ดๅ›พๅƒ็š„ไบฎๅบฆใ€ๅฏนๆฏ”ๅบฆใ€่‰ฒ่ฐƒใ€้ฅฑๅ’Œๅบฆๅ’Œๅ™ช้Ÿณ๏ผ‰ๅ’Œๅ‡ ไฝ•ๅคฑ็œŸ๏ผˆๆทปๅŠ ้šๆœบ็ผฉๆ”พใ€่ฃๅ‰ชใ€็ฟป่ฝฌๅ’Œๆ—‹่ฝฌ๏ผ‰ใ€‚่ฟ™ไบ›ๆŠ€ๆœฏๅธฎๅŠฉๆจกๅž‹ๆ›ดๅฅฝๅœฐๅบ”ๅฏนไธๅŒ็ฑปๅž‹็š„ๅ›พๅƒใ€‚ + +## ็‰น็‚นๅ’Œๆ€ง่ƒฝ + +YOLOv4่ขซ่ฎพ่ฎกไธบๅœจ็›ฎๆ ‡ๆฃ€ๆต‹ไธญๅ…ทๆœ‰ๆœ€ไฝณ้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€งใ€‚YOLOv4็š„ๆžถๆž„ๅŒ…ๆ‹ฌCSPDarknet53ไฝœไธบไธปๅนฒ๏ผŒPANetไฝœไธบ้ขˆ้ƒจ๏ผŒไปฅๅŠYOLOv3ไฝœไธบๆฃ€ๆต‹ๅคดใ€‚่ฟ™็ง่ฎพ่ฎกไฝฟๅพ—YOLOv4่ƒฝๅคŸไปฅไปคไบบๅฐ่ฑกๆทฑๅˆป็š„้€Ÿๅบฆ่ฟ›่กŒ็›ฎๆ ‡ๆฃ€ๆต‹๏ผŒ้€‚็”จไบŽๅฎžๆ—ถๅบ”็”จใ€‚YOLOv4ๅœจๅ‡†็กฎๆ€งๆ–น้ขไนŸ่กจ็Žฐๅ‡บ่‰ฒ๏ผŒๅœจ็›ฎๆ ‡ๆฃ€ๆต‹ๅŸบๅ‡†ๆต‹่ฏ•ไธญๅ–ๅพ—ไบ†ๆœ€ๅ…ˆ่ฟ›็š„็ป“ๆžœใ€‚ + +## ไฝฟ็”จ็คบไพ‹ + +ๆˆช่‡ณๆ’ฐๅ†™ๆœฌๆ–‡ๆ—ถ๏ผŒUltralyticsๅฝ“ๅ‰ไธๆ”ฏๆŒYOLOv4ๆจกๅž‹ใ€‚ๅ› ๆญค๏ผŒไปปไฝ•ๆœ‰ๅ…ด่ถฃไฝฟ็”จYOLOv4็š„็”จๆˆท้œ€่ฆ็›ดๆŽฅๅ‚่€ƒYOLOv4 GitHubๅญ˜ๅ‚จๅบ“ไธญ็š„ๅฎ‰่ฃ…ๅ’Œไฝฟ็”จ่ฏดๆ˜Žใ€‚ + +ไปฅไธ‹ๆ˜ฏไฝฟ็”จYOLOv4็š„ๅ…ธๅž‹ๆญฅ้ชค็š„็ฎ€่ฆๆฆ‚่ฟฐ๏ผš + +1. ่ฎฟ้—ฎYOLOv4 GitHubๅญ˜ๅ‚จๅบ“๏ผš[https://github.com/AlexeyAB/darknet](https://github.com/AlexeyAB/darknet)ใ€‚ + +2. ๆŒ‰็…งREADMEๆ–‡ไปถไธญๆไพ›็š„่ฏดๆ˜Ž่ฟ›่กŒๅฎ‰่ฃ…ใ€‚่ฟ™้€šๅธธๆถ‰ๅŠๅ…‹้š†ๅญ˜ๅ‚จๅบ“๏ผŒๅฎ‰่ฃ…ๅฟ…่ฆ็š„ไพ่ต–้กน๏ผŒๅนถ่ฎพ็ฝฎไปปไฝ•ๅฟ…่ฆ็š„็Žฏๅขƒๅ˜้‡ใ€‚ + +3. ๅฎ‰่ฃ…ๅฎŒๆˆๅŽ๏ผŒๆ‚จๅฏไปฅๆ นๆฎๅญ˜ๅ‚จๅบ“ๆไพ›็š„ไฝฟ็”จ่ฏดๆ˜Ž่ฎญ็ปƒๅ’Œไฝฟ็”จๆจกๅž‹ใ€‚่ฟ™้€šๅธธๆถ‰ๅŠๅ‡†ๅค‡ๆ‚จ็š„ๆ•ฐๆฎ้›†ใ€้…็ฝฎๆจกๅž‹ๅ‚ๆ•ฐใ€่ฎญ็ปƒๆจกๅž‹๏ผŒ็„ถๅŽไฝฟ็”จ่ฎญ็ปƒๅฅฝ็š„ๆจกๅž‹่ฟ›่กŒ็›ฎๆ ‡ๆฃ€ๆต‹ใ€‚ + +่ฏทๆณจๆ„๏ผŒๅ…ทไฝ“็š„ๆญฅ้ชคๅฏ่ƒฝๅ› ๆ‚จ็š„็‰นๅฎš็”จไพ‹ๅ’ŒYOLOv4ๅญ˜ๅ‚จๅบ“็š„ๅฝ“ๅ‰็Šถๆ€่€Œๆœ‰ๆ‰€ไธๅŒใ€‚ๅ› ๆญค๏ผŒๅผบ็ƒˆๅปบ่ฎฎ็›ดๆŽฅๅ‚่€ƒYOLOv4 GitHubๅญ˜ๅ‚จๅบ“ไธญๆไพ›็š„่ฏดๆ˜Žใ€‚ + +ๅฏนไบŽUltralyticsไธๆ”ฏๆŒYOLOv4็š„ๆƒ…ๅ†ต๏ผŒๆˆ‘ไปฌๆ„Ÿๅˆฐ้žๅธธๆŠฑๆญ‰๏ผŒๆˆ‘ไปฌๅฐ†ๅŠชๅŠ›ๆ›ดๆ–ฐๆœฌๆ–‡ๆกฃ๏ผŒไปฅๅŒ…ๆ‹ฌไฝฟ็”จUltralyticsๆ”ฏๆŒ็š„YOLOv4็š„็คบไพ‹ใ€‚ + +## ็ป“่ฎบ + +YOLOv4ๆ˜ฏไธ€็งๅผบๅคง่€Œ้ซ˜ๆ•ˆ็š„็›ฎๆ ‡ๆฃ€ๆต‹ๆจกๅž‹๏ผŒๅฎƒๅœจ้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€งไน‹้—ดๅ–ๅพ—ไบ†ๅนณ่กกใ€‚ๅฎƒๅœจ่ฎญ็ปƒ่ฟ‡็จ‹ไธญไฝฟ็”จ็‹ฌ็‰น็š„ๅŠŸ่ƒฝๅ’Œๅ…่ดน่ต ๅ“ๆŠ€ๆœฏ๏ผŒไฝฟๅ…ถๅœจๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ไปปๅŠกไธญ่กจ็Žฐๅ‡บ่‰ฒใ€‚ไปปไฝ•ๅ…ทๅค‡ๅธธ่ง„GPU็š„ไบบ้ƒฝๅฏไปฅ่ฟ›่กŒYOLOv4็š„่ฎญ็ปƒๅ’Œไฝฟ็”จ๏ผŒไฝฟๅ…ถๅฏนไบŽๅ„็งๅบ”็”จๅ…ทๆœ‰ๅฏ่ฎฟ้—ฎๆ€งๅ’Œๅฎž็”จๆ€งใ€‚ + +## ๅผ•ๆ–‡ๅ’Œ่‡ด่ฐข + +ๆˆ‘ไปฌ่ฆๆ„Ÿ่ฐขYOLOv4็š„ไฝœ่€…ๅฏนๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹้ข†ๅŸŸ็š„้‡่ฆ่ดก็Œฎ๏ผš + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @misc{bochkovskiy2020yolov4, + title={YOLOv4: Optimal Speed and Accuracy of Object Detection}, + author={Alexey Bochkovskiy and Chien-Yao Wang and Hong-Yuan Mark Liao}, + year={2020}, + eprint={2004.10934}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + +YOLOv4็š„ๅŽŸๅง‹่ฎบๆ–‡ๅฏไปฅๅœจ[arXiv](https://arxiv.org/abs/2004.10934)ไธŠๆ‰พๅˆฐใ€‚ไฝœ่€…ๅทฒ็ปๅ…ฌๅผ€ไบ†ไป–ไปฌ็š„ๅทฅไฝœ๏ผŒไปฃ็ ๅบ“ๅฏไปฅๅœจ[GitHub](https://github.com/AlexeyAB/darknet)ไธŠ่Žทๅ–ใ€‚ๆˆ‘ไปฌ่ตž่ตไป–ไปฌๅœจๆŽจๅŠจ่ฏฅ้ข†ๅŸŸๆ–น้ข็š„ๅŠชๅŠ›๏ผŒๅนถไฝฟไป–ไปฌ็š„ๅทฅไฝœๅฏนๅนฟๅคง็คพๅŒบไบง็”Ÿๅฝฑๅ“ใ€‚ diff --git a/ultralytics/docs/zh/models/yolov4.md:Zone.Identifier b/ultralytics/docs/zh/models/yolov4.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/models/yolov4.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/models/yolov5.md b/ultralytics/docs/zh/models/yolov5.md new file mode 100755 index 0000000..10c5339 --- /dev/null +++ b/ultralytics/docs/zh/models/yolov5.md @@ -0,0 +1,113 @@ +--- +comments: true +description: ๅ‘็ŽฐYOLOv5u๏ผŒๅฎƒๆ˜ฏYOLOv5ๆจกๅž‹็š„ๆ”น่ฟ›็‰ˆๆœฌ๏ผŒๅ…ทๆœ‰ๆ›ดๅฅฝ็š„ๅ‡†็กฎๆ€งๅ’Œ้€Ÿๅบฆไน‹้—ด็š„ๅนณ่กก๏ผŒๅนถไธบๅ„็ง็›ฎๆ ‡ๆฃ€ๆต‹ไปปๅŠกๆไพ›ไบ†่ฎธๅคš้ข„่ฎญ็ปƒๆจกๅž‹ใ€‚ +keywords: YOLOv5uใ€็›ฎๆ ‡ๆฃ€ๆต‹ใ€้ข„่ฎญ็ปƒๆจกๅž‹ใ€Ultralyticsใ€ๆŽจๆ–ญใ€้ชŒ่ฏใ€YOLOv5ใ€YOLOv8ใ€ๆ— ้”š็‚นใ€ๆ— ็‰ฉไฝ“ๆฃ€ๆต‹ใ€ๅฎžๆ—ถๅบ”็”จใ€ๆœบๅ™จๅญฆไน  +--- + +# YOLOv5 + +## ๆฆ‚่ฟฐ + +YOLOv5uๆ˜ฏ็›ฎๆ ‡ๆฃ€ๆต‹ๆ–นๆณ•็š„ไธ€็ง่ฟ›ๆญฅใ€‚YOLOv5uๆบไบŽUltralyticsๅผ€ๅ‘็š„[YOLOv5](https://github.com/ultralytics/yolov5)ๆจกๅž‹็š„ๅŸบ็ก€ๆžถๆž„๏ผŒๅฎƒ้›†ๆˆไบ†ๆ— ้”š็‚นใ€ๆ— ็‰ฉไฝ“ๆฃ€ๆต‹ๅˆ†็ฆปๅคด็š„ๆ–ฐ็‰นๆ€ง๏ผŒ่ฟ™ไธ€็‰นๆ€งๅœจ[YOLOv8](yolov8.md)ๆจกๅž‹ไธญ้ฆ–ๆฌกๅผ•ๅ…ฅใ€‚้€š่ฟ‡้‡‡็”จ่ฟ™็ง้€‚ๅบ”ๆ€งๆ›ดๅผบ็š„ๆฃ€ๆต‹ๆœบๅˆถ๏ผŒYOLOv5uๆ”น่ฟ›ไบ†ๆจกๅž‹็š„ๆžถๆž„๏ผŒไปŽ่€Œๅœจ็›ฎๆ ‡ๆฃ€ๆต‹ไปปๅŠกไธญๅฎž็Žฐไบ†ๆ›ดๅฅฝ็š„ๅ‡†็กฎๆ€งๅ’Œ้€Ÿๅบฆ็š„ๅนณ่กกใ€‚ๆ นๆฎๅฎž่ฏ็ป“ๆžœๅ’Œๅ…ถ่ก็”Ÿ็‰นๆ€ง๏ผŒYOLOv5uไธบ้‚ฃไบ›ๅœจ็ ”็ฉถๅ’Œๅฎž้™…ๅบ”็”จไธญๅฏปๆฑ‚ๅผบๅคง่งฃๅ†ณๆ–นๆกˆ็š„ไบบๆไพ›ไบ†ไธ€็ง้ซ˜ๆ•ˆ็š„้€‰ๆ‹ฉใ€‚ + +![Ultralytics YOLOv5](https://raw.githubusercontent.com/ultralytics/assets/main/yolov5/v70/splash.png) + +## ไธป่ฆ็‰นๆ€ง + +- **ๆ— ้”š็‚นๅˆ†็ฆปUltralyticsๅคด้ƒจ**: ไผ ็ปŸ็š„็›ฎๆ ‡ๆฃ€ๆต‹ๆจกๅž‹ไพ้ ้ข„ๅฎšไน‰็š„้”š็‚นๆก†ๆฅ้ข„ๆต‹็›ฎๆ ‡ไฝ็ฝฎ๏ผŒ่€ŒYOLOv5uๆ”นๅ˜ไบ†่ฟ™็งๆ–นๆณ•ใ€‚้‡‡็”จๆ— ้”š็‚นๅˆ†็ฆปUltralyticsๅคด้ƒจ็š„ๆ–นๅผ๏ผŒๅฎƒ็กฎไฟไบ†ๆ›ด็ตๆดปใ€้€‚ๅบ”ๆ€งๆ›ดๅผบ็š„ๆฃ€ๆต‹ๆœบๅˆถ๏ผŒไปŽ่€Œๅœจๅ„็งๅœบๆ™ฏไธญๆ้ซ˜ไบ†ๆ€ง่ƒฝใ€‚ + +- **ไผ˜ๅŒ–็š„ๅ‡†็กฎๆ€งๅ’Œ้€Ÿๅบฆไน‹้—ด็š„ๅนณ่กก**: ้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€ง้€šๅธธๆ˜ฏ็›ธไบ’ๅˆถ็บฆ็š„ใ€‚ไฝ†ๆ˜ฏYOLOv5uๆŒ‘ๆˆ˜ไบ†่ฟ™็งๅนณ่กกใ€‚ๅฎƒๆไพ›ไบ†ไธ€ไธชๆ กๅ‡†ๅนณ่กก๏ผŒ็กฎไฟๅœจไฟๆŒๅ‡†็กฎๆ€ง็š„ๅŒๆ—ถๅฎž็Žฐๅฎžๆ—ถๆฃ€ๆต‹ใ€‚่ฟ™ไธ€็‰นๆ€งๅฏนไบŽ้œ€่ฆๅฟซ้€Ÿๅ“ๅบ”็š„ๅบ”็”จ้žๅธธ้‡่ฆ๏ผŒๆฏ”ๅฆ‚่‡ชๅŠจ้ฉพ้ฉถ่ฝฆ่พ†ใ€ๆœบๅ™จไบบๅ’Œๅฎžๆ—ถ่ง†้ข‘ๅˆ†ๆžใ€‚ + +- **ไธฐๅฏŒ็š„้ข„่ฎญ็ปƒๆจกๅž‹**: YOLOv5uๆไพ›ไบ†ๅคš็ง้ข„่ฎญ็ปƒๆจกๅž‹ใ€‚ๆ— ่ฎบไฝ ไธ“ๆณจไบŽๆŽจๆ–ญใ€้ชŒ่ฏ่ฟ˜ๆ˜ฏ่ฎญ็ปƒ๏ผŒ้ƒฝๆœ‰ไธ€ไธช้‡่บซๅฎšๅˆถ็š„ๆจกๅž‹็ญ‰ๅพ…็€ไฝ ใ€‚่ฟ™็งๅคšๆ ทๆ€ง็กฎไฟไฝ ไธไป…ไป…ไฝฟ็”จโ€œไธ€ๅˆ€ๅˆ‡โ€็š„่งฃๅ†ณๆ–นๆกˆ๏ผŒ่€Œๆ˜ฏไฝฟ็”จไธ€ไธชไธ“้—จไธบไฝ ็š„็‹ฌ็‰นๆŒ‘ๆˆ˜่ฟ›่กŒไบ†็ฒพ็ป†่ฐƒๆ•ด็š„ๆจกๅž‹ใ€‚ + +## ๆ”ฏๆŒ็š„ไปปๅŠกๅ’Œๆจกๅผ + +ๅ…ทๆœ‰ๅ„็ง้ข„่ฎญ็ปƒๆƒ้‡็š„YOLOv5uๆจกๅž‹ๅœจ[็›ฎๆ ‡ๆฃ€ๆต‹](../tasks/detect.md)ไปปๅŠกไธญ่กจ็Žฐๅ‡บ่‰ฒใ€‚ๅฎƒไปฌๆ”ฏๆŒๅ…จ้ข็š„ๆจกๅผ๏ผŒ้€‚็”จไบŽไปŽๅผ€ๅ‘ๅˆฐ้ƒจ็ฝฒ็š„ๅ„็งๅบ”็”จๅœบๆ™ฏใ€‚ + +| ๆจกๅž‹็ฑปๅž‹ | ้ข„่ฎญ็ปƒๆƒ้‡ | ไปปๅŠก | ๆŽจๆ–ญ | ้ชŒ่ฏ | ่ฎญ็ปƒ | ๅฏผๅ‡บ | +|---------|-----------------------------------------------------------------------------------------------------------------------------|----------------------------|----|----|----|----| +| YOLOv5u | `yolov5nu`, `yolov5su`, `yolov5mu`, `yolov5lu`, `yolov5xu`, `yolov5n6u`, `yolov5s6u`, `yolov5m6u`, `yolov5l6u`, `yolov5x6u` | [็›ฎๆ ‡ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +่ฏฅ่กจ่ฏฆ็ป†ไป‹็ปไบ†YOLOv5uๆจกๅž‹็š„ๅ˜ไฝ“๏ผŒ็ชๅ‡บไบ†ๅฎƒไปฌๅœจ็›ฎๆ ‡ๆฃ€ๆต‹ไปปๅŠกๅ’Œๅ„็งๆ“ไฝœๆจกๅผ๏ผˆๅฆ‚[ๆŽจๆ–ญ](../modes/predict.md)ใ€[้ชŒ่ฏ](../modes/val.md)ใ€[่ฎญ็ปƒ](../modes/train.md)ๅ’Œ[ๅฏผๅ‡บ](../modes/export.md)๏ผ‰ๆ–น้ข็š„้€‚็”จๆ€งใ€‚่ฟ™็งๅ…จ้ข็š„ๆ”ฏๆŒ็กฎไฟ็”จๆˆทๅฏไปฅๅ……ๅˆ†ๅ‘ๆŒฅYOLOv5uๆจกๅž‹ๅœจๅ„็ง็›ฎๆ ‡ๆฃ€ๆต‹ๅœบๆ™ฏไธญ็š„่ƒฝๅŠ›ใ€‚ + +## ๆ€ง่ƒฝๆŒ‡ๆ ‡ + +!!! Performance + + === "ๆฃ€ๆต‹" + + ่ฏทๅ‚้˜…[ๆฃ€ๆต‹ๆ–‡ๆกฃ](https://docs.ultralytics.com/tasks/detect/)๏ผŒไปฅไบ†่งฃๅœจ[COCO](https://docs.ultralytics.com/datasets/detect/coco/)ไธŠ่ฎญ็ปƒ็š„่ฟ™ไบ›ๆจกๅž‹็š„็”จๆณ•็คบไพ‹๏ผŒๅ…ถไธญๅŒ…ๆ‹ฌ80ไธช้ข„่ฎญ็ปƒ็ฑปๅˆซใ€‚ + + | ๆจกๅž‹ | YAML | ๅคงๅฐ
๏ผˆๅƒ็ด ๏ผ‰ | mAPval
50-95 | ้€Ÿๅบฆ
CPU ONNX
๏ผˆๆฏซ็ง’๏ผ‰ | ้€Ÿๅบฆ
A100 TensorRT
๏ผˆๆฏซ็ง’๏ผ‰ | ๅ‚ๆ•ฐๆ•ฐ
๏ผˆ็™พไธ‡๏ผ‰ | FLOPs
๏ผˆๅไบฟ๏ผ‰ | + |---------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------|-----------------------|----------------------|--------------------------------|-------------------------------------|--------------------|-------------------| + | [yolov5nu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5nu.pt) | [yolov5n.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 34.3 | 73.6 | 1.06 | 2.6 | 7.7 | + | [yolov5su.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5su.pt) | [yolov5s.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 43.0 | 120.7 | 1.27 | 9.1 | 24.0 | + | [yolov5mu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5mu.pt) | [yolov5m.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 49.0 | 233.9 | 1.86 | 25.1 | 64.2 | + | [yolov5lu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5lu.pt) | [yolov5l.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 52.2 | 408.4 | 2.50 | 53.2 | 135.0 | + | [yolov5xu.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5xu.pt) | [yolov5x.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5.yaml) | 640 | 53.2 | 763.2 | 3.81 | 97.2 | 246.4 | + | | | | | | | | | + | [yolov5n6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5n6u.pt) | [yolov5n6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 42.1 | 211.0 | 1.83 | 4.3 | 7.8 | + | [yolov5s6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5s6u.pt) | [yolov5s6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 48.6 | 422.6 | 2.34 | 15.3 | 24.6 | + | [yolov5m6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5m6u.pt) | [yolov5m6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 53.6 | 810.9 | 4.36 | 41.2 | 65.7 | + | [yolov5l6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5l6u.pt) | [yolov5l6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 55.7 | 1470.9 | 5.47 | 86.1 | 137.4 | + | [yolov5x6u.pt](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov5x6u.pt) | [yolov5x6.yaml](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/models/v5/yolov5-p6.yaml) | 1280 | 56.8 | 2436.5 | 8.98 | 155.4 | 250.7 | + +## ไฝฟ็”จ็คบไพ‹ + +่ฟ™ไธช็คบไพ‹ๆไพ›ไบ†YOLOv5่ฎญ็ปƒๅ’ŒๆŽจๆ–ญ็š„็ฎ€ๅ•็คบไพ‹ใ€‚ๆœ‰ๅ…ณ่ฟ™ไบ›ๅ’Œๅ…ถไป–[ๆจกๅผ](../modes/index.md)็š„ๅฎŒๆ•ดๆ–‡ๆกฃ๏ผŒ่ฏทๅ‚้˜…[้ข„ๆต‹](../modes/predict.md)ใ€[่ฎญ็ปƒ](../modes/train.md)ใ€[้ชŒ่ฏ](../modes/val.md)ๅ’Œ[ๅฏผๅ‡บ](../modes/export.md)็š„ๆ–‡ๆกฃ้กต้ขใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + PyTorch้ข„่ฎญ็ปƒ็š„`*.pt`ๆจกๅž‹๏ผŒไปฅๅŠ้…็ฝฎ`*.yaml`ๆ–‡ไปถๅฏไปฅไผ ้€’็ป™`YOLO()`็ฑป๏ผŒไปฅๅœจpythonไธญๅˆ›ๅปบไธ€ไธชๆจกๅž‹ๅฎžไพ‹๏ผš + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝไธ€ไธชๅœจCOCOๆ•ฐๆฎ้›†ไธŠ้ข„่ฎญ็ปƒ็š„YOLOv5nๆจกๅž‹ + model = YOLO('yolov5n.pt') + + # ๆ˜พ็คบๆจกๅž‹ไฟกๆฏ๏ผˆๅฏ้€‰๏ผ‰ + model.info() + + # ไฝฟ็”จCOCO8็คบไพ‹ๆ•ฐๆฎ้›†ๅฏนๆจกๅž‹่ฟ›่กŒ100ไธชๆ—ถๆœŸ็š„่ฎญ็ปƒ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ไฝฟ็”จYOLOv5nๆจกๅž‹ๅฏน'bus.jpg'ๅ›พๅƒ่ฟ›่กŒๆŽจๆ–ญ + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ๅฏไปฅไฝฟ็”จCLIๅ‘ฝไปค็›ดๆŽฅ่ฟ่กŒๆจกๅž‹๏ผš + + ```bash + # ๅŠ ่ฝฝไธ€ไธชๅœจCOCOๆ•ฐๆฎ้›†ไธŠ้ข„่ฎญ็ปƒ็š„YOLOv5nๆจกๅž‹๏ผŒๅนถๅœจCOCO8็คบไพ‹ๆ•ฐๆฎ้›†ไธŠ่ฟ›่กŒ100ไธชๆ—ถๆœŸ็š„่ฎญ็ปƒ + yolo train model=yolov5n.pt data=coco8.yaml epochs=100 imgsz=640 + + # ๅŠ ่ฝฝไธ€ไธชๅœจCOCOๆ•ฐๆฎ้›†ไธŠ้ข„่ฎญ็ปƒ็š„YOLOv5nๆจกๅž‹๏ผŒๅนถๅœจ'bus.jpg'ๅ›พๅƒไธŠ่ฟ›่กŒๆŽจๆ–ญ + yolo predict model=yolov5n.pt source=path/to/bus.jpg + ``` + +## ๅผ•็”จๅ’Œ่‡ด่ฐข + +ๅฆ‚ๆžœๆ‚จๅœจๆ‚จ็š„็ ”็ฉถไธญไฝฟ็”จไบ†YOLOv5ๆˆ–YOLOv5u๏ผŒ่ฏทๅผ•็”จUltralytics็š„YOLOv5ๅญ˜ๅ‚จๅบ“๏ผŒๅผ•็”จๆ–นๅผๅฆ‚ไธ‹๏ผš + +!!! Quote "" + + === "BibTeX" + ```bibtex + @software{yolov5, + title = {Ultralytics YOLOv5}, + author = {Glenn Jocher}, + year = {2020}, + version = {7.0}, + license = {AGPL-3.0}, + url = {https://github.com/ultralytics/yolov5}, + doi = {10.5281/zenodo.3908559}, + orcid = {0000-0001-5950-6979} + } + ``` + +่ฏทๆณจๆ„๏ผŒYOLOv5ๆจกๅž‹ๆไพ›[AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE)ๅ’Œ[ไผไธš](https://ultralytics.com/license)่ฎธๅฏ่ฏใ€‚ diff --git a/ultralytics/docs/zh/models/yolov5.md:Zone.Identifier b/ultralytics/docs/zh/models/yolov5.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/models/yolov5.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/models/yolov6.md b/ultralytics/docs/zh/models/yolov6.md new file mode 100755 index 0000000..d0ac2e3 --- /dev/null +++ b/ultralytics/docs/zh/models/yolov6.md @@ -0,0 +1,107 @@ +--- +comments: true +description: ๆŽข็ดข็พŽๅ›ขYOLOv6๏ผŒไธ€็งๅœจ้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€งไน‹้—ดๅ–ๅพ—ๅนณ่กก็š„ๆœ€ๅ…ˆ่ฟ›็š„็‰ฉไฝ“ๆฃ€ๆต‹ๆจกๅž‹ใ€‚ๆทฑๅ…ฅไบ†่งฃๅŠŸ่ƒฝใ€้ข„่ฎญ็ปƒๆจกๅž‹ๅ’ŒPythonไฝฟ็”จๆ–นๆณ•ใ€‚ +keywords: ็พŽๅ›ขYOLOv6ใ€็‰ฉไฝ“ๆฃ€ๆต‹ใ€Ultralyticsใ€YOLOv6ๆ–‡ๆกฃใ€ๅŒๅ‘่ฟžๆŽฅใ€้”š่พ…ๅŠฉ่ฎญ็ปƒใ€้ข„่ฎญ็ปƒๆจกๅž‹ใ€ๅฎžๆ—ถๅบ”็”จ +--- + +# ็พŽๅ›ขYOLOv6 + +## ๆฆ‚่ฟฐ + +[็พŽๅ›ข](https://about.meituan.com/) YOLOv6ๆ˜ฏไธ€็งๆœ€ๅ…ˆ่ฟ›็š„็‰ฉไฝ“ๆฃ€ๆต‹ๅ™จ๏ผŒ้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€งๅ…ผๅ…ท๏ผŒๆˆไธบๅฎžๆ—ถๅบ”็”จ็š„็ƒญ้—จ้€‰ๆ‹ฉใ€‚่ฏฅๆจกๅž‹ๅœจๆžถๆž„ๅ’Œ่ฎญ็ปƒๆ–นๆกˆไธŠๅผ•ๅ…ฅไบ†ๅ‡ ้กน้‡่ฆๆ”น่ฟ›๏ผŒๅŒ…ๆ‹ฌๅŒๅ‘่ฟžๆŽฅๆจกๅ—๏ผˆBiC๏ผ‰ใ€้”š่พ…ๅŠฉ่ฎญ็ปƒ๏ผˆAAT๏ผ‰็ญ–็•ฅไปฅๅŠๆ”น่ฟ›ไบ†็š„ไธปๅนฒๅ’Œ้ขˆ้ƒจ่ฎพ่ฎก๏ผŒไฝฟๅ…ถๅœจCOCOๆ•ฐๆฎ้›†ไธŠ่พพๅˆฐไบ†ๆœ€ๅ…ˆ่ฟ›็š„ๅ‡†็กฎๆ€งใ€‚ + +![็พŽๅ›ขYOLOv6](https://user-images.githubusercontent.com/26833433/240750495-4da954ce-8b3b-41c4-8afd-ddb74361d3c2.png) +![ๆจกๅž‹็คบไพ‹ๅ›พ็‰‡](https://user-images.githubusercontent.com/26833433/240750557-3e9ec4f0-0598-49a8-83ea-f33c91eb6d68.png) +**YOLOv6ๆฆ‚่ฟฐใ€‚** ๆจกๅž‹ๆžถๆž„ๅ›พๆ˜พ็คบไบ†็ป่ฟ‡้‡ๆ–ฐ่ฎพ่ฎก็š„็ฝ‘็ปœ็ป„ไปถๅ’Œ่ฎญ็ปƒ็ญ–็•ฅ๏ผŒ่ฟ™ไบ›็ญ–็•ฅๅฏผ่‡ดไบ†ๆ˜พ่‘—็š„ๆ€ง่ƒฝๆๅ‡ใ€‚๏ผˆa๏ผ‰YOLOv6็š„้ขˆ้ƒจ๏ผˆNๅ’ŒS๏ผ‰ใ€‚๏ผˆb๏ผ‰BiCๆจกๅ—็š„็ป“ๆž„ใ€‚๏ผˆc๏ผ‰SimCSPSPPFๅ—ใ€‚([ๆฅๆบ](https://arxiv.org/pdf/2301.05586.pdf)). + +### ไธป่ฆๅŠŸ่ƒฝ + +- **ๅŒๅ‘่ฟžๆŽฅ๏ผˆBiC๏ผ‰ๆจกๅ—๏ผš** YOLOv6ๅœจๆฃ€ๆต‹ๅ™จ็š„้ขˆ้ƒจๅผ•ๅ…ฅไบ†ๅŒๅ‘่ฟžๆŽฅ๏ผˆBiC๏ผ‰ๆจกๅ—๏ผŒๅขžๅผบไบ†ๅฎšไฝไฟกๅท๏ผŒๆไพ›ไบ†ๆ€ง่ƒฝๅขž็›Š๏ผŒๅนถไธ”ๅ‡ ไนŽๆฒกๆœ‰้™ไฝŽ้€Ÿๅบฆใ€‚ +- **้”š่พ…ๅŠฉ่ฎญ็ปƒ๏ผˆAAT๏ผ‰็ญ–็•ฅ๏ผš** ่ฏฅๆจกๅž‹ๆๅ‡บไบ†้”š่พ…ๅŠฉ่ฎญ็ปƒ๏ผˆAAT๏ผ‰ไปฅไบซๅ—ๅŸบไบŽ้”š็‚นๅ’Œๆ— ้”š็‚น่Œƒไพ‹็š„ๅŒ้‡ไผ˜ๅŠฟ๏ผŒๅŒๆ—ถไธๅฝฑๅ“ๆŽจ็†ๆ•ˆ็އใ€‚ +- **ๅขžๅผบ็š„ไธปๅนฒๅ’Œ้ขˆ้ƒจ่ฎพ่ฎก๏ผš** ้€š่ฟ‡ๅœจไธปๅนฒๅ’Œ้ขˆ้ƒจไธญๅขžๅŠ ไธ€ไธช้˜ถๆฎต๏ผŒ่ฏฅๆจกๅž‹ๅœจ้ซ˜ๅˆ†่พจ็އ่พ“ๅ…ฅไธ‹ๅœจCOCOๆ•ฐๆฎ้›†ไธŠๅฎž็Žฐไบ†ๆœ€ๅ…ˆ่ฟ›็š„ๆ€ง่ƒฝใ€‚ +- **่‡ชๆˆ‘่’ธ้ฆ็ญ–็•ฅ๏ผš** ๅฎžๆ–ฝไบ†ไธ€็งๆ–ฐ็š„่‡ชๆˆ‘่’ธ้ฆ็ญ–็•ฅ๏ผŒไปฅๆๅ‡YOLOv6็š„่พƒๅฐๆจกๅž‹็š„ๆ€ง่ƒฝ๏ผŒๅœจ่ฎญ็ปƒ่ฟ‡็จ‹ไธญๅขžๅผบ่พ…ๅŠฉๅ›žๅฝ’ๅˆ†ๆ”ฏ๏ผŒๅนถๅœจๆŽจ็†่ฟ‡็จ‹ไธญๅฐ†ๅ…ถๅˆ ้™ค๏ผŒไปฅ้ฟๅ…ๆ˜Žๆ˜พ็š„้€Ÿๅบฆไธ‹้™ใ€‚ + +## ๆ€ง่ƒฝๆŒ‡ๆ ‡ + +YOLOv6ๆไพ›ไบ†ๅ…ทๆœ‰ไธๅŒๅฐบๅบฆ็š„ๅ„็ง้ข„่ฎญ็ปƒๆจกๅž‹๏ผš + +- YOLOv6-N๏ผšๅœจNVIDIA Tesla T4 GPUไธŠ๏ผŒCOCO val2017ไธŠ็š„APไธบ37.5%๏ผŒๅธง็އไธบ1187 FPSใ€‚ +- YOLOv6-S๏ผšAPไธบ45.0%๏ผŒๅธง็އไธบ484 FPSใ€‚ +- YOLOv6-M๏ผšAPไธบ50.0%๏ผŒๅธง็އไธบ226 FPSใ€‚ +- YOLOv6-L๏ผšAPไธบ52.8%๏ผŒๅธง็އไธบ116 FPSใ€‚ +- YOLOv6-L6๏ผšๅฎžๆ—ถๅœบๆ™ฏไธญ็š„ๆœ€ๅ…ˆ่ฟ›ๅ‡†็กฎๆ€งใ€‚ + +YOLOv6่ฟ˜ๆไพ›ไบ†้€‚็”จไบŽไธๅŒ็ฒพๅบฆๅ’Œ็งปๅŠจๅนณๅฐ็š„้‡ๅŒ–ๆจกๅž‹ใ€‚ + +## ไฝฟ็”จ็คบไพ‹ + +ไปฅไธ‹็คบไพ‹ๆไพ›ไบ†็ฎ€ๅ•็š„YOLOv6่ฎญ็ปƒๅ’ŒๆŽจ็†็คบไพ‹ใ€‚ๆœ‰ๅ…ณ่ฟ™ไบ›็คบไพ‹ๅ’Œๅ…ถไป–[ๆจกๅผ](../modes/index.md)็š„ๅฎŒๆ•ดๆ–‡ๆกฃ๏ผŒ่ฏทๅ‚้˜…[Predict](../modes/predict.md)ใ€[Train](../modes/train.md)ใ€[Val](../modes/val.md)ๅ’Œ[Export](../modes/export.md)็š„ๆ–‡ๆกฃ้กต้ขใ€‚ + +!!! ไพ‹ๅญ + + === "Python" + + ๅœจPythonไธญ๏ผŒๅฏไปฅๅฐ†PyTorch้ข„่ฎญ็ปƒ็š„`*.pt`ๆจกๅž‹ไปฅๅŠ้…็ฝฎๆ–‡ไปถ`*.yaml`ไผ ้€’็ป™`YOLO()`็ฑป๏ผŒไปฅๅˆ›ๅปบไธ€ไธชๆจกๅž‹ๅฎžไพ‹๏ผš + + ```python + from ultralytics import YOLO + + # ไปŽๅคดๅผ€ๅง‹ๆž„ๅปบไธ€ไธชYOLOv6nๆจกๅž‹ + model = YOLO('yolov6n.yaml') + + # ๆ˜พ็คบๆจกๅž‹ไฟกๆฏ๏ผˆๅฏ้€‰๏ผ‰ + model.info() + + # ไฝฟ็”จCOCO8็คบไพ‹ๆ•ฐๆฎ้›†ๅฏนๆจกๅž‹่ฟ›่กŒ100ไธชepoch็š„่ฎญ็ปƒ + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ไฝฟ็”จYOLOv6nๆจกๅž‹ๅฏน'bus.jpg'ๅ›พๅƒ่ฟ›่กŒๆŽจ็† + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ไนŸๅฏไปฅไฝฟ็”จCLIๅ‘ฝไปค็›ดๆŽฅ่ฟ่กŒๆจกๅž‹๏ผš + + ```bash + # ไปŽๅคดๅผ€ๅง‹ๆž„ๅปบYOLOv6nๆจกๅž‹๏ผŒๅนถๅœจCOCO8็คบไพ‹ๆ•ฐๆฎ้›†ไธŠ่ฟ›่กŒ100ไธชepoch็š„่ฎญ็ปƒ + yolo train model=yolov6n.yaml data=coco8.yaml epochs=100 imgsz=640 + + # ไปŽๅคดๅผ€ๅง‹ๆž„ๅปบYOLOv6nๆจกๅž‹๏ผŒๅนถๅฏน'bus.jpg'ๅ›พๅƒ่ฟ›่กŒๆŽจ็† + yolo predict model=yolov6n.yaml source=path/to/bus.jpg + ``` + +## ๆ”ฏๆŒ็š„ไปปๅŠกๅ’Œๆจกๅผ + +YOLOv6็ณปๅˆ—ๆไพ›ไบ†ไธ€็ณปๅˆ—ๆจกๅž‹๏ผŒๆฏไธชๆจกๅž‹้ƒฝ้’ˆๅฏน้ซ˜ๆ€ง่ƒฝ[็‰ฉไฝ“ๆฃ€ๆต‹](../tasks/detect.md)่ฟ›่กŒไบ†ไผ˜ๅŒ–ใ€‚่ฟ™ไบ›ๆจกๅž‹้€‚็”จไบŽๅ„็ง่ฎก็ฎ—้œ€ๆฑ‚ๅ’Œๅ‡†็กฎๆ€ง่ฆๆฑ‚๏ผŒไฝฟๅ…ถๅœจๅนฟๆณ›็š„ๅบ”็”จไธญๅ…ทๅค‡ๅคšๆ ทๆ€งใ€‚ + +| ๆจกๅž‹็ฑปๅž‹ | ้ข„่ฎญ็ปƒๆƒ้‡ | ๆ”ฏๆŒ็š„ไปปๅŠก | ๆŽจ็† | ้ชŒ่ฏ | ่ฎญ็ปƒ | ๅฏผๅ‡บ | +|-----------|----------------|----------------------------|----|----|----|----| +| YOLOv6-N | `yolov6-n.pt` | [็‰ฉไฝ“ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-S | `yolov6-s.pt` | [็‰ฉไฝ“ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-M | `yolov6-m.pt` | [็‰ฉไฝ“ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L | `yolov6-l.pt` | [็‰ฉไฝ“ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv6-L6 | `yolov6-l6.pt` | [็‰ฉไฝ“ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | + +่ฟ™ไธช่กจๆ ผ่ฏฆ็ป†ไป‹็ปไบ†YOLOv6ๆจกๅž‹็š„ๅ„ไธชๅ˜ไฝ“๏ผŒ็ชๅ‡บไบ†ๅฎƒไปฌๅœจ็‰ฉไฝ“ๆฃ€ๆต‹ไปปๅŠกไธญ็š„่ƒฝๅŠ›ไปฅๅŠๅฎƒไปฌไธŽๅ„็งๆ“ไฝœๆจกๅผ๏ผˆๅฆ‚[ๆŽจ็†](../modes/predict.md)ใ€[้ชŒ่ฏ](../modes/val.md)ใ€[่ฎญ็ปƒ](../modes/train.md)ๅ’Œ[ๅฏผๅ‡บ](../modes/export.md)๏ผ‰็š„ๅ…ผๅฎนๆ€งใ€‚่ฟ™็งๅ…จ้ข็š„ๆ”ฏๆŒ็กฎไฟ็”จๆˆทๅฏไปฅๅœจๅ„็ง็‰ฉไฝ“ๆฃ€ๆต‹ๅœบๆ™ฏไธญๅ……ๅˆ†ๅˆฉ็”จYOLOv6ๆจกๅž‹็š„่ƒฝๅŠ›ใ€‚ + +## ๅผ•็”จๅ’Œ่‡ด่ฐข + +ๆˆ‘ไปฌ่ฆๆ„Ÿ่ฐข่ฟ™ไบ›ไฝœ่€…ๅœจๅฎžๆ—ถ็‰ฉไฝ“ๆฃ€ๆต‹้ข†ๅŸŸ็š„้‡่ฆ่ดก็Œฎ๏ผš + +!!! ๅผ•ๆ–‡ "" + + === "BibTeX" + + ```bibtex + @misc{li2023yolov6, + title={YOLOv6 v3.0: A Full-Scale Reloading}, + author={Chuyi Li and Lulu Li and Yifei Geng and Hongliang Jiang and Meng Cheng and Bo Zhang and Zaidan Ke and Xiaoming Xu and Xiangxiang Chu}, + year={2023}, + eprint={2301.05586}, + archivePrefix={arXiv}, + primaryClass={cs.CV} + } + ``` + + ๅŽŸๅง‹็š„YOLOv6่ฎบๆ–‡ๅฏไปฅๅœจ[arXiv](https://arxiv.org/abs/2301.05586)ไธŠๆ‰พๅˆฐใ€‚ไฝœ่€…ๅทฒ็ปๅฐ†ไป–ไปฌ็š„ไฝœๅ“ๅ…ฌๅผ€๏ผŒๅนถไธ”ไปฃ็ ๅฏไปฅๅœจ[GitHub](https://github.com/meituan/YOLOv6)ไธŠ่ฎฟ้—ฎใ€‚ๆˆ‘ไปฌๅฏนไป–ไปฌๅœจๆŽจๅŠจ่ฏฅ้ข†ๅŸŸ็š„ๅŠชๅŠ›ไปฅๅŠไฝฟไป–ไปฌ็š„ๅทฅไฝœไธบๆ›ดๅนฟๆณ›็š„็คพๅŒบๆ‰€ๆŽฅ่งฆๅˆฐ็š„ๅŠชๅŠ›่กจ็คบๆ„Ÿ่ฐขใ€‚ diff --git a/ultralytics/docs/zh/models/yolov6.md:Zone.Identifier b/ultralytics/docs/zh/models/yolov6.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/models/yolov6.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/models/yolov7.md b/ultralytics/docs/zh/models/yolov7.md new file mode 100755 index 0000000..46b30b1 --- /dev/null +++ b/ultralytics/docs/zh/models/yolov7.md @@ -0,0 +1,65 @@ +--- +comments: true +description: ๆŽข็ดขYOLOv7๏ผŒไธ€ไธชๅฎžๆ—ถ็‰ฉไฝ“ๆฃ€ๆต‹ๅ™จใ€‚ไบ†่งฃๅ…ถๅ“่ถŠ็š„้€Ÿๅบฆ๏ผŒไปคไบบๅฐ่ฑกๆทฑๅˆป็š„็ฒพ็กฎๅบฆๅ’Œ็‹ฌ็‰น็š„ๅฏ่ฎญ็ปƒๆ— ้œ€ไป˜่ดนไผ˜ๅŒ–่š็„ฆ็‚นใ€‚ +keywords: YOLOv7๏ผŒๅฎžๆ—ถ็‰ฉไฝ“ๆฃ€ๆต‹ๅ™จ๏ผŒState-of-the-art๏ผŒUltralytics๏ผŒMS COCOๆ•ฐๆฎ้›†๏ผŒๆจกๅž‹้‡ๆ–ฐๅ‚ๆ•ฐๅŒ–๏ผŒๅŠจๆ€ๆ ‡็ญพๅˆ†้…๏ผŒๆ‰ฉๅฑ•็ผฉๆ”พ๏ผŒๅคๅˆ็ผฉๆ”พ +--- + +# YOLOv7๏ผšๅฏ่ฎญ็ปƒๆ— ้œ€ไป˜่ดน + +YOLOv7ๆ˜ฏไธ€็งๅฎžๆ—ถ็‰ฉไฝ“ๆฃ€ๆต‹ๅ™จ็š„ๆœ€ๆ–ฐๆŠ€ๆœฏ๏ผŒๅ…ถ้€Ÿๅบฆๅ’Œๅ‡†็กฎๅบฆ่ถ…่ฟ‡ไบ†็›ฎๅ‰ๅทฒ็Ÿฅ็š„ๆ‰€ๆœ‰็‰ฉไฝ“ๆฃ€ๆต‹ๅ™จ๏ผŒ้€Ÿๅบฆ่Œƒๅ›ดๅœจ5 FPSๅˆฐ160 FPSไน‹้—ดใ€‚ๅœจGPU V100ไธŠ๏ผŒๅฎƒๅœจๆ‰€ๆœ‰ๅทฒ็Ÿฅๅฎžๆ—ถ็‰ฉไฝ“ๆฃ€ๆต‹ๅ™จไธญๅ…ทๆœ‰ๆœ€้ซ˜็š„ๅ‡†็กฎๅบฆ๏ผˆ56.8๏ผ…AP๏ผ‰๏ผŒไธ”ๅธง็އ่พพๅˆฐ30 FPSๆˆ–ๆ›ด้ซ˜ใ€‚ๆญคๅค–๏ผŒYOLOv7ๅœจ้€Ÿๅบฆๅ’Œๅ‡†็กฎๅบฆๆ–น้ขไนŸไผ˜ไบŽๅ…ถไป–็‰ฉไฝ“ๆฃ€ๆต‹ๅ™จ๏ผŒๅฆ‚YOLOR๏ผŒYOLOX๏ผŒ็ผฉๆ”พๅŽ็š„YOLOv4๏ผŒYOLOv5็ญ‰็ญ‰ใ€‚่ฏฅๆจกๅž‹ๆ˜ฏไปŽๅคดๅผ€ๅง‹ไฝฟ็”จMS COCOๆ•ฐๆฎ้›†่ฟ›่กŒ่ฎญ็ปƒ็š„๏ผŒ่€Œๆฒกๆœ‰ไฝฟ็”จๅ…ถไป–ๆ•ฐๆฎ้›†ๆˆ–้ข„่ฎญ็ปƒๆƒ้‡ใ€‚YOLOv7็š„ๆบไปฃ็ ๅฏๅœจGitHubไธŠ่Žทๅพ—ใ€‚ + +![YOLOv7ไธŽSOTA็‰ฉไฝ“ๆฃ€ๆต‹ๅ™จ็š„ๆฏ”่พƒ](https://github.com/ultralytics/ultralytics/assets/26833433/5e1e0420-8122-4c79-b8d0-2860aa79af92) +**ๆœ€ๅ…ˆ่ฟ›็‰ฉไฝ“ๆฃ€ๆต‹ๅ™จ็š„ๆฏ”่พƒ**ใ€‚ไปŽ่กจ2็š„็ป“ๆžœๅฏไปฅ็œ‹ๅ‡บ๏ผŒๆ‰€ๆๅ‡บ็š„ๆ–นๆณ•ๅœจ้€Ÿๅบฆๅ’Œๅ‡†็กฎๅบฆ็š„ๅ‡่กกไธŠๆœ€ไฝณใ€‚ๅฐ†YOLOv7-tiny-SiLUไธŽYOLOv5-N๏ผˆr6.1๏ผ‰่ฟ›่กŒๆฏ”่พƒ๏ผŒๆˆ‘ไปฌ็š„ๆ–นๆณ•ๅœจAPไธŠๅฟซไบ†127 FPS๏ผŒๅ‡†็กฎๅบฆๆ้ซ˜ไบ†10.7๏ผ…ใ€‚ๆญคๅค–๏ผŒYOLOv7ๅœจ161 FPS็š„ๅธง็އไธ‹ๅ…ทๆœ‰51.4๏ผ…็š„AP๏ผŒ่€Œๅ…ทๆœ‰็›ธๅŒAP็š„PPYOLOE-Lไป…ๅ…ทๆœ‰78 FPS็š„ๅธง็އใ€‚ๅœจๅ‚ๆ•ฐไฝฟ็”จๆ–น้ข๏ผŒYOLOv7ๆฏ”PPYOLOE-Lๅฐ‘ไบ†41๏ผ…ใ€‚ๅฐ†YOLOv7-XไธŽ114 FPS็š„ๆŽจ็†้€ŸๅบฆไธŽYOLOv5-L๏ผˆr6.1๏ผ‰็š„99 FPS็š„ๆŽจ็†้€Ÿๅบฆ่ฟ›่กŒๆฏ”่พƒ๏ผŒYOLOv7-Xๅฏไปฅๆ้ซ˜3.9๏ผ…็š„APใ€‚ๅฆ‚ๆžœๅฐ†YOLOv7-XไธŽ็ฑปไผผ่ง„ๆจก็š„YOLOv5-X๏ผˆr6.1๏ผ‰่ฟ›่กŒๆฏ”่พƒ๏ผŒYOLOv7-X็š„ๆŽจ็†้€Ÿๅบฆๆฏ”YOLOv5-Xๅฟซ31 FPSใ€‚ๆญคๅค–๏ผŒๅฐฑๅ‚ๆ•ฐๅ’Œ่ฎก็ฎ—้‡่€Œ่จ€๏ผŒไธŽYOLOv5-X๏ผˆr6.1๏ผ‰็›ธๆฏ”๏ผŒYOLOv7-Xๅ‡ๅฐ‘ไบ†22๏ผ…็š„ๅ‚ๆ•ฐๅ’Œ8๏ผ…็š„่ฎก็ฎ—้‡๏ผŒไฝ†APๆ้ซ˜ไบ†2.2๏ผ…๏ผˆ[ๆฅๆบ](https://arxiv.org/pdf/2207.02696.pdf)๏ผ‰ใ€‚ + +## ๆฆ‚่ฟฐ + +ๅฎžๆ—ถ็‰ฉไฝ“ๆฃ€ๆต‹ๆ˜ฏ่ฎธๅคš่ฎก็ฎ—ๆœบ่ง†่ง‰็ณป็ปŸ็š„้‡่ฆ็ป„ไปถ๏ผŒๅŒ…ๆ‹ฌๅคš็›ฎๆ ‡่ทŸ่ธช๏ผŒ่‡ชๅŠจ้ฉพ้ฉถ๏ผŒๆœบๅ™จไบบๆŠ€ๆœฏๅ’ŒๅŒปๅญฆๅ›พๅƒๅˆ†ๆž็ญ‰ใ€‚่ฟ‘ๅนดๆฅ๏ผŒๅฎžๆ—ถ็‰ฉไฝ“ๆฃ€ๆต‹็š„ๅ‘ๅฑ•ไธ€็›ด่‡ดๅŠ›ไบŽ่ฎพ่ฎก้ซ˜ๆ•ˆ็š„ๆžถๆž„๏ผŒๅนถๆ้ซ˜ๅ„็งCPU๏ผŒGPUๅ’Œ็ฅž็ปๅค„็†ๅ•ๅ…ƒ๏ผˆNPU๏ผ‰็š„ๆŽจ็†้€Ÿๅบฆใ€‚YOLOv7ๆ”ฏๆŒ็งปๅŠจGPUๅ’ŒGPU่ฎพๅค‡๏ผŒไปŽ่พน็ผ˜ๅˆฐไบ‘็ซฏใ€‚ + +ไธŽไผ ็ปŸ็š„ๅฎžๆ—ถ็‰ฉไฝ“ๆฃ€ๆต‹ๅ™จไพง้‡ไบŽๆžถๆž„ไผ˜ๅŒ–ไธๅŒ๏ผŒYOLOv7ๅผ•ๅ…ฅไบ†ๅฏน่ฎญ็ปƒ่ฟ‡็จ‹ไผ˜ๅŒ–็š„ๅ…ณๆณจใ€‚่ฟ™ๅŒ…ๆ‹ฌๆจกๅ—ๅ’Œไผ˜ๅŒ–ๆ–นๆณ•๏ผŒๆ—จๅœจๆ้ซ˜็›ฎๆ ‡ๆฃ€ๆต‹็š„ๅ‡†็กฎๆ€ง่€ŒไธๅขžๅŠ ๆŽจ็†ๆˆๆœฌ๏ผŒ่ฟ™ไธชๆฆ‚ๅฟต่ขซ็งฐไธบโ€œๅฏ่ฎญ็ปƒๆ— ้œ€ไป˜่ดนโ€ใ€‚ + +## ไธป่ฆ็‰นๆ€ง + +YOLOv7ๅผ•ๅ…ฅไบ†ๅ‡ ไธชๅ…ณ้”ฎ็‰นๆ€ง๏ผš + +1. **ๆจกๅž‹้‡ๆ–ฐๅ‚ๆ•ฐๅŒ–**๏ผšYOLOv7ๆๅ‡บไบ†ไธ€็ง่ฎกๅˆ’ๅฅฝ็š„้‡ๆ–ฐๅ‚ๆ•ฐๅŒ–ๆจกๅž‹๏ผŒๅฎƒๆ˜ฏไธ€็ง้€‚็”จไบŽไธๅŒ็ฝ‘็ปœไธญ็š„ๅฑ‚็š„็ญ–็•ฅ๏ผŒๅ…ทๆœ‰ๆขฏๅบฆไผ ๆ’ญ่ทฏๅพ„็š„ๆฆ‚ๅฟตใ€‚ + +2. **ๅŠจๆ€ๆ ‡็ญพๅˆ†้…**๏ผšๅฏนๅคšไธช่พ“ๅ‡บๅฑ‚็š„ๆจกๅž‹่ฟ›่กŒ่ฎญ็ปƒไผš้‡ๅˆฐไธ€ไธชๆ–ฐ้—ฎ้ข˜๏ผšโ€œๅฆ‚ไฝ•ไธบไธๅŒๅˆ†ๆ”ฏ็š„่พ“ๅ‡บๅˆ†้…ๅŠจๆ€็›ฎๆ ‡๏ผŸโ€ไธบไบ†่งฃๅ†ณ่ฟ™ไธช้—ฎ้ข˜๏ผŒYOLOv7ๅผ•ๅ…ฅไบ†ไธ€็งๆ–ฐ็š„ๆ ‡็ญพๅˆ†้…ๆ–นๆณ•๏ผŒ็งฐไธบ็ฒ—ๅˆฐ็ป†็š„ๅผ•ๅฏผๅผๆ ‡็ญพๅˆ†้…ใ€‚ + +3. **ๆ‰ฉๅฑ•ๅ’Œๅคๅˆ็ผฉๆ”พ**๏ผšYOLOv7ๆๅ‡บไบ†้€‚็”จไบŽๅฎžๆ—ถ็‰ฉไฝ“ๆฃ€ๆต‹ๅ™จ็š„โ€œๆ‰ฉๅฑ•โ€ๅ’Œโ€œๅคๅˆ็ผฉๆ”พโ€ๆ–นๆณ•๏ผŒๅฏไปฅๆœ‰ๆ•ˆๅˆฉ็”จๅ‚ๆ•ฐๅ’Œ่ฎก็ฎ—ใ€‚ + +4. **ๆ•ˆ็އ**๏ผšYOLOv7ๆๅ‡บ็š„ๆ–นๆณ•ๅฏไปฅๆœ‰ๆ•ˆๅœฐๅ‡ๅฐ‘ๆœ€ๅ…ˆ่ฟ›ๅฎžๆ—ถ็‰ฉไฝ“ๆฃ€ๆต‹ๅ™จ็š„็บฆ40๏ผ…็š„ๅ‚ๆ•ฐๅ’Œ50๏ผ…็š„่ฎก็ฎ—้‡๏ผŒๅนถๅ…ทๆœ‰ๆ›ดๅฟซ็š„ๆŽจ็†้€Ÿๅบฆๅ’Œๆ›ด้ซ˜็š„ๆฃ€ๆต‹ๅ‡†็กฎๅบฆใ€‚ + +## ไฝฟ็”จ็คบไพ‹ + +ๆˆช่‡ณๆ’ฐๅ†™ๆœฌๆ–‡ๆ—ถ๏ผŒUltralyticsๅฝ“ๅ‰ไธๆ”ฏๆŒYOLOv7ๆจกๅž‹ใ€‚ๅ› ๆญค๏ผŒไปปไฝ•ๅธŒๆœ›ไฝฟ็”จYOLOv7็š„็”จๆˆท้ƒฝ้œ€่ฆ็›ดๆŽฅๅ‚่€ƒYOLOv7 GitHubๅญ˜ๅ‚จๅบ“ไธญ็š„ๅฎ‰่ฃ…ๅ’Œไฝฟ็”จ่ฏดๆ˜Žใ€‚ + +่ฟ™ๆ˜ฏๆ‚จๅฏ่ƒฝ้‡‡ๅ–็š„ไฝฟ็”จYOLOv7็š„ๅ…ธๅž‹ๆญฅ้ชค็š„็ฎ€่ฆๆฆ‚่ฟฐ๏ผš + +1. ่ฎฟ้—ฎYOLOv7 GitHubๅญ˜ๅ‚จๅบ“๏ผš[https://github.com/WongKinYiu/yolov7](https://github.com/WongKinYiu/yolov7)ใ€‚ + +2. ๆŒ‰็…งREADMEๆ–‡ไปถไธญๆไพ›็š„่ฏดๆ˜Ž่ฟ›่กŒๅฎ‰่ฃ…ใ€‚่ฟ™้€šๅธธๆถ‰ๅŠๅ…‹้š†ๅญ˜ๅ‚จๅบ“๏ผŒๅฎ‰่ฃ…ๅฟ…่ฆ็š„ไพ่ต–้กน๏ผŒๅนถ่ฎพ็ฝฎไปปไฝ•ๅฟ…่ฆ็š„็Žฏๅขƒๅ˜้‡ใ€‚ + +3. ๅฎ‰่ฃ…ๅฎŒๆˆๅŽ๏ผŒๆ‚จๅฏไปฅๆ นๆฎๅญ˜ๅ‚จๅบ“ไธญๆไพ›็š„ไฝฟ็”จ่ฏดๆ˜Ž่ฎญ็ปƒๅ’Œไฝฟ็”จๆจกๅž‹ใ€‚่ฟ™้€šๅธธๆถ‰ๅŠๅ‡†ๅค‡ๆ•ฐๆฎ้›†๏ผŒ้…็ฝฎๆจกๅž‹ๅ‚ๆ•ฐ๏ผŒ่ฎญ็ปƒๆจกๅž‹๏ผŒ็„ถๅŽไฝฟ็”จ่ฎญ็ปƒๅฅฝ็š„ๆจกๅž‹ๆ‰ง่กŒ็‰ฉไฝ“ๆฃ€ๆต‹ใ€‚ + +่ฏทๆณจๆ„๏ผŒๅ…ทไฝ“็š„ๆญฅ้ชคๅฏ่ƒฝๅ› ๆ‚จ็š„็‰นๅฎš็”จไพ‹ๅ’ŒYOLOv7ๅญ˜ๅ‚จๅบ“็š„ๅฝ“ๅ‰็Šถๆ€่€Œๆœ‰ๆ‰€ไธๅŒใ€‚ๅ› ๆญค๏ผŒๅผบ็ƒˆๅปบ่ฎฎ็›ดๆŽฅๅ‚่€ƒYOLOv7 GitHubๅญ˜ๅ‚จๅบ“ไธญๆไพ›็š„่ฏดๆ˜Žใ€‚ + +ๆˆ‘ไปฌๅฏน่ฟ™ๅฏ่ƒฝ้€ ๆˆ็š„ไปปไฝ•ไธไพฟ่กจ็คบๆญ‰ๆ„๏ผŒๅนถๅฐ†ๅŠชๅŠ›ๆ›ดๆ–ฐๆญคๆ–‡ๆกฃไปฅๆไพ›้’ˆๅฏนUltralytics็š„YOLOv7ๆ”ฏๆŒ็š„ไฝฟ็”จ็คบไพ‹ใ€‚ + +## ๅผ•็”จๅ’Œ่‡ด่ฐข + +ๆˆ‘ไปฌ่ฆๆ„Ÿ่ฐขYOLOv7็š„ไฝœ่€…ๅœจๅฎžๆ—ถ็‰ฉไฝ“ๆฃ€ๆต‹้ข†ๅŸŸๅšๅ‡บ็š„้‡ๅคง่ดก็Œฎ๏ผš + +!!! Quote "" + + === "BibTeX" + + ```bibtex + @article{wang2022yolov7, + title={{YOLOv7}: Trainable bag-of-freebies sets new state-of-the-art for real-time object detectors}, + author={Wang, Chien-Yao and Bochkovskiy, Alexey and Liao, Hong-Yuan Mark}, + journal={arXiv preprint arXiv:2207.02696}, + year={2022} + } + ``` + +YOLOv7็š„ๅŽŸๅง‹่ฎบๆ–‡ๅฏไปฅๅœจ[arXiv](https://arxiv.org/pdf/2207.02696.pdf)ไธŠๆ‰พๅˆฐใ€‚ไฝœ่€…ๅทฒๅฐ†ๅ…ถๅทฅไฝœๅ…ฌๅผ€๏ผŒๅนถไธ”ไปฃ็ ๅบ“ๅฏๅœจ[GitHub](https://github.com/WongKinYiu/yolov7)ไธญ่ฎฟ้—ฎใ€‚ๆˆ‘ไปฌๆ„Ÿ่ฐขไป–ไปฌๅœจๆŽจๅŠจ่ฏฅ้ข†ๅŸŸๅ‘ๅฑ•ๅนถไฝฟๅ…ถๅทฅไฝœๅฏนๅนฟๅคง็คพๅŒบๅฏ่ฎฟ้—ฎ็š„ๅŠชๅŠ›ใ€‚ diff --git a/ultralytics/docs/zh/models/yolov7.md:Zone.Identifier b/ultralytics/docs/zh/models/yolov7.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/models/yolov7.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/models/yolov8.md b/ultralytics/docs/zh/models/yolov8.md new file mode 100755 index 0000000..a9844e1 --- /dev/null +++ b/ultralytics/docs/zh/models/yolov8.md @@ -0,0 +1,162 @@ +--- +comments: true +description: ๆŽข็ดขYOLOv8็š„ๆฟ€ๅŠจไบบๅฟƒๅŠŸ่ƒฝ๏ผŒ่ฟ™ๆ˜ฏๆˆ‘ไปฌๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จ็š„ๆœ€ๆ–ฐ็‰ˆๆœฌ๏ผไบ†่งฃ้ซ˜็บงๆžถๆž„ใ€้ข„่ฎญ็ปƒๆจกๅž‹ๅ’Œ็ฒพ็กฎๅบฆไธŽ้€Ÿๅบฆ็š„ๆœ€ไฝณๅนณ่กกๅฆ‚ไฝ•ไฝฟYOLOv8ๆˆไธบๆ‚จ่ฟ›่กŒ็›ฎๆ ‡ๆฃ€ๆต‹ไปปๅŠก็š„็†ๆƒณ้€‰ๆ‹ฉใ€‚ +keywords: YOLOv8๏ผŒUltralytics๏ผŒๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จ๏ผŒ้ข„่ฎญ็ปƒๆจกๅž‹๏ผŒๆ–‡ๆกฃ๏ผŒ็›ฎๆ ‡ๆฃ€ๆต‹๏ผŒYOLO็ณปๅˆ—๏ผŒ้ซ˜็บงๆžถๆž„๏ผŒ็ฒพ็กฎๅบฆ๏ผŒ้€Ÿๅบฆ +--- + +# YOLOv8 + +## ๆฆ‚่ฟฐ + +YOLOv8ๆ˜ฏYOLO็ณปๅˆ—ๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จ็š„ๆœ€ๆ–ฐ็‰ˆๆœฌ๏ผŒไปฅๅ…ถๅœจๅ‡†็กฎๅบฆๅ’Œ้€Ÿๅบฆๆ–น้ข็š„ๅ“่ถŠๆ€ง่ƒฝ่€Œ้—ปๅใ€‚ๅœจๆž„ๅปบๅœจไน‹ๅ‰YOLO็‰ˆๆœฌ็š„ๅŸบ็ก€ไธŠ๏ผŒYOLOv8ๅผ•ๅ…ฅไบ†ๆ–ฐๅŠŸ่ƒฝๅ’Œไผ˜ๅŒ–๏ผŒไฝฟๅ…ถๆˆไธบๅ„็งๅบ”็”จ้ข†ๅŸŸไธญๅ„็ง็›ฎๆ ‡ๆฃ€ๆต‹ไปปๅŠก็š„็†ๆƒณ้€‰ๆ‹ฉใ€‚ + +![Ultralytics YOLOv8](https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png) + +## ไธป่ฆๅŠŸ่ƒฝ + +- **ๅ…ˆ่ฟ›็š„ไธปๅนฒๅ’Œไธญ้—ดๆžถๆž„๏ผš** YOLOv8้‡‡็”จๆœ€ๅ…ˆ่ฟ›็š„ไธปๅนฒๅ’Œไธญ้—ดๆžถๆž„๏ผŒๆไพ›ไบ†ๆ›ดๅฅฝ็š„็‰นๅพๆๅ–ๅ’Œ็›ฎๆ ‡ๆฃ€ๆต‹ๆ€ง่ƒฝใ€‚ +- **ๆ— ้”šๅˆ†ๅ‰ฒUltralyticsๅคด๏ผš** YOLOv8้‡‡็”จๆ— ้”šๅˆ†ๅ‰ฒ็š„Ultralyticsๅคด๏ผŒ็›ธๆฏ”ไบŽๅŸบไบŽ้”š็‚น็š„ๆ–นๆณ•๏ผŒๅฏไปฅๆไพ›ๆ›ด้ซ˜็š„ๅ‡†็กฎๆ€งๅ’Œๆ›ด้ซ˜ๆ•ˆ็š„ๆฃ€ๆต‹่ฟ‡็จ‹ใ€‚ +- **ไผ˜ๅŒ–็š„ๅ‡†็กฎๅบฆๅ’Œ้€Ÿๅบฆๅนณ่กก๏ผš** YOLOv8ไธ“ๆณจไบŽๅœจๅ‡†็กฎๅบฆๅ’Œ้€Ÿๅบฆไน‹้—ด็ปดๆŒๆœ€ไฝณๅนณ่กก๏ผŒ้€‚็”จไบŽๅ„็งๅฎžๆ—ถ็›ฎๆ ‡ๆฃ€ๆต‹ไปปๅŠกใ€‚ +- **ๅคš็ง้ข„่ฎญ็ปƒๆจกๅž‹๏ผš** YOLOv8ๆไพ›ไบ†ไธ€็ณปๅˆ—้ข„่ฎญ็ปƒๆจกๅž‹๏ผŒไปฅๆปก่ถณๅ„็งไปปๅŠกๅ’Œๆ€ง่ƒฝ่ฆๆฑ‚๏ผŒๆ›ดๅฎนๆ˜“ๆ‰พๅˆฐ้€‚ๅˆ็‰นๅฎš็”จไพ‹็š„ๆจกๅž‹ใ€‚ + +## ๆ”ฏๆŒ็š„ไปปๅŠกๅ’Œๆจกๅผ + +YOLOv8็ณปๅˆ—ๆไพ›ไบ†ๅคš็งๆจกๅž‹๏ผŒๆฏไธชๆจกๅž‹ไธ“้—จ็”จไบŽ่ฎก็ฎ—ๆœบ่ง†่ง‰ไธญ็š„็‰นๅฎšไปปๅŠกใ€‚่ฟ™ไบ›ๆจกๅž‹ๆ—จๅœจๆปก่ถณๅ„็ง่ฆๆฑ‚๏ผŒไปŽ็›ฎๆ ‡ๆฃ€ๆต‹ๅˆฐๆ›ดๅคๆ‚็š„ไปปๅŠก๏ผŒๅฆ‚ๅฎžไพ‹ๅˆ†ๅ‰ฒใ€ๅงฟๆ€/ๅ…ณ้”ฎ็‚นๆฃ€ๆต‹ๅ’Œๅˆ†็ฑปใ€‚ + +YOLOv8็ณปๅˆ—็š„ๆฏไธชๅ˜ไฝ“้ƒฝ้’ˆๅฏนๅ…ถ็›ธๅบ”็š„ไปปๅŠก่ฟ›่กŒไบ†ไผ˜ๅŒ–๏ผŒ็กฎไฟ้ซ˜ๆ€ง่ƒฝๅ’Œๅ‡†็กฎๆ€งใ€‚ๆญคๅค–๏ผŒ่ฟ™ไบ›ๆจกๅž‹ไธŽๅ„็งๆ“ไฝœๆจกๅผๅ…ผๅฎน๏ผŒๅŒ…ๆ‹ฌ[ๆŽจ็†](../modes/predict.md)ใ€[้ชŒ่ฏ](../modes/val.md)ใ€[่ฎญ็ปƒ](../modes/train.md)ๅ’Œ[ๅฏผๅ‡บ](../modes/export.md)๏ผŒไพฟไบŽๅœจ้ƒจ็ฝฒๅ’Œๅผ€ๅ‘็š„ไธๅŒ้˜ถๆฎตไฝฟ็”จใ€‚ + +| ๆจกๅž‹ | ๆ–‡ไปถๅ | ไปปๅŠก | ๆŽจ็† | ้ชŒ่ฏ | ่ฎญ็ปƒ | ๅฏผๅ‡บ | +|-------------|----------------------------------------------------------------------------------------------------------------|-----------------------------|----|----|----|----| +| YOLOv8 | `yolov8n.pt` `yolov8s.pt` `yolov8m.pt` `yolov8l.pt` `yolov8x.pt` | [ๆฃ€ๆต‹](../tasks/detect.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-seg | `yolov8n-seg.pt` `yolov8s-seg.pt` `yolov8m-seg.pt` `yolov8l-seg.pt` `yolov8x-seg.pt` | [ๅฎžไพ‹ๅˆ†ๅ‰ฒ](../tasks/segment.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-pose | `yolov8n-pose.pt` `yolov8s-pose.pt` `yolov8m-pose.pt` `yolov8l-pose.pt` `yolov8x-pose.pt` `yolov8x-pose-p6.pt` | [ๅงฟๆ€/ๅ…ณ้”ฎ็‚น](../tasks/pose.md) | โœ… | โœ… | โœ… | โœ… | +| YOLOv8-cls | `yolov8n-cls.pt` `yolov8s-cls.pt` `yolov8m-cls.pt` `yolov8l-cls.pt` `yolov8x-cls.pt` | [ๅˆ†็ฑป](../tasks/classify.md) | โœ… | โœ… | โœ… | โœ… | + +่ฟ™ไธช่กจๆ ผๆไพ›ไบ†YOLOv8ๆจกๅž‹ๅ˜็ง็š„ๆฆ‚่งˆ๏ผŒ็ชๅ‡บไบ†ๅฎƒไปฌๅœจ็‰นๅฎšไปปๅŠกไธญ็š„้€‚็”จๆ€ง๏ผŒไปฅๅŠๅฎƒไปฌไธŽๅ„็งๆ“ไฝœๆจกๅผ๏ผˆๅฆ‚ๆŽจ็†ใ€้ชŒ่ฏใ€่ฎญ็ปƒๅ’Œๅฏผๅ‡บ๏ผ‰็š„ๅ…ผๅฎนๆ€งใ€‚ๅฎƒๅฑ•็คบไบ†YOLOv8็ณปๅˆ—็š„ๅคšๅŠŸ่ƒฝๆ€งๅ’Œ้ฒๆฃ’ๆ€ง๏ผŒไฝฟๅฎƒไปฌ้€‚็”จไบŽ่ฎก็ฎ—ๆœบ่ง†่ง‰ไธญๅ„็งๅบ”็”จใ€‚ + +## ๆ€ง่ƒฝๆŒ‡ๆ ‡ + +!!! Performance + + === "ๆฃ€ๆต‹๏ผˆCOCO๏ผ‰" + + ๆœ‰ๅ…ณๅœจ[COCO](https://docs.ultralytics.com/datasets/detect/coco/)ไธŠ่ฎญ็ปƒ็š„่ฟ™ไบ›ๆจกๅž‹็š„็”จๆณ•็คบไพ‹๏ผŒ่ฏทๅ‚่ง[Detection Docs](https://docs.ultralytics.com/tasks/detect/)๏ผŒๅ…ถไธญๅŒ…ๆ‹ฌ80ไธช้ข„่ฎญ็ปƒ็š„็ฑปๅˆซใ€‚ + + | ๆจกๅž‹ | ๅคงๅฐ
(pixels) | mAPval
50-95 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ๅ‚ๆ•ฐ
(M) | FLOPs
(B) | + | ---------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------------------- | ---------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + + === "ๆฃ€ๆต‹๏ผˆOpen Images V7๏ผ‰" + + ๆœ‰ๅ…ณๅœจ[Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/)ไธŠ่ฎญ็ปƒ็š„่ฟ™ไบ›ๆจกๅž‹็š„็”จๆณ•็คบไพ‹๏ผŒ่ฏทๅ‚่ง[Detection Docs](https://docs.ultralytics.com/tasks/detect/)๏ผŒๅ…ถไธญๅŒ…ๆ‹ฌ600ไธช้ข„่ฎญ็ปƒ็š„็ฑปๅˆซใ€‚ + + | ๆจกๅž‹ | ๅคงๅฐ
(pixels) | mAPval
50-95 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ๅ‚ๆ•ฐ
(M) | FLOPs
(B) | + | ------------------------------------------------------------------------------------------- | --------------------- | -------------------- | ------------------------------ | ---------------------------------- | ------------------ | ----------------- | + | [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-oiv7.pt) | 640 | 18.4 | 142.4 | 1.21 | 3.5 | 10.5 | + | [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-oiv7.pt) | 640 | 27.7 | 183.1 | 1.40 | 11.4 | 29.7 | + | [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-oiv7.pt) | 640 | 33.6 | 408.5 | 2.26 | 26.2 | 80.6 | + | [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-oiv7.pt) | 640 | 34.9 | 596.9 | 2.43 | 44.1 | 167.4 | + | [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-oiv7.pt) | 640 | 36.3 | 860.6 | 3.56 | 68.7 | 260.6 | + + === "ๅˆ†ๅ‰ฒ๏ผˆCOCO๏ผ‰" + + ๆœ‰ๅ…ณๅœจ[COCO](https://docs.ultralytics.com/datasets/segment/coco/)ไธŠ่ฎญ็ปƒ็š„่ฟ™ไบ›ๆจกๅž‹็š„็”จๆณ•็คบไพ‹๏ผŒ่ฏทๅ‚่ง[Segmentation Docs](https://docs.ultralytics.com/tasks/segment/)๏ผŒๅ…ถไธญๅŒ…ๆ‹ฌ80ไธช้ข„่ฎญ็ปƒ็š„็ฑปๅˆซใ€‚ + + | ๆจกๅž‹ | ๅคงๅฐ
(pixels) | mAPbox
50-95 | mAPmask
50-95 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ๅ‚ๆ•ฐ
(M) | FLOPs
(B) | + | -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ---------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | + | [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | + | [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | + | [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | + | [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + + === "ๅˆ†็ฑป๏ผˆImageNet๏ผ‰" + + ๆœ‰ๅ…ณๅœจ[ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/)ไธŠ่ฎญ็ปƒ็š„่ฟ™ไบ›ๆจกๅž‹็š„็”จๆณ•็คบไพ‹๏ผŒ่ฏทๅ‚่ง[Classification Docs](https://docs.ultralytics.com/tasks/classify/)๏ผŒๅ…ถไธญๅŒ…ๆ‹ฌ1000ไธช้ข„่ฎญ็ปƒ็š„็ฑปๅˆซใ€‚ + + | ๆจกๅž‹ | ๅคงๅฐ
(pixels) | ๅ‡†็กฎ็އ
top1 | ๅ‡†็กฎ็އ
top5 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ๅ‚ๆ•ฐ
(M) | FLOPs
(B) at 640 | + | ---------------------------------------------------------------------------------------------- | --------------------- | ------------------ | ------------------ | ------------------------------ | ---------------------------------- | ------------------ | ------------------------ | + | [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | + | [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | + | [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | + | [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | + | [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + + === "ๅงฟๆ€๏ผˆCOCO๏ผ‰" + + ๆœ‰ๅ…ณๅœจ[COCO](https://docs.ultralytics.com/datasets/pose/coco/)ไธŠ่ฎญ็ปƒ็š„่ฟ™ไบ›ๆจกๅž‹็š„็”จๆณ•็คบไพ‹๏ผŒ่ฏทๅ‚่ง[Pose Estimation Docs](https://docs.ultralytics.com/tasks/segment/)๏ผŒๅ…ถไธญๅŒ…ๆ‹ฌ1ไธช้ข„่ฎญ็ปƒ็š„็ฑปๅˆซ๏ผŒ'person'ใ€‚ + + | ๆจกๅž‹ | ๅคงๅฐ
(pixels) | mAPpose
50-95 | mAPpose
50 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ๅ‚ๆ•ฐ
(M) | FLOPs
(B) | + | ---------------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ---------------------------------- | ------------------ | ----------------- | + | [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | + | [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | + | [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | + | [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | + | [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | + | [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +## ็”จๆณ•็คบไพ‹ + +่ฟ™ไธช็คบไพ‹ๆไพ›ไบ†ๅ…ณไบŽYOLOv8่ฎญ็ปƒๅ’ŒๆŽจ็†็š„็ฎ€ๅ•็คบไพ‹ใ€‚ๆœ‰ๅ…ณ่ฟ™ไบ›ๅ’Œๅ…ถไป–[ๆจกๅผ](../modes/index.md)็š„ๅฎŒๆ•ดๆ–‡ๆกฃ๏ผŒ่ฏทๅ‚่ง[Predict](../modes/predict.md)๏ผŒ[Train](../modes/train.md)๏ผŒ[Val](../modes/val.md)ๅ’Œ[Export](../modes/export.md)ๆ–‡ๆกฃ้กต้ขใ€‚ + +่ฏทๆณจๆ„๏ผŒไปฅไธ‹็คบไพ‹ๆ˜ฏ้’ˆๅฏน็”จไบŽ็›ฎๆ ‡ๆฃ€ๆต‹็š„YOLOv8 [Detect](../tasks/detect.md)ๆจกๅž‹ใ€‚ๆœ‰ๅ…ณๅ…ถไป–ๆ”ฏๆŒ็š„ไปปๅŠก๏ผŒ่ฏทๅ‚่ง[Segment](../tasks/segment.md)ใ€[Classify](../tasks/classify.md)ๅ’Œ[Pose](../tasks/pose.md)ๆ–‡ๆกฃใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ๅฏไปฅๅฐ†PyTorch้ข„่ฎญ็ปƒ็š„`*.pt`ๆจกๅž‹ๅ’Œ้…็ฝฎ`*.yaml`ๆ–‡ไปถไผ ้€’็ป™`YOLO()`็ฑป๏ผŒๅœจpythonไธญๅˆ›ๅปบไธ€ไธชๆจกๅž‹ๅฎžไพ‹๏ผš + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝไธ€ไธชๅœจCOCO้ข„่ฎญ็ปƒ็š„YOLOv8nๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๆ˜พ็คบๆจกๅž‹ไฟกๆฏ๏ผˆๅฏ้€‰๏ผ‰ + model.info() + + # ไฝฟ็”จCOCO8็คบไพ‹ๆ•ฐๆฎ้›†่ฎญ็ปƒๆจกๅž‹100ไธชepoch + results = model.train(data='coco8.yaml', epochs=100, imgsz=640) + + # ไฝฟ็”จYOLOv8nๆจกๅž‹ๅœจ'bus.jpg'ๅ›พ็‰‡ไธŠ่ฟ่กŒๆŽจ็† + results = model('path/to/bus.jpg') + ``` + + === "CLI" + + ๅฏไปฅไฝฟ็”จCLIๅ‘ฝไปค็›ดๆŽฅ่ฟ่กŒๆจกๅž‹๏ผš + + ```bash + # ๅŠ ่ฝฝไธ€ไธชๅœจCOCO้ข„่ฎญ็ปƒ็š„YOLOv8nๆจกๅž‹๏ผŒๅนถๅœจCOCO8็คบไพ‹ๆ•ฐๆฎ้›†ไธŠ่ฎญ็ปƒ100ไธชepoch + yolo train model=yolov8n.pt data=coco8.yaml epochs=100 imgsz=640 + + # ๅŠ ่ฝฝไธ€ไธชๅœจCOCO้ข„่ฎญ็ปƒ็š„YOLOv8nๆจกๅž‹๏ผŒๅนถๅœจ'bus.jpg'ๅ›พ็‰‡ไธŠ่ฟ่กŒๆŽจ็† + yolo predict model=yolov8n.pt source=path/to/bus.jpg + ``` + +## ๅผ•็”จๅ’Œ่‡ด่ฐข + +ๅฆ‚ๆžœๆ‚จๅœจๅทฅไฝœไธญไฝฟ็”จYOLOv8ๆจกๅž‹ๆˆ–ๆญคๅญ˜ๅ‚จๅบ“ไธญ็š„ๅ…ถไป–่ฝฏไปถ๏ผŒ่ฏทไฝฟ็”จไปฅไธ‹ๆ ผๅผ่ฟ›่กŒๅผ•็”จ๏ผš + +!!! Quote "ๅผ•็”จ" + + === "BibTeX" + + ```bibtex + @software{yolov8_ultralytics, + author = {Glenn Jocher and Ayush Chaurasia and Jing Qiu}, + title = {Ultralytics YOLOv8}, + version = {8.0.0}, + year = {2023}, + url = {https://github.com/ultralytics/ultralytics}, + orcid = {0000-0001-5950-6979, 0000-0002-7603-6750, 0000-0003-3783-7069}, + license = {AGPL-3.0} + } + ``` + +่ฏทๆณจๆ„๏ผŒDOIๆญฃๅœจ็ญ‰ๅพ…ไธญ๏ผŒDOIๅฐ†ๅœจๅฏ็”จๆ—ถๆทปๅŠ ๅˆฐๅผ•็”จไธญใ€‚YOLOv8ๆจกๅž‹ๆ นๆฎ[AGPL-3.0](https://github.com/ultralytics/ultralytics/blob/main/LICENSE)ๅ’Œ[ไผไธš่ฎธๅฏ่ฏ](https://ultralytics.com/license)ๆไพ›ใ€‚ diff --git a/ultralytics/docs/zh/models/yolov8.md:Zone.Identifier b/ultralytics/docs/zh/models/yolov8.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/models/yolov8.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/modes/benchmark.md b/ultralytics/docs/zh/modes/benchmark.md new file mode 100755 index 0000000..53a14e5 --- /dev/null +++ b/ultralytics/docs/zh/modes/benchmark.md @@ -0,0 +1,94 @@ +--- +comments: ็œŸ +description: ไบ†่งฃๅฆ‚ไฝ•่ฏ„ไผฐYOLOv8ๅœจๅ„็งๅฏผๅ‡บๆ ผๅผไธ‹็š„้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€ง๏ผŒ่Žทๅ–mAP50-95ใ€accuracy_top5็ญ‰ๆŒ‡ๆ ‡็š„ๆดžๅฏŸใ€‚ +keywords: Ultralytics, YOLOv8, ๅŸบๅ‡†ๆต‹่ฏ•, ้€Ÿๅบฆๅˆ†ๆž, ๅ‡†็กฎๆ€งๅˆ†ๆž, mAP50-95, accuracy_top5, ONNX, OpenVINO, TensorRT, YOLOๅฏผๅ‡บๆ ผๅผ +--- + +# ไฝฟ็”จUltralytics YOLO่ฟ›่กŒๆจกๅž‹ๅŸบๅ‡†ๆต‹่ฏ• + +Ultralytics YOLO็”Ÿๆ€็ณป็ปŸๅ’Œ้›†ๆˆ + +## ไป‹็ป + +ไธ€ๆ—ฆๆ‚จ็š„ๆจกๅž‹็ป่ฟ‡่ฎญ็ปƒๅ’Œ้ชŒ่ฏ๏ผŒไธ‹ไธ€ไธชๅˆไนŽ้€ป่พ‘็š„ๆญฅ้ชคๆ˜ฏ่ฏ„ไผฐๅฎƒๅœจๅ„็งๅฎž้™…ๅœบๆ™ฏไธญ็š„ๆ€ง่ƒฝใ€‚Ultralytics YOLOv8็š„ๅŸบๅ‡†ๆจกๅผ้€š่ฟ‡ๆไพ›ไธ€ไธชๅฅๅฃฎ็š„ๆก†ๆžถๆฅ่ฏ„ไผฐๆจกๅž‹ๅœจไธ€็ณปๅˆ—ๅฏผๅ‡บๆ ผๅผไธญ็š„้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€ง๏ผŒไธบๆญค็›ฎ็š„ๆœๅŠกใ€‚ + +## ไธบไป€ไนˆๅŸบๅ‡†ๆต‹่ฏ•่‡ณๅ…ณ้‡่ฆ๏ผŸ + +- **ๆ˜Žๆ™บ็š„ๅ†ณ็ญ–๏ผš** ๆดžๅฏŸ้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€งไน‹้—ด็š„ๆƒ่กกใ€‚ +- **่ต„ๆบๅˆ†้…๏ผš** ็†่งฃไธๅŒ็š„ๅฏผๅ‡บๆ ผๅผๅœจไธๅŒ็กฌไปถไธŠ็š„ๆ€ง่ƒฝ่กจ็Žฐใ€‚ +- **ไผ˜ๅŒ–๏ผš** ไบ†่งฃๅ“ช็งๅฏผๅ‡บๆ ผๅผไธบๆ‚จ็š„็‰นๅฎš็”จไพ‹ๆไพ›ๆœ€ไฝณๆ€ง่ƒฝใ€‚ +- **ๆˆๆœฌๆ•ˆ็›Š๏ผš** ๆ นๆฎๅŸบๅ‡†ๆต‹่ฏ•็ป“ๆžœ๏ผŒๆ›ดๆœ‰ๆ•ˆๅœฐๅˆฉ็”จ็กฌไปถ่ต„ๆบใ€‚ + +### ๅŸบๅ‡†ๆจกๅผ็š„ๅ…ณ้”ฎๆŒ‡ๆ ‡ + +- **mAP50-95๏ผš** ็”จไบŽ็‰ฉไฝ“ๆฃ€ๆต‹ใ€ๅˆ†ๅ‰ฒๅ’Œๅงฟๆ€ไผฐ่ฎกใ€‚ +- **accuracy_top5๏ผš** ็”จไบŽๅ›พๅƒๅˆ†็ฑปใ€‚ +- **ๆŽจๆ–ญๆ—ถ้—ด๏ผš** ๅค„็†ๆฏๅผ ๅ›พ็‰‡็š„ๆ—ถ้—ด๏ผˆๆฏซ็ง’๏ผ‰ใ€‚ + +### ๆ”ฏๆŒ็š„ๅฏผๅ‡บๆ ผๅผ + +- **ONNX๏ผš** ไธบไบ†ๆœ€ไฝณ็š„CPUๆ€ง่ƒฝ +- **TensorRT๏ผš** ไธบไบ†ๆœ€ๅคงๅŒ–็š„GPUๆ•ˆ็އ +- **OpenVINO๏ผš** ้’ˆๅฏนIntel็กฌไปถ็š„ไผ˜ๅŒ– +- **CoreMLใ€TensorFlow SavedModel ็ญ‰๏ผš** ๆปก่ถณๅคšๆ ทๅŒ–้ƒจ็ฝฒ้œ€ๆฑ‚ใ€‚ + +!!! ๆŠ€ๅทง "ๆ็คบ" + + * ๅฏผๅ‡บๅˆฐONNXๆˆ–OpenVINOๅฏๅฎž็Žฐ้ซ˜่พพ3ๅ€CPU้€Ÿๅบฆๆๅ‡ใ€‚ + * ๅฏผๅ‡บๅˆฐTensorRTๅฏๅฎž็Žฐ้ซ˜่พพ5ๅ€GPU้€Ÿๅบฆๆๅ‡ใ€‚ + +## ไฝฟ็”จ็คบไพ‹ + +ๅœจๆ‰€ๆœ‰ๆ”ฏๆŒ็š„ๅฏผๅ‡บๆ ผๅผไธŠ่ฟ่กŒYOLOv8nๅŸบๅ‡†ๆต‹่ฏ•๏ผŒๅŒ…ๆ‹ฌONNXใ€TensorRT็ญ‰ใ€‚ๆ›ดๅคšๅฏผๅ‡บๅ‚ๆ•ฐ็š„ๅฎŒๆ•ดๅˆ—่กจ่ฏท่งไธ‹ๆ–น็š„ๅ‚ๆ•ฐ้ƒจๅˆ†ใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics.utils.benchmarks import benchmark + + # ๅœจGPUไธŠ่ฟ›่กŒๅŸบๅ‡†ๆต‹่ฏ• + benchmark(model='yolov8n.pt', data='coco8.yaml', imgsz=640, half=False, device=0) + ``` + === "CLI" + + ```bash + yolo benchmark model=yolov8n.pt data='coco8.yaml' imgsz=640 half=False device=0 + ``` + +## ๅ‚ๆ•ฐ + +ๅ‚ๆ•ฐๅฆ‚ `model`ใ€`data`ใ€`imgsz`ใ€`half`ใ€`device` ๅ’Œ `verbose` ็ญ‰๏ผŒไธบ็”จๆˆทๆไพ›ไบ†็ตๆดปๆ€ง๏ผŒไปฅไพฟๆ นๆฎๅ…ทไฝ“้œ€ๆฑ‚ๅพฎ่ฐƒๅŸบๅ‡†ๆต‹่ฏ•๏ผŒๅนถ่ฝปๆพๆฏ”่พƒไธๅŒๅฏผๅ‡บๆ ผๅผ็š„ๆ€ง่ƒฝใ€‚ + +| ้”ฎ | ๅ€ผ | ๆ่ฟฐ | +|-----------|---------|----------------------------------------------------| +| `model` | `None` | ๆจกๅž‹ๆ–‡ไปถ่ทฏๅพ„๏ผŒๅฆ‚ yolov8n.pt, yolov8n.yaml | +| `data` | `None` | ๅผ•็”จๅŸบๅ‡†ๆต‹่ฏ•ๆ•ฐๆฎ้›†็š„YAML่ทฏๅพ„๏ผˆๆ ‡่ฎฐไธบ `val`๏ผ‰ | +| `imgsz` | `640` | ๅ›พๅƒๅคงๅฐไฝœไธบๆ ‡้‡ๆˆ–๏ผˆh, w๏ผ‰ๅˆ—่กจ๏ผŒๅฆ‚ (640, 480) | +| `half` | `False` | FP16้‡ๅŒ– | +| `int8` | `False` | INT8้‡ๅŒ– | +| `device` | `None` | ่ฟ่กŒ่ฎพๅค‡๏ผŒๅฆ‚ cuda device=0 ๆˆ– device=0,1,2,3 ๆˆ– device=cpu | +| `verbose` | `False` | ้”™่ฏฏๆ—ถไธ็ปง็ปญ๏ผˆๅธƒๅฐ”ๅ€ผ๏ผ‰๏ผŒๆˆ–้ชŒ่ฏ้˜ˆๅ€ผไธ‹้™๏ผˆๆตฎ็‚นๆ•ฐ๏ผ‰ | + +## ๅฏผๅ‡บๆ ผๅผ + +ๅŸบๅ‡†ๆต‹่ฏ•ๅฐ†ๅฐ่ฏ•ๅœจไธ‹ๆ–นๅˆ—ๅ‡บ็š„ๆ‰€ๆœ‰ๅฏ่ƒฝ็š„ๅฏผๅ‡บๆ ผๅผไธŠ่‡ชๅŠจ่ฟ่กŒใ€‚ + +| ๆ ผๅผ | `format` ๅ‚ๆ•ฐ | ๆจกๅž‹ | ๅ…ƒๆ•ฐๆฎ | ๅ‚ๆ•ฐ | +|--------------------------------------------------------------------|---------------|---------------------------|-----|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | + +ๅœจ[ๅฏผๅ‡บ](https://docs.ultralytics.com/modes/export/)้กต้ขๆŸฅ็œ‹ๅฎŒๆ•ด็š„ `export` ่ฏฆๆƒ…ใ€‚ diff --git a/ultralytics/docs/zh/modes/benchmark.md:Zone.Identifier b/ultralytics/docs/zh/modes/benchmark.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/modes/benchmark.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/modes/export.md b/ultralytics/docs/zh/modes/export.md new file mode 100755 index 0000000..dadb1f1 --- /dev/null +++ b/ultralytics/docs/zh/modes/export.md @@ -0,0 +1,108 @@ +--- +comments: true +description: ๅฆ‚ไฝ•้€ๆญฅๆŒ‡ๅฏผๆ‚จๅฐ† YOLOv8 ๆจกๅž‹ๅฏผๅ‡บๅˆฐๅ„็งๆ ผๅผ๏ผŒๅฆ‚ ONNXใ€TensorRTใ€CoreML ็ญ‰ไปฅ่ฟ›่กŒ้ƒจ็ฝฒใ€‚็ŽฐๅœจๅฐฑๆŽข็ดข๏ผ +keywords: YOLO, YOLOv8, Ultralytics, ๆจกๅž‹ๅฏผๅ‡บ, ONNX, TensorRT, CoreML, TensorFlow SavedModel, OpenVINO, PyTorch, ๅฏผๅ‡บๆจกๅž‹ +--- + +# Ultralytics YOLO ็š„ๆจกๅž‹ๅฏผๅ‡บ + +Ultralytics YOLO ็”Ÿๆ€็ณป็ปŸๅ’Œ้›†ๆˆ + +## ๅผ•่จ€ + +่ฎญ็ปƒๆจกๅž‹็š„ๆœ€็ปˆ็›ฎๆ ‡ๆ˜ฏๅฐ†ๅ…ถ้ƒจ็ฝฒๅˆฐ็Žฐๅฎžไธ–็•Œ็š„ๅบ”็”จไธญใ€‚Ultralytics YOLOv8 ็š„ๅฏผๅ‡บๆจกๅผๆไพ›ไบ†ๅคš็ง้€‰้กน๏ผŒ็”จไบŽๅฐ†ๆ‚จ่ฎญ็ปƒๅฅฝ็š„ๆจกๅž‹ๅฏผๅ‡บๅˆฐไธๅŒ็š„ๆ ผๅผ๏ผŒไปŽ่€Œๅฏไปฅๅœจๅ„็งๅนณๅฐๅ’Œ่ฎพๅค‡ไธŠ้ƒจ็ฝฒใ€‚ๆœฌ็ปผๅˆๆŒ‡ๅ—ๆ—จๅœจๅธฆๆ‚จ้€ๆญฅไบ†่งฃๆจกๅž‹ๅฏผๅ‡บ็š„็ป†่Š‚๏ผŒๅฑ•็คบๅฆ‚ไฝ•ๅฎž็Žฐๆœ€ๅคง็š„ๅ…ผๅฎนๆ€งๅ’Œๆ€ง่ƒฝใ€‚ + +

+
+ +
+ ่ง‚็œ‹๏ผšๅฆ‚ไฝ•ๅฏผๅ‡บ่‡ชๅฎšไน‰่ฎญ็ปƒ็š„ Ultralytics YOLOv8 ๆจกๅž‹ๅนถๅœจ็ฝ‘็ปœๆ‘„ๅƒๅคดไธŠๅฎžๆ—ถๆŽจ็†ใ€‚ +

+ +## ไธบไป€ไนˆ้€‰ๆ‹ฉ YOLOv8 ็š„ๅฏผๅ‡บๆจกๅผ๏ผŸ + +- **ๅคšๅŠŸ่ƒฝๆ€ง๏ผš** ๆ”ฏๆŒๅฏผๅ‡บๅˆฐๅคš็งๆ ผๅผ๏ผŒๅŒ…ๆ‹ฌ ONNXใ€TensorRTใ€CoreML ็ญ‰ใ€‚ +- **ๆ€ง่ƒฝ๏ผš** ไฝฟ็”จ TensorRT ๅฏๅฎž็Žฐ้ซ˜่พพ 5 ๅ€ GPU ๅŠ ้€Ÿ๏ผŒไฝฟ็”จ ONNX ๆˆ– OpenVINO ๅฏๅฎž็Žฐ้ซ˜่พพ 3 ๅ€ CPU ๅŠ ้€Ÿใ€‚ +- **ๅ…ผๅฎนๆ€ง๏ผš** ไฝฟๆ‚จ็š„ๆจกๅž‹ๅฏไปฅๅœจไผ—ๅคš็กฌไปถๅ’Œ่ฝฏไปถ็Žฏๅขƒไธญๅนฟๆณ›้ƒจ็ฝฒใ€‚ +- **ๆ˜“็”จๆ€ง๏ผš** ็ฎ€ๅ•็š„ CLI ๅ’Œ Python API๏ผŒๅฟซ้€Ÿ็›ดๆŽฅๅœฐ่ฟ›่กŒๆจกๅž‹ๅฏผๅ‡บใ€‚ + +### ๅฏผๅ‡บๆจกๅผ็š„ๅ…ณ้”ฎ็‰นๆ€ง + +ไปฅไธ‹ๆ˜ฏไธ€ไบ›็ชๅ‡บ็š„ๅŠŸ่ƒฝ๏ผš + +- **ไธ€้”ฎๅฏผๅ‡บ๏ผš** ็”จไบŽๅฏผๅ‡บๅˆฐไธๅŒๆ ผๅผ็š„็ฎ€ๅ•ๅ‘ฝไปคใ€‚ +- **ๆ‰น้‡ๅฏผๅ‡บ๏ผš** ๆ”ฏๆŒๆ‰นๆŽจ็†่ƒฝๅŠ›็š„ๆจกๅž‹ๅฏผๅ‡บใ€‚ +- **ไผ˜ๅŒ–ๆŽจ็†๏ผš** ๅฏผๅ‡บ็š„ๆจกๅž‹้’ˆๅฏนๆ›ดๅฟซ็š„ๆŽจ็†ๆ—ถ้—ด่ฟ›่กŒไผ˜ๅŒ–ใ€‚ +- **ๆ•™ๅญฆ่ง†้ข‘๏ผš** ๆไพ›ๆทฑๅ…ฅๆŒ‡ๅฏผๅ’Œๆ•™ๅญฆ๏ผŒ็กฎไฟๆต็•…็š„ๅฏผๅ‡บไฝ“้ชŒใ€‚ + +!!! Tip "ๆ็คบ" + + * ๅฏผๅ‡บๅˆฐ ONNX ๆˆ– OpenVINO๏ผŒไปฅๅฎž็Žฐ้ซ˜่พพ 3 ๅ€็š„ CPU ๅŠ ้€Ÿใ€‚ + * ๅฏผๅ‡บๅˆฐ TensorRT๏ผŒไปฅๅฎž็Žฐ้ซ˜่พพ 5 ๅ€็š„ GPU ๅŠ ้€Ÿใ€‚ + +## ไฝฟ็”จ็คบไพ‹ + +ๅฐ† YOLOv8n ๆจกๅž‹ๅฏผๅ‡บไธบ ONNX ๆˆ– TensorRT ็ญ‰ไธๅŒๆ ผๅผใ€‚ๆŸฅ็œ‹ไธ‹้ข็š„ๅ‚ๆ•ฐ้ƒจๅˆ†๏ผŒไบ†่งฃๅฎŒๆ•ด็š„ๅฏผๅ‡บๅ‚ๆ•ฐๅˆ—่กจใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n.pt') # ๅŠ ่ฝฝๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ๅŠ ่ฝฝ่‡ชๅฎšไน‰่ฎญ็ปƒ็š„ๆจกๅž‹ + + # ๅฏผๅ‡บๆจกๅž‹ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # ๅฏผๅ‡บๅฎ˜ๆ–นๆจกๅž‹ + yolo export model=path/to/best.pt format=onnx # ๅฏผๅ‡บ่‡ชๅฎšไน‰่ฎญ็ปƒ็š„ๆจกๅž‹ + ``` + +## ๅ‚ๆ•ฐ + +YOLO ๆจกๅž‹็š„ๅฏผๅ‡บ่ฎพ็ฝฎๆ˜ฏๆŒ‡็”จไบŽๅœจๅ…ถไป–็Žฏๅขƒๆˆ–ๅนณๅฐไธญไฝฟ็”จๆจกๅž‹ๆ—ถไฟๅญ˜ๆˆ–ๅฏผๅ‡บๆจกๅž‹็š„ๅ„็ง้…็ฝฎๅ’Œ้€‰้กนใ€‚่ฟ™ไบ›่ฎพ็ฝฎไผšๅฝฑๅ“ๆจกๅž‹็š„ๆ€ง่ƒฝใ€ๅคงๅฐๅ’ŒไธŽไธๅŒ็ณป็ปŸ็š„ๅ…ผๅฎนๆ€งใ€‚ไธ€ไบ›ๅธธ่ง็š„ YOLO ๅฏผๅ‡บ่ฎพ็ฝฎๅŒ…ๆ‹ฌๅฏผๅ‡บ็š„ๆจกๅž‹ๆ–‡ไปถๆ ผๅผ๏ผˆไพ‹ๅฆ‚ ONNXใ€TensorFlow SavedModel๏ผ‰ใ€ๆจกๅž‹ๅฐ†ๅœจๅ“ชไธช่ฎพๅค‡ไธŠ่ฟ่กŒ๏ผˆไพ‹ๅฆ‚ CPUใ€GPU๏ผ‰ไปฅๅŠๆ˜ฏๅฆๅŒ…ๅซ้™„ๅŠ ๅŠŸ่ƒฝ๏ผŒๅฆ‚้ฎ็ฝฉๆˆ–ๆฏไธชๆก†ๅคšไธชๆ ‡็ญพใ€‚ๅ…ถไป–ๅฏ่ƒฝๅฝฑๅ“ๅฏผๅ‡บ่ฟ‡็จ‹็š„ๅ› ็ด ๅŒ…ๆ‹ฌๆจกๅž‹็”จ้€”็š„ๅ…ทไฝ“็ป†่Š‚ไปฅๅŠ็›ฎๆ ‡็Žฏๅขƒๆˆ–ๅนณๅฐ็š„่ฆๆฑ‚ๆˆ–้™ๅˆถใ€‚้‡่ฆ็š„ๆ˜ฏ่ฆไป”็ป†่€ƒ่™‘ๅ’Œ้…็ฝฎ่ฟ™ไบ›่ฎพ็ฝฎ๏ผŒไปฅ็กฎไฟๅฏผๅ‡บ็š„ๆจกๅž‹้’ˆๅฏน้ข„ๆœŸ็”จไพ‹็ป่ฟ‡ไผ˜ๅŒ–๏ผŒๅนถไธ”ๅฏไปฅๅœจ็›ฎๆ ‡็Žฏๅขƒไธญๆœ‰ๆ•ˆไฝฟ็”จใ€‚ + +| ้”ฎ | ๅ€ผ | ๆ่ฟฐ | +|-------------|-----------------|-------------------------------------| +| `format` | `'torchscript'` | ๅฏผๅ‡บ็š„ๆ ผๅผ | +| `imgsz` | `640` | ๅ›พๅƒๅฐบๅฏธ๏ผŒๅฏไปฅๆ˜ฏๆ ‡้‡ๆˆ– (h, w) ๅˆ—่กจ๏ผŒๆฏ”ๅฆ‚ (640, 480) | +| `keras` | `False` | ไฝฟ็”จ Keras ๅฏผๅ‡บ TF SavedModel | +| `optimize` | `False` | TorchScript๏ผšไธบ็งปๅŠจ่ฎพๅค‡ไผ˜ๅŒ– | +| `half` | `False` | FP16 ้‡ๅŒ– | +| `int8` | `False` | INT8 ้‡ๅŒ– | +| `dynamic` | `False` | ONNX/TensorRT๏ผšๅŠจๆ€่ฝด | +| `simplify` | `False` | ONNX/TensorRT๏ผš็ฎ€ๅŒ–ๆจกๅž‹ | +| `opset` | `None` | ONNX๏ผšopset ็‰ˆๆœฌ๏ผˆๅฏ้€‰๏ผŒ้ป˜่ฎคไธบๆœ€ๆ–ฐ็‰ˆๆœฌ๏ผ‰ | +| `workspace` | `4` | TensorRT๏ผšๅทฅไฝœๅŒบๅคงๅฐ๏ผˆGB๏ผ‰ | +| `nms` | `False` | CoreML๏ผšๆทปๅŠ  NMS | + +## ๅฏผๅ‡บๆ ผๅผ + +ไธ‹่กจไธญๆไพ›ไบ†ๅฏ็”จ็š„ YOLOv8 ๅฏผๅ‡บๆ ผๅผใ€‚ๆ‚จๅฏไปฅไฝฟ็”จ `format` ๅ‚ๆ•ฐๅฏผๅ‡บไปปไฝ•ๆ ผๅผ็š„ๆจกๅž‹๏ผŒๆฏ”ๅฆ‚ `format='onnx'` ๆˆ– `format='engine'`ใ€‚ + +| ๆ ผๅผ | `format` ๅ‚ๆ•ฐ | ๆจกๅž‹ | ๅ…ƒๆ•ฐๆฎ | ๅ‚ๆ•ฐ | +|--------------------------------------------------------------------|---------------|---------------------------|-----|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`, `half` | diff --git a/ultralytics/docs/zh/modes/export.md:Zone.Identifier b/ultralytics/docs/zh/modes/export.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/modes/export.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/modes/index.md b/ultralytics/docs/zh/modes/index.md new file mode 100755 index 0000000..b82c43c --- /dev/null +++ b/ultralytics/docs/zh/modes/index.md @@ -0,0 +1,73 @@ +--- +comments: true +description: ไปŽ่ฎญ็ปƒๅˆฐ่ทŸ่ธช๏ผŒๅ……ๅˆ†ๅˆฉ็”จUltralytics็š„YOLOv8ใ€‚่Žทๅ–ๆ”ฏๆŒ็š„ๆฏ็งๆจกๅผ็š„่ง่งฃๅ’Œ็คบไพ‹๏ผŒๅŒ…ๆ‹ฌ้ชŒ่ฏใ€ๅฏผๅ‡บๅ’ŒๅŸบๅ‡†ๆต‹่ฏ•ใ€‚ +keywords: Ultralytics, YOLOv8, ๆœบๅ™จๅญฆไน , ็›ฎๆ ‡ๆฃ€ๆต‹, ่ฎญ็ปƒ, ้ชŒ่ฏ, ้ข„ๆต‹, ๅฏผๅ‡บ, ่ทŸ่ธช, ๅŸบๅ‡†ๆต‹่ฏ• +--- + +# Ultralytics YOLOv8 ๆจกๅผ + +Ultralytics YOLO็”Ÿๆ€็ณป็ปŸๅŠๆ•ดๅˆ + +## ็ฎ€ไป‹ + +Ultralytics YOLOv8ไธไป…ไป…ๆ˜ฏๅฆไธ€ไธช็›ฎๆ ‡ๆฃ€ๆต‹ๆจกๅž‹๏ผ›ๅฎƒๆ˜ฏไธ€ไธชๅคšๅŠŸ่ƒฝๆก†ๆžถ๏ผŒๆ—จๅœจๆถต็›–ๆœบๅ™จๅญฆไน ๆจกๅž‹็š„ๆ•ดไธช็”Ÿๅ‘ฝๅ‘จๆœŸโ€”โ€”ไปŽๆ•ฐๆฎๆ‘„ๅ–ๅ’Œๆจกๅž‹่ฎญ็ปƒๅˆฐ้ชŒ่ฏใ€้ƒจ็ฝฒๅ’Œๅฎž้™…่ทŸ่ธชใ€‚ๆฏ็งๆจกๅผ้ƒฝๆœๅŠกไบŽไธ€ไธช็‰นๅฎš็š„็›ฎ็š„๏ผŒๅนถ่ฎพ่ฎกไธบๆไพ›ๆ‚จๅœจไธๅŒไปปๅŠกๅ’Œ็”จไพ‹ไธญๆ‰€้œ€็š„็ตๆดปๆ€งๅ’Œๆ•ˆ็އใ€‚ + +

+
+ +
+ ่ง‚็œ‹๏ผš Ultralyticsๆจกๅผๆ•™็จ‹๏ผš่ฎญ็ปƒใ€้ชŒ่ฏใ€้ข„ๆต‹ใ€ๅฏผๅ‡บๅ’ŒๅŸบๅ‡†ๆต‹่ฏ•ใ€‚ +

+ +### ๆจกๅผๆฆ‚่งˆ + +็†่งฃUltralytics YOLOv8ๆ‰€ๆ”ฏๆŒ็š„ไธๅŒ**ๆจกๅผ**ๅฏนไบŽๅ……ๅˆ†ๅˆฉ็”จๆ‚จ็š„ๆจกๅž‹่‡ณๅ…ณ้‡่ฆ๏ผš + +- **่ฎญ็ปƒ๏ผˆTrain๏ผ‰**ๆจกๅผ๏ผšๅœจ่‡ชๅฎšไน‰ๆˆ–้ข„ๅŠ ่ฝฝ็š„ๆ•ฐๆฎ้›†ไธŠๅพฎ่ฐƒๆ‚จ็š„ๆจกๅž‹ใ€‚ +- **้ชŒ่ฏ๏ผˆVal๏ผ‰**ๆจกๅผ๏ผš่ฎญ็ปƒๅŽ่ฟ›่กŒๆ ก้ชŒ๏ผŒไปฅ้ชŒ่ฏๆจกๅž‹ๆ€ง่ƒฝใ€‚ +- **้ข„ๆต‹๏ผˆPredict๏ผ‰**ๆจกๅผ๏ผšๅœจ็œŸๅฎžไธ–็•Œๆ•ฐๆฎไธŠ้‡Šๆ”พๆจกๅž‹็š„้ข„ๆต‹่ƒฝๅŠ›ใ€‚ +- **ๅฏผๅ‡บ๏ผˆExport๏ผ‰**ๆจกๅผ๏ผšไปฅๅ„็งๆ ผๅผไฝฟๆจกๅž‹ๅ‡†ๅค‡ๅฐฑ็ปช๏ผŒ้ƒจ็ฝฒ่‡ณ็”Ÿไบง็Žฏๅขƒใ€‚ +- **่ทŸ่ธช๏ผˆTrack๏ผ‰**ๆจกๅผ๏ผšๅฐ†ๆ‚จ็š„็›ฎๆ ‡ๆฃ€ๆต‹ๆจกๅž‹ๆ‰ฉๅฑ•ๅˆฐๅฎžๆ—ถ่ทŸ่ธชๅบ”็”จไธญใ€‚ +- **ๅŸบๅ‡†๏ผˆBenchmark๏ผ‰**ๆจกๅผ๏ผšๅœจไธๅŒ้ƒจ็ฝฒ็Žฏๅขƒไธญๅˆ†ๆžๆจกๅž‹็š„้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€งใ€‚ + +ๆœฌ็ปผๅˆๆŒ‡ๅ—ๆ—จๅœจไธบๆ‚จๆไพ›ๆฏ็งๆจกๅผ็š„ๆฆ‚่งˆๅ’Œๅฎž็”จ่ง่งฃ๏ผŒๅธฎๅŠฉๆ‚จๅ……ๅˆ†ๅ‘ๆŒฅYOLOv8็š„ๅ…จ้ƒจๆฝœๅŠ›ใ€‚ + +## [่ฎญ็ปƒ](train.md) + +่ฎญ็ปƒๆจกๅผ็”จไบŽๅœจ่‡ชๅฎšไน‰ๆ•ฐๆฎ้›†ไธŠ่ฎญ็ปƒYOLOv8ๆจกๅž‹ใ€‚ๅœจๆญคๆจกๅผไธ‹๏ผŒๆจกๅž‹ๅฐ†ไฝฟ็”จๆŒ‡ๅฎš็š„ๆ•ฐๆฎ้›†ๅ’Œ่ถ…ๅ‚ๆ•ฐ่ฟ›่กŒ่ฎญ็ปƒใ€‚่ฎญ็ปƒ่ฟ‡็จ‹ๅŒ…ๆ‹ฌไผ˜ๅŒ–ๆจกๅž‹็š„ๅ‚ๆ•ฐ๏ผŒไฝฟๅ…ถ่ƒฝๅคŸๅ‡†็กฎ้ข„ๆต‹ๅ›พๅƒไธญๅฏน่ฑก็š„็ฑปๅˆซๅ’Œไฝ็ฝฎใ€‚ + +[่ฎญ็ปƒ็คบไพ‹](train.md){ .md-button } + +## [้ชŒ่ฏ](val.md) + +้ชŒ่ฏๆจกๅผ็”จไบŽ่ฎญ็ปƒYOLOv8ๆจกๅž‹ๅŽ่ฟ›่กŒ้ชŒ่ฏใ€‚ๅœจๆญคๆจกๅผไธ‹๏ผŒๆจกๅž‹ๅœจ้ชŒ่ฏ้›†ไธŠ่ฟ›่กŒ่ฏ„ไผฐ๏ผŒไปฅ่กก้‡ๅ…ถๅ‡†็กฎๆ€งๅ’Œๆณ›ๅŒ–่ƒฝๅŠ›ใ€‚ๆญคๆจกๅผๅฏไปฅ็”จๆฅ่ฐƒๆ•ดๆจกๅž‹็š„่ถ…ๅ‚ๆ•ฐ๏ผŒไปฅๆ”นๅ–„ๅ…ถๆ€ง่ƒฝใ€‚ + +[้ชŒ่ฏ็คบไพ‹](val.md){ .md-button } + +## [้ข„ๆต‹](predict.md) + +้ข„ๆต‹ๆจกๅผ็”จไบŽไฝฟ็”จ่ฎญ็ปƒๅฅฝ็š„YOLOv8ๆจกๅž‹ๅœจๆ–ฐๅ›พๅƒๆˆ–่ง†้ข‘ไธŠ่ฟ›่กŒ้ข„ๆต‹ใ€‚ๅœจๆญคๆจกๅผไธ‹๏ผŒๆจกๅž‹ไปŽๆฃ€ๆŸฅ็‚นๆ–‡ไปถๅŠ ่ฝฝ๏ผŒ็”จๆˆทๅฏไปฅๆไพ›ๅ›พๅƒๆˆ–่ง†้ข‘ไปฅๆ‰ง่กŒๆŽจ็†ใ€‚ๆจกๅž‹้ข„ๆต‹่พ“ๅ…ฅๅ›พๅƒๆˆ–่ง†้ข‘ไธญๅฏน่ฑก็š„็ฑปๅˆซๅ’Œไฝ็ฝฎใ€‚ + +[้ข„ๆต‹็คบไพ‹](predict.md){ .md-button } + +## [ๅฏผๅ‡บ](export.md) + +ๅฏผๅ‡บๆจกๅผ็”จไบŽๅฐ†YOLOv8ๆจกๅž‹ๅฏผๅ‡บไธบๅฏ็”จไบŽ้ƒจ็ฝฒ็š„ๆ ผๅผใ€‚ๅœจๆญคๆจกๅผไธ‹๏ผŒๆจกๅž‹่ขซ่ฝฌๆขไธบๅ…ถไป–่ฝฏไปถๅบ”็”จๆˆ–็กฌไปถ่ฎพๅค‡ๅฏไปฅไฝฟ็”จ็š„ๆ ผๅผใ€‚ๅฝ“ๆจกๅž‹้ƒจ็ฝฒๅˆฐ็”Ÿไบง็Žฏๅขƒๆ—ถ๏ผŒๆญคๆจกๅผๅๅˆ†ๆœ‰็”จใ€‚ + +[ๅฏผๅ‡บ็คบไพ‹](export.md){ .md-button } + +## [่ทŸ่ธช](track.md) + +่ทŸ่ธชๆจกๅผ็”จไบŽไฝฟ็”จYOLOv8ๆจกๅž‹ๅฎžๆ—ถ่ทŸ่ธชๅฏน่ฑกใ€‚ๅœจๆญคๆจกๅผไธ‹๏ผŒๆจกๅž‹ไปŽๆฃ€ๆŸฅ็‚นๆ–‡ไปถๅŠ ่ฝฝ๏ผŒ็”จๆˆทๅฏไปฅๆไพ›ๅฎžๆ—ถ่ง†้ข‘ๆตไปฅๆ‰ง่กŒๅฎžๆ—ถๅฏน่ฑก่ทŸ่ธชใ€‚ๆญคๆจกๅผ้€‚็”จไบŽ็›‘ๆŽง็ณป็ปŸๆˆ–่‡ชๅŠจ้ฉพ้ฉถๆฑฝ่ฝฆ็ญ‰ๅบ”็”จใ€‚ + +[่ทŸ่ธช็คบไพ‹](track.md){ .md-button } + +## [ๅŸบๅ‡†](benchmark.md) + +ๅŸบๅ‡†ๆจกๅผ็”จไบŽๅฏนYOLOv8็š„ๅ„็งๅฏผๅ‡บๆ ผๅผ็š„้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€ง่ฟ›่กŒ่ฏ„ไผฐใ€‚ๅŸบๅ‡†ๆไพ›ไบ†ๆœ‰ๅ…ณๅฏผๅ‡บๆ ผๅผๅคงๅฐใ€ๅ…ถ้’ˆๅฏน็›ฎๆ ‡ๆฃ€ๆต‹ใ€ๅˆ†ๅ‰ฒๅ’Œๅงฟๆ€็š„`mAP50-95`ๆŒ‡ๆ ‡๏ผŒๆˆ–้’ˆๅฏนๅˆ†็ฑป็š„`accuracy_top5`ๆŒ‡ๆ ‡๏ผŒไปฅๅŠๆฏๅผ ๅ›พๅƒ่ทจๅ„็งๅฏผๅ‡บๆ ผๅผ๏ผˆๅฆ‚ONNXใ€OpenVINOใ€TensorRT็ญ‰๏ผ‰็š„ๆŽจ็†ๆ—ถ้—ด๏ผˆไปฅๆฏซ็ง’ไธบๅ•ไฝ๏ผ‰็š„ไฟกๆฏใ€‚ๆญคไฟกๆฏๅฏไปฅๅธฎๅŠฉ็”จๆˆทๆ นๆฎๅฏน้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€ง็š„ๅ…ทไฝ“้œ€ๆฑ‚๏ผŒ้€‰ๆ‹ฉๆœ€ไฝณ็š„ๅฏผๅ‡บๆ ผๅผใ€‚ + +[ๅŸบๅ‡†็คบไพ‹](benchmark.md){ .md-button } diff --git a/ultralytics/docs/zh/modes/index.md:Zone.Identifier b/ultralytics/docs/zh/modes/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/modes/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/modes/predict.md b/ultralytics/docs/zh/modes/predict.md new file mode 100755 index 0000000..9defcd5 --- /dev/null +++ b/ultralytics/docs/zh/modes/predict.md @@ -0,0 +1,714 @@ +--- +comments: true +description: ไบ†่งฃๅฆ‚ไฝ•ไฝฟ็”จ YOLOv8 ้ข„ๆต‹ๆจกๅผ่ฟ›่กŒๅ„็งไปปๅŠกใ€‚ๅญฆไน ๅ…ณไบŽไธๅŒๆŽจ็†ๆบๅฆ‚ๅ›พๅƒ๏ผŒ่ง†้ข‘ๅ’Œๆ•ฐๆฎๆ ผๅผ็š„ๅ†…ๅฎนใ€‚ +keywords: Ultralytics, YOLOv8, ้ข„ๆต‹ๆจกๅผ, ๆŽจ็†ๆบ, ้ข„ๆต‹ไปปๅŠก, ๆตๅผๆจกๅผ, ๅ›พๅƒๅค„็†, ่ง†้ข‘ๅค„็†, ๆœบๅ™จๅญฆไน , ไบบๅทฅๆ™บ่ƒฝ +--- + +# ไฝฟ็”จ Ultralytics YOLO ่ฟ›่กŒๆจกๅž‹้ข„ๆต‹ + +Ultralytics YOLO ็”Ÿๆ€็ณป็ปŸๅ’Œ้›†ๆˆ + +## ๅผ•่จ€ + +ๅœจๆœบๅ™จๅญฆไน ๅ’Œ่ฎก็ฎ—ๆœบ่ง†่ง‰้ข†ๅŸŸ๏ผŒๅฐ†่ง†่ง‰ๆ•ฐๆฎ่ฝฌๅŒ–ไธบๆœ‰็”จไฟกๆฏ็š„่ฟ‡็จ‹่ขซ็งฐไธบ'ๆŽจ็†'ๆˆ–'้ข„ๆต‹'ใ€‚Ultralytics YOLOv8 ๆไพ›ไบ†ไธ€ไธชๅผบๅคง็š„ๅŠŸ่ƒฝ๏ผŒ็งฐไธบ **้ข„ๆต‹ๆจกๅผ**๏ผŒๅฎƒไธ“ไธบๅ„็งๆ•ฐๆฎๆฅๆบ็š„้ซ˜ๆ€ง่ƒฝๅฎžๆ—ถๆŽจ็†่€Œ่ฎพ่ฎกใ€‚ + +

+
+ +
+ ่ง‚็œ‹๏ผš ๅฆ‚ไฝ•ไปŽ Ultralytics YOLOv8 ๆจกๅž‹ไธญๆๅ–่พ“ๅ‡บ๏ผŒ็”จไบŽ่‡ชๅฎšไน‰้กน็›ฎใ€‚ +

+ +## ๅฎž้™…ๅบ”็”จ้ข†ๅŸŸ + +| ๅˆถ้€ ไธš | ไฝ“่‚ฒ | ๅฎ‰ๅ…จ | +|:-------------------------------------------------------------------------------------------------------------:|:-------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------:| +| ![่ฝฆ่พ†้›ถ้ƒจไปถๆฃ€ๆต‹](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a0f802a8-0776-44cf-8f17-93974a4a28a1) | ![่ถณ็ƒ่ฟๅŠจๅ‘˜ๆฃ€ๆต‹](https://github.com/RizwanMunawar/ultralytics/assets/62513924/7d320e1f-fc57-4d7f-a691-78ee579c3442) | ![ไบบๅ‘˜ๆ‘”ๅ€’ๆฃ€ๆต‹](https://github.com/RizwanMunawar/ultralytics/assets/62513924/86437c4a-3227-4eee-90ef-9efb697bdb43) | +| ่ฝฆ่พ†้›ถ้ƒจไปถๆฃ€ๆต‹ | ่ถณ็ƒ่ฟๅŠจๅ‘˜ๆฃ€ๆต‹ | ไบบๅ‘˜ๆ‘”ๅ€’ๆฃ€ๆต‹ | + +## ไธบไฝ•ไฝฟ็”จ Ultralytics YOLO ่ฟ›่กŒๆŽจ็†๏ผŸ + +ไปฅไธ‹ๆ˜ฏ่€ƒ่™‘ไฝฟ็”จ YOLOv8 ็š„้ข„ๆต‹ๆจกๅผๆปก่ถณๆ‚จ็š„ๅ„็งๆŽจ็†้œ€ๆฑ‚็š„ๅ‡ ไธชๅŽŸๅ› ๏ผš + +- **ๅคšๅŠŸ่ƒฝๆ€ง๏ผš** ่ƒฝๅคŸๅฏนๅ›พๅƒใ€่ง†้ข‘ไนƒ่‡ณๅฎžๆ—ถๆต่ฟ›่กŒๆŽจ็†ใ€‚ +- **ๆ€ง่ƒฝ๏ผš** ๅทฅ็จ‹ๅŒ–ไธบๅฎžๆ—ถใ€้ซ˜้€Ÿๅค„็†่€Œ่ฎพ่ฎก๏ผŒไธ็‰บ็‰ฒๅ‡†็กฎๆ€งใ€‚ +- **ๆ˜“็”จๆ€ง๏ผš** ็›ด่ง‚็š„ Python ๅ’Œ CLI ๆŽฅๅฃ๏ผŒไพฟไบŽๅฟซ้€Ÿ้ƒจ็ฝฒๅ’Œๆต‹่ฏ•ใ€‚ +- **้ซ˜ๅบฆๅฏๅฎšๅˆถๆ€ง๏ผš** ๅคš็ง่ฎพ็ฝฎๅ’Œๅ‚ๆ•ฐๅฏ่ฐƒ๏ผŒไพๆฎๆ‚จ็š„ๅ…ทไฝ“้œ€ๆฑ‚่ฐƒๆ•ดๆจกๅž‹็š„ๆŽจ็†่กŒไธบใ€‚ + +### ้ข„ๆต‹ๆจกๅผ็š„ๅ…ณ้”ฎ็‰นๆ€ง + +YOLOv8 ็š„้ข„ๆต‹ๆจกๅผ่ขซ่ฎพ่ฎกไธบๅผบๅคงไธ”ๅคšๅŠŸ่ƒฝ๏ผŒๅŒ…ๆ‹ฌไปฅไธ‹็‰นๆ€ง๏ผš + +- **ๅ…ผๅฎนๅคšไธชๆ•ฐๆฎๆฅๆบ๏ผš** ๆ— ่ฎบๆ‚จ็š„ๆ•ฐๆฎๆ˜ฏๅ•็‹ฌๅ›พ็‰‡๏ผŒๅ›พ็‰‡้›†ๅˆ๏ผŒ่ง†้ข‘ๆ–‡ไปถ๏ผŒ่ฟ˜ๆ˜ฏๅฎžๆ—ถ่ง†้ข‘ๆต๏ผŒ้ข„ๆต‹ๆจกๅผ้ƒฝ่ƒฝ่ƒœไปปใ€‚ +- **ๆตๅผๆจกๅผ๏ผš** ไฝฟ็”จๆตๅผๅŠŸ่ƒฝ็”Ÿๆˆไธ€ไธชๅ†…ๅญ˜้ซ˜ๆ•ˆ็š„ `Results` ๅฏน่ฑก็”Ÿๆˆๅ™จใ€‚ๅœจ่ฐƒ็”จ้ข„ๆต‹ๅ™จๆ—ถ๏ผŒ้€š่ฟ‡่ฎพ็ฝฎ `stream=True` ๆฅๅฏ็”จๆญคๅŠŸ่ƒฝใ€‚ +- **ๆ‰นๅค„็†๏ผš** ่ƒฝๅคŸๅœจๅ•ไธชๆ‰นๆฌกไธญๅค„็†ๅคšไธชๅ›พ็‰‡ๆˆ–่ง†้ข‘ๅธง๏ผŒ่ฟ›ไธ€ๆญฅๅŠ ๅฟซๆŽจ็†ๆ—ถ้—ดใ€‚ +- **ๆ˜“ไบŽ้›†ๆˆ๏ผš** ็”ฑไบŽๅ…ถ็ตๆดป็š„ API๏ผŒๆ˜“ไบŽไธŽ็Žฐๆœ‰ๆ•ฐๆฎ็ฎก้“ๅ’Œๅ…ถไป–่ฝฏไปถ็ป„ไปถ้›†ๆˆใ€‚ + +Ultralytics YOLO ๆจกๅž‹ๅœจ่ฟ›่กŒๆŽจ็†ๆ—ถ่ฟ”ๅ›žไธ€ไธช Python `Results` ๅฏน่ฑกๅˆ—่กจ๏ผŒๆˆ–่€…ๅฝ“ไผ ๅ…ฅ `stream=True` ๆ—ถ๏ผŒ่ฟ”ๅ›žไธ€ไธชๅ†…ๅญ˜้ซ˜ๆ•ˆ็š„ Python `Results` ๅฏน่ฑก็”Ÿๆˆๅ™จ๏ผš + +!!! Example "้ข„ๆต‹" + + === "ไฝฟ็”จ `stream=False` ่ฟ”ๅ›žๅˆ—่กจ" + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n.pt') # ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + + # ๅœจๅ›พ็‰‡ๅˆ—่กจไธŠ่ฟ่กŒๆ‰น้‡ๆŽจ็† + results = model(['im1.jpg', 'im2.jpg']) # ่ฟ”ๅ›ž Results ๅฏน่ฑกๅˆ—่กจ + + # ๅค„็†็ป“ๆžœๅˆ—่กจ + for result in results: + boxes = result.boxes # ่พน็•Œๆก†่พ“ๅ‡บ็š„ Boxes ๅฏน่ฑก + masks = result.masks # ๅˆ†ๅ‰ฒๆŽฉ็ ่พ“ๅ‡บ็š„ Masks ๅฏน่ฑก + keypoints = result.keypoints # ๅงฟๆ€่พ“ๅ‡บ็š„ Keypoints ๅฏน่ฑก + probs = result.probs # ๅˆ†็ฑป่พ“ๅ‡บ็š„ Probs ๅฏน่ฑก + ``` + + === "ไฝฟ็”จ `stream=True` ่ฟ”ๅ›ž็”Ÿๆˆๅ™จ" + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n.pt') # ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + + # ๅœจๅ›พ็‰‡ๅˆ—่กจไธŠ่ฟ่กŒๆ‰น้‡ๆŽจ็† + results = model(['im1.jpg', 'im2.jpg'], stream=True) # ่ฟ”ๅ›ž Results ๅฏน่ฑก็”Ÿๆˆๅ™จ + + # ๅค„็†็ป“ๆžœ็”Ÿๆˆๅ™จ + for result in results: + boxes = result.boxes # ่พน็•Œๆก†่พ“ๅ‡บ็š„ Boxes ๅฏน่ฑก + masks = result.masks # ๅˆ†ๅ‰ฒๆŽฉ็ ่พ“ๅ‡บ็š„ Masks ๅฏน่ฑก + keypoints = result.keypoints # ๅงฟๆ€่พ“ๅ‡บ็š„ Keypoints ๅฏน่ฑก + probs = result.probs # ๅˆ†็ฑป่พ“ๅ‡บ็š„ Probs ๅฏน่ฑก + ``` + +## ๆŽจ็†ๆฅๆบ + +YOLOv8 ๅฏไปฅๅค„็†ๆŽจ็†่พ“ๅ…ฅ็š„ไธๅŒ็ฑปๅž‹๏ผŒๅฆ‚ไธ‹่กจๆ‰€็คบใ€‚ๆฅๆบๅŒ…ๆ‹ฌ้™ๆ€ๅ›พๅƒใ€่ง†้ข‘ๆตๅ’Œๅ„็งๆ•ฐๆฎๆ ผๅผใ€‚่กจๆ ผ่ฟ˜่กจ็คบไบ†ๆฏ็งๆฅๆบๆ˜ฏๅฆๅฏไปฅๅœจๆตๅผๆจกๅผไธ‹ไฝฟ็”จ๏ผŒไฝฟ็”จๅ‚ๆ•ฐ `stream=True` โœ…ใ€‚ๆตๅผๆจกๅผๅฏนไบŽๅค„็†่ง†้ข‘ๆˆ–ๅฎžๆ—ถๆต้žๅธธๆœ‰ๅˆฉ๏ผŒๅ› ไธบๅฎƒๅˆ›ๅปบไบ†็ป“ๆžœ็š„็”Ÿๆˆๅ™จ๏ผŒ่€Œไธๆ˜ฏๅฐ†ๆ‰€ๆœ‰ๅธงๅŠ ่ฝฝๅˆฐๅ†…ๅญ˜ใ€‚ + +!!! Tip "ๆ็คบ" + + ไฝฟ็”จ `stream=True` ๅค„็†้•ฟ่ง†้ข‘ๆˆ–ๅคงๅž‹ๆ•ฐๆฎ้›†ๆฅ้ซ˜ๆ•ˆๅœฐ็ฎก็†ๅ†…ๅญ˜ใ€‚ๅฝ“ `stream=False` ๆ—ถ๏ผŒๆ‰€ๆœ‰ๅธงๆˆ–ๆ•ฐๆฎ็‚น็š„็ป“ๆžœ้ƒฝๅฐ†ๅญ˜ๅ‚จๅœจๅ†…ๅญ˜ไธญ๏ผŒ่ฟ™ๅฏ่ƒฝๅพˆๅฟซๅฏผ่‡ดๅ†…ๅญ˜ไธ่ถณ้”™่ฏฏใ€‚็›ธๅฏนๅœฐ๏ผŒ`stream=True` ไฝฟ็”จ็”Ÿๆˆๅ™จ๏ผŒๅชไฟ็•™ๅฝ“ๅ‰ๅธงๆˆ–ๆ•ฐๆฎ็‚น็š„็ป“ๆžœๅœจๅ†…ๅญ˜ไธญ๏ผŒๆ˜พ่‘—ๅ‡ๅฐ‘ไบ†ๅ†…ๅญ˜ๆถˆ่€—๏ผŒ้˜ฒๆญขๅ†…ๅญ˜ไธ่ถณ้—ฎ้ข˜ใ€‚ + +| ๆฅๆบ | ๅ‚ๆ•ฐ | ็ฑปๅž‹ | ๅค‡ๆณจ | +|-----------|--------------------------------------------|----------------|----------------------------------------------------| +| ๅ›พๅƒ | `'image.jpg'` | `str` ๆˆ– `Path` | ๅ•ไธชๅ›พๅƒๆ–‡ไปถใ€‚ | +| URL | `'https://ultralytics.com/images/bus.jpg'` | `str` | ๅ›พๅƒ็š„ URL ๅœฐๅ€ใ€‚ | +| ๆˆชๅฑ | `'screen'` | `str` | ๆˆชๅ–ๅฑๅน•ๅ›พๅƒใ€‚ | +| PIL | `Image.open('im.jpg')` | `PIL.Image` | RGB ้€š้“็š„ HWC ๆ ผๅผๅ›พๅƒใ€‚ | +| OpenCV | `cv2.imread('im.jpg')` | `np.ndarray` | BGR ้€š้“็š„ HWC ๆ ผๅผๅ›พๅƒ `uint8 (0-255)`ใ€‚ | +| numpy | `np.zeros((640,1280,3))` | `np.ndarray` | BGR ้€š้“็š„ HWC ๆ ผๅผๅ›พๅƒ `uint8 (0-255)`ใ€‚ | +| torch | `torch.zeros(16,3,320,640)` | `torch.Tensor` | RGB ้€š้“็š„ BCHW ๆ ผๅผๅ›พๅƒ `float32 (0.0-1.0)`ใ€‚ | +| CSV | `'sources.csv'` | `str` ๆˆ– `Path` | ๅŒ…ๅซๅ›พๅƒใ€่ง†้ข‘ๆˆ–็›ฎๅฝ•่ทฏๅพ„็š„ CSV ๆ–‡ไปถใ€‚ | +| ่ง†้ข‘ โœ… | `'video.mp4'` | `str` ๆˆ– `Path` | ๅฆ‚ MP4, AVI ็ญ‰ๆ ผๅผ็š„่ง†้ข‘ๆ–‡ไปถใ€‚ | +| ็›ฎๅฝ• โœ… | `'path/'` | `str` ๆˆ– `Path` | ๅŒ…ๅซๅ›พๅƒๆˆ–่ง†้ข‘ๆ–‡ไปถ็š„็›ฎๅฝ•่ทฏๅพ„ใ€‚ | +| ้€š้…็ฌฆ โœ… | `'path/*.jpg'` | `str` | ๅŒน้…ๅคšไธชๆ–‡ไปถ็š„้€š้…็ฌฆๆจกๅผใ€‚ไฝฟ็”จ `*` ๅญ—็ฌฆไฝœไธบ้€š้…็ฌฆใ€‚ | +| YouTube โœ… | `'https://youtu.be/LNwODJXcvt4'` | `str` | YouTube ่ง†้ข‘็š„ URL ๅœฐๅ€ใ€‚ | +| ๆตๅช’ไฝ“ โœ… | `'rtsp://example.com/media.mp4'` | `str` | RTSP, RTMP, TCP ๆˆ– IP ๅœฐๅ€็ญ‰ๆตๅ่ฎฎ็š„ URL ๅœฐๅ€ใ€‚ | +| ๅคšๆตๅช’ไฝ“ โœ… | `'list.streams'` | `str` ๆˆ– `Path` | ไธ€ไธชๆต URL ๆฏ่กŒ็š„ `*.streams` ๆ–‡ๆœฌๆ–‡ไปถ๏ผŒไพ‹ๅฆ‚ 8 ไธชๆตๅฐ†ไปฅ 8 ็š„ๆ‰นๅค„็†ๅคงๅฐ่ฟ่กŒใ€‚ | + +ไธ‹้ขไธบๆฏ็งๆฅๆบ็ฑปๅž‹ไฝฟ็”จไปฃ็ ็š„็คบไพ‹๏ผš + +!!! Example "้ข„ๆต‹ๆฅๆบ" + + === "ๅ›พๅƒ" + ๅฏนๅ›พๅƒๆ–‡ไปถ่ฟ›่กŒๆŽจ็†ใ€‚ + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅฎšไน‰ๅ›พๅƒๆ–‡ไปถ็š„่ทฏๅพ„ + source = 'path/to/image.jpg' + + # ๅฏนๆฅๆบ่ฟ›่กŒๆŽจ็† + results = model(source) # Results ๅฏน่ฑกๅˆ—่กจ + ``` + + === "ๆˆชๅฑ" + ๅฏนๅฝ“ๅ‰ๅฑๅน•ๅ†…ๅฎนไฝœไธบๆˆชๅฑ่ฟ›่กŒๆŽจ็†ใ€‚ + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅฎšไน‰ๅฝ“ๅ‰ๆˆชๅฑไธบๆฅๆบ + source = 'screen' + + # ๅฏนๆฅๆบ่ฟ›่กŒๆŽจ็† + results = model(source) # Results ๅฏน่ฑกๅˆ—่กจ + ``` + + === "URL" + ๅฏน้€š่ฟ‡ URL ่ฟœ็จ‹ๆ‰˜็ฎก็š„ๅ›พๅƒๆˆ–่ง†้ข‘่ฟ›่กŒๆŽจ็†ใ€‚ + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅฎšไน‰่ฟœ็จ‹ๅ›พๅƒๆˆ–่ง†้ข‘ URL + source = 'https://ultralytics.com/images/bus.jpg' + + # ๅฏนๆฅๆบ่ฟ›่กŒๆŽจ็† + results = model(source) # Results ๅฏน่ฑกๅˆ—่กจ + ``` + + === "PIL" + ๅฏนไฝฟ็”จ Python Imaging Library (PIL) ๆ‰“ๅผ€็š„ๅ›พๅƒ่ฟ›่กŒๆŽจ็†ใ€‚ + ```python + from PIL import Image + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ไฝฟ็”จ PIL ๆ‰“ๅผ€ๅ›พๅƒ + source = Image.open('path/to/image.jpg') + + # ๅฏนๆฅๆบ่ฟ›่กŒๆŽจ็† + results = model(source) # Results ๅฏน่ฑกๅˆ—่กจ + ``` + + === "OpenCV" + ๅฏนไฝฟ็”จ OpenCV ่ฏปๅ–็š„ๅ›พๅƒ่ฟ›่กŒๆŽจ็†ใ€‚ + ```python + import cv2 + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ไฝฟ็”จ OpenCV ่ฏปๅ–ๅ›พๅƒ + source = cv2.imread('path/to/image.jpg') + + # ๅฏนๆฅๆบ่ฟ›่กŒๆŽจ็† + results = model(source) # Results ๅฏน่ฑกๅˆ—่กจ + ``` + + === "numpy" + ๅฏน่กจ็คบไธบ numpy ๆ•ฐ็ป„็š„ๅ›พๅƒ่ฟ›่กŒๆŽจ็†ใ€‚ + ```python + import numpy as np + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅˆ›ๅปบไธ€ไธช HWC ๅฝข็Šถ (640, 640, 3) ็š„้šๆœบ numpy ๆ•ฐ็ป„๏ผŒๆ•ฐๅ€ผ่Œƒๅ›ด [0, 255] ็ฑปๅž‹ไธบ uint8 + source = np.random.randint(low=0, high=255, size=(640, 640, 3), dtype='uint8') + + # ๅฏนๆฅๆบ่ฟ›่กŒๆŽจ็† + results = model(source) # Results ๅฏน่ฑกๅˆ—่กจ + ``` + + === "torch" + ๅฏน่กจ็คบไธบ PyTorch ๅผ ้‡็š„ๅ›พๅƒ่ฟ›่กŒๆŽจ็†ใ€‚ + ```python + import torch + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅˆ›ๅปบไธ€ไธช BCHW ๅฝข็Šถ (1, 3, 640, 640) ็š„้šๆœบ torch ๅผ ้‡๏ผŒๆ•ฐๅ€ผ่Œƒๅ›ด [0, 1] ็ฑปๅž‹ไธบ float32 + source = torch.rand(1, 3, 640, 640, dtype=torch.float32) + + # ๅฏนๆฅๆบ่ฟ›่กŒๆŽจ็† + results = model(source) # Results ๅฏน่ฑกๅˆ—่กจ + ``` + + === "CSV" + ๅฏน CSV ๆ–‡ไปถไธญๅˆ—ๅ‡บ็š„ๅ›พๅƒใ€URLsใ€่ง†้ข‘ๅ’Œ็›ฎๅฝ•่ฟ›่กŒๆŽจ็†ใ€‚ + ```python + import torch + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅฎšไน‰ไธ€ไธชๅŒ…ๅซๅ›พๅƒใ€URLsใ€่ง†้ข‘ๅ’Œ็›ฎๅฝ•่ทฏๅพ„็š„ CSV ๆ–‡ไปถ่ทฏๅพ„ + source = 'path/to/file.csv' + + # ๅฏนๆฅๆบ่ฟ›่กŒๆŽจ็† + results = model(source) # Results ๅฏน่ฑกๅˆ—่กจ + ``` + + === "่ง†้ข‘" + ๅฏน่ง†้ข‘ๆ–‡ไปถ่ฟ›่กŒๆŽจ็†ใ€‚ไฝฟ็”จ `stream=True` ๆ—ถ๏ผŒๅฏไปฅๅˆ›ๅปบไธ€ไธช Results ๅฏน่ฑก็š„็”Ÿๆˆๅ™จ๏ผŒๅ‡ๅฐ‘ๅ†…ๅญ˜ไฝฟ็”จใ€‚ + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅฎšไน‰่ง†้ข‘ๆ–‡ไปถ่ทฏๅพ„ + source = 'path/to/video.mp4' + + # ๅฏนๆฅๆบ่ฟ›่กŒๆŽจ็† + results = model(source, stream=True) # Results ๅฏน่ฑก็š„็”Ÿๆˆๅ™จ + ``` + + === "็›ฎๅฝ•" + ๅฏน็›ฎๅฝ•ไธญ็š„ๆ‰€ๆœ‰ๅ›พๅƒๅ’Œ่ง†้ข‘่ฟ›่กŒๆŽจ็†ใ€‚่ฆๅŒ…ๅซๅญ็›ฎๅฝ•ไธญ็š„ๅ›พๅƒๅ’Œ่ง†้ข‘๏ผŒไฝฟ็”จ้€š้…็ฌฆๆจกๅผ๏ผŒไพ‹ๅฆ‚ `path/to/dir/**/*`ใ€‚ + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅฎšไน‰ๅŒ…ๅซๅ›พๅƒๅ’Œ่ง†้ข‘ๆ–‡ไปถ็”จไบŽๆŽจ็†็š„็›ฎๅฝ•่ทฏๅพ„ + source = 'path/to/dir' + + # ๅฏนๆฅๆบ่ฟ›่กŒๆŽจ็† + results = model(source, stream=True) # Results ๅฏน่ฑก็š„็”Ÿๆˆๅ™จ + ``` + + === "้€š้…็ฌฆ" + ๅฏนไธŽ `*` ๅญ—็ฌฆๅŒน้…็š„ๆ‰€ๆœ‰ๅ›พๅƒๅ’Œ่ง†้ข‘่ฟ›่กŒๆŽจ็†ใ€‚ + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅฎšไน‰ไธ€ไธช็›ฎๅฝ•ไธ‹ๆ‰€ๆœ‰ JPG ๆ–‡ไปถ็š„้€š้…็ฌฆๆœ็ดข + source = 'path/to/dir/*.jpg' + + # ๆˆ–ๅฎšไน‰ไธ€ไธชๅŒ…ๆ‹ฌๅญ็›ฎๅฝ•็š„ๆ‰€ๆœ‰ JPG ๆ–‡ไปถ็š„้€’ๅฝ’้€š้…็ฌฆๆœ็ดข + source = 'path/to/dir/**/*.jpg' + + # ๅฏนๆฅๆบ่ฟ›่กŒๆŽจ็† + results = model(source, stream=True) # Results ๅฏน่ฑก็š„็”Ÿๆˆๅ™จ + ``` + + === "YouTube" + ๅœจYouTube่ง†้ข‘ไธŠ่ฟ่กŒๆŽจ็†ใ€‚้€š่ฟ‡ไฝฟ็”จ`stream=True`๏ผŒๆ‚จๅฏไปฅๅˆ›ๅปบไธ€ไธชResultsๅฏน่ฑก็š„็”Ÿๆˆๅ™จ๏ผŒไปฅๅ‡ๅฐ‘้•ฟ่ง†้ข‘็š„ๅ†…ๅญ˜ไฝฟ็”จใ€‚ + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„YOLOv8nๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅฎšไน‰ๆบไธบYouTube่ง†้ข‘URL + source = 'https://youtu.be/LNwODJXcvt4' + + # ๅœจๆบไธŠ่ฟ่กŒๆŽจ็† + results = model(source, stream=True) # Resultsๅฏน่ฑก็š„็”Ÿๆˆๅ™จ + ``` + + === "Streams" + ไฝฟ็”จRTSPใ€RTMPใ€TCPๅ’ŒIPๅœฐๅ€ๅ่ฎฎๅœจ่ฟœ็จ‹ๆตๅช’ไฝ“ๆบไธŠ่ฟ่กŒๆŽจ็†ใ€‚ๅฆ‚ๆžœๅœจ`*.streams`ๆ–‡ๆœฌๆ–‡ไปถไธญๆไพ›ไบ†ๅคšไธชๆต๏ผŒๅˆ™ๅฐ†่ฟ่กŒๆ‰น้‡ๆŽจ็†๏ผŒไพ‹ๅฆ‚๏ผŒ8ไธชๆตๅฐ†ไปฅๆ‰นๅคงๅฐ8่ฟ่กŒ๏ผŒๅฆๅˆ™ๅ•ไธชๆตๅฐ†ไปฅๆ‰นๅคงๅฐ1่ฟ่กŒใ€‚ + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„YOLOv8nๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅ•ๆตๅช’ไฝ“ๆบๆ‰นๅคงๅฐ1ๆŽจ็† + source = 'rtsp://example.com/media.mp4' # RTSPใ€RTMPใ€TCPๆˆ–IPๆตๅช’ไฝ“ๅœฐๅ€ + + # ๅคšไธชๆตๅช’ไฝ“ๆบ็š„ๆ‰น้‡ๆŽจ็†๏ผˆไพ‹ๅฆ‚๏ผŒ8ไธชๆต็š„ๆ‰นๅคงๅฐไธบ8๏ผ‰ + source = 'path/to/list.streams' # *.streamsๆ–‡ๆœฌๆ–‡ไปถ๏ผŒๆฏ่กŒไธ€ไธชๆตๅช’ไฝ“ๅœฐๅ€ + + # ๅœจๆบไธŠ่ฟ่กŒๆŽจ็† + results = model(source, stream=True) # Resultsๅฏน่ฑก็š„็”Ÿๆˆๅ™จ + ``` + +## ๆŽจ็†ๅ‚ๆ•ฐ + +`model.predict()` ๅœจๆŽจ็†ๆ—ถๆŽฅๅ—ๅคšไธชๅ‚ๆ•ฐ๏ผŒๅฏไปฅ็”จๆฅ่ฆ†็›–้ป˜่ฎคๅ€ผ๏ผš + +!!! Example "็คบไพ‹" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„YOLOv8nๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅœจ'bus.jpg'ไธŠ่ฟ่กŒๆŽจ็†๏ผŒๅนถ้™„ๅŠ ๅ‚ๆ•ฐ + model.predict('bus.jpg', save=True, imgsz=320, conf=0.5) + ``` + +ๆ”ฏๆŒ็š„ๆ‰€ๆœ‰ๅ‚ๆ•ฐ๏ผš + +| ๅ็งฐ | ็ฑปๅž‹ | ้ป˜่ฎคๅ€ผ | ๆ่ฟฐ | +|-----------------|----------------|------------------------|------------------------------------------| +| `source` | `str` | `'ultralytics/assets'` | ๅ›พๅƒๆˆ–่ง†้ข‘็š„ๆบ็›ฎๅฝ• | +| `conf` | `float` | `0.25` | ๆฃ€ๆต‹ๅฏน่ฑก็š„็ฝฎไฟกๅบฆ้˜ˆๅ€ผ | +| `iou` | `float` | `0.7` | ็”จไบŽNMS็š„ไบคๅนถๆฏ”๏ผˆIoU๏ผ‰้˜ˆๅ€ผ | +| `imgsz` | `int or tuple` | `640` | ๅ›พๅƒๅคงๅฐ๏ผŒๅฏไปฅๆ˜ฏๆ ‡้‡ๆˆ–๏ผˆh, w๏ผ‰ๅˆ—่กจ๏ผŒไพ‹ๅฆ‚๏ผˆ640, 480๏ผ‰ | +| `half` | `bool` | `False` | ไฝฟ็”จๅŠ็ฒพๅบฆ๏ผˆFP16๏ผ‰ | +| `device` | `None or str` | `None` | ่ฟ่กŒ่ฎพๅค‡๏ผŒไพ‹ๅฆ‚ cuda device=0/1/2/3 ๆˆ– device=cpu | +| `show` | `bool` | `False` | ๅฆ‚ๆžœๅฏ่ƒฝ๏ผŒๆ˜พ็คบ็ป“ๆžœ | +| `save` | `bool` | `False` | ไฟๅญ˜ๅธฆๆœ‰็ป“ๆžœ็š„ๅ›พๅƒ | +| `save_txt` | `bool` | `False` | ๅฐ†็ป“ๆžœไฟๅญ˜ไธบ.txtๆ–‡ไปถ | +| `save_conf` | `bool` | `False` | ไฟๅญ˜ๅธฆๆœ‰็ฝฎไฟกๅบฆๅˆ†ๆ•ฐ็š„็ป“ๆžœ | +| `save_crop` | `bool` | `False` | ไฟๅญ˜ๅธฆๆœ‰็ป“ๆžœ็š„่ฃๅ‰ชๅ›พๅƒ | +| `show_labels` | `bool` | `True` | ้š่—ๆ ‡็ญพ | +| `show_conf` | `bool` | `True` | ้š่—็ฝฎไฟกๅบฆๅˆ†ๆ•ฐ | +| `max_det` | `int` | `300` | ๆฏๅผ ๅ›พๅƒ็š„ๆœ€ๅคงๆฃ€ๆต‹ๆ•ฐ้‡ | +| `vid_stride` | `bool` | `False` | ่ง†้ข‘ๅธง้€Ÿ็އ่ทณ่ทƒ | +| `stream_buffer` | `bool` | `False` | ็ผ“ๅ†ฒๆ‰€ๆœ‰ๆตๅช’ไฝ“ๅธง๏ผˆTrue๏ผ‰ๆˆ–่ฟ”ๅ›žๆœ€ๆ–ฐๅธง๏ผˆFalse๏ผ‰ | +| `line_width` | `None or int` | `None` | ่พนๆก†็บฟๅฎฝๅบฆใ€‚ๅฆ‚ๆžœไธบNone๏ผŒๅˆ™ๆŒ‰ๅ›พๅƒๅคงๅฐ็ผฉๆ”พใ€‚ | +| `visualize` | `bool` | `False` | ๅฏ่ง†ๅŒ–ๆจกๅž‹็‰นๅพ | +| `augment` | `bool` | `False` | ๅบ”็”จๅ›พๅƒๅขžๅผบๅˆฐ้ข„ๆต‹ๆบ | +| `agnostic_nms` | `bool` | `False` | ็ฑปๅˆซไธๆ•ๆ„Ÿ็š„NMS | +| `retina_masks` | `bool` | `False` | ไฝฟ็”จ้ซ˜ๅˆ†่พจ็އๅˆ†ๅ‰ฒๆŽฉ็  | +| `classes` | `None or list` | `None` | ๆŒ‰็ฑปๅˆซ่ฟ‡ๆปค็ป“ๆžœ๏ผŒไพ‹ๅฆ‚ classes=0๏ผŒๆˆ– classes=[0,2,3] | +| `boxes` | `bool` | `True` | ๅœจๅˆ†ๅ‰ฒ้ข„ๆต‹ไธญๆ˜พ็คบๆก† | + +## ๅ›พๅƒๅ’Œ่ง†้ข‘ๆ ผๅผ + +YOLOv8ๆ”ฏๆŒๅคš็งๅ›พๅƒๅ’Œ่ง†้ข‘ๆ ผๅผ๏ผŒๅฆ‚[data/utils.py](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/data/utils.py)ๆ‰€ๆŒ‡ๅฎšใ€‚่ฏทๅ‚้˜…ไธ‹่กจไบ†่งฃๆœ‰ๆ•ˆ็š„ๅŽ็ผ€ๅๅ’Œ็คบไพ‹้ข„ๆต‹ๅ‘ฝไปคใ€‚ + +### ๅ›พๅƒ + +ไธ‹่กจๅŒ…ๅซไบ†Ultralyticsๆ”ฏๆŒ็š„ๆœ‰ๆ•ˆๅ›พๅƒๆ ผๅผใ€‚ + +| ๅ›พๅƒๅŽ็ผ€ | ็คบไพ‹้ข„ๆต‹ๅ‘ฝไปค | ๅ‚่€ƒ้“พๆŽฅ | +|-------|----------------------------------|-------------------------------------------------------------------------------| +| .bmp | `yolo predict source=image.bmp` | [Microsoft BMPๆ–‡ไปถๆ ผๅผ](https://en.wikipedia.org/wiki/BMP_file_format) | +| .dng | `yolo predict source=image.dng` | [Adobe DNG](https://www.adobe.com/products/photoshop/extend.displayTab2.html) | +| .jpeg | `yolo predict source=image.jpeg` | [JPEG](https://en.wikipedia.org/wiki/JPEG) | +| .jpg | `yolo predict source=image.jpg` | [JPEG](https://en.wikipedia.org/wiki/JPEG) | +| .mpo | `yolo predict source=image.mpo` | [ๅคšๅ›พๅƒๅฏน่ฑก](https://fileinfo.com/extension/mpo) | +| .png | `yolo predict source=image.png` | [ไพฟๆบๅผ็ฝ‘็ปœๅ›พๅฝข](https://en.wikipedia.org/wiki/PNG) | +| .tif | `yolo predict source=image.tif` | [ๆ ‡็ญพๅ›พๅƒๆ–‡ไปถๆ ผๅผ](https://en.wikipedia.org/wiki/TIFF) | +| .tiff | `yolo predict source=image.tiff` | [ๆ ‡็ญพๅ›พๅƒๆ–‡ไปถๆ ผๅผ](https://en.wikipedia.org/wiki/TIFF) | +| .webp | `yolo predict source=image.webp` | [WebP](https://en.wikipedia.org/wiki/WebP) | +| .pfm | `yolo predict source=image.pfm` | [ไพฟๆบๅผๆตฎ็‚นๆ˜ ๅฐ„](https://en.wikipedia.org/wiki/Netpbm#File_formats) | + +### ่ง†้ข‘ + +ไปฅไธ‹่กจๆ ผๅŒ…ๅซๆœ‰ๆ•ˆ็š„Ultralytics่ง†้ข‘ๆ ผๅผใ€‚ + +| ่ง†้ข‘ๅŽ็ผ€ๅ | ็คบไพ‹้ข„ๆต‹ๅ‘ฝไปค | ๅ‚่€ƒ้“พๆŽฅ | +|-------|----------------------------------|----------------------------------------------------------------------| +| .asf | `yolo predict source=video.asf` | [้ซ˜็บง็ณป็ปŸๆ ผๅผ](https://en.wikipedia.org/wiki/Advanced_Systems_Format) | +| .avi | `yolo predict source=video.avi` | [้Ÿณ่ง†้ข‘ไบค้”™](https://en.wikipedia.org/wiki/Audio_Video_Interleave) | +| .gif | `yolo predict source=video.gif` | [ๅ›พๅฝขไบคๆขๆ ผๅผ](https://en.wikipedia.org/wiki/GIF) | +| .m4v | `yolo predict source=video.m4v` | [MPEG-4็ฌฌ14้ƒจๅˆ†](https://en.wikipedia.org/wiki/M4V) | +| .mkv | `yolo predict source=video.mkv` | [Matroska](https://en.wikipedia.org/wiki/Matroska) | +| .mov | `yolo predict source=video.mov` | [QuickTimeๆ–‡ไปถๆ ผๅผ](https://en.wikipedia.org/wiki/QuickTime_File_Format) | +| .mp4 | `yolo predict source=video.mp4` | [MPEG-4็ฌฌ14้ƒจๅˆ†](https://en.wikipedia.org/wiki/MPEG-4_Part_14) | +| .mpeg | `yolo predict source=video.mpeg` | [MPEG-1็ฌฌ2้ƒจๅˆ†](https://en.wikipedia.org/wiki/MPEG-1) | +| .mpg | `yolo predict source=video.mpg` | [MPEG-1็ฌฌ2้ƒจๅˆ†](https://en.wikipedia.org/wiki/MPEG-1) | +| .ts | `yolo predict source=video.ts` | [MPEGไผ ่พ“ๆต](https://en.wikipedia.org/wiki/MPEG_transport_stream) | +| .wmv | `yolo predict source=video.wmv` | [Windowsๅช’ไฝ“่ง†้ข‘](https://en.wikipedia.org/wiki/Windows_Media_Video) | +| .webm | `yolo predict source=video.webm` | [WebM้กน็›ฎ](https://en.wikipedia.org/wiki/WebM) | + +## ๅค„็†็ป“ๆžœ + +ๆ‰€ๆœ‰Ultralytics็š„`predict()`่ฐƒ็”จ้ƒฝๅฐ†่ฟ”ๅ›žไธ€ไธช`Results`ๅฏน่ฑกๅˆ—่กจ๏ผš + +!!! Example "็ป“ๆžœ" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„YOLOv8nๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅœจๅ›พ็‰‡ไธŠ่ฟ่กŒๆŽจ็† + results = model('bus.jpg') # 1ไธชResultsๅฏน่ฑก็š„ๅˆ—่กจ + results = model(['bus.jpg', 'zidane.jpg']) # 2ไธชResultsๅฏน่ฑก็š„ๅˆ—่กจ + ``` + +`Results`ๅฏน่ฑกๅ…ทๆœ‰ไปฅไธ‹ๅฑžๆ€ง๏ผš + +| ๅฑžๆ€ง | ็ฑปๅž‹ | ๆ่ฟฐ | +|--------------|-----------------|------------------------------| +| `orig_img` | `numpy.ndarray` | ๅŽŸๅง‹ๅ›พๅƒ็š„numpyๆ•ฐ็ป„ใ€‚ | +| `orig_shape` | `tuple` | ๅŽŸๅง‹ๅ›พๅƒ็š„ๅฝข็Šถ๏ผŒๆ ผๅผไธบ๏ผˆ้ซ˜ๅบฆ๏ผŒๅฎฝๅบฆ๏ผ‰ใ€‚ | +| `boxes` | `Boxes, ๅฏ้€‰` | ๅŒ…ๅซๆฃ€ๆต‹่พน็•Œๆก†็š„Boxesๅฏน่ฑกใ€‚ | +| `masks` | `Masks, ๅฏ้€‰` | ๅŒ…ๅซๆฃ€ๆต‹ๆŽฉ็ ็š„Masksๅฏน่ฑกใ€‚ | +| `probs` | `Probs, ๅฏ้€‰` | ๅŒ…ๅซๆฏไธช็ฑปๅˆซ็š„ๆฆ‚็އ็š„Probsๅฏน่ฑก๏ผŒ็”จไบŽๅˆ†็ฑปไปปๅŠกใ€‚ | +| `keypoints` | `Keypoints, ๅฏ้€‰` | ๅŒ…ๅซๆฏไธชๅฏน่ฑกๆฃ€ๆต‹ๅˆฐ็š„ๅ…ณ้”ฎ็‚น็š„Keypointsๅฏน่ฑกใ€‚ | +| `speed` | `dict` | ไปฅๆฏซ็ง’ไธบๅ•ไฝ็š„ๆฏๅผ ๅ›พ็‰‡็š„้ข„ๅค„็†ใ€ๆŽจ็†ๅ’ŒๅŽๅค„็†้€Ÿๅบฆ็š„ๅญ—ๅ…ธใ€‚ | +| `names` | `dict` | ็ฑปๅˆซๅ็งฐ็š„ๅญ—ๅ…ธใ€‚ | +| `path` | `str` | ๅ›พๅƒๆ–‡ไปถ็š„่ทฏๅพ„ใ€‚ | + +`Results`ๅฏน่ฑกๅ…ทๆœ‰ไปฅไธ‹ๆ–นๆณ•๏ผš + +| ๆ–นๆณ• | ่ฟ”ๅ›ž็ฑปๅž‹ | ๆ่ฟฐ | +|-----------------|-----------------|----------------------------------------| +| `__getitem__()` | `Results` | ่ฟ”ๅ›žๆŒ‡ๅฎš็ดขๅผ•็š„Resultsๅฏน่ฑกใ€‚ | +| `__len__()` | `int` | ่ฟ”ๅ›žResultsๅฏน่ฑกไธญ็š„ๆฃ€ๆต‹ๆ•ฐ้‡ใ€‚ | +| `update()` | `None` | ๆ›ดๆ–ฐResultsๅฏน่ฑก็š„boxes, masksๅ’Œprobsๅฑžๆ€งใ€‚ | +| `cpu()` | `Results` | ๅฐ†ๆ‰€ๆœ‰ๅผ ้‡็งปๅŠจๅˆฐCPUๅ†…ๅญ˜ไธŠ็š„Resultsๅฏน่ฑก็š„ๅ‰ฏๆœฌใ€‚ | +| `numpy()` | `Results` | ๅฐ†ๆ‰€ๆœ‰ๅผ ้‡่ฝฌๆขไธบnumpyๆ•ฐ็ป„็š„Resultsๅฏน่ฑก็š„ๅ‰ฏๆœฌใ€‚ | +| `cuda()` | `Results` | ๅฐ†ๆ‰€ๆœ‰ๅผ ้‡็งปๅŠจๅˆฐGPUๅ†…ๅญ˜ไธŠ็š„Resultsๅฏน่ฑก็š„ๅ‰ฏๆœฌใ€‚ | +| `to()` | `Results` | ่ฟ”ๅ›žๅฐ†ๅผ ้‡็งปๅŠจๅˆฐๆŒ‡ๅฎš่ฎพๅค‡ๅ’Œdtype็š„Resultsๅฏน่ฑก็š„ๅ‰ฏๆœฌใ€‚ | +| `new()` | `Results` | ่ฟ”ๅ›žไธ€ไธชๅธฆๆœ‰็›ธๅŒๅ›พๅƒใ€่ทฏๅพ„ๅ’Œๅ็งฐ็š„ๆ–ฐResultsๅฏน่ฑกใ€‚ | +| `keys()` | `List[str]` | ่ฟ”ๅ›ž้ž็ฉบๅฑžๆ€งๅ็งฐ็š„ๅˆ—่กจใ€‚ | +| `plot()` | `numpy.ndarray` | ็ป˜ๅˆถๆฃ€ๆต‹็ป“ๆžœใ€‚่ฟ”ๅ›žๅธฆๆœ‰ๆณจ้‡Š็š„ๅ›พๅƒ็š„numpyๆ•ฐ็ป„ใ€‚ | +| `verbose()` | `str` | ่ฟ”ๅ›žๆฏไธชไปปๅŠก็š„ๆ—ฅๅฟ—ๅญ—็ฌฆไธฒใ€‚ | +| `save_txt()` | `None` | ๅฐ†้ข„ๆต‹ไฟๅญ˜ๅˆฐtxtๆ–‡ไปถไธญใ€‚ | +| `save_crop()` | `None` | ๅฐ†่ฃๅ‰ช็š„้ข„ๆต‹ไฟๅญ˜ๅˆฐ`save_dir/cls/file_name.jpg`ใ€‚ | +| `tojson()` | `None` | ๅฐ†ๅฏน่ฑก่ฝฌๆขไธบJSONๆ ผๅผใ€‚ | + +ๆœ‰ๅ…ณๆ›ดๅคš่ฏฆ็ป†ไฟกๆฏ๏ผŒ่ฏทๅ‚้˜…`Results`็ฑป็š„[ๆ–‡ๆกฃ](/../reference/engine/results.md)ใ€‚ + +### ่พน็•Œๆก†๏ผˆBoxes๏ผ‰ + +`Boxes`ๅฏน่ฑกๅฏ็”จไบŽ็ดขๅผ•ใ€ๆ“ไฝœๅ’Œ่ฝฌๆข่พน็•Œๆก†ๅˆฐไธๅŒๆ ผๅผใ€‚ + +!!! Example "่พน็•Œๆก†๏ผˆBoxes๏ผ‰" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„YOLOv8nๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅœจๅ›พ็‰‡ไธŠ่ฟ่กŒๆŽจ็† + results = model('bus.jpg') + + # ๆŸฅ็œ‹็ป“ๆžœ + for r in results: + print(r.boxes) # ๆ‰“ๅฐๅŒ…ๅซๆฃ€ๆต‹่พน็•Œๆก†็š„Boxesๅฏน่ฑก + ``` + +ไปฅไธ‹ๆ˜ฏ`Boxes`็ฑปๆ–นๆณ•ๅ’Œๅฑžๆ€ง็š„่กจๆ ผ๏ผŒๅŒ…ๆ‹ฌๅฎƒไปฌ็š„ๅ็งฐใ€็ฑปๅž‹ๅ’Œdescription: + +| ๅ็งฐ | ็ฑปๅž‹ | ๆ่ฟฐ | +|-----------|---------------------|-------------------------| +| `cpu()` | ๆ–นๆณ• | ๅฐ†ๅฏน่ฑก็งปๅŠจๅˆฐCPUๅ†…ๅญ˜ใ€‚ | +| `numpy()` | ๆ–นๆณ• | ๅฐ†ๅฏน่ฑก่ฝฌๆขไธบnumpyๆ•ฐ็ป„ใ€‚ | +| `cuda()` | ๆ–นๆณ• | ๅฐ†ๅฏน่ฑก็งปๅŠจๅˆฐCUDAๅ†…ๅญ˜ใ€‚ | +| `to()` | ๆ–นๆณ• | ๅฐ†ๅฏน่ฑก็งปๅŠจๅˆฐๆŒ‡ๅฎš็š„่ฎพๅค‡ใ€‚ | +| `xyxy` | ๅฑžๆ€ง (`torch.Tensor`) | ไปฅxyxyๆ ผๅผ่ฟ”ๅ›ž่พน็•Œๆก†ใ€‚ | +| `conf` | ๅฑžๆ€ง (`torch.Tensor`) | ่ฟ”ๅ›ž่พน็•Œๆก†็š„็ฝฎไฟกๅบฆๅ€ผใ€‚ | +| `cls` | ๅฑžๆ€ง (`torch.Tensor`) | ่ฟ”ๅ›ž่พน็•Œๆก†็š„็ฑปๅˆซๅ€ผใ€‚ | +| `id` | ๅฑžๆ€ง (`torch.Tensor`) | ่ฟ”ๅ›ž่พน็•Œๆก†็š„่ทŸ่ธชID๏ผˆๅฆ‚ๆžœๅฏ็”จ๏ผ‰ใ€‚ | +| `xywh` | ๅฑžๆ€ง (`torch.Tensor`) | ไปฅxywhๆ ผๅผ่ฟ”ๅ›ž่พน็•Œๆก†ใ€‚ | +| `xyxyn` | ๅฑžๆ€ง (`torch.Tensor`) | ไปฅๅŽŸๅง‹ๅ›พๅƒๅคงๅฐๅฝ’ไธ€ๅŒ–็š„xyxyๆ ผๅผ่ฟ”ๅ›ž่พน็•Œๆก†ใ€‚ | +| `xywhn` | ๅฑžๆ€ง (`torch.Tensor`) | ไปฅๅŽŸๅง‹ๅ›พๅƒๅคงๅฐๅฝ’ไธ€ๅŒ–็š„xywhๆ ผๅผ่ฟ”ๅ›ž่พน็•Œๆก†ใ€‚ | + +ๆœ‰ๅ…ณๆ›ดๅคš่ฏฆ็ป†ไฟกๆฏ๏ผŒ่ฏทๅ‚้˜…`Boxes`็ฑป็š„[ๆ–‡ๆกฃ](/../reference/engine/results.md)ใ€‚ + +### ๆŽฉ็ ๏ผˆMasks๏ผ‰ + +`Masks`ๅฏน่ฑกๅฏ็”จไบŽ็ดขๅผ•ใ€ๆ“ไฝœๅ’Œๅฐ†ๆŽฉ็ ่ฝฌๆขไธบๅˆ†ๆฎตใ€‚ + +!!! Example "ๆŽฉ็ ๏ผˆMasks๏ผ‰" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„YOLOv8n-segๅˆ†ๅ‰ฒๆจกๅž‹ + model = YOLO('yolov8n-seg.pt') + + # ๅœจๅ›พ็‰‡ไธŠ่ฟ่กŒๆŽจ็† + results = model('bus.jpg') # resultsๅˆ—่กจ + + # ๆŸฅ็œ‹็ป“ๆžœ + for r in results: + print(r.masks) # ๆ‰“ๅฐๅŒ…ๅซๆฃ€ๆต‹ๅˆฐ็š„ๅฎžไพ‹ๆŽฉ็ ็š„Masksๅฏน่ฑก + ``` + +ไปฅไธ‹ๆ˜ฏ`Masks`็ฑปๆ–นๆณ•ๅ’Œๅฑžๆ€ง็š„่กจๆ ผ๏ผŒๅŒ…ๆ‹ฌๅฎƒไปฌ็š„ๅ็งฐใ€็ฑปๅž‹ๅ’Œdescription: + +| ๅ็งฐ | ็ฑปๅž‹ | ๆ่ฟฐ | +|-----------|---------------------|----------------------| +| `cpu()` | ๆ–นๆณ• | ๅฐ†ๆŽฉ็ ๅผ ้‡่ฟ”ๅ›žๅˆฐCPUๅ†…ๅญ˜ใ€‚ | +| `numpy()` | ๆ–นๆณ• | ๅฐ†ๆŽฉ็ ๅผ ้‡่ฝฌๆขไธบnumpyๆ•ฐ็ป„ใ€‚ | +| `cuda()` | ๆ–นๆณ• | ๅฐ†ๆŽฉ็ ๅผ ้‡่ฟ”ๅ›žๅˆฐGPUๅ†…ๅญ˜ใ€‚ | +| `to()` | ๆ–นๆณ• | ๅฐ†ๆŽฉ็ ๅผ ้‡ๅธฆๆœ‰ๆŒ‡ๅฎš่ฎพๅค‡ๅ’Œdtype่ฟ”ๅ›žใ€‚ | +| `xyn` | ๅฑžๆ€ง (`torch.Tensor`) | ไปฅๅผ ้‡่กจ็คบ็š„ๅฝ’ไธ€ๅŒ–ๅˆ†ๆฎต็š„ๅˆ—่กจใ€‚ | +| `xy` | ๅฑžๆ€ง (`torch.Tensor`) | ไปฅๅƒ็ด ๅๆ ‡่กจ็คบ็š„ๅˆ†ๆฎต็š„ๅผ ้‡ๅˆ—่กจใ€‚ | + +ๆœ‰ๅ…ณๆ›ดๅคš่ฏฆ็ป†ไฟกๆฏ๏ผŒ่ฏทๅ‚้˜…`Masks`็ฑป็š„[ๆ–‡ๆกฃ](/../reference/engine/results.md)ใ€‚ + +### ๅ…ณ้”ฎ็‚น (Keypoints) + +`Keypoints` ๅฏน่ฑกๅฏไปฅ็”จไบŽ็ดขๅผ•ใ€ๆ“ไฝœๅ’Œ่ง„่ŒƒๅŒ–ๅๆ ‡ใ€‚ + +!!! Example "ๅ…ณ้”ฎ็‚น" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„YOLOv8n-pose ๅงฟๆ€ๆจกๅž‹ + model = YOLO('yolov8n-pose.pt') + + # ๅœจๅ›พๅƒไธŠ่ฟ่กŒๆŽจ็† + results = model('bus.jpg') # ็ป“ๆžœๅˆ—่กจ + + # ๆŸฅ็œ‹็ป“ๆžœ + for r in results: + print(r.keypoints) # ๆ‰“ๅฐๅŒ…ๅซๆฃ€ๆต‹ๅˆฐ็š„ๅ…ณ้”ฎ็‚น็š„Keypointsๅฏน่ฑก + ``` + +ไปฅไธ‹ๆ˜ฏ`Keypoints`็ฑปๆ–นๆณ•ๅ’Œๅฑžๆ€ง็š„่กจๆ ผ๏ผŒๅŒ…ๆ‹ฌๅฎƒไปฌ็š„ๅ็งฐใ€็ฑปๅž‹ๅ’Œdescription: + +| ๅ็งฐ | ็ฑปๅž‹ | ๆ่ฟฐ | +|-----------|--------------------|---------------------------| +| `cpu()` | ๆ–นๆณ• | ่ฟ”ๅ›žCPUๅ†…ๅญ˜ไธŠ็š„ๅ…ณ้”ฎ็‚นๅผ ้‡ใ€‚ | +| `numpy()` | ๆ–นๆณ• | ่ฟ”ๅ›žไฝœไธบnumpyๆ•ฐ็ป„็š„ๅ…ณ้”ฎ็‚นๅผ ้‡ใ€‚ | +| `cuda()` | ๆ–นๆณ• | ่ฟ”ๅ›žGPUๅ†…ๅญ˜ไธŠ็š„ๅ…ณ้”ฎ็‚นๅผ ้‡ใ€‚ | +| `to()` | ๆ–นๆณ• | ่ฟ”ๅ›žๆŒ‡ๅฎš่ฎพๅค‡ๅ’Œdtype็š„ๅ…ณ้”ฎ็‚นๅผ ้‡ใ€‚ | +| `xyn` | ๅฑžๆ€ง๏ผˆ`torch.Tensor`๏ผ‰ | ่ง„่ŒƒๅŒ–ๅ…ณ้”ฎ็‚น็š„ๅˆ—่กจ๏ผŒ่กจ็คบไธบๅผ ้‡ใ€‚ | +| `xy` | ๅฑžๆ€ง๏ผˆ`torch.Tensor`๏ผ‰ | ไปฅๅƒ็ด ๅๆ ‡่กจ็คบ็š„ๅ…ณ้”ฎ็‚นๅˆ—่กจ๏ผŒ่กจ็คบไธบๅผ ้‡ใ€‚ | +| `conf` | ๅฑžๆ€ง๏ผˆ`torch.Tensor`๏ผ‰ | ่ฟ”ๅ›žๅ…ณ้”ฎ็‚น็š„็ฝฎไฟกๅบฆๅ€ผ๏ผˆๅฆ‚ๆžœๆœ‰๏ผ‰๏ผŒๅฆๅˆ™่ฟ”ๅ›žNoneใ€‚ | + +ๆœ‰ๅ…ณๆ›ดๅคš่ฏฆ็ป†ไฟกๆฏ๏ผŒ่ฏทๅ‚้˜…`Keypoints`็ฑป[ๆ–‡ๆกฃ](/../reference/engine/results.md)ใ€‚ + +### ๆฆ‚็އ (Probs) + +`Probs` ๅฏน่ฑกๅฏไปฅ็”จไบŽ็ดขๅผ•๏ผŒ่Žทๅ–ๅˆ†็ฑป็š„ `top1` ๅ’Œ `top5` ็ดขๅผ•ๅ’Œๅˆ†ๆ•ฐใ€‚ + +!!! Example "ๆฆ‚็އ" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„YOLOv8n-cls ๅˆ†็ฑปๆจกๅž‹ + model = YOLO('yolov8n-cls.pt') + + # ๅœจๅ›พๅƒไธŠ่ฟ่กŒๆŽจ็† + results = model('bus.jpg') # ็ป“ๆžœๅˆ—่กจ + + # ๆŸฅ็œ‹็ป“ๆžœ + for r in results: + print(r.probs) # ๆ‰“ๅฐๅŒ…ๅซๆฃ€ๆต‹ๅˆฐ็š„็ฑปๅˆซๆฆ‚็އ็š„Probsๅฏน่ฑก + ``` + +ไปฅไธ‹ๆ˜ฏ`Probs`็ฑป็š„ๆ–นๆณ•ๅ’Œๅฑžๆ€ง็š„่กจๆ ผๆ€ป็ป“๏ผš + +| ๅ็งฐ | ็ฑปๅž‹ | ๆ่ฟฐ | +|------------|--------------------|-------------------------| +| `cpu()` | ๆ–นๆณ• | ่ฟ”ๅ›žCPUๅ†…ๅญ˜ไธŠ็š„ๆฆ‚็އๅผ ้‡็š„ๅ‰ฏๆœฌใ€‚ | +| `numpy()` | ๆ–นๆณ• | ่ฟ”ๅ›žๆฆ‚็އๅผ ้‡็š„ๅ‰ฏๆœฌไฝœไธบnumpyๆ•ฐ็ป„ใ€‚ | +| `cuda()` | ๆ–นๆณ• | ่ฟ”ๅ›žGPUๅ†…ๅญ˜ไธŠ็š„ๆฆ‚็އๅผ ้‡็š„ๅ‰ฏๆœฌใ€‚ | +| `to()` | ๆ–นๆณ• | ่ฟ”ๅ›žๅธฆๆœ‰ๆŒ‡ๅฎš่ฎพๅค‡ๅ’Œdtype็š„ๆฆ‚็އๅผ ้‡็š„ๅ‰ฏๆœฌใ€‚ | +| `top1` | ๅฑžๆ€ง๏ผˆ`int`๏ผ‰ | ็ฌฌ1็ฑป็š„็ดขๅผ•ใ€‚ | +| `top5` | ๅฑžๆ€ง๏ผˆ`list[int]`๏ผ‰ | ๅ‰5็ฑป็š„็ดขๅผ•ใ€‚ | +| `top1conf` | ๅฑžๆ€ง๏ผˆ`torch.Tensor`๏ผ‰ | ็ฌฌ1็ฑป็š„็ฝฎไฟกๅบฆใ€‚ | +| `top5conf` | ๅฑžๆ€ง๏ผˆ`torch.Tensor`๏ผ‰ | ๅ‰5็ฑป็š„็ฝฎไฟกๅบฆใ€‚ | + +ๆœ‰ๅ…ณๆ›ดๅคš่ฏฆ็ป†ไฟกๆฏ๏ผŒ่ฏทๅ‚้˜…`Probs`็ฑป[ๆ–‡ๆกฃ](/../reference/engine/results.md)ใ€‚ + +## ็ป˜ๅˆถ็ป“ๆžœ + +ๆ‚จๅฏไปฅไฝฟ็”จ`Result`ๅฏน่ฑก็š„`plot()`ๆ–นๆณ•ๆฅๅฏ่ง†ๅŒ–้ข„ๆต‹็ป“ๆžœใ€‚ๅฎƒไผšๅฐ†`Results`ๅฏน่ฑกไธญๅŒ…ๅซ็š„ๆ‰€ๆœ‰้ข„ๆต‹็ฑปๅž‹๏ผˆๆก†ใ€ๆŽฉ็ ใ€ๅ…ณ้”ฎ็‚นใ€ๆฆ‚็އ็ญ‰๏ผ‰็ป˜ๅˆถๅˆฐไธ€ไธชnumpyๆ•ฐ็ป„ไธŠ๏ผŒ็„ถๅŽๅฏไปฅๆ˜พ็คบๆˆ–ไฟๅญ˜ใ€‚ + +!!! Example "็ป˜ๅˆถ" + + ```python + from PIL import Image + from ultralytics import YOLO + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„YOLOv8nๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๅœจ'bus.jpg'ไธŠ่ฟ่กŒๆŽจ็† + results = model('bus.jpg') # ็ป“ๆžœๅˆ—่กจ + + # ๅฑ•็คบ็ป“ๆžœ + for r in results: + im_array = r.plot() # ็ป˜ๅˆถๅŒ…ๅซ้ข„ๆต‹็ป“ๆžœ็š„BGR numpyๆ•ฐ็ป„ + im = Image.fromarray(im_array[..., ::-1]) # RGB PILๅ›พๅƒ + im.show() # ๆ˜พ็คบๅ›พๅƒ + im.save('results.jpg') # ไฟๅญ˜ๅ›พๅƒ + ``` + + `plot()`ๆ–นๆณ•ๆ”ฏๆŒไปฅไธ‹ๅ‚ๆ•ฐ๏ผš + + | ๅ‚ๆ•ฐ | ็ฑปๅž‹ | ๆ่ฟฐ | ้ป˜่ฎคๅ€ผ | + |---------------|-----------------|------------------------------------------------------------------------|---------------| + | `conf` | `bool` | ๆ˜ฏๅฆ็ป˜ๅˆถๆฃ€ๆต‹็ฝฎไฟกๅบฆๅˆ†ๆ•ฐใ€‚ | `True` | + | `line_width` | `float` | ่พนๆก†็บฟๅฎฝๅบฆใ€‚ๅฆ‚ๆžœไธบNone๏ผŒๅˆ™ๆŒ‰ๅ›พๅƒๅคงๅฐ็ผฉๆ”พใ€‚ | `None` | + | `font_size` | `float` | ๆ–‡ๆœฌๅญ—ไฝ“ๅคงๅฐใ€‚ๅฆ‚ๆžœไธบNone๏ผŒๅˆ™ๆŒ‰ๅ›พๅƒๅคงๅฐ็ผฉๆ”พใ€‚ | `None` | + | `font` | `str` | ๆ–‡ๆœฌๅญ—ไฝ“ใ€‚ | `'Arial.ttf'` | + | `pil` | `bool` | ๆ˜ฏๅฆๅฐ†ๅ›พๅƒ่ฟ”ๅ›žไธบPILๅ›พๅƒใ€‚ | `False` | + | `img` | `numpy.ndarray` | ็ป˜ๅˆถๅˆฐๅฆไธ€ไธชๅ›พๅƒไธŠใ€‚ๅฆ‚ๆžœๆฒกๆœ‰๏ผŒๅˆ™็ป˜ๅˆถๅˆฐๅŽŸๅง‹ๅ›พๅƒไธŠใ€‚ | `None` | + | `im_gpu` | `torch.Tensor` | ๅฝข็Šถไธบ(1, 3, 640, 640)็š„่ง„่ŒƒๅŒ–GPUๅ›พๅƒ๏ผŒ็”จไบŽๆ›ดๅฟซๅœฐ็ป˜ๅˆถๆŽฉ็ ใ€‚ | `None` | + | `kpt_radius` | `int` | ็ป˜ๅˆถๅ…ณ้”ฎ็‚น็š„ๅŠๅพ„ใ€‚้ป˜่ฎคไธบ5ใ€‚ | `5` | + | `kpt_line` | `bool` | ๆ˜ฏๅฆ็ป˜ๅˆถ่ฟžๆŽฅๅ…ณ้”ฎ็‚น็š„็บฟๆกใ€‚ | `True` | + | `labels` | `bool` | ๆ˜ฏๅฆ็ป˜ๅˆถ่พนๆก†ๆ ‡็ญพใ€‚ | `True` | + | `boxes` | `bool` | ๆ˜ฏๅฆ็ป˜ๅˆถ่พนๆก†ใ€‚ | `True` | + | `masks` | `bool` | ๆ˜ฏๅฆ็ป˜ๅˆถๆŽฉ็ ใ€‚ | `True` | + | `probs` | `bool` | ๆ˜ฏๅฆ็ป˜ๅˆถๅˆ†็ฑปๆฆ‚็އ | `True` | + +## ็บฟ็จ‹ๅฎ‰ๅ…จๆŽจ็† + +ๅœจๅคš็บฟ็จ‹ไธญๅนถ่กŒ่ฟ่กŒๅคšไธชYOLOๆจกๅž‹ๆ—ถ๏ผŒ็กฎไฟๆŽจ็†่ฟ‡็จ‹็š„็บฟ็จ‹ๅฎ‰ๅ…จๆ€ง่‡ณๅ…ณ้‡่ฆใ€‚็บฟ็จ‹ๅฎ‰ๅ…จ็š„ๆŽจ็†ไฟ่ฏไบ†ๆฏไธช็บฟ็จ‹็š„้ข„ๆต‹็ป“ๆžœๆ˜ฏ้š”็ฆป็š„๏ผŒไธไผš็›ธไบ’ๅนฒๆ‰ฐ๏ผŒ้ฟๅ…็ซžๆ€ๆกไปถ๏ผŒ็กฎไฟ่พ“ๅ‡บ็š„ไธ€่‡ดๆ€งๅ’Œๅฏ้ ๆ€งใ€‚ + +ๅœจๅคš็บฟ็จ‹ๅบ”็”จไธญไฝฟ็”จYOLOๆจกๅž‹ๆ—ถ๏ผŒ้‡่ฆ็š„ๆ˜ฏไธบๆฏไธช็บฟ็จ‹ๅฎžไพ‹ๅŒ–ๅ•็‹ฌ็š„ๆจกๅž‹ๅฏน่ฑก๏ผŒๆˆ–ไฝฟ็”จ็บฟ็จ‹ๆœฌๅœฐๅญ˜ๅ‚จๆฅ้˜ฒๆญขๅ†ฒ็ช๏ผš + +!!! Example "็บฟ็จ‹ๅฎ‰ๅ…จๆŽจ็†" + + ๅœจๆฏไธช็บฟ็จ‹ๅ†…ๅฎžไพ‹ๅŒ–ๅ•ไธชๆจกๅž‹ไปฅๅฎž็Žฐ็บฟ็จ‹ๅฎ‰ๅ…จ็š„ๆŽจ็†๏ผš + ```python + from ultralytics import YOLO + from threading import Thread + + def thread_safe_predict(image_path): + # ๅœจ็บฟ็จ‹ๅ†…ๅฎžไพ‹ๅŒ–ๆ–ฐๆจกๅž‹ + local_model = YOLO("yolov8n.pt") + results = local_model.predict(image_path) + # ๅค„็†็ป“ๆžœ + + # ๅฏๅŠจๆ‹ฅๆœ‰ๅ„่‡ชๆจกๅž‹ๅฎžไพ‹็š„็บฟ็จ‹ + Thread(target=thread_safe_predict, args=("image1.jpg",)).start() + Thread(target=thread_safe_predict, args=("image2.jpg",)).start() + ``` + +ๆœ‰ๅ…ณYOLOๆจกๅž‹็บฟ็จ‹ๅฎ‰ๅ…จๆŽจ็†็š„ๆทฑๅ…ฅ่ฎจ่ฎบๅ’Œ้€ๆญฅๆŒ‡ๅฏผ๏ผŒ่ฏทๅ‚้˜…ๆˆ‘ไปฌ็š„[YOLO็บฟ็จ‹ๅฎ‰ๅ…จๆŽจ็†ๆŒ‡ๅ—](/../guides/yolo-thread-safe-inference.md)ใ€‚่ฏฅๆŒ‡ๅ—ๅฐ†ไธบๆ‚จๆไพ›้ฟๅ…ๅธธ่ง้™ท้˜ฑๅนถ็กฎไฟๅคš็บฟ็จ‹ๆŽจ็†้กบๅˆฉ่ฟ›่กŒๆ‰€้œ€็š„ๆ‰€ๆœ‰ๅฟ…่ฆไฟกๆฏใ€‚ + +## ๆตๅช’ไฝ“ๆบ`for`ๅพช็Žฏ + +ไปฅไธ‹ๆ˜ฏไฝฟ็”จOpenCV๏ผˆ`cv2`๏ผ‰ๅ’ŒYOLOv8ๅœจ่ง†้ข‘ๅธงไธŠ่ฟ่กŒๆŽจ็†็š„Python่„šๆœฌใ€‚ๆญค่„šๆœฌๅ‡่ฎพๆ‚จๅทฒ็ปๅฎ‰่ฃ…ไบ†ๅฟ…่ฆ็š„ๅŒ…๏ผˆ`opencv-python`ๅ’Œ`ultralytics`๏ผ‰ใ€‚ + +!!! Example "ๆตๅช’ไฝ“forๅพช็Žฏ" + + ```python + import cv2 + from ultralytics import YOLO + + # ๅŠ ่ฝฝYOLOv8ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๆ‰“ๅผ€่ง†้ข‘ๆ–‡ไปถ + video_path = "path/to/your/video/file.mp4" + cap = cv2.VideoCapture(video_path) + + # ้ๅކ่ง†้ข‘ๅธง + while cap.isOpened(): + # ไปŽ่ง†้ข‘ไธญ่ฏปๅ–ไธ€ๅธง + success, frame = cap.read() + + if success: + # ๅœจ่ฏฅๅธงไธŠ่ฟ่กŒYOLOv8ๆŽจ็† + results = model(frame) + + # ๅœจๅธงไธŠๅฏ่ง†ๅŒ–็ป“ๆžœ + annotated_frame = results[0].plot() + + # ๆ˜พ็คบๅธฆๆณจ้‡Š็š„ๅธง + cv2.imshow("YOLOv8ๆŽจ็†", annotated_frame) + + # ๅฆ‚ๆžœๆŒ‰ไธ‹'q'ๅˆ™ไธญๆ–ญๅพช็Žฏ + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # ๅฆ‚ๆžœ่ง†้ข‘็ป“ๆŸๅˆ™ไธญๆ–ญๅพช็Žฏ + break + + # ้‡Šๆ”พ่ง†้ข‘ๆ•่Žทๅฏน่ฑกๅนถๅ…ณ้—ญๆ˜พ็คบ็ช—ๅฃ + cap.release() + cv2.destroyAllWindows() + ``` + +ๆญค่„šๆœฌๅฐ†ๅฏน่ง†้ข‘็š„ๆฏไธ€ๅธง่ฟ›่กŒ้ข„ๆต‹๏ผŒๅฏ่ง†ๅŒ–็ป“ๆžœ๏ผŒๅนถๅœจ็ช—ๅฃไธญๆ˜พ็คบใ€‚ๆŒ‰ไธ‹'q'้”ฎๅฏไปฅ้€€ๅ‡บๅพช็Žฏใ€‚ diff --git a/ultralytics/docs/zh/modes/predict.md:Zone.Identifier b/ultralytics/docs/zh/modes/predict.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/modes/predict.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/modes/track.md b/ultralytics/docs/zh/modes/track.md new file mode 100755 index 0000000..95e6745 --- /dev/null +++ b/ultralytics/docs/zh/modes/track.md @@ -0,0 +1,273 @@ +--- +comments: true +description: ๅญฆไน ๅฆ‚ไฝ•ไฝฟ็”จUltralytics YOLO่ฟ›่กŒ่ง†้ข‘ๆตไธญ็š„็‰ฉไฝ“่ฟฝ่ธชใ€‚ๆŒ‡ๅ—ๅŒ…ๆ‹ฌไฝฟ็”จไธๅŒ็š„่ฟฝ่ธชๅ™จๅ’Œ่‡ชๅฎšไน‰่ฟฝ่ธชๅ™จ้…็ฝฎใ€‚ +keywords: Ultralytics, YOLO, ็‰ฉไฝ“่ฟฝ่ธช, ่ง†้ข‘ๆต, BoT-SORT, ByteTrack, Python ๆŒ‡ๅ—, CLI ๆŒ‡ๅ— +--- + +# ไฝฟ็”จUltralytics YOLO่ฟ›่กŒๅคš็‰ฉไฝ“่ฟฝ่ธช + +ๅคš็‰ฉไฝ“่ฟฝ่ธช็คบไพ‹ + +่ง†้ข‘ๅˆ†ๆž้ข†ๅŸŸ็š„็‰ฉไฝ“่ฟฝ่ธชๆ˜ฏไธ€้กนๅ…ณ้”ฎไปปๅŠก๏ผŒๅฎƒไธไป…่ƒฝๆ ‡่ฏ†ๅ‡บๅธงๅ†…็‰ฉไฝ“็š„ไฝ็ฝฎๅ’Œ็ฑปๅˆซ๏ผŒ่ฟ˜่ƒฝๅœจ่ง†้ข‘่ฟ›่กŒ่ฟ‡็จ‹ไธญไธบๆฏไธชๆฃ€ๆต‹ๅˆฐ็š„็‰ฉไฝ“ไฟๆŒไธ€ไธชๅ”ฏไธ€็š„IDใ€‚ๅบ”็”จๅœบๆ™ฏๆ— ้™ๅนฟ้˜”โ€”โ€”ไปŽ็›‘ๆŽงไธŽๅฎ‰ๅ…จๅˆฐๅฎžๆ—ถไฝ“่‚ฒๅˆ†ๆžใ€‚ + +## ไธบไป€ไนˆ้€‰ๆ‹ฉUltralytics YOLO่ฟ›่กŒ็‰ฉไฝ“่ฟฝ่ธช๏ผŸ + +Ultralytics ่ฟฝ่ธชๅ™จ็š„่พ“ๅ‡บไธŽๆ ‡ๅ‡†็š„็‰ฉไฝ“ๆฃ€ๆต‹็ป“ๆžœไธ€่‡ด๏ผŒไฝ†ๅขžๅŠ ไบ†็‰ฉไฝ“ID็š„้™„ๅŠ ๅ€ผใ€‚่ฟ™ไฝฟๅ…ถๆ˜“ไบŽ่ฟฝ่ธช่ง†้ข‘ๆตไธญ็š„็‰ฉไฝ“ๅนถ่ฟ›่กŒๅŽ็ปญๅˆ†ๆžใ€‚ไปฅไธ‹ๆ˜ฏๆ‚จๅบ”่€ƒ่™‘ไฝฟ็”จUltralytics YOLOๆฅๆปก่ถณๆ‚จ็‰ฉไฝ“่ฟฝ่ธช้œ€ๆฑ‚็š„ๅŽŸๅ› ๏ผš + +- **ๆ•ˆ็އ๏ผš** ๅฎžๆ—ถๅค„็†่ง†้ข‘ๆต๏ผŒๅŒๆ—ถไฟๆŒๅ‡†็กฎๆ€งใ€‚ +- **็ตๆดปๆ€ง๏ผš** ๆ”ฏๆŒๅคš็ง่ฟฝ่ธช็ฎ—ๆณ•ๅ’Œ้…็ฝฎใ€‚ +- **ๆ˜“็”จๆ€ง๏ผš** ็ฎ€ๅ•็š„Python APIๅ’ŒCLI้€‰้กน๏ผŒไพฟไบŽๅฟซ้€Ÿ้›†ๆˆๅ’Œ้ƒจ็ฝฒใ€‚ +- **ๅฏๅฎšๅˆถๆ€ง๏ผš** ๆ˜“ไบŽไฝฟ็”จ่‡ชๅฎšไน‰่ฎญ็ปƒ็š„YOLOๆจกๅž‹๏ผŒๅ…่ฎธ้›†ๆˆๅˆฐ็‰นๅฎš้ข†ๅŸŸ็š„ๅบ”็”จไธญใ€‚ + +

+
+ +
+ ่ง‚็œ‹๏ผšไฝฟ็”จUltralytics YOLOv8็š„็‰ฉไฝ“ๆฃ€ๆต‹ไธŽ่ฟฝ่ธชใ€‚ +

+ +## ๅฎž้™…ๅบ”็”จๅœบๆ™ฏ + +| ไบค้€š่ฟ่พ“ | ้›ถๅ”ฎ | ๆฐดไบงๅ…ปๆฎ– | +|:----------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------:| +| ![่ฝฆ่พ†่ฟฝ่ธช](https://github.com/RizwanMunawar/ultralytics/assets/62513924/ee6e6038-383b-4f21-ac29-b2a1c7d386ab) | ![ไบบๅ‘˜่ฟฝ่ธช](https://github.com/RizwanMunawar/ultralytics/assets/62513924/93bb4ee2-77a0-4e4e-8eb6-eb8f527f0527) | ![้ฑผ็ฑป่ฟฝ่ธช](https://github.com/RizwanMunawar/ultralytics/assets/62513924/a5146d0f-bfa8-4e0a-b7df-3c1446cd8142) | +| ่ฝฆ่พ†่ฟฝ่ธช | ไบบๅ‘˜่ฟฝ่ธช | ้ฑผ็ฑป่ฟฝ่ธช | + +## ไธ€็žฅ็‰น็‚น + +Ultralytics YOLOๆ‰ฉๅฑ•ไบ†ๅ…ถ็‰ฉไฝ“ๆฃ€ๆต‹ๅŠŸ่ƒฝ๏ผŒไปฅๆไพ›ๅผบๅคงไธ”ๅคšๅŠŸ่ƒฝ็š„็‰ฉไฝ“่ฟฝ่ธช๏ผš + +- **ๅฎžๆ—ถ่ฟฝ่ธช๏ผš** ๅœจ้ซ˜ๅธง็އ่ง†้ข‘ไธญๆ— ็ผ่ฟฝ่ธช็‰ฉไฝ“ใ€‚ +- **ๆ”ฏๆŒๅคšไธช่ฟฝ่ธชๅ™จ๏ผš** ไปŽๅคš็งๆˆ็†Ÿ็š„่ฟฝ่ธช็ฎ—ๆณ•ไธญ้€‰ๆ‹ฉใ€‚ +- **่‡ชๅฎšไน‰่ฟฝ่ธชๅ™จ้…็ฝฎ๏ผš** ้€š่ฟ‡่ฐƒๆ•ดๅ„็งๅ‚ๆ•ฐๆฅๅฎšๅˆถ่ฟฝ่ธช็ฎ—ๆณ•๏ผŒไปฅๆปก่ถณ็‰นๅฎš้œ€ๆฑ‚ใ€‚ + +## ๅฏ็”จ็š„่ฟฝ่ธชๅ™จ + +Ultralytics YOLOๆ”ฏๆŒไปฅไธ‹่ฟฝ่ธช็ฎ—ๆณ•ใ€‚ๅฏไปฅ้€š่ฟ‡ไผ ้€’็›ธๅ…ณ็š„YAML้…็ฝฎๆ–‡ไปถๅฆ‚`tracker=tracker_type.yaml`ๆฅๅฏ็”จ๏ผš + +* [BoT-SORT](https://github.com/NirAharon/BoT-SORT) - ไฝฟ็”จ `botsort.yaml` ๅฏ็”จๆญค่ฟฝ่ธชๅ™จใ€‚ +* [ByteTrack](https://github.com/ifzhang/ByteTrack) - ไฝฟ็”จ `bytetrack.yaml` ๅฏ็”จๆญค่ฟฝ่ธชๅ™จใ€‚ + +้ป˜่ฎค่ฟฝ่ธชๅ™จๆ˜ฏBoT-SORTใ€‚ + +## ่ฟฝ่ธช + +่ฆๅœจ่ง†้ข‘ๆตไธญ่ฟ่กŒ่ฟฝ่ธชๅ™จ๏ผŒ่ฏทไฝฟ็”จๅทฒ่ฎญ็ปƒ็š„ๆฃ€ๆต‹ใ€ๅˆ†ๅ‰ฒๆˆ–ๅงฟๆ€ๆจกๅž‹๏ผŒไพ‹ๅฆ‚YOLOv8nใ€YOLOv8n-segๅ’ŒYOLOv8n-poseใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๅฎ˜ๆ–นๆˆ–่‡ชๅฎšไน‰ๆจกๅž‹ + model = YOLO('yolov8n.pt') # ๅŠ ่ฝฝไธ€ไธชๅฎ˜ๆ–น็š„ๆฃ€ๆต‹ๆจกๅž‹ + model = YOLO('yolov8n-seg.pt') # ๅŠ ่ฝฝไธ€ไธชๅฎ˜ๆ–น็š„ๅˆ†ๅ‰ฒๆจกๅž‹ + model = YOLO('yolov8n-pose.pt') # ๅŠ ่ฝฝไธ€ไธชๅฎ˜ๆ–น็š„ๅงฟๆ€ๆจกๅž‹ + model = YOLO('path/to/best.pt') # ๅŠ ่ฝฝไธ€ไธช่‡ชๅฎšไน‰่ฎญ็ปƒ็š„ๆจกๅž‹ + + # ไฝฟ็”จๆจกๅž‹่ฟ›่กŒ่ฟฝ่ธช + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True) # ไฝฟ็”จ้ป˜่ฎค่ฟฝ่ธชๅ™จ่ฟ›่กŒ่ฟฝ่ธช + results = model.track(source="https://youtu.be/LNwODJXcvt4", show=True, tracker="bytetrack.yaml") # ไฝฟ็”จByteTrack่ฟฝ่ธชๅ™จ่ฟ›่กŒ่ฟฝ่ธช + ``` + + === "CLI" + + ```bash + # ไฝฟ็”จๅ‘ฝไปค่กŒ็•Œ้ข่ฟ›่กŒๅ„็งๆจกๅž‹็š„่ฟฝ่ธช + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" # ๅฎ˜ๆ–นๆฃ€ๆต‹ๆจกๅž‹ + yolo track model=yolov8n-seg.pt source="https://youtu.be/LNwODJXcvt4" # ๅฎ˜ๆ–นๅˆ†ๅ‰ฒๆจกๅž‹ + yolo track model=yolov8n-pose.pt source="https://youtu.be/LNwODJXcvt4" # ๅฎ˜ๆ–นๅงฟๆ€ๆจกๅž‹ + yolo track model=path/to/best.pt source="https://youtu.be/LNwODJXcvt4" # ่‡ชๅฎšไน‰่ฎญ็ปƒๆจกๅž‹ + + # ไฝฟ็”จByteTrack่ฟฝ่ธชๅ™จ่ฟ›่กŒ่ฟฝ่ธช + yolo track model=path/to/best.pt tracker="bytetrack.yaml" + ``` + +ๅฆ‚ไธŠๆ‰€่ฟฐ๏ผŒDetectใ€Segmentๅ’ŒPoseๆจกๅž‹ๅœจ่ง†้ข‘ๆˆ–ๆตๅช’ไฝ“ๆบไธŠ่ฟ่กŒๆ—ถๅ‡ๅฏ่ฟ›่กŒ่ฟฝ่ธชใ€‚ + +## ้…็ฝฎ + +### ่ฟฝ่ธชๅ‚ๆ•ฐ + +่ฟฝ่ธช้…็ฝฎไธŽ้ข„ๆต‹ๆจกๅผๅ…ฑไบซไธ€ไบ›ๅฑžๆ€ง๏ผŒๅฆ‚`conf`ใ€`iou`ๅ’Œ`show`ใ€‚ๆœ‰ๅ…ณ่ฟ›ไธ€ๆญฅ้…็ฝฎ๏ผŒ่ฏทๅ‚่ง[้ข„ๆต‹](https://docs.ultralytics.com/modes/predict/)ๆจกๅž‹้กต้ขใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ้…็ฝฎ่ฟฝ่ธชๅ‚ๆ•ฐๅนถ่ฟ่กŒ่ฟฝ่ธชๅ™จ + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", conf=0.3, iou=0.5, show=True) + ``` + + === "CLI" + + ```bash + # ไฝฟ็”จๅ‘ฝไปค่กŒ็•Œ้ข้…็ฝฎ่ฟฝ่ธชๅ‚ๆ•ฐๅนถ่ฟ่กŒ่ฟฝ่ธชๅ™จ + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" conf=0.3, iou=0.5 show + ``` + +### ้€‰ๆ‹ฉ่ฟฝ่ธชๅ™จ + +Ultralytics่ฟ˜ๅ…่ฎธๆ‚จไฝฟ็”จไฟฎๆ”นๅŽ็š„่ฟฝ่ธชๅ™จ้…็ฝฎๆ–‡ไปถใ€‚่ฆๆ‰ง่กŒๆญคๆ“ไฝœ๏ผŒๅช้œ€ไปŽ[ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers)ไธญๅคๅˆถไธ€ไธช่ฟฝ่ธชๅ™จ้…็ฝฎๆ–‡ไปถ๏ผˆไพ‹ๅฆ‚๏ผŒ`custom_tracker.yaml`๏ผ‰ๅนถๆ นๆฎๆ‚จ็š„้œ€ๆฑ‚ไฟฎๆ”นไปปไฝ•้…็ฝฎ๏ผˆ้™คไบ†`tracker_type`๏ผ‰ใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ๅนถไฝฟ็”จ่‡ชๅฎšไน‰้…็ฝฎๆ–‡ไปถ่ฟ่กŒ่ฟฝ่ธชๅ™จ + model = YOLO('yolov8n.pt') + results = model.track(source="https://youtu.be/LNwODJXcvt4", tracker='custom_tracker.yaml') + ``` + + === "CLI" + + ```bash + # ไฝฟ็”จๅ‘ฝไปค่กŒ็•Œ้ขๅŠ ่ฝฝๆจกๅž‹ๅนถไฝฟ็”จ่‡ชๅฎšไน‰้…็ฝฎๆ–‡ไปถ่ฟ่กŒ่ฟฝ่ธชๅ™จ + yolo track model=yolov8n.pt source="https://youtu.be/LNwODJXcvt4" tracker='custom_tracker.yaml' + ``` + +ๆœ‰ๅ…ณ่ฟฝ่ธชๅ‚ๆ•ฐ็š„ๅ…จ้ขๅˆ—่กจ๏ผŒ่ฏทๅ‚่€ƒ[ultralytics/cfg/trackers](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/trackers)้กต้ขใ€‚ + +## Python็คบไพ‹ + +### ๆŒ็ปญ่ฟฝ่ธชๅพช็Žฏ + +่ฟ™ๆ˜ฏไธ€ไธชไฝฟ็”จOpenCV๏ผˆ`cv2`๏ผ‰ๅ’ŒYOLOv8ๅœจ่ง†้ข‘ๅธงไธŠ่ฟ่กŒ็‰ฉไฝ“่ฟฝ่ธช็š„Python่„šๆœฌใ€‚ๆญค่„šๆœฌๅ‡่ฎพๆ‚จๅทฒ็ปๅฎ‰่ฃ…ไบ†ๅฟ…่ฆ็š„ๅŒ…๏ผˆ`opencv-python`ๅ’Œ`ultralytics`๏ผ‰ใ€‚ๅ‚ๆ•ฐ`persist=True`ๅ‘Š่ฏ‰่ฟฝ่ธชๅ™จๅฝ“ๅ‰็š„ๅ›พๅƒๆˆ–ๅธงๆ˜ฏๅบๅˆ—ไธญ็š„ไธ‹ไธ€ไธช๏ผŒๅนถไธ”ๆœŸๆœ›ๅœจๅฝ“ๅ‰ๅ›พๅƒไธญไปŽไธŠไธ€ไธชๅ›พๅƒไธญ่Žทๅพ—่ฟฝ่ธช่ทฏๅพ„ใ€‚ + +!!! Example "ๅธฆ่ฟฝ่ธชๅŠŸ่ƒฝ็š„ๆตๅพช็Žฏ" + + ```python + import cv2 + from ultralytics import YOLO + + # ๅŠ ่ฝฝYOLOv8ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๆ‰“ๅผ€่ง†้ข‘ๆ–‡ไปถ + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # ๅพช็Žฏ้ๅކ่ง†้ข‘ๅธง + while cap.isOpened(): + # ไปŽ่ง†้ข‘่ฏปๅ–ไธ€ๅธง + success, frame = cap.read() + + if success: + # ๅœจๅธงไธŠ่ฟ่กŒYOLOv8่ฟฝ่ธช๏ผŒๆŒ็ปญ่ฟฝ่ธชๅธง้—ด็š„็‰ฉไฝ“ + results = model.track(frame, persist=True) + + # ๅœจๅธงไธŠๅฑ•็คบ็ป“ๆžœ + annotated_frame = results[0].plot() + + # ๅฑ•็คบๅธฆๆณจ้‡Š็š„ๅธง + cv2.imshow("YOLOv8 Tracking", annotated_frame) + + # ๅฆ‚ๆžœๆŒ‰ไธ‹'q'ๅˆ™้€€ๅ‡บๅพช็Žฏ + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # ๅฆ‚ๆžœ่ง†้ข‘็ป“ๆŸๅˆ™้€€ๅ‡บๅพช็Žฏ + break + + # ้‡Šๆ”พ่ง†้ข‘ๆ•่Žทๅฏน่ฑกๅนถๅ…ณ้—ญๆ˜พ็คบ็ช—ๅฃ + cap.release() + cv2.destroyAllWindows() + ``` + +่ฏทๆณจๆ„ไปŽ`model(frame)`ๆ›ดๆ”นไธบ`model.track(frame)`็š„ๅ˜ๅŒ–๏ผŒ่ฟ™ไฝฟ่ƒฝๅคŸๅฏ็”จ็‰ฉไฝ“่ฟฝ่ธช่€Œไธๅชๆ˜ฏ็ฎ€ๅ•็š„ๆฃ€ๆต‹ใ€‚่ฟ™ไธชไฟฎๆ”น็š„่„šๆœฌๅฐ†ๅœจ่ง†้ข‘็š„ๆฏไธ€ๅธงไธŠ่ฟ่กŒ่ฟฝ่ธชๅ™จ๏ผŒๅฏ่ง†ๅŒ–็ป“ๆžœ๏ผŒๅนถๅœจ็ช—ๅฃไธญๆ˜พ็คบๅฎƒไปฌใ€‚้€š่ฟ‡ๆŒ‰'q'ๅฏไปฅ้€€ๅ‡บๅพช็Žฏใ€‚ + +### ้šๆ—ถ้—ด็ป˜ๅˆถ่ฟฝ่ธช่ทฏๅพ„ + +ๅœจ่ฟž็ปญๅธงไธŠๅฏ่ง†ๅŒ–็‰ฉไฝ“่ฟฝ่ธช่ทฏๅพ„ๅฏไปฅๆไพ›ๆœ‰ๅ…ณ่ง†้ข‘ไธญๆฃ€ๆต‹ๅˆฐ็š„็‰ฉไฝ“็š„่ฟๅŠจๆจกๅผๅ’Œ่กŒไธบ็š„ๆœ‰ไปทๅ€ผ็š„ๆดž่งใ€‚ไฝฟ็”จUltralytics YOLOv8๏ผŒ็ป˜ๅˆถ่ฟ™ไบ›่ทฏๅพ„ๆ˜ฏไธ€ไธชๆ— ็ผไธ”้ซ˜ๆ•ˆ็š„่ฟ‡็จ‹ใ€‚ + +ๅœจไปฅไธ‹็คบไพ‹ไธญ๏ผŒๆˆ‘ไปฌๆผ”็คบไบ†ๅฆ‚ไฝ•ๅˆฉ็”จYOLOv8็š„่ฟฝ่ธชๅŠŸ่ƒฝๅœจๅคšไธช่ง†้ข‘ๅธงไธŠ็ป˜ๅˆถๆฃ€ๆต‹็‰ฉไฝ“็š„็งปๅŠจใ€‚่ฟ™ไธช่„šๆœฌๆถ‰ๅŠๆ‰“ๅผ€่ง†้ข‘ๆ–‡ไปถใ€้€ๅธง่ฏปๅ–๏ผŒๅนถไฝฟ็”จYOLOๆจกๅž‹่ฏ†ๅˆซๅนถ่ฟฝ่ธชๅ„็ง็‰ฉไฝ“ใ€‚้€š่ฟ‡ไฟ็•™ๆฃ€ๆต‹ๅˆฐ็š„่พน็•Œๆก†็š„ไธญๅฟƒ็‚นๅนถ่ฟžๆŽฅๅฎƒไปฌ๏ผŒๆˆ‘ไปฌๅฏไปฅ็ป˜ๅˆถ่กจ็คบ่ทŸ่ธช็‰ฉไฝ“่ทฏๅพ„็š„็บฟๆกใ€‚ + +!!! Example "ๅœจๅคšไธช่ง†้ข‘ๅธงไธŠ็ป˜ๅˆถ่ฟฝ่ธช่ทฏๅพ„" + + ```python + from collections import defaultdict + + import cv2 + import numpy as np + + from ultralytics import YOLO + + # ๅŠ ่ฝฝYOLOv8ๆจกๅž‹ + model = YOLO('yolov8n.pt') + + # ๆ‰“ๅผ€่ง†้ข‘ๆ–‡ไปถ + video_path = "path/to/video.mp4" + cap = cv2.VideoCapture(video_path) + + # ๅญ˜ๅ‚จ่ฟฝ่ธชๅކๅฒ + track_history = defaultdict(lambda: []) + + # ๅพช็Žฏ้ๅކ่ง†้ข‘ๅธง + while cap.isOpened(): + # ไปŽ่ง†้ข‘่ฏปๅ–ไธ€ๅธง + success, frame = cap.read() + + if success: + # ๅœจๅธงไธŠ่ฟ่กŒYOLOv8่ฟฝ่ธช๏ผŒๆŒ็ปญ่ฟฝ่ธชๅธง้—ด็š„็‰ฉไฝ“ + results = model.track(frame, persist=True) + + # ่Žทๅ–ๆก†ๅ’Œ่ฟฝ่ธชID + boxes = results[0].boxes.xywh.cpu() + track_ids = results[0].boxes.id.int().cpu().tolist() + + # ๅœจๅธงไธŠๅฑ•็คบ็ป“ๆžœ + annotated_frame = results[0].plot() + + # ็ป˜ๅˆถ่ฟฝ่ธช่ทฏๅพ„ + for box, track_id in zip(boxes, track_ids): + x, y, w, h = box + track = track_history[track_id] + track.append((float(x), float(y))) # x, yไธญๅฟƒ็‚น + if len(track) > 30: # ๅœจ90ๅธงไธญไฟ็•™90ไธช่ฟฝ่ธช็‚น + track.pop(0) + + # ็ป˜ๅˆถ่ฟฝ่ธช็บฟ + points = np.hstack(track).astype(np.int32).reshape((-1, 1, 2)) + cv2.polylines(annotated_frame, [points], isClosed=False, color=(230, 230, 230), thickness=10) + + # ๅฑ•็คบๅธฆๆณจ้‡Š็š„ๅธง + cv2.imshow("YOLOv8 Tracking", annotated_frame) + + # ๅฆ‚ๆžœๆŒ‰ไธ‹'q'ๅˆ™้€€ๅ‡บๅพช็Žฏ + if cv2.waitKey(1) & 0xFF == ord("q"): + break + else: + # ๅฆ‚ๆžœ่ง†้ข‘็ป“ๆŸๅˆ™้€€ๅ‡บๅพช็Žฏ + break + + # ้‡Šๆ”พ่ง†้ข‘ๆ•่Žทๅฏน่ฑกๅนถๅ…ณ้—ญๆ˜พ็คบ็ช—ๅฃ + cap.release() + cv2.destroyAllWindows() + ``` + +### ๅคš็บฟ็จ‹่ฟฝ่ธช + +ๅคš็บฟ็จ‹่ฟฝ่ธชๆไพ›ไบ†ๅŒๆ—ถๅœจๅคšไธช่ง†้ข‘ๆตไธŠ่ฟ่กŒ็‰ฉไฝ“่ฟฝ่ธช็š„่ƒฝๅŠ›ใ€‚ๅฝ“ๅค„็†ๅคšไธช่ง†้ข‘่พ“ๅ…ฅ๏ผŒไพ‹ๅฆ‚ๆฅ่‡ชๅคšไธช็›‘ๆŽงๆ‘„ๅƒๅคดๆ—ถ๏ผŒ่ฟ™ไธ€ๅŠŸ่ƒฝ็‰นๅˆซๆœ‰็”จ๏ผŒๅ…ถไธญๅนถๅ‘ๅค„็†ๅฏไปฅๅคงๅคงๆ้ซ˜ๆ•ˆ็އๅ’Œๆ€ง่ƒฝใ€‚ + +ๅœจๆไพ›็š„Python่„šๆœฌไธญ๏ผŒๆˆ‘ไปฌๅˆฉ็”จPython็š„`threading`ๆจกๅ—ๆฅๅŒๆ—ถ่ฟ่กŒๅคšไธช่ฟฝ่ธชๅ™จๅฎžไพ‹ใ€‚ๆฏไธช็บฟ็จ‹่ดŸ่ดฃๅœจไธ€ไธช่ง†้ข‘ๆ–‡ไปถไธŠ่ฟ่กŒ่ฟฝ่ธชๅ™จ๏ผŒๆ‰€ๆœ‰็บฟ็จ‹ๅœจๅŽๅฐๅŒๆ—ถ่ฟ่กŒใ€‚ + +ไธบไบ†็กฎไฟๆฏไธช็บฟ็จ‹ๆŽฅๆ”ถๅˆฐๆญฃ็กฎ็š„ๅ‚ๆ•ฐ๏ผˆ่ง†้ข‘ๆ–‡ไปถใ€่ฆไฝฟ็”จ็š„ๆจกๅž‹ๅ’Œๆ–‡ไปถ็ดขๅผ•๏ผ‰๏ผŒๆˆ‘ไปฌๅฎšไน‰ไบ†ไธ€ไธชๅ‡ฝๆ•ฐ`run_tracker_in_thread`๏ผŒๅฎƒๆŽฅๅ—่ฟ™ไบ›ๅ‚ๆ•ฐๅนถๅŒ…ๅซไธป่ฟฝ่ธชๅพช็Žฏใ€‚ๆญคๅ‡ฝๆ•ฐ้€ๅธง่ฏปๅ–่ง†้ข‘๏ผŒ่ฟ่กŒ่ฟฝ่ธชๅ™จ๏ผŒๅนถๆ˜พ็คบ็ป“ๆžœใ€‚ + +ๅœจ่ฟ™ไธชไพ‹ๅญไธญ๏ผŒไธคไธชไธๅŒ็š„ๆจกๅž‹่ขซไฝฟ็”จ๏ผš`yolov8n.pt`ๅ’Œ`yolov8n-seg.pt`๏ผŒๆฏไธชๆจกๅž‹้ƒฝๅœจไธๅŒ็š„่ง†้ข‘ๆ–‡ไปถไธญ่ฟฝ่ธช็‰ฉไฝ“ใ€‚่ง†้ข‘ๆ–‡ไปถๅˆ†ๅˆซๆŒ‡ๅฎšๅœจ`video_file1`ๅ’Œ`video_file2`ไธญใ€‚ + +ๅœจ`threading.Thread`ไธญๅ‚ๆ•ฐ`daemon=True`่กจ็คบ๏ผŒ่ฟ™ไบ›็บฟ็จ‹ไผšๅœจไธป็จ‹ๅบ็ป“ๆŸๆ—ถๅ…ณ้—ญใ€‚็„ถๅŽๆˆ‘ไปฌ็”จ`start()`ๆฅๅผ€ๅง‹็บฟ็จ‹๏ผŒๅนถไฝฟ็”จ`join()`ๆฅไฝฟไธป็บฟ็จ‹็ญ‰ๅพ…๏ผŒ็›ดๅˆฐไธคไธช่ฟฝ่ธช็บฟ็จ‹้ƒฝ็ป“ๆŸใ€‚ + +ๆœ€ๅŽ๏ผŒๅœจๆ‰€ๆœ‰็บฟ็จ‹ๅฎŒๆˆไปปๅŠกๅŽ๏ผŒไฝฟ็”จ`cv2.destroyAllWindows()`ๅ…ณ้—ญๆ˜พ็คบ็ป“ๆžœ็š„็ช—ๅฃใ€‚ diff --git a/ultralytics/docs/zh/modes/track.md:Zone.Identifier b/ultralytics/docs/zh/modes/track.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/modes/track.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/modes/train.md b/ultralytics/docs/zh/modes/train.md new file mode 100755 index 0000000..f9c8c3c --- /dev/null +++ b/ultralytics/docs/zh/modes/train.md @@ -0,0 +1,294 @@ +--- +comments: true +description: ไฝฟ็”จUltralytics YOLO่ฎญ็ปƒYOLOv8ๆจกๅž‹็š„้€ๆญฅๆŒ‡ๅ—๏ผŒๅŒ…ๆ‹ฌๅ•GPUๅ’ŒๅคšGPU่ฎญ็ปƒ็คบไพ‹ +keywords: Ultralytics, YOLOv8, YOLO, ็›ฎๆ ‡ๆฃ€ๆต‹, ่ฎญ็ปƒๆจกๅผ, ่‡ชๅฎšไน‰ๆ•ฐๆฎ้›†, GPU่ฎญ็ปƒ, ๅคšGPU, ่ถ…ๅ‚ๆ•ฐ, CLI็คบไพ‹, Python็คบไพ‹ +--- + +# ไฝฟ็”จUltralytics YOLO่ฟ›่กŒๆจกๅž‹่ฎญ็ปƒ + +Ultralytics YOLO็”Ÿๆ€็ณป็ปŸไธŽ้›†ๆˆ + +## ๅผ•่จ€ + +่ฎญ็ปƒๆทฑๅบฆๅญฆไน ๆจกๅž‹ๆถ‰ๅŠๅ‘ๅ…ถ่พ“ๅ…ฅๆ•ฐๆฎๅนถ่ฐƒๆ•ดๅ‚ๆ•ฐ๏ผŒไปฅไพฟๅ‡†็กฎ้ข„ๆต‹ใ€‚Ultralytics YOLOv8็š„่ฎญ็ปƒๆจกๅผๆ—จๅœจๆœ‰ๆ•ˆ้ซ˜ๆ•ˆๅœฐ่ฎญ็ปƒ็›ฎๆ ‡ๆฃ€ๆต‹ๆจกๅž‹๏ผŒๅ……ๅˆ†ๅˆฉ็”จ็Žฐไปฃ็กฌไปถๅŠŸ่ƒฝใ€‚ๆœฌๆŒ‡ๅ—ๆ—จๅœจๆถต็›–ไฝฟ็”จYOLOv8็š„ๅผบๅคงๅŠŸ่ƒฝ้›†่ฎญ็ปƒ่‡ชๅทฑๆจกๅž‹็š„ๆ‰€ๆœ‰็ป†่Š‚ใ€‚ + +

+
+ +
+ ่ง‚็œ‹: ๅฆ‚ไฝ•ๅœจGoogle Colabไธญ็”จไฝ ็š„่‡ชๅฎšไน‰ๆ•ฐๆฎ้›†่ฎญ็ปƒไธ€ไธชYOLOv8ๆจกๅž‹ใ€‚ +

+ +## ไธบไป€ไนˆ้€‰ๆ‹ฉUltralytics YOLO่ฟ›่กŒ่ฎญ็ปƒ๏ผŸ + +ไปฅไธ‹ๆ˜ฏ้€‰ๆ‹ฉYOLOv8่ฎญ็ปƒๆจกๅผ็š„ไธ€ไบ›ๆœ‰ๅŠ›็†็”ฑ๏ผš + +- **ๆ•ˆ็އ:** ๅ……ๅˆ†ๅˆฉ็”จๆ‚จ็š„็กฌไปถ่ต„ๆบ๏ผŒๆ— ่ฎบๆ‚จๆ˜ฏไฝฟ็”จๅ•GPU่ฎพ็ฝฎ่ฟ˜ๆ˜ฏ่ทจๅคšไธชGPUๆ‰ฉๅฑ•ใ€‚ +- **ๅคšๅŠŸ่ƒฝ:** ้™คไบ†ๅฏ้šๆ—ถ่Žทๅ–็š„ๆ•ฐๆฎ้›†๏ผˆๅฆ‚COCOใ€VOCๅ’ŒImageNet๏ผ‰ไน‹ๅค–๏ผŒ่ฟ˜ๅฏไปฅๅฏน่‡ชๅฎšไน‰ๆ•ฐๆฎ้›†่ฟ›่กŒ่ฎญ็ปƒใ€‚ +- **็”จๆˆทๅ‹ๅฅฝ:** ็ฎ€ๅ•่€Œๅผบๅคง็š„CLIๅ’ŒPythonๆŽฅๅฃ๏ผŒไธบๆ‚จๆไพ›็›ดๆŽฅ็š„่ฎญ็ปƒไฝ“้ชŒใ€‚ +- **่ถ…ๅ‚ๆ•ฐ็ตๆดปๆ€ง:** ๅฏๅฎšๅˆถ็š„ๅนฟๆณ›่ถ…ๅ‚ๆ•ฐ่Œƒๅ›ด๏ผŒไปฅๅพฎ่ฐƒๆจกๅž‹ๆ€ง่ƒฝใ€‚ + +### ่ฎญ็ปƒๆจกๅผ็š„ๅ…ณ้”ฎ็‰นๆ€ง + +ไปฅไธ‹ๆ˜ฏYOLOv8่ฎญ็ปƒๆจกๅผ็š„ไธ€ไบ›ๆ˜พ่‘—็‰น็‚น๏ผš + +- **่‡ชๅŠจๆ•ฐๆฎ้›†ไธ‹่ฝฝ:** ๆ ‡ๅ‡†ๆ•ฐๆฎ้›†ๅฆ‚COCOใ€VOCๅ’ŒImageNetๅฐ†ๅœจ้ฆ–ๆฌกไฝฟ็”จๆ—ถ่‡ชๅŠจไธ‹่ฝฝใ€‚ +- **ๅคšGPUๆ”ฏๆŒ:** ๆ— ็ผๅœฐ่ทจๅคšไธชGPUๆ‰ฉๅฑ•ๆ‚จ็š„่ฎญ็ปƒๅทฅไฝœ๏ผŒไปฅๅŠ ๅฟซ่ฟ‡็จ‹ใ€‚ +- **่ถ…ๅ‚ๆ•ฐ้…็ฝฎ:** ้€š่ฟ‡YAML้…็ฝฎๆ–‡ไปถๆˆ–CLIๅ‚ๆ•ฐไฟฎๆ”น่ถ…ๅ‚ๆ•ฐ็š„้€‰้กนใ€‚ +- **ๅฏ่ง†ๅŒ–ๅ’Œ็›‘ๆŽง:** ๅฎžๆ—ถ่ทŸ่ธช่ฎญ็ปƒๆŒ‡ๆ ‡ๅนถๅฏ่ง†ๅŒ–ๅญฆไน ่ฟ‡็จ‹๏ผŒไปฅ่Žทๅพ—ๆ›ดๅฅฝ็š„ๆดžๅฏŸๅŠ›ใ€‚ + +!!! ๅฐ่ดดๅฃซ "ๅฐ่ดดๅฃซ" + + * ๅฆ‚COCOใ€VOCใ€ImageNet็ญ‰YOLOv8ๆ•ฐๆฎ้›†ๅœจ้ฆ–ๆฌกไฝฟ็”จๆ—ถไผš่‡ชๅŠจไธ‹่ฝฝ๏ผŒๅณ `yolo train data=coco.yaml` + +## ไฝฟ็”จ็คบไพ‹ + +ๅœจCOCO128ๆ•ฐๆฎ้›†ไธŠ่ฎญ็ปƒYOLOv8nๆจกๅž‹100ไธชๆ—ถๆœŸ๏ผŒๅ›พๅƒๅคงๅฐไธบ640ใ€‚ๅฏไปฅไฝฟ็”จ`device`ๅ‚ๆ•ฐๆŒ‡ๅฎš่ฎญ็ปƒ่ฎพๅค‡ใ€‚ๅฆ‚ๆžœๆฒกๆœ‰ไผ ้€’ๅ‚ๆ•ฐ๏ผŒๅนถไธ”ๆœ‰ๅฏ็”จ็š„GPU๏ผŒๅˆ™ๅฐ†ไฝฟ็”จGPU `device=0`๏ผŒๅฆๅˆ™ๅฐ†ไฝฟ็”จ`device=cpu`ใ€‚ๆœ‰ๅ…ณๅฎŒๆ•ดๅˆ—่กจ็š„่ฎญ็ปƒๅ‚ๆ•ฐ๏ผŒ่ฏทๅ‚่งไธ‹้ข็š„ๅ‚ๆ•ฐ้ƒจๅˆ†ใ€‚ + +!!! Example "ๅ•GPUๅ’ŒCPU่ฎญ็ปƒ็คบไพ‹" + + ่ฎพๅค‡ๅฐ†่‡ชๅŠจ็กฎๅฎšใ€‚ๅฆ‚ๆžœๆœ‰ๅฏ็”จ็š„GPU๏ผŒ้‚ฃไนˆๅฐ†ไฝฟ็”จๅฎƒ๏ผŒๅฆๅˆ™ๅฐ†ๅœจCPUไธŠๅผ€ๅง‹่ฎญ็ปƒใ€‚ + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝไธ€ไธชๆจกๅž‹ + model = YOLO('yolov8n.yaml') # ไปŽYAMLๅปบ็ซ‹ไธ€ไธชๆ–ฐๆจกๅž‹ + model = YOLO('yolov8n.pt') # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒๆจกๅž‹๏ผˆๆŽจ่็”จไบŽ่ฎญ็ปƒ๏ผ‰ + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # ไปŽYAMLๅปบ็ซ‹ๅนถ่ฝฌ็งปๆƒ้‡ + + # ่ฎญ็ปƒๆจกๅž‹ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + + === "CLI" + + ```bash + # ไปŽYAMLๆž„ๅปบๆ–ฐๆจกๅž‹๏ผŒไปŽๅคดๅผ€ๅง‹่ฎญ็ปƒ + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # ไปŽ้ข„่ฎญ็ปƒ*.ptๆจกๅž‹ๅผ€ๅง‹่ฎญ็ปƒ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # ไปŽYAMLๆž„ๅปบไธ€ไธชๆ–ฐๆจกๅž‹๏ผŒ่ฝฌ็งป้ข„่ฎญ็ปƒๆƒ้‡๏ผŒ็„ถๅŽๅผ€ๅง‹่ฎญ็ปƒ + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### ๅคšGPU่ฎญ็ปƒ + +ๅคšGPU่ฎญ็ปƒ้€š่ฟ‡ๅœจๅคšไธชGPUไธŠๅˆ†ๅธƒ่ฎญ็ปƒ่ดŸ่ฝฝ๏ผŒๅฎž็Žฐๅฏนๅฏ็”จ็กฌไปถ่ต„ๆบ็š„ๆ›ดๆœ‰ๆ•ˆๅˆฉ็”จใ€‚ๆ— ่ฎบๆ˜ฏ้€š่ฟ‡Python API่ฟ˜ๆ˜ฏๅ‘ฝไปค่กŒ็•Œ้ข๏ผŒ้ƒฝๅฏไปฅไฝฟ็”จๆญคๅŠŸ่ƒฝใ€‚ ่‹ฅ่ฆๅฏ็”จๅคšGPU่ฎญ็ปƒ๏ผŒ่ฏทๆŒ‡ๅฎšๆ‚จๅธŒๆœ›ไฝฟ็”จ็š„GPU่ฎพๅค‡IDใ€‚ + +!!! Example "ๅคšGPU่ฎญ็ปƒ็คบไพ‹" + + ่ฆไฝฟ็”จ2ไธชGPU่ฟ›่กŒ่ฎญ็ปƒ๏ผŒ่ฏทไฝฟ็”จCUDA่ฎพๅค‡0ๅ’Œ1๏ผŒไฝฟ็”จไปฅไธ‹ๅ‘ฝไปคใ€‚ๆ นๆฎ้œ€่ฆๆ‰ฉๅฑ•ๅˆฐๆ›ดๅคšGPUใ€‚ + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n.pt') # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒๆจกๅž‹๏ผˆๆŽจ่็”จไบŽ่ฎญ็ปƒ๏ผ‰ + + # ไฝฟ็”จ2ไธชGPU่ฎญ็ปƒๆจกๅž‹ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device=[0, 1]) + ``` + + === "CLI" + + ```bash + # ไฝฟ็”จGPU 0ๅ’Œ1ไปŽ้ข„่ฎญ็ปƒ*.ptๆจกๅž‹ๅผ€ๅง‹่ฎญ็ปƒ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=0,1 + ``` + +### ่‹นๆžœM1ๅ’ŒM2 MPS่ฎญ็ปƒ + +้€š่ฟ‡Ultralytics YOLOๆจกๅž‹้›†ๆˆๅฏนApple M1ๅ’ŒM2่Šฏ็‰‡็š„ๆ”ฏๆŒ๏ผŒ็Žฐๅœจๅฏไปฅๅœจไฝฟ็”จๅผบๅคง็š„Metalๆ€ง่ƒฝ็€่‰ฒๅ™จ๏ผˆMPS๏ผ‰ๆก†ๆžถ็š„่ฎพๅค‡ไธŠ่ฎญ็ปƒๆจกๅž‹ใ€‚MPSไธบๅœจApple็š„ๅฎšๅˆถ็ก…ไธŠๆ‰ง่กŒ่ฎก็ฎ—ๅ’Œๅ›พๅƒๅค„็†ไปปๅŠกๆไพ›ไบ†ไธ€็ง้ซ˜ๆ€ง่ƒฝ็š„ๆ–นๆณ•ใ€‚ + +่ฆๅœจApple M1ๅ’ŒM2่Šฏ็‰‡ไธŠๅฏ็”จ่ฎญ็ปƒ๏ผŒๆ‚จๅบ”่ฏฅๅœจๅฏๅŠจ่ฎญ็ปƒ่ฟ‡็จ‹ๆ—ถๅฐ†่ฎพๅค‡ๆŒ‡ๅฎšไธบ'mps'ใ€‚ไปฅไธ‹ๆ˜ฏPythonๅ’Œๅ‘ฝไปค่กŒไธญๅฆ‚ไฝ•ๅšๅˆฐ่ฟ™็‚น็š„็คบไพ‹๏ผš + +!!! Example "MPS่ฎญ็ปƒ็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n.pt') # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒๆจกๅž‹๏ผˆๆŽจ่็”จไบŽ่ฎญ็ปƒ๏ผ‰ + + # ไฝฟ็”จ2ไธชGPU่ฎญ็ปƒๆจกๅž‹ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640, device='mps') + ``` + + === "CLI" + + ```bash + # ไฝฟ็”จGPU 0ๅ’Œ1ไปŽ้ข„่ฎญ็ปƒ*.ptๆจกๅž‹ๅผ€ๅง‹่ฎญ็ปƒ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 device=mps + ``` + +ๅˆฉ็”จM1/M2่Šฏ็‰‡็š„่ฎก็ฎ—่ƒฝๅŠ›๏ผŒ่ฟ™ไฝฟๅพ—่ฎญ็ปƒไปปๅŠก็š„ๅค„็†ๆ›ดๅŠ ้ซ˜ๆ•ˆใ€‚ๆœ‰ๅ…ณๆ›ด่ฏฆ็ป†็š„ๆŒ‡ๅ—ๅ’Œ้ซ˜็บง้…็ฝฎ้€‰้กน๏ผŒ่ฏทๅ‚้˜…[PyTorch MPSๆ–‡ๆกฃ](https://pytorch.org/docs/stable/notes/mps.html)ใ€‚ + +### ๆขๅคไธญๆ–ญ็š„่ฎญ็ปƒ + +ๅœจๅค„็†ๆทฑๅบฆๅญฆไน ๆจกๅž‹ๆ—ถ๏ผŒไปŽไน‹ๅ‰ไฟๅญ˜็š„็Šถๆ€ๆขๅค่ฎญ็ปƒๆ˜ฏไธ€ไธชๅ…ณ้”ฎ็‰นๆ€งใ€‚ๅœจๅ„็งๆƒ…ๅ†ตไธ‹๏ผŒ่ฟ™ๅฏ่ƒฝๅพˆๆ–นไพฟ๏ผŒๆฏ”ๅฆ‚ๅฝ“่ฎญ็ปƒ่ฟ‡็จ‹ๆ„ๅค–ไธญๆ–ญ๏ผŒๆˆ–่€…ๅฝ“ๆ‚จๅธŒๆœ›็”จๆ–ฐๆ•ฐๆฎๆˆ–ๆ›ดๅคšๆ—ถๆœŸ็ปง็ปญ่ฎญ็ปƒๆจกๅž‹ๆ—ถใ€‚ + +ๆขๅค่ฎญ็ปƒๆ—ถ๏ผŒUltralytics YOLOๅฐ†ๅŠ ่ฝฝๆœ€ๅŽไฟๅญ˜็š„ๆจกๅž‹็š„ๆƒ้‡๏ผŒๅนถๆขๅคไผ˜ๅŒ–ๅ™จ็Šถๆ€ใ€ๅญฆไน ็އ่ฐƒๅบฆๅ™จๅ’Œๆ—ถๆœŸ็ผ–ๅทใ€‚่ฟ™ๅ…่ฎธๆ‚จๆ— ็ผๅœฐไปŽ็ฆปๅผ€็š„ๅœฐๆ–น็ปง็ปญ่ฎญ็ปƒ่ฟ‡็จ‹ใ€‚ + +ๅœจUltralytics YOLOไธญ๏ผŒๆ‚จๅฏไปฅ้€š่ฟ‡ๅœจ่ฐƒ็”จ`train`ๆ–นๆณ•ๆ—ถๅฐ†`resume`ๅ‚ๆ•ฐ่ฎพ็ฝฎไธบ`True`ๅนถๆŒ‡ๅฎšๅŒ…ๅซ้ƒจๅˆ†่ฎญ็ปƒๆจกๅž‹ๆƒ้‡็š„`.pt`ๆ–‡ไปถ่ทฏๅพ„ๆฅ่ฝปๆพๆขๅค่ฎญ็ปƒใ€‚ + +ไธ‹้ขๆ˜ฏไฝฟ็”จPythonๅ’Œๅ‘ฝไปค่กŒๆขๅคไธญๆ–ญ่ฎญ็ปƒ็š„็คบไพ‹๏ผš + +!!! Example "ๆขๅค่ฎญ็ปƒ็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('path/to/last.pt') # ๅŠ ่ฝฝ้ƒจๅˆ†่ฎญ็ปƒ็š„ๆจกๅž‹ + + # ๆขๅค่ฎญ็ปƒ + results = model.train(resume=True) + ``` + + === "CLI" + + ```bash + # ๆขๅคไธญๆ–ญ็š„่ฎญ็ปƒ + yolo train resume model=path/to/last.pt + ``` + +้€š่ฟ‡่ฎพ็ฝฎ`resume=True`๏ผŒ`train`ๅ‡ฝๆ•ฐๅฐ†ไปŽ'path/to/last.pt'ๆ–‡ไปถไธญๅญ˜ๅ‚จ็š„็Šถๆ€็ปง็ปญ่ฎญ็ปƒใ€‚ๅฆ‚ๆžœ็œ็•ฅ`resume`ๅ‚ๆ•ฐๆˆ–ๅฐ†ๅ…ถ่ฎพ็ฝฎไธบ`False`๏ผŒ`train`ๅ‡ฝๆ•ฐๅฐ†ๅฏๅŠจๆ–ฐ็š„่ฎญ็ปƒไผš่ฏใ€‚ + +่ฏท่ฎฐไฝ๏ผŒ้ป˜่ฎคๆƒ…ๅ†ตไธ‹๏ผŒๆฃ€ๆŸฅ็‚นไผšๅœจๆฏไธชๆ—ถๆœŸ็ป“ๆŸๆ—ถไฟๅญ˜๏ผŒๆˆ–่€…ไฝฟ็”จ`save_period`ๅ‚ๆ•ฐไปฅๅ›บๅฎš้—ด้š”ไฟๅญ˜๏ผŒๅ› ๆญคๆ‚จๅฟ…้กป่‡ณๅฐ‘ๅฎŒๆˆ1ไธชๆ—ถๆœŸๆ‰่ƒฝๆขๅค่ฎญ็ปƒ่ฟ่กŒใ€‚ + +## ๅ‚ๆ•ฐ + +YOLOๆจกๅž‹็š„่ฎญ็ปƒ่ฎพ็ฝฎๆ˜ฏๆŒ‡็”จไบŽๅฏนๆ•ฐๆฎ้›†่ฟ›่กŒๆจกๅž‹่ฎญ็ปƒ็š„ๅ„็ง่ถ…ๅ‚ๆ•ฐๅ’Œ้…็ฝฎใ€‚่ฟ™ไบ›่ฎพ็ฝฎไผšๅฝฑๅ“ๆจกๅž‹็š„ๆ€ง่ƒฝใ€้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€งใ€‚ไธ€ไบ›ๅธธ่ง็š„YOLO่ฎญ็ปƒ่ฎพ็ฝฎๅŒ…ๆ‹ฌๆ‰นๅคงๅฐใ€ๅญฆไน ็އใ€ๅŠจ้‡ๅ’Œๆƒ้‡่กฐๅ‡ใ€‚ๅ…ถไป–ๅฏ่ƒฝๅฝฑๅ“่ฎญ็ปƒ่ฟ‡็จ‹็š„ๅ› ็ด ๅŒ…ๆ‹ฌไผ˜ๅŒ–ๅ™จ็š„้€‰ๆ‹ฉใ€ๆŸๅคฑๅ‡ฝๆ•ฐ็š„้€‰ๆ‹ฉไปฅๅŠ่ฎญ็ปƒๆ•ฐๆฎ้›†็š„ๅคงๅฐๅ’Œ็ป„ๆˆใ€‚ไป”็ป†่ฐƒๆ•ดๅ’Œๅฎž้ชŒ่ฟ™ไบ›่ฎพ็ฝฎไปฅๅฎž็Žฐ็ป™ๅฎšไปปๅŠก็š„ๆœ€ไฝณๆ€ง่ƒฝๆ˜ฏ้žๅธธ้‡่ฆ็š„ใ€‚ + +| ้”ฎ | ๅ€ผ | ๆ่ฟฐ | +|-------------------|----------|---------------------------------------------------------------------| +| `model` | `None` | ๆจกๅž‹ๆ–‡ไปถ่ทฏๅพ„๏ผŒไพ‹ๅฆ‚ yolov8n.pt, yolov8n.yaml | +| `data` | `None` | ๆ•ฐๆฎๆ–‡ไปถ่ทฏๅพ„๏ผŒไพ‹ๅฆ‚ coco128.yaml | +| `epochs` | `100` | ่ฎญ็ปƒ็š„่ฝฎๆฌกๆ•ฐ้‡ | +| `patience` | `50` | ๆ—ฉๅœ่ฎญ็ปƒ็š„็ญ‰ๅพ…่ฝฎๆฌก | +| `batch` | `16` | ๆฏๆ‰นๅ›พๅƒๆ•ฐ้‡๏ผˆ-1ไธบ่‡ชๅŠจๆ‰นๅคงๅฐ๏ผ‰ | +| `imgsz` | `640` | ่พ“ๅ…ฅๅ›พๅƒ็š„ๅคงๅฐ๏ผŒไปฅๆ•ดๆ•ฐ่กจ็คบ | +| `save` | `True` | ไฟๅญ˜่ฎญ็ปƒๆฃ€ๆŸฅ็‚นๅ’Œ้ข„ๆต‹็ป“ๆžœ | +| `save_period` | `-1` | ๆฏx่ฝฎๆฌกไฟๅญ˜ๆฃ€ๆŸฅ็‚น๏ผˆๅฆ‚ๆžœ<1ๅˆ™็ฆ็”จ๏ผ‰ | +| `cache` | `False` | True/ram, disk ๆˆ– Falseใ€‚ไฝฟ็”จ็ผ“ๅญ˜ๅŠ ่ฝฝๆ•ฐๆฎ | +| `device` | `None` | ่ฟ่กŒ่ฎพๅค‡๏ผŒไพ‹ๅฆ‚ cuda device=0 ๆˆ– device=0,1,2,3 ๆˆ– device=cpu | +| `workers` | `8` | ๆ•ฐๆฎๅŠ ่ฝฝ็š„ๅทฅไฝœ็บฟ็จ‹ๆ•ฐ๏ผˆๅฆ‚ๆžœDDPๅˆ™ไธบๆฏไธชRANK๏ผ‰ | +| `project` | `None` | ้กน็›ฎๅ็งฐ | +| `name` | `None` | ๅฎž้ชŒๅ็งฐ | +| `exist_ok` | `False` | ๆ˜ฏๅฆ่ฆ†็›–็Žฐๆœ‰ๅฎž้ชŒ | +| `pretrained` | `True` | (bool ๆˆ– str) ๆ˜ฏๅฆไฝฟ็”จ้ข„่ฎญ็ปƒๆจกๅž‹๏ผˆbool๏ผ‰ๆˆ–ไปŽไธญๅŠ ่ฝฝๆƒ้‡็š„ๆจกๅž‹๏ผˆstr๏ผ‰ | +| `optimizer` | `'auto'` | ไฝฟ็”จ็š„ไผ˜ๅŒ–ๅ™จ๏ผŒ้€‰ๆ‹ฉ่Œƒๅ›ด=[SGD, Adam, Adamax, AdamW, NAdam, RAdam, RMSProp, auto] | +| `verbose` | `False` | ๆ˜ฏๅฆๆ‰“ๅฐ่ฏฆ็ป†่พ“ๅ‡บ | +| `seed` | `0` | ้šๆœบ็งๅญ๏ผŒ็”จไบŽๅฏ้‡ๅคๆ€ง | +| `deterministic` | `True` | ๆ˜ฏๅฆๅฏ็”จ็กฎๅฎšๆ€งๆจกๅผ | +| `single_cls` | `False` | ๅฐ†ๅคš็ฑปๆ•ฐๆฎไฝœไธบๅ•็ฑป่ฎญ็ปƒ | +| `rect` | `False` | ็Ÿฉๅฝข่ฎญ็ปƒ๏ผŒๆฏๆ‰นไธบๆœ€ๅฐๅกซๅ……ๆ•ดๅˆ | +| `cos_lr` | `False` | ไฝฟ็”จไฝ™ๅผฆๅญฆไน ็އ่ฐƒๅบฆๅ™จ | +| `close_mosaic` | `10` | (int) ๆœ€ๅŽ่ฝฎๆฌก็ฆ็”จ้ฉฌ่ต›ๅ…‹ๅขžๅผบ๏ผˆ0ไธบ็ฆ็”จ๏ผ‰ | +| `resume` | `False` | ไปŽๆœ€ๅŽๆฃ€ๆŸฅ็‚นๆขๅค่ฎญ็ปƒ | +| `amp` | `True` | ่‡ชๅŠจๆททๅˆ็ฒพๅบฆ๏ผˆAMP๏ผ‰่ฎญ็ปƒ๏ผŒ้€‰ๆ‹ฉ่Œƒๅ›ด=[True, False] | +| `fraction` | `1.0` | ่ฎญ็ปƒ็š„ๆ•ฐๆฎ้›†ๆฏ”ไพ‹๏ผˆ้ป˜่ฎคไธบ1.0๏ผŒๅณ่ฎญ็ปƒ้›†ไธญ็š„ๆ‰€ๆœ‰ๅ›พๅƒ๏ผ‰ | +| `profile` | `False` | ๅœจ่ฎญ็ปƒๆœŸ้—ดไธบ่ฎฐๅฝ•ๅ™จๅˆ†ๆžONNXๅ’ŒTensorRT้€Ÿๅบฆ | +| `freeze` | `None` | (int ๆˆ– list, ๅฏ้€‰) ๅœจ่ฎญ็ปƒๆœŸ้—ดๅ†ป็ป“ๅ‰nๅฑ‚๏ผŒๆˆ–ๅ†ป็ป“ๅฑ‚็ดขๅผ•ๅˆ—่กจ | +| `lr0` | `0.01` | ๅˆๅง‹ๅญฆไน ็އ๏ผˆไพ‹ๅฆ‚ SGD=1E-2, Adam=1E-3๏ผ‰ | +| `lrf` | `0.01` | ๆœ€็ปˆๅญฆไน ็އ (lr0 * lrf) | +| `momentum` | `0.937` | SGDๅŠจ้‡/Adam beta1 | +| `weight_decay` | `0.0005` | ไผ˜ๅŒ–ๅ™จๆƒ้‡่กฐๅ‡5e-4 | +| `warmup_epochs` | `3.0` | ็ƒญ่บซ่ฝฎๆฌก๏ผˆๅฐๆ•ฐok๏ผ‰ | +| `warmup_momentum` | `0.8` | ็ƒญ่บซๅˆๅง‹ๅŠจ้‡ | +| `warmup_bias_lr` | `0.1` | ็ƒญ่บซๅˆๅง‹ๅๅทฎlr | +| `box` | `7.5` | ๆก†ๆŸๅคฑๅขž็›Š | +| `cls` | `0.5` | clsๆŸๅคฑๅขž็›Š๏ผˆๆ นๆฎๅƒ็ด ็ผฉๆ”พ๏ผ‰ | +| `dfl` | `1.5` | dflๆŸๅคฑๅขž็›Š | +| `pose` | `12.0` | ๅงฟๆ€ๆŸๅคฑๅขž็›Š๏ผˆไป…้™ๅงฟๆ€๏ผ‰ | +| `kobj` | `2.0` | ๅ…ณ้”ฎ็‚นobjๆŸๅคฑๅขž็›Š๏ผˆไป…้™ๅงฟๆ€๏ผ‰ | +| `label_smoothing` | `0.0` | ๆ ‡็ญพๅนณๆป‘๏ผˆๅฐๆ•ฐ๏ผ‰ | +| `nbs` | `64` | ๆ ‡็งฐๆ‰นๅคงๅฐ | +| `overlap_mask` | `True` | ่ฎญ็ปƒๆœŸ้—ดๆŽฉ็ ๅบ”้‡ๅ ๏ผˆไป…้™ๅˆ†ๅ‰ฒ่ฎญ็ปƒ๏ผ‰ | +| `mask_ratio` | `4` | ๆŽฉ็ ้™้‡‡ๆ ทๆฏ”็އ๏ผˆไป…้™ๅˆ†ๅ‰ฒ่ฎญ็ปƒ๏ผ‰ | +| `dropout` | `0.0` | ไฝฟ็”จdropoutๆญฃๅˆ™ๅŒ–๏ผˆไป…้™ๅˆ†็ฑป่ฎญ็ปƒ๏ผ‰ | +| `val` | `True` | ่ฎญ็ปƒๆœŸ้—ด้ชŒ่ฏ/ๆต‹่ฏ• | + +## ่ฎฐๅฝ• + +ๅœจ่ฎญ็ปƒYOLOv8ๆจกๅž‹ๆ—ถ๏ผŒ่ทŸ่ธชๆจกๅž‹้šๆ—ถ้—ด็š„ๆ€ง่ƒฝๅ˜ๅŒ–ๅฏ่ƒฝ้žๅธธๆœ‰ไปทๅ€ผใ€‚่ฟ™ๅฐฑๆ˜ฏ่ฎฐๅฝ•ๅ‘ๆŒฅไฝœ็”จ็š„ๅœฐๆ–นใ€‚Ultralytics็š„YOLOๆไพ›ๅฏนไธ‰็ง็ฑปๅž‹่ฎฐๅฝ•ๅ™จ็š„ๆ”ฏๆŒ - Cometใ€ClearMLๅ’ŒTensorBoardใ€‚ + +่ฆไฝฟ็”จ่ฎฐๅฝ•ๅ™จ๏ผŒ่ฏทๅœจไธŠ้ข็š„ไปฃ็ ็‰‡ๆฎตไธญ็š„ไธ‹ๆ‹‰่œๅ•ไธญ้€‰ๆ‹ฉๅฎƒๅนถ่ฟ่กŒใ€‚ๆ‰€้€‰็š„่ฎฐๅฝ•ๅ™จๅฐ†่ขซๅฎ‰่ฃ…ๅ’Œๅˆๅง‹ๅŒ–ใ€‚ + +### Comet + +[Comet](https://www.comet.ml/site/)ๆ˜ฏไธ€ไธชๅนณๅฐ๏ผŒๅ…่ฎธๆ•ฐๆฎ็ง‘ๅญฆๅฎถๅ’Œๅผ€ๅ‘ไบบๅ‘˜่ทŸ่ธชใ€ๆฏ”่พƒใ€่งฃ้‡Šๅ’Œไผ˜ๅŒ–ๅฎž้ชŒๅ’Œๆจกๅž‹ใ€‚ๅฎƒๆไพ›ไบ†ๅฎžๆ—ถๆŒ‡ๆ ‡ใ€ไปฃ็ ๅทฎๅผ‚ๅ’Œ่ถ…ๅ‚ๆ•ฐ่ทŸ่ธช็ญ‰ๅŠŸ่ƒฝใ€‚ + +ไฝฟ็”จComet๏ผš + +!!! Example "็คบไพ‹" + + === "Python" + ```python + # pip install comet_ml + import comet_ml + + comet_ml.init() + ``` + +่ฎฐๅพ—ๅœจไป–ไปฌ็š„็ฝ‘็ซ™ไธŠ็™ปๅฝ•ๆ‚จ็š„Comet่ดฆๆˆทๅนถ่Žทๅ–ๆ‚จ็š„APIๅฏ†้’ฅใ€‚ๆ‚จ้œ€่ฆๅฐ†ๆญคๆทปๅŠ ๅˆฐๆ‚จ็š„็Žฏๅขƒๅ˜้‡ๆˆ–่„šๆœฌไธญ๏ผŒไปฅ่ฎฐๅฝ•ๆ‚จ็š„ๅฎž้ชŒใ€‚ + +### ClearML + +[ClearML](https://www.clear.ml/) ๆ˜ฏไธ€ไธชๅผ€ๆบๅนณๅฐ๏ผŒ่‡ชๅŠจ่ทŸ่ธชๅฎž้ชŒๅนถๅธฎๅŠฉๆœ‰ๆ•ˆๅ…ฑไบซ่ต„ๆบใ€‚ๅฎƒๆ—จๅœจๅธฎๅŠฉๅ›ข้˜Ÿๆ›ดๆœ‰ๆ•ˆๅœฐ็ฎก็†ใ€ๆ‰ง่กŒๅ’Œๅค็Žฐไป–ไปฌ็š„MLๅทฅไฝœใ€‚ + +ไฝฟ็”จClearML๏ผš + +!!! Example "็คบไพ‹" + + === "Python" + ```python + # pip install clearml + import clearml + + clearml.browser_login() + ``` + +่ฟ่กŒๆญค่„šๆœฌๅŽ๏ผŒๆ‚จ้œ€่ฆๅœจๆต่งˆๅ™จไธญ็™ปๅฝ•ๆ‚จ็š„ClearML่ดฆๆˆทๅนถ่ฎค่ฏๆ‚จ็š„ไผš่ฏใ€‚ + +### TensorBoard + +[TensorBoard](https://www.tensorflow.org/tensorboard) ๆ˜ฏTensorFlow็š„ๅฏ่ง†ๅŒ–ๅทฅๅ…ทๅŒ…ใ€‚ๅฎƒๅ…่ฎธๆ‚จๅฏ่ง†ๅŒ–TensorFlowๅ›พ่กจ๏ผŒ็ป˜ๅˆถๆœ‰ๅ…ณๅ›พ่กจๆ‰ง่กŒ็š„ๅฎš้‡ๆŒ‡ๆ ‡๏ผŒๅนถๅฑ•็คบ้€š่ฟ‡ๅฎƒ็š„้™„ๅŠ ๆ•ฐๆฎ๏ผŒๅฆ‚ๅ›พๅƒใ€‚ + +ๅœจ[Google Colab](https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb)ไธญไฝฟ็”จTensorBoard๏ผš + +!!! Example "็คบไพ‹" + + === "CLI" + ```bash + load_ext tensorboard + tensorboard --logdir ultralytics/runs # ๆ›ฟๆขไธบ'runs'็›ฎๅฝ• + ``` + +ๅœจๆœฌๅœฐไฝฟ็”จTensorBoard๏ผŒ่ฟ่กŒไธ‹้ข็š„ๅ‘ฝไปคๅนถๅœจ http://localhost:6006/ ๆŸฅ็œ‹็ป“ๆžœใ€‚ + +!!! Example "็คบไพ‹" + + === "CLI" + ```bash + tensorboard --logdir ultralytics/runs # ๆ›ฟๆขไธบ'runs'็›ฎๅฝ• + ``` + +่ฟ™ๅฐ†ๅŠ ่ฝฝTensorBoardๅนถๅฐ†ๅ…ถๅฎšๅ‘ๅˆฐไฟๅญ˜่ฎญ็ปƒๆ—ฅๅฟ—็š„็›ฎๅฝ•ใ€‚ + +ๅœจ่ฎพ็ฝฎๅฅฝๆ—ฅๅฟ—่ฎฐๅฝ•ๅ™จๅŽ๏ผŒๆ‚จๅฏไปฅ็ปง็ปญ่ฟ›่กŒๆจกๅž‹่ฎญ็ปƒใ€‚ๆ‰€ๆœ‰่ฎญ็ปƒๆŒ‡ๆ ‡ๅฐ†่‡ชๅŠจ่ฎฐๅฝ•ๅœจๆ‚จ้€‰ๆ‹ฉ็š„ๅนณๅฐไธญ๏ผŒๆ‚จๅฏไปฅ่ฎฟ้—ฎ่ฟ™ไบ›ๆ—ฅๅฟ—ไปฅ็›‘ๆŽงๆจกๅž‹้šๆ—ถ้—ด็š„่กจ็Žฐ๏ผŒๆฏ”่พƒไธๅŒๆจกๅž‹๏ผŒๅนถ่ฏ†ๅˆซๆ”น่ฟ›็š„้ข†ๅŸŸใ€‚ diff --git a/ultralytics/docs/zh/modes/train.md:Zone.Identifier b/ultralytics/docs/zh/modes/train.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/modes/train.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/modes/val.md b/ultralytics/docs/zh/modes/val.md new file mode 100755 index 0000000..21129fb --- /dev/null +++ b/ultralytics/docs/zh/modes/val.md @@ -0,0 +1,86 @@ +--- +comments: true +description: ๆŒ‡ๅ— - ้ชŒ่ฏ YOLOv8 ๆจกๅž‹ใ€‚ไบ†่งฃๅฆ‚ไฝ•ไฝฟ็”จ้ชŒ่ฏ่ฎพ็ฝฎๅ’ŒๆŒ‡ๆ ‡่ฏ„ไผฐๆ‚จ็š„ YOLO ๆจกๅž‹็š„ๆ€ง่ƒฝ๏ผŒๅŒ…ๆ‹ฌ Python ๅ’Œ CLI ็คบไพ‹ใ€‚ +keywords: Ultralytics, YOLO ๆ–‡ๆกฃ, YOLOv8, ้ชŒ่ฏ, ๆจกๅž‹่ฏ„ไผฐ, ่ถ…ๅ‚ๆ•ฐ, ๅ‡†็กฎ็އ, ๆŒ‡ๆ ‡, Python, CLI +--- + +# ไฝฟ็”จ Ultralytics YOLO ่ฟ›่กŒๆจกๅž‹้ชŒ่ฏ + +Ultralytics YOLO ็”Ÿๆ€็ณป็ปŸๅ’Œ้›†ๆˆ + +## ๅผ•่จ€ + +ๅœจๆœบๅ™จๅญฆไน ๆต็จ‹ไธญ๏ผŒ้ชŒ่ฏๆ˜ฏไธ€ไธชๅ…ณ้”ฎๆญฅ้ชค๏ผŒ่ฎฉๆ‚จ่ƒฝๅคŸ่ฏ„ไผฐ่ฎญ็ปƒๆจกๅž‹็š„่ดจ้‡ใ€‚Ultralytics YOLOv8 ็š„ Val ๆจกๅผๆไพ›ไบ†ไธ€ๆ•ดๅฅ—ๅผบๅคง็š„ๅทฅๅ…ทๅ’ŒๆŒ‡ๆ ‡๏ผŒ็”จไบŽ่ฏ„ไผฐๆ‚จ็š„็›ฎๆ ‡ๆฃ€ๆต‹ๆจกๅž‹็š„ๆ€ง่ƒฝใ€‚ๆœฌๆŒ‡ๅ—ไฝœไธบไธ€ไธชๅฎŒๆ•ด่ต„ๆบ๏ผŒ็”จไบŽ็†่งฃๅฆ‚ไฝ•ๆœ‰ๆ•ˆไฝฟ็”จ Val ๆจกๅผๆฅ็กฎไฟๆ‚จ็š„ๆจกๅž‹ๆ—ขๅ‡†็กฎๅˆๅฏ้ ใ€‚ + +## ไธบไป€ไนˆ่ฆไฝฟ็”จ Ultralytics YOLO ่ฟ›่กŒ้ชŒ่ฏ๏ผŸ + +ไปฅไธ‹ๆ˜ฏไฝฟ็”จ YOLOv8 ็š„ Val ๆจกๅผ็š„ๅฅฝๅค„๏ผš + +- **็ฒพ็กฎๆ€ง๏ผš** ่Žทๅ–ๅ‡†็กฎ็š„ๆŒ‡ๆ ‡๏ผŒๅฆ‚ mAP50ใ€mAP75 ๅ’Œ mAP50-95๏ผŒๅ…จ้ข่ฏ„ไผฐๆ‚จ็š„ๆจกๅž‹ใ€‚ +- **ไพฟๅˆฉๆ€ง๏ผš** ๅˆฉ็”จๅ†…็ฝฎๅŠŸ่ƒฝ่ฎฐไฝ่ฎญ็ปƒ่ฎพ็ฝฎ๏ผŒ็ฎ€ๅŒ–้ชŒ่ฏ่ฟ‡็จ‹ใ€‚ +- **็ตๆดปๆ€ง๏ผš** ไฝฟ็”จ็›ธๅŒๆˆ–ไธๅŒ็š„ๆ•ฐๆฎ้›†ๅ’Œๅ›พๅƒๅฐบๅฏธ้ชŒ่ฏๆ‚จ็š„ๆจกๅž‹ใ€‚ +- **่ถ…ๅ‚ๆ•ฐ่ฐƒไผ˜๏ผš** ไฝฟ็”จ้ชŒ่ฏๆŒ‡ๆ ‡ๆฅ่ฐƒๆ•ดๆ‚จ็š„ๆจกๅž‹ไปฅ่Žทๅพ—ๆ›ดๅฅฝ็š„ๆ€ง่ƒฝใ€‚ + +### Val ๆจกๅผ็š„ไธป่ฆ็‰น็‚น + +ไปฅไธ‹ๆ˜ฏ YOLOv8 ็š„ Val ๆจกๅผๆไพ›็š„ๆ˜พ่‘—ๅŠŸ่ƒฝ๏ผš + +- **่‡ชๅŠจๅŒ–่ฎพ็ฝฎ๏ผš** ๆจกๅž‹่ฎฐไฝๅ…ถ่ฎญ็ปƒ้…็ฝฎ๏ผŒไปฅไพฟ็›ดๆŽฅ่ฟ›่กŒ้ชŒ่ฏใ€‚ +- **ๅคšๆŒ‡ๆ ‡ๆ”ฏๆŒ๏ผš** ๆ นๆฎไธ€็ณปๅˆ—ๅ‡†็กฎๅบฆๆŒ‡ๆ ‡่ฏ„ไผฐๆ‚จ็š„ๆจกๅž‹ใ€‚ +- **CLI ๅ’Œ Python API๏ผš** ๆ นๆฎๆ‚จ็š„้ชŒ่ฏๅๅฅฝ้€‰ๆ‹ฉๅ‘ฝไปค่กŒ็•Œ้ขๆˆ– Python APIใ€‚ +- **ๆ•ฐๆฎๅ…ผๅฎนๆ€ง๏ผš** ไธŽ่ฎญ็ปƒ้˜ถๆฎตไฝฟ็”จ็š„ๆ•ฐๆฎ้›†ไปฅๅŠ่‡ชๅฎšไน‰ๆ•ฐๆฎ้›†ๆ— ็ผๅไฝœใ€‚ + +!!! Tip "ๆ็คบ" + + * YOLOv8 ๆจกๅž‹ไผš่‡ชๅŠจ่ฎฐไฝๅ…ถ่ฎญ็ปƒ่ฎพ็ฝฎ๏ผŒๅ› ๆญคๆ‚จๅฏไปฅๅพˆๅฎนๆ˜“ๅœฐไป…ไฝฟ็”จ `yolo val model=yolov8n.pt` ๆˆ– `model('yolov8n.pt').val()` ๅœจๅŽŸๅง‹ๆ•ฐๆฎ้›†ไธŠๅนถไปฅ็›ธๅŒๅ›พๅƒๅคงๅฐ้ชŒ่ฏๆจกๅž‹ใ€‚ + +## ไฝฟ็”จ็คบไพ‹ + +ๅœจ COCO128 ๆ•ฐๆฎ้›†ไธŠ้ชŒ่ฏ่ฎญ็ปƒ่ฟ‡็š„ YOLOv8n ๆจกๅž‹็š„ๅ‡†็กฎๆ€งใ€‚็”ฑไบŽ `model` ไฟ็•™ไบ†ๅ…ถ่ฎญ็ปƒ็š„ `data` ๅ’Œๅ‚ๆ•ฐไฝœไธบๆจกๅž‹ๅฑžๆ€ง๏ผŒๅ› ๆญคๆ— ้œ€ไผ ้€’ไปปไฝ•ๅ‚ๆ•ฐใ€‚ๆœ‰ๅ…ณๅฎŒๆ•ด็š„ๅฏผๅ‡บๅ‚ๆ•ฐๅˆ—่กจ๏ผŒ่ฏทๅ‚้˜…ไธ‹้ข็š„ๅ‚ๆ•ฐ้ƒจๅˆ†ใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n.pt') # ๅŠ ่ฝฝๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ๅŠ ่ฝฝ่‡ชๅฎšไน‰ๆจกๅž‹ + + # ้ชŒ่ฏๆจกๅž‹ + metrics = model.val() # ๆ— ้œ€ๅ‚ๆ•ฐ๏ผŒๆ•ฐๆฎ้›†ๅ’Œ่ฎพ็ฝฎ่ฎฐๅฟ† + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ๅŒ…ๅซๆฏไธช็ฑปๅˆซ็š„map50-95ๅˆ—่กจ + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # ้ชŒ่ฏๅฎ˜ๆ–นๆจกๅž‹ + yolo detect val model=path/to/best.pt # ้ชŒ่ฏ่‡ชๅฎšไน‰ๆจกๅž‹ + ``` + +## ๅ‚ๆ•ฐ + +YOLO ๆจกๅž‹็š„้ชŒ่ฏ่ฎพ็ฝฎๆ˜ฏๆŒ‡็”จไบŽ่ฏ„ไผฐๆจกๅž‹ๅœจ้ชŒ่ฏๆ•ฐๆฎ้›†ไธŠๆ€ง่ƒฝ็š„ๅ„็ง่ถ…ๅ‚ๆ•ฐๅ’Œ้…็ฝฎใ€‚่ฟ™ไบ›่ฎพ็ฝฎไผšๅฝฑๅ“ๆจกๅž‹็š„ๆ€ง่ƒฝใ€้€Ÿๅบฆๅ’Œๅ‡†็กฎๆ€งใ€‚ไธ€ไบ›ๅธธ่ง็š„ YOLO ้ชŒ่ฏ่ฎพ็ฝฎๅŒ…ๆ‹ฌๆ‰นๅค„็†ๅคงๅฐใ€ๅœจ่ฎญ็ปƒๆœŸ้—ด้ชŒ่ฏ้ข‘็އไปฅๅŠ็”จไบŽ่ฏ„ไผฐๆจกๅž‹ๆ€ง่ƒฝ็š„ๆŒ‡ๆ ‡ใ€‚ๅ…ถไป–ๅฏ่ƒฝๅฝฑๅ“้ชŒ่ฏ่ฟ‡็จ‹็š„ๅ› ็ด ๅŒ…ๆ‹ฌ้ชŒ่ฏๆ•ฐๆฎ้›†็š„ๅคงๅฐๅ’Œ็ป„ๆˆไปฅๅŠๆจกๅž‹็”จไบŽ็‰นๅฎšไปปๅŠก็š„็‰นๆ€งใ€‚ไป”็ป†่ฐƒๆ•ดๅ’Œๅฎž้ชŒ่ฟ™ไบ›่ฎพ็ฝฎๅพˆ้‡่ฆ๏ผŒไปฅ็กฎไฟๆจกๅž‹ๅœจ้ชŒ่ฏๆ•ฐๆฎ้›†ไธŠ่กจ็Žฐ่‰ฏๅฅฝๅนถไธ”ๆฃ€ๆต‹ๅ’Œ้ข„้˜ฒ่ฟ‡ๆ‹Ÿๅˆใ€‚ + +| ้”ฎ | ๅ€ผ | ๆ่ฟฐ | +|---------------|---------|---------------------------------------------| +| `data` | `None` | ๆ•ฐๆฎๆ–‡ไปถ็š„่ทฏๅพ„๏ผŒไพ‹ๅฆ‚ coco128.yaml | +| `imgsz` | `640` | ่พ“ๅ…ฅๅ›พๅƒ็š„ๅคงๅฐ๏ผŒไปฅๆ•ดๆ•ฐ่กจ็คบ | +| `batch` | `16` | ๆฏๆ‰นๅ›พๅƒ็š„ๆ•ฐ้‡๏ผˆAutoBatch ไธบ -1๏ผ‰ | +| `save_json` | `False` | ๅฐ†็ป“ๆžœไฟๅญ˜่‡ณ JSON ๆ–‡ไปถ | +| `save_hybrid` | `False` | ไฟๅญ˜ๆททๅˆ็‰ˆๆœฌ็š„ๆ ‡็ญพ๏ผˆๆ ‡็ญพ + ้ขๅค–้ข„ๆต‹๏ผ‰ | +| `conf` | `0.001` | ็”จไบŽๆฃ€ๆต‹็š„ๅฏน่ฑก็ฝฎไฟกๅบฆ้˜ˆๅ€ผ | +| `iou` | `0.6` | NMS๏ผˆ้žๆžๅคงๆŠ‘ๅˆถ๏ผ‰็”จ็š„ไบคๅนถๆฏ”๏ผˆIoU๏ผ‰้˜ˆๅ€ผ | +| `max_det` | `300` | ๆฏๅผ ๅ›พๅƒ็š„ๆœ€ๅคงๆฃ€ๆต‹ๆ•ฐ้‡ | +| `half` | `True` | ไฝฟ็”จๅŠ็ฒพๅบฆ๏ผˆFP16๏ผ‰ | +| `device` | `None` | ่ฟ่กŒๆ‰€็”จ็š„่ฎพๅค‡๏ผŒไพ‹ๅฆ‚ cuda device=0/1/2/3 ๆˆ– device=cpu | +| `dnn` | `False` | ไฝฟ็”จ OpenCV DNN ่ฟ›่กŒ ONNX ๆŽจ็† | +| `plots` | `False` | ๅœจ่ฎญ็ปƒๆœŸ้—ดๆ˜พ็คบๅ›พ่กจ | +| `rect` | `False` | ็Ÿฉๅฝข้ชŒ่ฏ๏ผŒๆฏๆ‰นๅ›พๅƒไธบไบ†ๆœ€ๅฐๅกซๅ……ๆ•ด้ฝๆŽ’ๅˆ— | +| `split` | `val` | ็”จไบŽ้ชŒ่ฏ็š„ๆ•ฐๆฎ้›†ๅˆ†ๅ‰ฒ๏ผŒไพ‹ๅฆ‚ 'val'ใ€'test' ๆˆ– 'train' | +| diff --git a/ultralytics/docs/zh/modes/val.md:Zone.Identifier b/ultralytics/docs/zh/modes/val.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/modes/val.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/quickstart.md b/ultralytics/docs/zh/quickstart.md new file mode 100755 index 0000000..b3438a8 --- /dev/null +++ b/ultralytics/docs/zh/quickstart.md @@ -0,0 +1,325 @@ +--- +comments: true +description: ๆŽข็ดขไฝฟ็”จpipใ€condaใ€gitๅ’ŒDockerๅฎ‰่ฃ…Ultralytics็š„ๅ„็งๆ–นๆณ•ใ€‚ไบ†่งฃๅฆ‚ไฝ•ๅœจๅ‘ฝไปค่กŒ็•Œ้ขๆˆ–Python้กน็›ฎไธญไฝฟ็”จUltralyticsใ€‚ +keywords: Ultralyticsๅฎ‰่ฃ…๏ผŒpipๅฎ‰่ฃ…Ultralytics๏ผŒDockerๅฎ‰่ฃ…Ultralytics๏ผŒUltralyticsๅ‘ฝไปค่กŒ็•Œ้ข๏ผŒUltralytics PythonๆŽฅๅฃ +--- + +## ๅฎ‰่ฃ…Ultralytics + +Ultralyticsๆไพ›ไบ†ๅคš็งๅฎ‰่ฃ…ๆ–นๆณ•๏ผŒๅŒ…ๆ‹ฌpipใ€condaๅ’ŒDockerใ€‚้€š่ฟ‡`ultralytics`pipๅŒ…ๅฎ‰่ฃ…ๆœ€ๆ–ฐ็จณๅฎš็‰ˆ็š„YOLOv8๏ผŒๆˆ–่€…ๅ…‹้š†[Ultralytics GitHubไป“ๅบ“](https://github.com/ultralytics/ultralytics)ไปฅ่Žทๅ–ๆœ€ๆ–ฐ็‰ˆๆœฌใ€‚Dockerๅฏ็”จไบŽๅœจ้š”็ฆปๅฎนๅ™จไธญๆ‰ง่กŒๅŒ…๏ผŒ้ฟๅ…ๆœฌๅœฐๅฎ‰่ฃ…ใ€‚ + +!!! Example "ๅฎ‰่ฃ…" + + === "Pipๅฎ‰่ฃ…๏ผˆๆŽจ่๏ผ‰" + ไฝฟ็”จpipๅฎ‰่ฃ…`ultralytics`ๅŒ…๏ผŒๆˆ–้€š่ฟ‡่ฟ่กŒ`pip install -U ultralytics`ๆ›ดๆ–ฐ็Žฐๆœ‰ๅฎ‰่ฃ…ใ€‚่ฎฟ้—ฎPythonๅŒ…็ดขๅผ•(PyPI)ไบ†่งฃๆ›ดๅคšๅ…ณไบŽ`ultralytics`ๅŒ…็š„่ฏฆ็ป†ไฟกๆฏ๏ผš[https://pypi.org/project/ultralytics/](https://pypi.org/project/ultralytics/)ใ€‚ + + [![PyPI็‰ˆๆœฌ](https://badge.fury.io/py/ultralytics.svg)](https://badge.fury.io/py/ultralytics) [![ไธ‹่ฝฝ](https://static.pepy.tech/badge/ultralytics)](https://pepy.tech/project/ultralytics) + + ```bash + # ไปŽPyPIๅฎ‰่ฃ…ultralyticsๅŒ… + pip install ultralytics + ``` + + ไฝ ไนŸๅฏไปฅ็›ดๆŽฅไปŽGitHub[ไป“ๅบ“](https://github.com/ultralytics/ultralytics)ๅฎ‰่ฃ…`ultralytics`ๅŒ…ใ€‚ๅฆ‚ๆžœไฝ ๆƒณ่ฆๆœ€ๆ–ฐ็š„ๅผ€ๅ‘็‰ˆๆœฌ๏ผŒ่ฟ™ๅฏ่ƒฝไผšๅพˆๆœ‰็”จใ€‚็กฎไฟไฝ ็š„็ณป็ปŸไธŠๅฎ‰่ฃ…ไบ†Gitๅ‘ฝไปค่กŒๅทฅๅ…ทใ€‚`@main`ๆŒ‡ไปคๅฎ‰่ฃ…`main`ๅˆ†ๆ”ฏ๏ผŒๅฏไฟฎๆ”นไธบๅ…ถไป–ๅˆ†ๆ”ฏ๏ผŒๅฆ‚`@my-branch`๏ผŒๆˆ–ๅฎŒๅ…จๅˆ ้™ค๏ผŒ้ป˜่ฎคไธบ`main`ๅˆ†ๆ”ฏใ€‚ + + ```bash + # ไปŽGitHubๅฎ‰่ฃ…ultralyticsๅŒ… + pip install git+https://github.com/ultralytics/ultralytics.git@main + ``` + + + === "Condaๅฎ‰่ฃ…" + Condaๆ˜ฏpip็š„ไธ€ไธชๆ›ฟไปฃๅŒ…็ฎก็†ๅ™จ๏ผŒไนŸๅฏ็”จไบŽๅฎ‰่ฃ…ใ€‚่ฎฟ้—ฎAnacondaไบ†่งฃๆ›ดๅคš่ฏฆๆƒ…๏ผŒ็ฝ‘ๅ€ไธบ[https://anaconda.org/conda-forge/ultralytics](https://anaconda.org/conda-forge/ultralytics)ใ€‚็”จไบŽๆ›ดๆ–ฐcondaๅŒ…็š„Ultralytics feedstockไป“ๅบ“ไฝไบŽ[https://github.com/conda-forge/ultralytics-feedstock/](https://github.com/conda-forge/ultralytics-feedstock/)ใ€‚ + + + [![Conda้…ๆ–น](https://img.shields.io/badge/recipe-ultralytics-green.svg)](https://anaconda.org/conda-forge/ultralytics) [![Condaไธ‹่ฝฝ](https://img.shields.io/conda/dn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Conda็‰ˆๆœฌ](https://img.shields.io/conda/vn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) [![Condaๅนณๅฐ](https://img.shields.io/conda/pn/conda-forge/ultralytics.svg)](https://anaconda.org/conda-forge/ultralytics) + + ```bash + # ไฝฟ็”จcondaๅฎ‰่ฃ…ultralyticsๅŒ… + conda install -c conda-forge ultralytics + ``` + + !!! ๆณจๆ„ + + ๅฆ‚ๆžœไฝ ๅœจCUDA็Žฏๅขƒไธญๅฎ‰่ฃ…๏ผŒๆœ€ไฝณๅฎž่ทตๆ˜ฏๅŒๆ—ถๅฎ‰่ฃ…`ultralytics`ใ€`pytorch`ๅ’Œ`pytorch-cuda`๏ผŒไปฅไพฟcondaๅŒ…็ฎก็†ๅ™จ่งฃๅ†ณไปปไฝ•ๅ†ฒ็ช๏ผŒๆˆ–่€…ๆœ€ๅŽๅฎ‰่ฃ…`pytorch-cuda`๏ผŒ่ฎฉๅฎƒๅฟ…่ฆๆ—ถ่ฆ†็›–็‰นๅฎšไบŽCPU็š„`pytorch`ๅŒ…ใ€‚ + ```bash + # ไฝฟ็”จcondaไธ€่ตทๅฎ‰่ฃ…ๆ‰€ๆœ‰ๅŒ… + conda install -c pytorch -c nvidia -c conda-forge pytorch torchvision pytorch-cuda=11.8 ultralytics + ``` + + ### Conda Dockerๆ˜ ๅƒ + + Ultralytics Conda Dockerๆ˜ ๅƒไนŸๅฏไปŽ[DockerHub](https://hub.docker.com/r/ultralytics/ultralytics)่Žทๅพ—ใ€‚่ฟ™ไบ›ๆ˜ ๅƒๅŸบไบŽ[Miniconda3](https://docs.conda.io/projects/miniconda/en/latest/)๏ผŒๆ˜ฏๅผ€ๅง‹ๅœจConda็Žฏๅขƒไธญไฝฟ็”จ`ultralytics`็š„็ฎ€ๅ•ๆ–นๅผใ€‚ + + ```bash + # ๅฐ†ๆ˜ ๅƒๅ็งฐ่ฎพ็ฝฎไธบๅ˜้‡ + t=ultralytics/ultralytics:latest-conda + + # ไปŽDocker Hubๆ‹‰ๅ–ๆœ€ๆ–ฐ็š„ultralyticsๆ˜ ๅƒ + sudo docker pull $t + + # ไฝฟ็”จGPUๆ”ฏๆŒ่ฟ่กŒultralyticsๆ˜ ๅƒ็š„ๅฎนๅ™จ + sudo docker run -it --ipc=host --gpus all $t # ๆ‰€ๆœ‰GPU + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # ๆŒ‡ๅฎšGPU + ``` + + === "Gitๅ…‹้š†" + ๅฆ‚ๆžœๆ‚จๅฏนๅ‚ไธŽๅผ€ๅ‘ๆ„Ÿๅ…ด่ถฃๆˆ–ๅธŒๆœ›ๅฐ่ฏ•ๆœ€ๆ–ฐๆบไปฃ็ ๏ผŒ่ฏทๅ…‹้š†`ultralytics`ไป“ๅบ“ใ€‚ๅ…‹้š†ๅŽ๏ผŒๅฏผ่ˆชๅˆฐ็›ฎๅฝ•ๅนถไฝฟ็”จpipไปฅๅฏ็ผ–่พ‘ๆจกๅผ`-e`ๅฎ‰่ฃ…ๅŒ…ใ€‚ + ```bash + # ๅ…‹้š†ultralyticsไป“ๅบ“ + git clone https://github.com/ultralytics/ultralytics + + # ๅฏผ่ˆชๅˆฐๅ…‹้š†็š„็›ฎๅฝ• + cd ultralytics + + # ไธบๅผ€ๅ‘ๅฎ‰่ฃ…ๅฏ็ผ–่พ‘ๆจกๅผไธ‹็š„ๅŒ… + pip install -e . + ``` + + === "Docker" + + ๅˆฉ็”จDocker่ฝปๆพๅœฐๅœจ้š”็ฆป็š„ๅฎนๅ™จไธญๆ‰ง่กŒ`ultralytics`ๅŒ…๏ผŒ็กฎไฟ่ทจไธๅŒ็Žฏๅขƒ็š„ไธ€่‡ดๆ€งๅ’Œๆต็•…ๆ€ง่ƒฝใ€‚้€š่ฟ‡้€‰ๆ‹ฉไธ€ๆฌพๅฎ˜ๆ–น`ultralytics`ๆ˜ ๅƒ๏ผŒไปŽ[Docker Hub](https://hub.docker.com/r/ultralytics/ultralytics)ไธญไธไป…้ฟๅ…ไบ†ๆœฌๅœฐๅฎ‰่ฃ…็š„ๅคๆ‚ๆ€ง๏ผŒ่ฟ˜่Žทๅพ—ไบ†ๅฏน้ชŒ่ฏๅทฅไฝœ็Žฏๅขƒ็š„่ฎฟ้—ฎใ€‚Ultralyticsๆไพ›5็งไธป่ฆๆ”ฏๆŒ็š„Dockerๆ˜ ๅƒ๏ผŒๆฏไธ€็ง้ƒฝไธบไธๅŒ็š„ๅนณๅฐๅ’Œไฝฟ็”จๆกˆไพ‹่ฎพ่ฎก๏ผŒไปฅๆไพ›้ซ˜ๅ…ผๅฎนๆ€งๅ’Œๆ•ˆ็އ๏ผš + + Dockerๆ‹‰ๅ–ๆฌกๆ•ฐ + + - **Dockerfile๏ผš** ๆŽจ่็”จไบŽ่ฎญ็ปƒ็š„GPUๆ˜ ๅƒใ€‚ + - **Dockerfile-arm64๏ผš** ไธบARM64ๆžถๆž„ไผ˜ๅŒ–๏ผŒๅ…่ฎธๅœจๆ ‘่Ž“ๆดพๅ’Œๅ…ถไป–ๅŸบไบŽARM64็š„ๅนณๅฐไธŠ้ƒจ็ฝฒใ€‚ + - **Dockerfile-cpu๏ผš** ๅŸบไบŽUbuntu็š„CPU็‰ˆ๏ผŒ้€‚ๅˆๆ— GPU็Žฏๅขƒไธ‹็š„ๆŽจ็†ใ€‚ + - **Dockerfile-jetson๏ผš** ไธบNVIDIA Jetson่ฎพๅค‡้‡่บซๅฎšๅˆถ๏ผŒๆ•ดๅˆไบ†้’ˆๅฏน่ฟ™ไบ›ๅนณๅฐไผ˜ๅŒ–็š„GPUๆ”ฏๆŒใ€‚ + - **Dockerfile-python๏ผš** ๆœ€ๅฐๅŒ–ๆ˜ ๅƒ๏ผŒๅชๅŒ…ๅซPythonๅŠๅฟ…่ฆไพ่ต–๏ผŒ็†ๆƒณไบŽ่ฝป้‡็บงๅบ”็”จๅ’Œๅผ€ๅ‘ใ€‚ + - **Dockerfile-conda๏ผš** ๅŸบไบŽMiniconda3๏ผŒๅŒ…ๅซcondaๅฎ‰่ฃ…็š„ultralyticsๅŒ…ใ€‚ + + ไปฅไธ‹ๆ˜ฏ่Žทๅ–ๆœ€ๆ–ฐๆ˜ ๅƒๅนถๆ‰ง่กŒๅฎƒ็š„ๅ‘ฝไปค๏ผš + + ```bash + # ๅฐ†ๆ˜ ๅƒๅ็งฐ่ฎพ็ฝฎไธบๅ˜้‡ + t=ultralytics/ultralytics:latest + + # ไปŽDocker Hubๆ‹‰ๅ–ๆœ€ๆ–ฐ็š„ultralyticsๆ˜ ๅƒ + sudo docker pull $t + + # ไฝฟ็”จGPUๆ”ฏๆŒ่ฟ่กŒultralyticsๆ˜ ๅƒ็š„ๅฎนๅ™จ + sudo docker run -it --ipc=host --gpus all $t # ๆ‰€ๆœ‰GPU + sudo docker run -it --ipc=host --gpus '"device=2,3"' $t # ๆŒ‡ๅฎšGPU + ``` + + ไธŠ่ฟฐๅ‘ฝไปคๅˆๅง‹ๅŒ–ไบ†ไธ€ไธชๅธฆๆœ‰ๆœ€ๆ–ฐ`ultralytics`ๆ˜ ๅƒ็š„Dockerๅฎนๅ™จใ€‚`-it`ๆ ‡ๅฟ—ๅˆ†้…ไบ†ไธ€ไธชไผชTTY๏ผŒๅนถไฟๆŒstdinๆ‰“ๅผ€๏ผŒไฝฟๆ‚จๅฏไปฅไธŽๅฎนๅ™จไบคไบ’ใ€‚`--ipc=host`ๆ ‡ๅฟ—ๅฐ†IPC๏ผˆ่ฟ›็จ‹้—ด้€šไฟก๏ผ‰ๅ‘ฝๅ็ฉบ้—ด่ฎพ็ฝฎไธบๅฎฟไธป๏ผŒ่ฟ™ๅฏนไบŽ่ฟ›็จ‹ไน‹้—ด็š„ๅ†…ๅญ˜ๅ…ฑไบซ่‡ณๅ…ณ้‡่ฆใ€‚`--gpus all`ๆ ‡ๅฟ—ไฝฟๅฎนๅ™จๅ†…ๅฏไปฅ่ฎฟ้—ฎๆ‰€ๆœ‰ๅฏ็”จ็š„GPU๏ผŒ่ฟ™ๅฏนไบŽ้œ€่ฆGPU่ฎก็ฎ—็š„ไปปๅŠก่‡ณๅ…ณ้‡่ฆใ€‚ + + ๆณจๆ„๏ผš่ฆๅœจๅฎนๅ™จไธญไฝฟ็”จๆœฌๅœฐๆœบๅ™จไธŠ็š„ๆ–‡ไปถ๏ผŒ่ฏทไฝฟ็”จDockerๅทๅฐ†ๆœฌๅœฐ็›ฎๅฝ•ๆŒ‚่ฝฝๅˆฐๅฎนๅ™จไธญ๏ผš + + ```bash + # ๅฐ†ๆœฌๅœฐ็›ฎๅฝ•ๆŒ‚่ฝฝๅˆฐๅฎนๅ™จๅ†…็š„็›ฎๅฝ• + sudo docker run -it --ipc=host --gpus all -v /path/on/host:/path/in/container $t + ``` + + ๅฐ†`/path/on/host`ๆ›ดๆ”นไธบๆ‚จๆœฌๅœฐๆœบๅ™จไธŠ็š„็›ฎๅฝ•่ทฏๅพ„๏ผŒๅฐ†`/path/in/container`ๆ›ดๆ”นไธบDockerๅฎนๅ™จๅ†…ๅธŒๆœ›่ฎฟ้—ฎ็š„่ทฏๅพ„ใ€‚ + + ๆฌฒไบ†่งฃ่ฟ›้˜ถDockerไฝฟ็”จๆ–นๆณ•๏ผŒ่ฏทๆŽข็ดข[Ultralytics DockerๆŒ‡ๅ—](https://docs.ultralytics.com/guides/docker-quickstart/)ใ€‚ + +ๆœ‰ๅ…ณไพ่ต–้กนๅˆ—่กจ๏ผŒ่ฏทๅ‚่ง`ultralytics`็š„[requirements.txt](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt)ๆ–‡ไปถใ€‚่ฏทๆณจๆ„๏ผŒไธŠ่ฟฐๆ‰€ๆœ‰็คบไพ‹ๅ‡ๅฎ‰่ฃ…ไบ†ๆ‰€ๆœ‰ๅฟ…้œ€็š„ไพ่ต–้กนใ€‚ + +

+
+ +
+ Watch: Ultralytics YOLO Quick Start Guide +

+ +!!! Tip "ๆ็คบ" + + PyTorch็š„่ฆๆฑ‚ๅ› ๆ“ไฝœ็ณป็ปŸๅ’ŒCUDA้œ€่ฆ่€Œๅผ‚๏ผŒๅ› ๆญคๅปบ่ฎฎ้ฆ–ๅ…ˆๆ นๆฎ[https://pytorch.org/get-started/locally](https://pytorch.org/get-started/locally)ไธŠ็š„ๆŒ‡ๅ—ๅฎ‰่ฃ…PyTorchใ€‚ + + + PyTorchๅฎ‰่ฃ…ๆŒ‡ๅ— + + +## ้€š่ฟ‡CLIไฝฟ็”จUltralytics + +Ultralyticsๅ‘ฝไปค่กŒ็•Œ้ข๏ผˆCLI๏ผ‰ๅ…่ฎธๆ‚จ้€š่ฟ‡็ฎ€ๅ•็š„ๅ•่กŒๅ‘ฝไปคไฝฟ็”จ๏ผŒๆ— ้œ€Python็Žฏๅขƒใ€‚CLIไธ้œ€่ฆ่‡ชๅฎšไน‰ๆˆ–Pythonไปฃ็ ใ€‚ๆ‚จๅฏไปฅ็›ดๆŽฅไปŽ็ปˆ็ซฏไฝฟ็”จ`yolo`ๅ‘ฝไปค่ฟ่กŒๆ‰€ๆœ‰ไปปๅŠกใ€‚ๆŸฅ็œ‹[CLIๆŒ‡ๅ—](/../usage/cli.md)๏ผŒไบ†่งฃๆ›ดๅคšๅ…ณไบŽไปŽๅ‘ฝไปค่กŒไฝฟ็”จYOLOv8็š„ไฟกๆฏใ€‚ + +!!! Example "็คบไพ‹" + + === "่ฏญๆณ•" + + Ultralytics `yolo`ๅ‘ฝไปคไฝฟ็”จไปฅไธ‹่ฏญๆณ•๏ผš + ```bash + yolo ไปปๅŠก ๆจกๅผ ๅ‚ๆ•ฐ + + ๅ…ถไธญ ไปปๅŠก๏ผˆๅฏ้€‰๏ผ‰ๆ˜ฏ[detect, segment, classify]ไธญ็š„ไธ€ไธช + ๆจกๅผ๏ผˆๅฟ…้œ€๏ผ‰ๆ˜ฏ[train, val, predict, export, track]ไธญ็š„ไธ€ไธช + ๅ‚ๆ•ฐ๏ผˆๅฏ้€‰๏ผ‰ๆ˜ฏไปปๆ„ๆ•ฐ้‡็š„่‡ชๅฎšไน‰โ€œarg=valueโ€ๅฏน๏ผŒๅฆ‚โ€œimgsz=320โ€๏ผŒๅฏ่ฆ†็›–้ป˜่ฎคๅ€ผใ€‚ + ``` + ๅœจๅฎŒๆ•ด็š„[้…็ฝฎๆŒ‡ๅ—](/../usage/cfg.md)ไธญๆŸฅ็œ‹ๆ‰€ๆœ‰ๅ‚ๆ•ฐ๏ผŒๆˆ–่€…็”จ`yolo cfg`ๆŸฅ็œ‹ + + === "่ฎญ็ปƒ" + + ็”จๅˆๅง‹ๅญฆไน ็އ0.01่ฎญ็ปƒๆฃ€ๆต‹ๆจกๅž‹10ไธชๅ‘จๆœŸ + ```bash + yolo train data=coco128.yaml model=yolov8n.pt epochs=10 lr0=0.01 + ``` + + === "้ข„ๆต‹" + + ไฝฟ็”จ้ข„่ฎญ็ปƒ็š„ๅˆ†ๅ‰ฒๆจกๅž‹ไปฅ320็š„ๅ›พๅƒๅคงๅฐ้ข„ๆต‹YouTube่ง†้ข‘๏ผš + ```bash + yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320 + ``` + + === "้ชŒ่ฏ" + + ไปฅๆ‰น้‡ๅคงๅฐ1ๅ’Œ640็š„ๅ›พๅƒๅคงๅฐ้ชŒ่ฏ้ข„่ฎญ็ปƒ็š„ๆฃ€ๆต‹ๆจกๅž‹๏ผš + ```bash + yolo val model=yolov8n.pt data=coco128.yaml batch=1 imgsz=640 + ``` + + === "ๅฏผๅ‡บ" + + ไปฅ224x128็š„ๅ›พๅƒๅคงๅฐๅฐ†YOLOv8nๅˆ†็ฑปๆจกๅž‹ๅฏผๅ‡บๅˆฐONNXๆ ผๅผ๏ผˆๆ— ้œ€ไปปๅŠก๏ผ‰ + ```bash + yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128 + ``` + + === "็‰นๆฎŠ" + + ่ฟ่กŒ็‰นๆฎŠๅ‘ฝไปคไปฅๆŸฅ็œ‹็‰ˆๆœฌใ€ๆŸฅ็œ‹่ฎพ็ฝฎใ€่ฟ่กŒๆฃ€ๆŸฅ็ญ‰๏ผš + ```bash + yolo help + yolo checks + yolo version + yolo settings + yolo copy-cfg + yolo cfg + ``` + +!!! Warning "่ญฆๅ‘Š" + + ๅ‚ๆ•ฐๅฟ…้กปไปฅ`arg=val`ๅฏน็š„ๅฝขๅผไผ ้€’๏ผŒ็”จ็ญ‰ๅท`=`ๅˆ†้š”๏ผŒๅนถ็”จ็ฉบๆ ผ` `ๅˆ†้š”ๅฏนใ€‚ไธ่ฆไฝฟ็”จ`--`ๅ‚ๆ•ฐๅ‰็ผ€ๆˆ–้€—ๅท`,`ๅˆ†้š”ๅ‚ๆ•ฐใ€‚ + + - `yolo predict model=yolov8n.pt imgsz=640 conf=0.25`   โœ… + - `yolo predict model yolov8n.pt imgsz 640 conf 0.25`   โŒ + - `yolo predict --model yolov8n.pt --imgsz 640 --conf 0.25`   โŒ + +[CLIๆŒ‡ๅ—](/../usage/cli.md){ .md-button } + +## ้€š่ฟ‡Pythonไฝฟ็”จUltralytics + +YOLOv8็š„PythonๆŽฅๅฃๅ…่ฎธๆ— ็ผ้›†ๆˆ่ฟ›ๆ‚จ็š„Python้กน็›ฎ๏ผŒ่ฝปๆพๅŠ ่ฝฝใ€่ฟ่กŒๆจกๅž‹ๅŠๅค„็†่พ“ๅ‡บใ€‚PythonๆŽฅๅฃ่ฎพ่ฎก็ฎ€ๆดๆ˜“็”จ๏ผŒไฝฟ็”จๆˆท่ƒฝๅฟซ้€Ÿๅฎž็Žฐไป–ไปฌ้กน็›ฎไธญ็š„็›ฎๆ ‡ๆฃ€ๆต‹ใ€ๅˆ†ๅ‰ฒๅ’Œๅˆ†็ฑปๅŠŸ่ƒฝใ€‚่ฟ™ไฝฟYOLOv8็š„PythonๆŽฅๅฃๆˆไธบไปปไฝ•ๅธŒๆœ›ๅœจๅ…ถPython้กน็›ฎไธญ็บณๅ…ฅ่ฟ™ไบ›ๅŠŸ่ƒฝ็š„ไบบ็š„ๅฎ่ดตๅทฅๅ…ทใ€‚ + +ไพ‹ๅฆ‚๏ผŒ็”จๆˆทๅฏไปฅๅŠ ่ฝฝไธ€ไธชๆจกๅž‹๏ผŒ่ฎญ็ปƒๅฎƒ๏ผŒๅœจ้ชŒ่ฏ้›†ไธŠ่ฏ„ไผฐๆ€ง่ƒฝ๏ผŒ็”š่‡ณๅช้œ€ๅ‡ ่กŒไปฃ็ ๅฐฑๅฏไปฅๅฐ†ๅ…ถๅฏผๅ‡บๅˆฐONNXๆ ผๅผใ€‚ๆŸฅ็œ‹[PythonๆŒ‡ๅ—](/../usage/python.md)๏ผŒไบ†่งฃๆ›ดๅคšๅ…ณไบŽๅœจPython้กน็›ฎไธญไฝฟ็”จYOLOv8็š„ไฟกๆฏใ€‚ + +!!! Example "็คบไพ‹" + + ```python + from ultralytics import YOLO + + # ไปŽๅคดๅผ€ๅง‹ๅˆ›ๅปบไธ€ไธชๆ–ฐ็š„YOLOๆจกๅž‹ + model = YOLO('yolov8n.yaml') + + # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒ็š„YOLOๆจกๅž‹๏ผˆๆŽจ่็”จไบŽ่ฎญ็ปƒ๏ผ‰ + model = YOLO('yolov8n.pt') + + # ไฝฟ็”จโ€œcoco128.yamlโ€ๆ•ฐๆฎ้›†่ฎญ็ปƒๆจกๅž‹3ไธชๅ‘จๆœŸ + results = model.train(data='coco128.yaml', epochs=3) + + # ่ฏ„ไผฐๆจกๅž‹ๅœจ้ชŒ่ฏ้›†ไธŠ็š„ๆ€ง่ƒฝ + results = model.val() + + # ไฝฟ็”จๆจกๅž‹ๅฏนๅ›พ็‰‡่ฟ›่กŒ็›ฎๆ ‡ๆฃ€ๆต‹ + results = model('https://ultralytics.com/images/bus.jpg') + + # ๅฐ†ๆจกๅž‹ๅฏผๅ‡บไธบONNXๆ ผๅผ + success = model.export(format='onnx') + ``` + +[PythonๆŒ‡ๅ—](/../usage/python.md){.md-button .md-button--primary} + +## Ultralytics่ฎพ็ฝฎ + +Ultralyticsๅบ“ๆไพ›ไบ†ไธ€ไธชๅผบๅคง็š„่ฎพ็ฝฎ็ฎก็†็ณป็ปŸ๏ผŒๅ…่ฎธๆ‚จ็ฒพ็ป†ๆŽงๅˆถๅฎž้ชŒใ€‚้€š่ฟ‡ๅˆฉ็”จ`ultralytics.utils`ๆจกๅ—ไธญ็š„`SettingsManager`๏ผŒ็”จๆˆทๅฏไปฅ่ฝปๆพ่ฎฟ้—ฎๅ’Œไฟฎๆ”น่ฎพ็ฝฎใ€‚่ฟ™ไบ›่ฎพ็ฝฎๅญ˜ๅ‚จๅœจYAMLๆ–‡ไปถไธญ๏ผŒๅฏไปฅ็›ดๆŽฅๅœจPython็ŽฏๅขƒไธญๆŸฅ็œ‹ๆˆ–ไฟฎๆ”น๏ผŒๆˆ–่€…้€š่ฟ‡ๅ‘ฝไปค่กŒ็•Œ้ข(CLI)ไฟฎๆ”นใ€‚ + +### ๆฃ€ๆŸฅ่ฎพ็ฝฎ + +่‹ฅ่ฆไบ†่งฃๅฝ“ๅ‰่ฎพ็ฝฎ็š„้…็ฝฎๆƒ…ๅ†ต๏ผŒๆ‚จๅฏไปฅ็›ดๆŽฅๆŸฅ็œ‹๏ผš + +!!! Example "ๆŸฅ็œ‹่ฎพ็ฝฎ" + + === "Python" + ๆ‚จๅฏไปฅไฝฟ็”จPythonๆŸฅ็œ‹่ฎพ็ฝฎใ€‚้ฆ–ๅ…ˆไปŽ`ultralytics`ๆจกๅ—ๅฏผๅ…ฅ`settings`ๅฏน่ฑกใ€‚ไฝฟ็”จไปฅไธ‹ๅ‘ฝไปคๆ‰“ๅฐๅ’Œ่ฟ”ๅ›ž่ฎพ็ฝฎ๏ผš + ```python + from ultralytics import settings + + # ๆŸฅ็œ‹ๆ‰€ๆœ‰่ฎพ็ฝฎ + print(settings) + + # ่ฟ”ๅ›ž็‰นๅฎš่ฎพ็ฝฎ + value = settings['runs_dir'] + ``` + + === "CLI" + ๆˆ–่€…๏ผŒๅ‘ฝไปค่กŒ็•Œ้ขๅ…่ฎธๆ‚จ็”จไธ€ไธช็ฎ€ๅ•็š„ๅ‘ฝไปคๆฃ€ๆŸฅๆ‚จ็š„่ฎพ็ฝฎ๏ผš + ```bash + yolo settings + ``` + +### ไฟฎๆ”น่ฎพ็ฝฎ + +Ultralyticsๅ…่ฎธ็”จๆˆท่ฝปๆพไฟฎๆ”นไป–ไปฌ็š„่ฎพ็ฝฎใ€‚ๆ›ดๆ”นๅฏไปฅ้€š่ฟ‡ไปฅไธ‹ๆ–นๅผๆ‰ง่กŒ๏ผš + +!!! Example "ๆ›ดๆ–ฐ่ฎพ็ฝฎ" + + === "Python" + ๅœจPython็Žฏๅขƒไธญ๏ผŒ่ฐƒ็”จ`settings`ๅฏน่ฑกไธŠ็š„`update`ๆ–นๆณ•ๆฅๆ›ดๆ”นๆ‚จ็š„่ฎพ็ฝฎ๏ผš + ```python + from ultralytics import settings + + # ๆ›ดๆ–ฐไธ€ไธช่ฎพ็ฝฎ + settings.update({'runs_dir': '/path/to/runs'}) + + # ๆ›ดๆ–ฐๅคšไธช่ฎพ็ฝฎ + settings.update({'runs_dir': '/path/to/runs', 'tensorboard': False}) + + # ้‡็ฝฎ่ฎพ็ฝฎไธบ้ป˜่ฎคๅ€ผ + settings.reset() + ``` + + === "CLI" + ๅฆ‚ๆžœๆ‚จๆ›ดๅ–œๆฌขไฝฟ็”จๅ‘ฝไปค่กŒ็•Œ้ข๏ผŒไปฅไธ‹ๅ‘ฝไปคๅฐ†ๅ…่ฎธๆ‚จไฟฎๆ”น่ฎพ็ฝฎ๏ผš + ```bash + # ๆ›ดๆ–ฐไธ€ไธช่ฎพ็ฝฎ + yolo settings runs_dir='/path/to/runs' + + # ๆ›ดๆ–ฐๅคšไธช่ฎพ็ฝฎ + yolo settings runs_dir='/path/to/runs' tensorboard=False + + # ้‡็ฝฎ่ฎพ็ฝฎไธบ้ป˜่ฎคๅ€ผ + yolo settings reset + ``` + +### ็†่งฃ่ฎพ็ฝฎ + +ไธ‹่กจๆไพ›ไบ†Ultralyticsไธญๅฏ่ฐƒๆ•ด่ฎพ็ฝฎ็š„ๆฆ‚่งˆใ€‚ๆฏไธช่ฎพ็ฝฎ้ƒฝๆฆ‚่ฟฐไบ†ไธ€ไธช็คบไพ‹ๅ€ผใ€ๆ•ฐๆฎ็ฑปๅž‹ๅ’Œ็ฎ€็Ÿญๆ่ฟฐใ€‚ + +| ๅ็งฐ | ็คบไพ‹ๅ€ผ | ๆ•ฐๆฎ็ฑปๅž‹ | ๆ่ฟฐ | +|--------------------|-----------------------|--------|------------------------------------------------------------------------------------------| +| `settings_version` | `'0.0.4'` | `str` | Ultralytics _settings_ ็‰ˆๆœฌ๏ผˆไธๅŒไบŽUltralytics [pip](https://pypi.org/project/ultralytics/)็‰ˆๆœฌ๏ผ‰ | +| `datasets_dir` | `'/path/to/datasets'` | `str` | ๅญ˜ๅ‚จๆ•ฐๆฎ้›†็š„็›ฎๅฝ• | +| `weights_dir` | `'/path/to/weights'` | `str` | ๅญ˜ๅ‚จๆจกๅž‹ๆƒ้‡็š„็›ฎๅฝ• | +| `runs_dir` | `'/path/to/runs'` | `str` | ๅญ˜ๅ‚จๅฎž้ชŒ่ฟ่กŒ็š„็›ฎๅฝ• | +| `uuid` | `'a1b2c3d4'` | `str` | ๅฝ“ๅ‰่ฎพ็ฝฎ็š„ๅ”ฏไธ€ๆ ‡่ฏ†็ฌฆ | +| `sync` | `True` | `bool` | ๆ˜ฏๅฆๅฐ†ๅˆ†ๆžๅ’ŒๅดฉๆบƒๅŒๆญฅๅˆฐHUB | +| `api_key` | `''` | `str` | Ultralytics HUB [API Key](https://hub.ultralytics.com/settings?tab=api+keys) | +| `clearml` | `True` | `bool` | ๆ˜ฏๅฆไฝฟ็”จClearML่ฎฐๅฝ• | +| `comet` | `True` | `bool` | ๆ˜ฏๅฆไฝฟ็”จ[Comet ML](https://bit.ly/yolov8-readme-comet)่ฟ›่กŒๅฎž้ชŒ่ทŸ่ธชๅ’Œๅฏ่ง†ๅŒ– | +| `dvc` | `True` | `bool` | ๆ˜ฏๅฆไฝฟ็”จ[DVC่ฟ›่กŒๅฎž้ชŒ่ทŸ่ธช](https://dvc.org/doc/dvclive/ml-frameworks/yolo)ๅ’Œ็‰ˆๆœฌๆŽงๅˆถ | +| `hub` | `True` | `bool` | ๆ˜ฏๅฆไฝฟ็”จ[Ultralytics HUB](https://hub.ultralytics.com)้›†ๆˆ | +| `mlflow` | `True` | `bool` | ๆ˜ฏๅฆไฝฟ็”จMLFlow่ฟ›่กŒๅฎž้ชŒ่ทŸ่ธช | +| `neptune` | `True` | `bool` | ๆ˜ฏๅฆไฝฟ็”จNeptune่ฟ›่กŒๅฎž้ชŒ่ทŸ่ธช | +| `raytune` | `True` | `bool` | ๆ˜ฏๅฆไฝฟ็”จRay Tune่ฟ›่กŒ่ถ…ๅ‚ๆ•ฐ่ฐƒๆ•ด | +| `tensorboard` | `True` | `bool` | ๆ˜ฏๅฆไฝฟ็”จTensorBoard่ฟ›่กŒๅฏ่ง†ๅŒ– | +| `wandb` | `True` | `bool` | ๆ˜ฏๅฆไฝฟ็”จWeights & Biases่ฎฐๅฝ• | + +ๅœจๆ‚จๆต่งˆ้กน็›ฎๆˆ–ๅฎž้ชŒๆ—ถ๏ผŒ่ฏทๅŠกๅฟ…้‡ๆ–ฐ่ฎฟ้—ฎ่ฟ™ไบ›่ฎพ็ฝฎ๏ผŒไปฅ็กฎไฟๅฎƒไปฌไธบๆ‚จ็š„้œ€ๆฑ‚ๆไพ›ๆœ€ไฝณ้…็ฝฎใ€‚ diff --git a/ultralytics/docs/zh/quickstart.md:Zone.Identifier b/ultralytics/docs/zh/quickstart.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/quickstart.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/tasks/classify.md b/ultralytics/docs/zh/tasks/classify.md new file mode 100755 index 0000000..f440ad1 --- /dev/null +++ b/ultralytics/docs/zh/tasks/classify.md @@ -0,0 +1,172 @@ +--- +comments: true +description: ๅญฆไน YOLOv8ๅˆ†็ฑปๆจกๅž‹่ฟ›่กŒๅ›พๅƒๅˆ†็ฑปใ€‚่Žทๅ–ๅ…ณไบŽ้ข„่ฎญ็ปƒๆจกๅž‹ๅˆ—่กจๅŠๅฆ‚ไฝ•่ฎญ็ปƒใ€้ชŒ่ฏใ€้ข„ๆต‹ใ€ๅฏผๅ‡บๆจกๅž‹็š„่ฏฆ็ป†ไฟกๆฏใ€‚ +keywords: Ultralytics, YOLOv8, ๅ›พๅƒๅˆ†็ฑป, ้ข„่ฎญ็ปƒๆจกๅž‹, YOLOv8n-cls, ่ฎญ็ปƒ, ้ชŒ่ฏ, ้ข„ๆต‹, ๆจกๅž‹ๅฏผๅ‡บ +--- + +# ๅ›พๅƒๅˆ†็ฑป + +ๅ›พๅƒๅˆ†็ฑป็คบไพ‹ + +ๅ›พๅƒๅˆ†็ฑปๆ˜ฏไธ‰้กนไปปๅŠกไธญๆœ€็ฎ€ๅ•็š„๏ผŒๅฎƒๆถ‰ๅŠๅฐ†ๆ•ดไธชๅ›พๅƒๅˆ†็ฑปไธบไธ€็ป„้ข„ๅฎšไน‰็ฑปๅˆซไธญ็š„ไธ€ไธชใ€‚ + +ๅ›พๅƒๅˆ†็ฑปๅ™จ็š„่พ“ๅ‡บๆ˜ฏๅ•ไธช็ฑปๅˆซๆ ‡็ญพๅ’Œไธ€ไธช็ฝฎไฟกๅบฆๅˆ†ๆ•ฐใ€‚ๅฝ“ๆ‚จๅช้œ€่ฆ็Ÿฅ้“ไธ€ๅน…ๅ›พๅƒๅฑžไบŽๅ“ชไธช็ฑปๅˆซใ€่€Œไธ้œ€่ฆ็Ÿฅ้“่ฏฅ็ฑปๅˆซๅฏน่ฑก็š„ไฝ็ฝฎๆˆ–ๅฎƒไปฌ็š„็กฎๅˆ‡ๅฝข็Šถๆ—ถ๏ผŒๅ›พๅƒๅˆ†็ฑป้žๅธธๆœ‰็”จใ€‚ + +!!! Tip "ๆ็คบ" + + YOLOv8ๅˆ†็ฑปๆจกๅž‹ไฝฟ็”จ`-cls`ๅŽ็ผ€๏ผŒๅณ`yolov8n-cls.pt`๏ผŒๅนถ้ข„ๅ…ˆ่ฎญ็ปƒๅœจ[ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml)ไธŠใ€‚ + +## [ๆจกๅž‹](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +่ฟ™้‡Œๅฑ•็คบไบ†้ข„่ฎญ็ปƒ็š„YOLOv8ๅˆ†็ฑปๆจกๅž‹ใ€‚Detectใ€Segmentๅ’ŒPoseๆจกๅž‹ๆ˜ฏๅœจ[COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)ๆ•ฐๆฎ้›†ไธŠ้ข„่ฎญ็ปƒ็š„๏ผŒ่€Œๅˆ†็ฑปๆจกๅž‹ๅˆ™ๆ˜ฏๅœจ[ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml)ๆ•ฐๆฎ้›†ไธŠ้ข„่ฎญ็ปƒ็š„ใ€‚ + +[ๆจกๅž‹](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models)ไผšๅœจ้ฆ–ๆฌกไฝฟ็”จๆ—ถ่‡ชๅŠจไปŽUltralytics็š„ๆœ€ๆ–ฐ[ๅ‘ๅธƒ็‰ˆๆœฌ](https://github.com/ultralytics/assets/releases)ไธญไธ‹่ฝฝใ€‚ + +| ๆจกๅž‹ | ๅฐบๅฏธ
(ๅƒ็ด ) | ๅ‡†็กฎ็އ
top1 | ๅ‡†็กฎ็އ
top5 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ๅ‚ๆ•ฐ
(M) | FLOPs
(B) at 640 | +|----------------------------------------------------------------------------------------------|-----------------|------------------|------------------|-----------------------------|----------------------------------|----------------|--------------------------| +| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-cls.pt) | 224 | 66.6 | 87.0 | 12.9 | 0.31 | 2.7 | 4.3 | +| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-cls.pt) | 224 | 72.3 | 91.1 | 23.4 | 0.35 | 6.4 | 13.5 | +| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-cls.pt) | 224 | 76.4 | 93.2 | 85.4 | 0.62 | 17.0 | 42.7 | +| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-cls.pt) | 224 | 78.0 | 94.1 | 163.0 | 0.87 | 37.5 | 99.7 | +| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-cls.pt) | 224 | 78.4 | 94.3 | 232.0 | 1.01 | 57.4 | 154.8 | + +- **ๅ‡†็กฎ็އ** ๆ˜ฏๆจกๅž‹ๅœจ[ImageNet](https://www.image-net.org/)ๆ•ฐๆฎ้›†้ชŒ่ฏ้›†ไธŠ็š„ๅ‡†็กฎๅบฆใ€‚ +
้€š่ฟ‡`yolo val classify data=path/to/ImageNet device=0`ๅค็Žฐ็ป“ๆžœใ€‚ +- **้€Ÿๅบฆ** ๆ˜ฏๅœจไฝฟ็”จ[Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/)ๅฎžไพ‹ๆ—ถ๏ผŒImageNet้ชŒ่ฏๅ›พๅƒ็š„ๅนณๅ‡ๅค„็†้€Ÿๅบฆใ€‚ +
้€š่ฟ‡`yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`ๅค็Žฐ็ป“ๆžœใ€‚ + +## ่ฎญ็ปƒ + +ๅœจMNIST160ๆ•ฐๆฎ้›†ไธŠ่ฎญ็ปƒYOLOv8n-clsๆจกๅž‹100ไธชๆ—ถๆœŸ๏ผŒๅ›พๅƒๅฐบๅฏธไธบ64ใ€‚ๆœ‰ๅ…ณๅฏ็”จๅ‚ๆ•ฐ็š„ๅฎŒๆ•ดๅˆ—่กจ๏ผŒ่ฏทๅ‚่ง[้…็ฝฎ](/../usage/cfg.md)้กต้ขใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n-cls.yaml') # ไปŽYAMLๆž„ๅปบๆ–ฐๆจกๅž‹ + model = YOLO('yolov8n-cls.pt') # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒๆจกๅž‹๏ผˆๆŽจ่็”จไบŽ่ฎญ็ปƒ๏ผ‰ + model = YOLO('yolov8n-cls.yaml').load('yolov8n-cls.pt') # ไปŽYAMLๆž„ๅปบๅนถ่ฝฌ็งปๆƒ้‡ + + # ่ฎญ็ปƒๆจกๅž‹ + results = model.train(data='mnist160', epochs=100, imgsz=64) + ``` + + === "CLI" + + ```bash + # ไปŽYAMLๆž„ๅปบๆ–ฐๆจกๅž‹ๅนถไปŽๅคดๅผ€ๅง‹่ฎญ็ปƒ + yolo classify train data=mnist160 model=yolov8n-cls.yaml epochs=100 imgsz=64 + + # ไปŽ้ข„่ฎญ็ปƒ็š„*.ptๆจกๅž‹ๅผ€ๅง‹่ฎญ็ปƒ + yolo classify train data=mnist160 model=yolov8n-cls.pt epochs=100 imgsz=64 + + # ไปŽYAMLๆž„ๅปบๆ–ฐๆจกๅž‹๏ผŒ่ฝฌ็งป้ข„่ฎญ็ปƒๆƒ้‡ๅนถๅผ€ๅง‹่ฎญ็ปƒ + yolo classify train data=mnist160 model=yolov8n-cls.yaml pretrained=yolov8n-cls.pt epochs=100 imgsz=64 + ``` + +### ๆ•ฐๆฎ้›†ๆ ผๅผ + +YOLOๅˆ†็ฑปๆ•ฐๆฎ้›†็š„ๆ ผๅผ่ฏฆๆƒ…่ฏทๅ‚่ง[ๆ•ฐๆฎ้›†ๆŒ‡ๅ—](/../datasets/classify/index.md)ใ€‚ + +## ้ชŒ่ฏ + +ๅœจMNIST160ๆ•ฐๆฎ้›†ไธŠ้ชŒ่ฏ่ฎญ็ปƒๅฅฝ็š„YOLOv8n-clsๆจกๅž‹ๅ‡†็กฎๆ€งใ€‚ไธ้œ€่ฆไผ ้€’ไปปไฝ•ๅ‚ๆ•ฐ๏ผŒๅ› ไธบ`model`ไฟ็•™ไบ†ๅฎƒ็š„่ฎญ็ปƒ`data`ๅ’Œๅ‚ๆ•ฐไฝœไธบๆจกๅž‹ๅฑžๆ€งใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n-cls.pt') # ๅŠ ่ฝฝๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ๅŠ ่ฝฝ่‡ชๅฎšไน‰ๆจกๅž‹ + + # ้ชŒ่ฏๆจกๅž‹ + metrics = model.val() # ๆ— ้œ€ๅ‚ๆ•ฐ๏ผŒๆ•ฐๆฎ้›†ๅ’Œ่ฎพ็ฝฎๅทฒ่ฎฐๅฟ† + metrics.top1 # top1ๅ‡†็กฎ็އ + metrics.top5 # top5ๅ‡†็กฎ็އ + ``` + === "CLI" + + ```bash + yolo classify val model=yolov8n-cls.pt # ้ชŒ่ฏๅฎ˜ๆ–นๆจกๅž‹ + yolo classify val model=path/to/best.pt # ้ชŒ่ฏ่‡ชๅฎšไน‰ๆจกๅž‹ + ``` + +## ้ข„ๆต‹ + +ไฝฟ็”จ่ฎญ็ปƒ่ฟ‡็š„YOLOv8n-clsๆจกๅž‹ๅฏนๅ›พๅƒ่ฟ›่กŒ้ข„ๆต‹ใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n-cls.pt') # ๅŠ ่ฝฝๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ๅŠ ่ฝฝ่‡ชๅฎšไน‰ๆจกๅž‹ + + # ไฝฟ็”จๆจกๅž‹่ฟ›่กŒ้ข„ๆต‹ + results = model('https://ultralytics.com/images/bus.jpg') # ๅฏนๅ›พๅƒ่ฟ›่กŒ้ข„ๆต‹ + ``` + === "CLI" + + ```bash + yolo classify predict model=yolov8n-cls.pt source='https://ultralytics.com/images/bus.jpg' # ไฝฟ็”จๅฎ˜ๆ–นๆจกๅž‹่ฟ›่กŒ้ข„ๆต‹ + yolo classify predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ไฝฟ็”จ่‡ชๅฎšไน‰ๆจกๅž‹่ฟ›่กŒ้ข„ๆต‹ + ``` + +ๆœ‰ๅ…ณ`predict`ๆจกๅผ็š„ๅฎŒๆ•ด่ฏฆ็ป†ไฟกๆฏ๏ผŒ่ฏทๅ‚่ง[้ข„ๆต‹](https://docs.ultralytics.com/modes/predict/)้กต้ขใ€‚ + +## ๅฏผๅ‡บ + +ๅฐ†YOLOv8n-clsๆจกๅž‹ๅฏผๅ‡บไธบๅ…ถไป–ๆ ผๅผ๏ผŒๅฆ‚ONNXใ€CoreML็ญ‰ใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n-cls.pt') # ๅŠ ่ฝฝๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ๅŠ ่ฝฝ่‡ชๅฎšไน‰่ฎญ็ปƒๆจกๅž‹ + + # ๅฏผๅ‡บๆจกๅž‹ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-cls.pt format=onnx # ๅฏผๅ‡บๅฎ˜ๆ–นๆจกๅž‹ + yolo export model=path/to/best.pt format=onnx # ๅฏผๅ‡บ่‡ชๅฎšไน‰่ฎญ็ปƒๆจกๅž‹ + ``` + +ไธ‹่กจไธญๆไพ›ไบ†YOLOv8-clsๆจกๅž‹ๅฏๅฏผๅ‡บ็š„ๆ ผๅผใ€‚ๆ‚จๅฏไปฅ็›ดๆŽฅๅœจๅฏผๅ‡บ็š„ๆจกๅž‹ไธŠ่ฟ›่กŒ้ข„ๆต‹ๆˆ–้ชŒ่ฏ๏ผŒๅณ`yolo predict model=yolov8n-cls.onnx`ใ€‚ๅฏผๅ‡บๅฎŒๆˆๅŽ๏ผŒ็คบไพ‹็”จๆณ•ไผšๆ˜พ็คบๆ‚จ็š„ๆจกๅž‹ใ€‚ + +| ๆ ผๅผ | `format` ๅ‚ๆ•ฐ | ๆจกๅž‹ | ๅ…ƒๆ•ฐๆฎ | ๅ‚ๆ•ฐ | +|--------------------------------------------------------------------|---------------|-------------------------------|-----|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-cls.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-cls.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-cls.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-cls_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-cls.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-cls.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-cls_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-cls.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-cls.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-cls_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-cls_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-cls_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-cls_ncnn_model/` | โœ… | `imgsz`, `half` | + +ๆœ‰ๅ…ณ`export`็š„ๅฎŒๆ•ด่ฏฆ็ป†ไฟกๆฏ๏ผŒ่ฏทๅ‚่ง[ๅฏผๅ‡บ](https://docs.ultralytics.com/modes/export/)้กต้ขใ€‚ diff --git a/ultralytics/docs/zh/tasks/classify.md:Zone.Identifier b/ultralytics/docs/zh/tasks/classify.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/tasks/classify.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/tasks/detect.md b/ultralytics/docs/zh/tasks/detect.md new file mode 100755 index 0000000..52c2308 --- /dev/null +++ b/ultralytics/docs/zh/tasks/detect.md @@ -0,0 +1,184 @@ +--- +comments: true +description: Ultralytics ๅฎ˜ๆ–นYOLOv8ๆ–‡ๆกฃใ€‚ๅญฆไน ๅฆ‚ไฝ•่ฎญ็ปƒใ€้ชŒ่ฏใ€้ข„ๆต‹ๅนถไปฅๅ„็งๆ ผๅผๅฏผๅ‡บๆจกๅž‹ใ€‚ๅŒ…ๆ‹ฌ่ฏฆๅฐฝ็š„ๆ€ง่ƒฝ็ปŸ่ฎกใ€‚ +keywords: YOLOv8, Ultralytics, ็›ฎๆ ‡ๆฃ€ๆต‹, ้ข„่ฎญ็ปƒๆจกๅž‹, ่ฎญ็ปƒ, ้ชŒ่ฏ, ้ข„ๆต‹, ๅฏผๅ‡บๆจกๅž‹, COCO, ImageNet, PyTorch, ONNX, CoreML +--- + +# ็›ฎๆ ‡ๆฃ€ๆต‹ + +็›ฎๆ ‡ๆฃ€ๆต‹็คบไพ‹ + +็›ฎๆ ‡ๆฃ€ๆต‹ๆ˜ฏไธ€้กนไปปๅŠก๏ผŒๆถ‰ๅŠ่พจ่ฏ†ๅ›พๅƒๆˆ–่ง†้ข‘ๆตไธญ็‰ฉไฝ“็š„ไฝ็ฝฎๅ’Œ็ฑปๅˆซใ€‚ + +็›ฎๆ ‡ๆฃ€ๆต‹ๅ™จ็š„่พ“ๅ‡บๆ˜ฏไธ€็ป„ๅ›ด็ป•ๅ›พๅƒไธญ็‰ฉไฝ“็š„่พน็•Œๆก†๏ผŒไปฅๅŠๆฏไธชๆก†็š„็ฑปๅˆซๆ ‡็ญพๅ’Œ็ฝฎไฟกๅบฆๅพ—ๅˆ†ใ€‚ๅฝ“ๆ‚จ้œ€่ฆ่ฏ†ๅˆซๅœบๆ™ฏไธญ็š„ๆ„Ÿๅ…ด่ถฃๅฏน่ฑก๏ผŒไฝ†ไธ้œ€่ฆๅ‡†็กฎไบ†่งฃ็‰ฉไฝ“็š„ไฝ็ฝฎๆˆ–ๅ…ถ็กฎๅˆ‡ๅฝข็Šถๆ—ถ๏ผŒ็›ฎๆ ‡ๆฃ€ๆต‹ๆ˜ฏไธ€ไธชๅพˆๅฅฝ็š„้€‰ๆ‹ฉใ€‚ + +

+
+ +
+ ่ง‚็œ‹๏ผšไฝฟ็”จ้ข„่ฎญ็ปƒ็š„Ultralytics YOLOv8ๆจกๅž‹่ฟ›่กŒ็›ฎๆ ‡ๆฃ€ๆต‹ใ€‚ +

+ +!!! Tip "ๆ็คบ" + + YOLOv8 Detect ๆจกๅž‹ๆ˜ฏ้ป˜่ฎค็š„ YOLOv8 ๆจกๅž‹๏ผŒๅณ `yolov8n.pt` ๏ผŒๅนถๅœจ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ๆ•ฐๆฎ้›†ไธŠ่ฟ›่กŒไบ†้ข„่ฎญ็ปƒใ€‚ + +## [ๆจกๅž‹](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +ๆญคๅค„ๅฑ•็คบไบ†้ข„่ฎญ็ปƒ็š„YOLOv8 Detectๆจกๅž‹ใ€‚Detectใ€Segmentๅ’ŒPoseๆจกๅž‹ๅœจ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ๆ•ฐๆฎ้›†ไธŠ้ข„่ฎญ็ปƒ๏ผŒ่€ŒClassifyๆจกๅž‹ๅœจ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) ๆ•ฐๆฎ้›†ไธŠ้ข„่ฎญ็ปƒใ€‚ + +[ๆจกๅž‹](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) ไผšๅœจ้ฆ–ๆฌกไฝฟ็”จๆ—ถ่‡ชๅŠจไปŽUltralytics็š„ๆœ€ๆ–ฐ [ๅ‘ๅธƒ](https://github.com/ultralytics/assets/releases) ไธญไธ‹่ฝฝใ€‚ + +| ๆจกๅž‹ | ๅฐบๅฏธ
(ๅƒ็ด ) | mAPval
50-95 | ้€Ÿๅบฆ
CPU ONNX
(ๆฏซ็ง’) | ้€Ÿๅบฆ
A100 TensorRT
(ๆฏซ็ง’) | ๅ‚ๆ•ฐ
(M) | FLOPs
(B) | +|--------------------------------------------------------------------------------------|-----------------|----------------------|-----------------------------|----------------------------------|----------------|-------------------| +| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 | +| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 | +| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 | +| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 | +| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 | + +- **mAPval** ๅ€ผ้€‚็”จไบŽ [COCO val2017](http://cocodataset.org) ๆ•ฐๆฎ้›†ไธŠ็š„ๅ•ๆจกๅž‹ๅ•ๅฐบๅบฆใ€‚ +
้€š่ฟ‡ `yolo val detect data=coco.yaml device=0` ๅค็Žฐใ€‚ +- **้€Ÿๅบฆ** ๆ˜ฏๅœจไฝฟ็”จ [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) ไบ‘ๅฎžไพ‹ๅฏนCOCO valๅ›พๅƒ็š„ๅนณๅ‡ๅ€ผใ€‚ +
้€š่ฟ‡ `yolo val detect data=coco128.yaml batch=1 device=0|cpu` ๅค็Žฐใ€‚ + +## ่ฎญ็ปƒ + +ๅœจCOCO128ๆ•ฐๆฎ้›†ไธŠไฝฟ็”จๅ›พๅƒๅฐบๅฏธ640ๅฐ†YOLOv8n่ฎญ็ปƒ100ไธชepochsใ€‚่ฆๆŸฅ็œ‹ๅฏ็”จๅ‚ๆ•ฐ็š„ๅฎŒๆ•ดๅˆ—่กจ๏ผŒ่ฏทๅ‚้˜… [้…็ฝฎ](/../usage/cfg.md) ้กต้ขใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n.yaml') # ไปŽYAMLๆž„ๅปบๆ–ฐๆจกๅž‹ + model = YOLO('yolov8n.pt') # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒๆจกๅž‹๏ผˆๆŽจ่็”จไบŽ่ฎญ็ปƒ๏ผ‰ + model = YOLO('yolov8n.yaml').load('yolov8n.pt') # ไปŽYAMLๆž„ๅปบๅนถ่ฝฌ็งปๆƒ้‡ + + # ่ฎญ็ปƒๆจกๅž‹ + results = model.train(data='coco128.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # ไปŽYAMLๆž„ๅปบๆ–ฐๆจกๅž‹ๅนถไปŽๅคดๅผ€ๅง‹่ฎญ็ปƒ + yolo detect train data=coco128.yaml model=yolov8n.yaml epochs=100 imgsz=640 + + # ไปŽ้ข„่ฎญ็ปƒ็š„*.ptๆจกๅž‹ๅผ€ๅง‹่ฎญ็ปƒ + yolo detect train data=coco128.yaml model=yolov8n.pt epochs=100 imgsz=640 + + # ไปŽYAMLๆž„ๅปบๆ–ฐๆจกๅž‹๏ผŒไผ ้€’้ข„่ฎญ็ปƒๆƒ้‡ๅนถๅผ€ๅง‹่ฎญ็ปƒ + yolo detect train data=coco128.yaml model=yolov8n.yaml pretrained=yolov8n.pt epochs=100 imgsz=640 + ``` + +### ๆ•ฐๆฎ้›†ๆ ผๅผ + +YOLOๆฃ€ๆต‹ๆ•ฐๆฎ้›†ๆ ผๅผๅฏไปฅๅœจ [ๆ•ฐๆฎ้›†ๆŒ‡ๅ—](/../datasets/detect/index.md) ไธญ่ฏฆ็ป†ๆ‰พๅˆฐใ€‚่ฆๅฐ†ๆ‚จ็Žฐๆœ‰็š„ๆ•ฐๆฎ้›†ไปŽๅ…ถไป–ๆ ผๅผ๏ผˆๅฆ‚COCO็ญ‰๏ผ‰่ฝฌๆขไธบYOLOๆ ผๅผ๏ผŒ่ฏทไฝฟ็”จUltralytics็š„ [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) ๅทฅๅ…ทใ€‚ + +## ้ชŒ่ฏ + +ๅœจCOCO128ๆ•ฐๆฎ้›†ไธŠ้ชŒ่ฏ่ฎญ็ปƒๅฅฝ็š„YOLOv8nๆจกๅž‹ๅ‡†็กฎๆ€งใ€‚ๆ— ้œ€ไผ ้€’ๅ‚ๆ•ฐ๏ผŒ`model` ไฝœไธบๆจกๅž‹ๅฑžๆ€งไฟ็•™ๅ…ถ่ฎญ็ปƒ็š„ `data` ๅ’Œๅ‚ๆ•ฐใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n.pt') # ๅŠ ่ฝฝๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ๅŠ ่ฝฝ่‡ชๅฎšไน‰ๆจกๅž‹ + + # ้ชŒ่ฏๆจกๅž‹ + metrics = model.val() # ๆ— ้œ€ๅ‚ๆ•ฐ๏ผŒๆ•ฐๆฎ้›†ๅ’Œ่ฎพ็ฝฎ้€š่ฟ‡ๆจกๅž‹ๅฑžๆ€ง่ฎฐไฝ + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ๅŒ…ๅซๆฏไธช็ฑปๅˆซmap50-95็š„ๅˆ—่กจ + ``` + === "CLI" + + ```bash + yolo detect val model=yolov8n.pt # ้ชŒ่ฏๅฎ˜ๆ–นๆจกๅž‹ + yolo detect val model=path/to/best.pt # ้ชŒ่ฏ่‡ชๅฎšไน‰ๆจกๅž‹ + ``` + +## ้ข„ๆต‹ + +ไฝฟ็”จ่ฎญ็ปƒๅฅฝ็š„YOLOv8nๆจกๅž‹ๅœจๅ›พๅƒไธŠ่ฟ›่กŒ้ข„ๆต‹ใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n.pt') # ๅŠ ่ฝฝๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ๅŠ ่ฝฝ่‡ชๅฎšไน‰ๆจกๅž‹ + + # ไฝฟ็”จๆจกๅž‹่ฟ›่กŒ้ข„ๆต‹ + results = model('https://ultralytics.com/images/bus.jpg') # ๅฏนๅ›พๅƒ่ฟ›่กŒ้ข„ๆต‹ + ``` + === "CLI" + + ```bash + yolo detect predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg' # ไฝฟ็”จๅฎ˜ๆ–นๆจกๅž‹่ฟ›่กŒ้ข„ๆต‹ + yolo detect predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ไฝฟ็”จ่‡ชๅฎšไน‰ๆจกๅž‹่ฟ›่กŒ้ข„ๆต‹ + ``` + +ๅฎŒๆ•ด็š„ `predict` ๆจกๅผ็ป†่Š‚่ฏท่ง [้ข„ๆต‹](https://docs.ultralytics.com/modes/predict/) ้กต้ขใ€‚ + +## ๅฏผๅ‡บ + +ๅฐ†YOLOv8nๆจกๅž‹ๅฏผๅ‡บไธบONNXใ€CoreML็ญ‰ไธๅŒๆ ผๅผใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n.pt') # ๅŠ ่ฝฝๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ๅŠ ่ฝฝ่‡ชๅฎšไน‰่ฎญ็ปƒๆจกๅž‹ + + # ๅฏผๅ‡บๆจกๅž‹ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n.pt format=onnx # ๅฏผๅ‡บๅฎ˜ๆ–นๆจกๅž‹ + yolo export model=path/to/best.pt format=onnx # ๅฏผๅ‡บ่‡ชๅฎšไน‰่ฎญ็ปƒๆจกๅž‹ + ``` + +ไธ‹่กจไธญๆไพ›ไบ†ๅฏ็”จ็š„YOLOv8ๅฏผๅ‡บๆ ผๅผใ€‚ๆ‚จๅฏไปฅ็›ดๆŽฅๅœจๅฏผๅ‡บ็š„ๆจกๅž‹ไธŠ่ฟ›่กŒ้ข„ๆต‹ๆˆ–้ชŒ่ฏ๏ผŒๅณ `yolo predict model=yolov8n.onnx`ใ€‚ๅฏผๅ‡บๅฎŒๆˆๅŽ๏ผŒไผšไธบๆ‚จ็š„ๆจกๅž‹ๆ˜พ็คบไฝฟ็”จ็คบไพ‹ใ€‚ + +| ๆ ผๅผ | `format` ๅ‚ๆ•ฐ | ๆจกๅž‹ | ๅ…ƒๆ•ฐๆฎ | ๅ‚ๆ•ฐ | +|--------------------------------------------------------------------|---------------|---------------------------|-----|-------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n.torchscript` | โœ… | `imgsz`๏ผŒ`optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n.onnx` | โœ… | `imgsz`๏ผŒ`half`๏ผŒ`dynamic`๏ผŒ`simplify`๏ผŒ`opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n_openvino_model/` | โœ… | `imgsz`๏ผŒ`half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n.engine` | โœ… | `imgsz`๏ผŒ`half`๏ผŒ`dynamic`๏ผŒ`simplify`๏ผŒ`workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n.mlpackage` | โœ… | `imgsz`๏ผŒ`half`๏ผŒ`int8`๏ผŒ`nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n_saved_model/` | โœ… | `imgsz`๏ผŒ`keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n.tflite` | โœ… | `imgsz`๏ผŒ`half`๏ผŒ`int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n_ncnn_model/` | โœ… | `imgsz`๏ผŒ`half` | + +ๅฎŒๆ•ด็š„ `export` ่ฏฆๆƒ…่ฏท่ง [ๅฏผๅ‡บ](https://docs.ultralytics.com/modes/export/) ้กต้ขใ€‚ diff --git a/ultralytics/docs/zh/tasks/detect.md:Zone.Identifier b/ultralytics/docs/zh/tasks/detect.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/tasks/detect.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/tasks/index.md b/ultralytics/docs/zh/tasks/index.md new file mode 100755 index 0000000..4a48c73 --- /dev/null +++ b/ultralytics/docs/zh/tasks/index.md @@ -0,0 +1,51 @@ +--- +comments: true +description: ไบ†่งฃ YOLOv8 ่ƒฝๅคŸๆ‰ง่กŒ็š„ๅŸบ็ก€่ฎก็ฎ—ๆœบ่ง†่ง‰ไปปๅŠก๏ผŒๅŒ…ๆ‹ฌๆฃ€ๆต‹ใ€ๅˆ†ๅ‰ฒใ€ๅˆ†็ฑปๅ’Œๅงฟๆ€ไผฐ่ฎกใ€‚็†่งฃๅฎƒไปฌๅœจไฝ ็š„ AI ้กน็›ฎไธญ็š„ๅบ”็”จใ€‚ +keywords: Ultralytics, YOLOv8, ๆฃ€ๆต‹, ๅˆ†ๅ‰ฒ, ๅˆ†็ฑป, ๅงฟๆ€ไผฐ่ฎก, AI ๆก†ๆžถ, ่ฎก็ฎ—ๆœบ่ง†่ง‰ไปปๅŠก +--- + +# Ultralytics YOLOv8 ไปปๅŠก + +
+Ultralytics YOLO ๆ”ฏๆŒ็š„ไปปๅŠก + +YOLOv8 ๆ˜ฏไธ€ไธชๆ”ฏๆŒๅคš็ง่ฎก็ฎ—ๆœบ่ง†่ง‰**ไปปๅŠก**็š„ AI ๆก†ๆžถใ€‚่ฏฅๆก†ๆžถๅฏ็”จไบŽๆ‰ง่กŒ[ๆฃ€ๆต‹](detect.md)ใ€[ๅˆ†ๅ‰ฒ](segment.md)ใ€[ๅˆ†็ฑป](classify.md)ๅ’Œ[ๅงฟๆ€](pose.md)ไผฐ่ฎกใ€‚ๆฏ้กนไปปๅŠก้ƒฝๆœ‰ไธๅŒ็š„็›ฎๆ ‡ๅ’Œ็”จไพ‹ใ€‚ + +

+
+ +
+ ่ง‚็œ‹๏ผšๆŽข็ดข Ultralytics YOLO ไปปๅŠก๏ผšๅฏน่ฑกๆฃ€ๆต‹ใ€ๅˆ†ๅ‰ฒใ€่ฟฝ่ธชๅ’Œๅงฟๆ€ไผฐ่ฎกใ€‚ +

+ +## [ๆฃ€ๆต‹](detect.md) + +ๆฃ€ๆต‹ๆ˜ฏ YOLOv8 ๆ”ฏๆŒ็š„ไธป่ฆไปปๅŠกใ€‚ๅฎƒๆถ‰ๅŠๅœจๅ›พๅƒๆˆ–่ง†้ข‘ๅธงไธญๆฃ€ๆต‹ๅฏน่ฑกๅนถๅ›ด็ป•ๅฎƒไปฌ็ป˜ๅˆถ่พน็•Œๆก†ใ€‚ไพฆๆต‹ๅˆฐ็š„ๅฏน่ฑกๆ นๆฎๅ…ถ็‰นๅพ่ขซๅฝ’็ฑปๅˆฐไธๅŒ็š„็ฑปๅˆซใ€‚YOLOv8 ่ƒฝๅคŸๅœจๅ•ไธชๅ›พๅƒๆˆ–่ง†้ข‘ๅธงไธญๆฃ€ๆต‹ๅคšไธชๅฏน่ฑก๏ผŒๅ…ทๆœ‰้ซ˜ๅ‡†็กฎๆ€งๅ’Œ้€Ÿๅบฆใ€‚ + +[ๆฃ€ๆต‹็คบไพ‹](detect.md){ .md-button } + +## [ๅˆ†ๅ‰ฒ](segment.md) + +ๅˆ†ๅ‰ฒๆ˜ฏไธ€้กนๆถ‰ๅŠๅฐ†ๅ›พๅƒๅˆ†ๅ‰ฒๆˆๅŸบไบŽๅ›พๅƒๅ†…ๅฎน็š„ไธๅŒๅŒบๅŸŸ็š„ไปปๅŠกใ€‚ๆฏไธชๅŒบๅŸŸๆ นๆฎๅ…ถๅ†…ๅฎน่ขซๅˆ†้…ไธ€ไธชๆ ‡็ญพใ€‚่ฏฅไปปๅŠกๅœจๅบ”็”จ็จ‹ๅบไธญ้žๅธธๆœ‰็”จ๏ผŒๅฆ‚ๅ›พๅƒๅˆ†ๅ‰ฒๅ’ŒๅŒปๅญฆๆˆๅƒใ€‚YOLOv8 ไฝฟ็”จ U-Net ๆžถๆž„็š„ๅ˜ไฝ“ๆฅๆ‰ง่กŒๅˆ†ๅ‰ฒใ€‚ + +[ๅˆ†ๅ‰ฒ็คบไพ‹](segment.md){ .md-button } + +## [ๅˆ†็ฑป](classify.md) + +ๅˆ†็ฑปๆ˜ฏไธ€้กนๆถ‰ๅŠๅฐ†ๅ›พๅƒๅฝ’็ฑปไธบไธๅŒ็ฑปๅˆซ็š„ไปปๅŠกใ€‚YOLOv8 ๅฏ็”จไบŽๆ นๆฎๅ›พๅƒๅ†…ๅฎนๅฏนๅ›พๅƒ่ฟ›่กŒๅˆ†็ฑปใ€‚ๅฎƒไฝฟ็”จ EfficientNet ๆžถๆž„็š„ๅ˜ไฝ“ๆฅๆ‰ง่กŒๅˆ†็ฑปใ€‚ + +[ๅˆ†็ฑป็คบไพ‹](classify.md){ .md-button } + +## [ๅงฟๆ€](pose.md) + +ๅงฟๆ€/ๅ…ณ้”ฎ็‚นๆฃ€ๆต‹ๆ˜ฏไธ€้กนๆถ‰ๅŠๅœจๅ›พๅƒๆˆ–่ง†้ข‘ๅธงไธญๆฃ€ๆต‹็‰นๅฎš็‚น็š„ไปปๅŠกใ€‚่ฟ™ไบ›็‚น่ขซ็งฐไธบๅ…ณ้”ฎ็‚น๏ผŒ็”จไบŽ่ทŸ่ธช็งปๅŠจๆˆ–ๅงฟๆ€ไผฐ่ฎกใ€‚YOLOv8 ่ƒฝๅคŸๅœจๅ›พๅƒๆˆ–่ง†้ข‘ๅธงไธญๅ‡†็กฎ่ฟ…้€Ÿๅœฐๆฃ€ๆต‹ๅ…ณ้”ฎ็‚นใ€‚ + +[ๅงฟๆ€็คบไพ‹](pose.md){ .md-button } + +## ็ป“่ฎบ + +YOLOv8 ๆ”ฏๆŒๅคšไธชไปปๅŠก๏ผŒๅŒ…ๆ‹ฌๆฃ€ๆต‹ใ€ๅˆ†ๅ‰ฒใ€ๅˆ†็ฑปๅ’Œๅ…ณ้”ฎ็‚นๆฃ€ๆต‹ใ€‚่ฟ™ไบ›ไปปๅŠก้ƒฝๅ…ทๆœ‰ไธๅŒ็š„็›ฎๆ ‡ๅ’Œ็”จไพ‹ใ€‚้€š่ฟ‡็†่งฃ่ฟ™ไบ›ไปปๅŠกไน‹้—ด็š„ๅทฎๅผ‚๏ผŒๆ‚จๅฏไปฅไธบๆ‚จ็š„่ฎก็ฎ—ๆœบ่ง†่ง‰ๅบ”็”จ้€‰ๆ‹ฉๅˆ้€‚็š„ไปปๅŠกใ€‚ diff --git a/ultralytics/docs/zh/tasks/index.md:Zone.Identifier b/ultralytics/docs/zh/tasks/index.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/tasks/index.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/tasks/pose.md b/ultralytics/docs/zh/tasks/pose.md new file mode 100755 index 0000000..6ad6890 --- /dev/null +++ b/ultralytics/docs/zh/tasks/pose.md @@ -0,0 +1,185 @@ +--- +comments: true +description: ๅญฆไน ๅฆ‚ไฝ•ไฝฟ็”จUltralytics YOLOv8่ฟ›่กŒๅงฟๆ€ไผฐ่ฎกไปปๅŠกใ€‚ๆ‰พๅˆฐ้ข„่ฎญ็ปƒๆจกๅž‹๏ผŒๅญฆไน ๅฆ‚ไฝ•่ฎญ็ปƒใ€้ชŒ่ฏใ€้ข„ๆต‹ไปฅๅŠๅฏผๅ‡บไฝ ่‡ชๅทฑ็š„ๆจกๅž‹ใ€‚ +keywords: Ultralytics, YOLO, YOLOv8, ๅงฟๆ€ไผฐ่ฎก, ๅ…ณ้”ฎ็‚นๆฃ€ๆต‹, ็‰ฉไฝ“ๆฃ€ๆต‹, ้ข„่ฎญ็ปƒๆจกๅž‹, ๆœบๅ™จๅญฆไน , ไบบๅทฅๆ™บ่ƒฝ +--- + +# ๅงฟๆ€ไผฐ่ฎก + +ๅงฟๆ€ไผฐ่ฎก็คบไพ‹ + +ๅงฟๆ€ไผฐ่ฎกๆ˜ฏไธ€้กนไปปๅŠก๏ผŒๅ…ถๆถ‰ๅŠ่ฏ†ๅˆซๅ›พๅƒไธญ็‰นๅฎš็‚น็š„ไฝ็ฝฎ๏ผŒ้€šๅธธ่ขซ็งฐไธบๅ…ณ้”ฎ็‚นใ€‚่ฟ™ไบ›ๅ…ณ้”ฎ็‚นๅฏไปฅไปฃ่กจ็‰ฉไฝ“็š„ๅ„็ง้ƒจไฝ๏ผŒๅฆ‚ๅ…ณ่Š‚ใ€ๅœฐๆ ‡ๆˆ–ๅ…ถไป–ๆ˜พ่‘—็‰นๅพใ€‚ๅ…ณ้”ฎ็‚น็š„ไฝ็ฝฎ้€šๅธธ่กจ็คบไธบไธ€็ป„2D `[x, y]` ๆˆ–3D `[x, y, visible]` ๅๆ ‡ใ€‚ + +ๅงฟๆ€ไผฐ่ฎกๆจกๅž‹็š„่พ“ๅ‡บๆ˜ฏไธ€็ป„็‚น้›†๏ผŒ่ฟ™ไบ›็‚นไปฃ่กจๅ›พๅƒไธญ็‰ฉไฝ“ไธŠ็š„ๅ…ณ้”ฎ็‚น๏ผŒ้€šๅธธ่ฟ˜ๅŒ…ๆ‹ฌๆฏไธช็‚น็š„็ฝฎไฟกๅบฆๅพ—ๅˆ†ใ€‚ๅฝ“ไฝ ้œ€่ฆๅœจๅœบๆ™ฏไธญ่ฏ†ๅˆซ็‰ฉไฝ“็š„็‰นๅฎš้ƒจไฝๅŠๅ…ถ็›ธไบ’ไน‹้—ด็š„ไฝ็ฝฎๆ—ถ๏ผŒๅงฟๆ€ไผฐ่ฎกๆ˜ฏไธ€ไธชไธ้”™็š„้€‰ๆ‹ฉใ€‚ + +

+
+ +
+ ่ง‚็œ‹๏ผšไฝฟ็”จUltralytics YOLOv8่ฟ›่กŒๅงฟๆ€ไผฐ่ฎกใ€‚ +

+ +!!! Tip "ๆ็คบ" + + YOLOv8 _ๅงฟๆ€_ ๆจกๅž‹ไฝฟ็”จ `-pose` ๅŽ็ผ€๏ผŒไพ‹ๅฆ‚ `yolov8n-pose.pt`ใ€‚่ฟ™ไบ›ๆจกๅž‹ๅœจ [COCOๅ…ณ้”ฎ็‚น](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco-pose.yaml) ๆ•ฐๆฎ้›†ไธŠ่ฟ›่กŒไบ†่ฎญ็ปƒ๏ผŒๅนถไธ”้€‚็”จไบŽๅ„็งๅงฟๆ€ไผฐ่ฎกไปปๅŠกใ€‚ + +## [ๆจกๅž‹](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +่ฟ™้‡Œๅฑ•็คบไบ†YOLOv8้ข„่ฎญ็ปƒ็š„ๅงฟๆ€ๆจกๅž‹ใ€‚ๆฃ€ๆต‹ใ€ๅˆ†ๅ‰ฒๅ’Œๅงฟๆ€ๆจกๅž‹ๅœจ [COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml) ๆ•ฐๆฎ้›†ไธŠ่ฟ›่กŒ้ข„่ฎญ็ปƒ๏ผŒ่€Œๅˆ†็ฑปๆจกๅž‹ๅˆ™ๅœจ [ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml) ๆ•ฐๆฎ้›†ไธŠ่ฟ›่กŒ้ข„่ฎญ็ปƒใ€‚ + +[ๆจกๅž‹](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) ๅœจ้ฆ–ๆฌกไฝฟ็”จๆ—ถๅฐ†่‡ชๅŠจไปŽๆœ€ๆ–ฐ็š„Ultralytics [ๅ‘ๅธƒ็‰ˆๆœฌ](https://github.com/ultralytics/assets/releases)ไธญไธ‹่ฝฝใ€‚ + +| ๆจกๅž‹ | ๅฐบๅฏธ
(ๅƒ็ด ) | mAPๅงฟๆ€
50-95 | mAPๅงฟๆ€
50 | ้€Ÿๅบฆ
CPU ONNX
(ๆฏซ็ง’) | ้€Ÿๅบฆ
A100 TensorRT
(ๆฏซ็ง’) | ๅ‚ๆ•ฐ
(M) | ๆตฎ็‚นๆ•ฐ่ฟ็ฎ—
(B) | +|----------------------------------------------------------------------------------------------------|-----------------|---------------------|------------------|-----------------------------|----------------------------------|----------------|-------------------| +| [YOLOv8n-ๅงฟๆ€](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 | +| [YOLOv8s-ๅงฟๆ€](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 | +| [YOLOv8m-ๅงฟๆ€](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 | +| [YOLOv8l-ๅงฟๆ€](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 | +| [YOLOv8x-ๅงฟๆ€](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 | +| [YOLOv8x-ๅงฟๆ€-p6](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 | + +- **mAPval** ๅ€ผ้€‚็”จไบŽ[COCO ๅ…ณ้”ฎ็‚น val2017](http://cocodataset.org)ๆ•ฐๆฎ้›†ไธŠ็š„ๅ•ๆจกๅž‹ๅ•ๅฐบๅบฆใ€‚ +
้€š่ฟ‡ๆ‰ง่กŒ `yolo val pose data=coco-pose.yaml device=0` ๆฅๅค็Žฐใ€‚ +- **้€Ÿๅบฆ** ๆ˜ฏๅœจ [ไบš้ฉฌ้€ŠEC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/)ๅฎžไพ‹ไธŠไฝฟ็”จCOCO valๅ›พๅƒ็š„ๅนณๅ‡ๅ€ผใ€‚ +
้€š่ฟ‡ๆ‰ง่กŒ `yolo val pose data=coco8-pose.yaml batch=1 device=0|cpu` ๆฅๅค็Žฐใ€‚ + +## ่ฎญ็ปƒ + +ๅœจCOCO128ๅงฟๆ€ๆ•ฐๆฎ้›†ไธŠ่ฎญ็ปƒไธ€ไธชYOLOv8ๅงฟๆ€ๆจกๅž‹ใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n-pose.yaml') # ไปŽYAMLๆž„ๅปบไธ€ไธชๆ–ฐๆจกๅž‹ + model = YOLO('yolov8n-pose.pt') # ๅŠ ่ฝฝไธ€ไธช้ข„่ฎญ็ปƒๆจกๅž‹๏ผˆๆŽจ่็”จไบŽ่ฎญ็ปƒ๏ผ‰ + model = YOLO('yolov8n-pose.yaml').load('yolov8n-pose.pt') # ไปŽYAMLๆž„ๅปบๅนถไผ ่พ“ๆƒ้‡ + + # ่ฎญ็ปƒๆจกๅž‹ + results = model.train(data='coco8-pose.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # ไปŽYAMLๆž„ๅปบไธ€ไธชๆ–ฐๆจกๅž‹ๅนถไปŽๅคดๅผ€ๅง‹่ฎญ็ปƒ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml epochs=100 imgsz=640 + + # ไปŽไธ€ไธช้ข„่ฎญ็ปƒ็š„*.ptๆจกๅž‹ๅผ€ๅง‹่ฎญ็ปƒ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.pt epochs=100 imgsz=640 + + # ไปŽYAMLๆž„ๅปบไธ€ไธชๆ–ฐๆจกๅž‹๏ผŒไผ ่พ“้ข„่ฎญ็ปƒๆƒ้‡ๅนถๅผ€ๅง‹่ฎญ็ปƒ + yolo pose train data=coco8-pose.yaml model=yolov8n-pose.yaml pretrained=yolov8n-pose.pt epochs=100 imgsz=640 + ``` + +### ๆ•ฐๆฎ้›†ๆ ผๅผ + +YOLOๅงฟๆ€ๆ•ฐๆฎ้›†ๆ ผๅผๅฏ่ฏฆ็ป†ๆ‰พๅˆฐๅœจ[ๆ•ฐๆฎ้›†ๆŒ‡ๅ—](/../datasets/pose/index.md)ไธญใ€‚่‹ฅ่ฆๅฐ†ๆ‚จ็Žฐๆœ‰็š„ๆ•ฐๆฎ้›†ไปŽๅ…ถไป–ๆ ผๅผ๏ผˆๅฆ‚COCO็ญ‰๏ผ‰่ฝฌๆขไธบYOLOๆ ผๅผ๏ผŒ่ฏทไฝฟ็”จUltralytics็š„ [JSON2YOLO](https://github.com/ultralytics/JSON2YOLO) ๅทฅๅ…ทใ€‚ + +## ้ชŒ่ฏ + +ๅœจCOCO128ๅงฟๆ€ๆ•ฐๆฎ้›†ไธŠ้ชŒ่ฏ่ฎญ็ปƒๅฅฝ็š„YOLOv8nๅงฟๆ€ๆจกๅž‹็š„ๅ‡†็กฎๆ€งใ€‚ๆฒกๆœ‰ๅ‚ๆ•ฐ้œ€่ฆไผ ้€’๏ผŒๅ› ไธบ`ๆจกๅž‹`ไฟๅญ˜ไบ†ๅ…ถ่ฎญ็ปƒ`ๆ•ฐๆฎ`ๅ’Œๅ‚ๆ•ฐไฝœไธบๆจกๅž‹ๅฑžๆ€งใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n-pose.pt') # ๅŠ ่ฝฝๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ๅŠ ่ฝฝ่‡ชๅฎšไน‰ๆจกๅž‹ + + # ้ชŒ่ฏๆจกๅž‹ + metrics = model.val() # ๆ— ้œ€ๅ‚ๆ•ฐ๏ผŒๆ•ฐๆฎ้›†ๅ’Œ่ฎพ็ฝฎ้ƒฝ่ฎฐไฝไบ† + metrics.box.map # map50-95 + metrics.box.map50 # map50 + metrics.box.map75 # map75 + metrics.box.maps # ๅŒ…ๅซๆฏไธช็ฑปๅˆซmap50-95็š„ๅˆ—่กจ + ``` + === "CLI" + + ```bash + yolo pose val model=yolov8n-pose.pt # ้ชŒ่ฏๅฎ˜ๆ–นๆจกๅž‹ + yolo pose val model=path/to/best.pt # ้ชŒ่ฏ่‡ชๅฎšไน‰ๆจกๅž‹ + ``` + +## ้ข„ๆต‹ + +ไฝฟ็”จ่ฎญ็ปƒๅฅฝ็š„YOLOv8nๅงฟๆ€ๆจกๅž‹ๅœจๅ›พ็‰‡ไธŠ่ฟ่กŒ้ข„ๆต‹ใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n-pose.pt') # ๅŠ ่ฝฝๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ๅŠ ่ฝฝ่‡ชๅฎšไน‰ๆจกๅž‹ + + # ็”จๆจกๅž‹่ฟ›่กŒ้ข„ๆต‹ + results = model('https://ultralytics.com/images/bus.jpg') # ๅœจไธ€ๅผ ๅ›พ็‰‡ไธŠ้ข„ๆต‹ + ``` + === "CLI" + + ```bash + yolo pose predict model=yolov8n-pose.pt source='https://ultralytics.com/images/bus.jpg' # ็”จๅฎ˜ๆ–นๆจกๅž‹้ข„ๆต‹ + yolo pose predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ็”จ่‡ชๅฎšไน‰ๆจกๅž‹้ข„ๆต‹ + ``` + +ๅœจ[้ข„ๆต‹](https://docs.ultralytics.com/modes/predict/)้กต้ขไธญๆŸฅ็œ‹ๅฎŒๆ•ด็š„`้ข„ๆต‹`ๆจกๅผ็ป†่Š‚ใ€‚ + +## ๅฏผๅ‡บ + +ๅฐ†YOLOv8nๅงฟๆ€ๆจกๅž‹ๅฏผๅ‡บไธบONNXใ€CoreML็ญ‰ไธๅŒๆ ผๅผใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ๅŠ ่ฝฝๆจกๅž‹ + model = YOLO('yolov8n-pose.pt') # ๅŠ ่ฝฝๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ๅŠ ่ฝฝ่‡ชๅฎšไน‰่ฎญ็ปƒๅฅฝ็š„ๆจกๅž‹ + + # ๅฏผๅ‡บๆจกๅž‹ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-pose.pt format=onnx # ๅฏผๅ‡บๅฎ˜ๆ–นๆจกๅž‹ + yolo export model=path/to/best.pt format=onnx # ๅฏผๅ‡บ่‡ชๅฎšไน‰่ฎญ็ปƒๅฅฝ็š„ๆจกๅž‹ + ``` + +ไปฅไธ‹่กจๆ ผไธญๆœ‰ๅฏ็”จ็š„YOLOv8ๅงฟๆ€ๅฏผๅ‡บๆ ผๅผใ€‚ๆ‚จๅฏไปฅ็›ดๆŽฅๅœจๅฏผๅ‡บ็š„ๆจกๅž‹ไธŠ่ฟ›่กŒ้ข„ๆต‹ๆˆ–้ชŒ่ฏ๏ผŒไพ‹ๅฆ‚ `yolo predict model=yolov8n-pose.onnx`ใ€‚ๅฏผๅ‡บๅฎŒๆˆๅŽ๏ผŒไธบๆ‚จ็š„ๆจกๅž‹ๆ˜พ็คบ็”จๆณ•็คบไพ‹ใ€‚ + +| ๆ ผๅผ | `format` ๅ‚ๆ•ฐ | ๆจกๅž‹ | ๅ…ƒๆ•ฐๆฎ | ๅ‚ๆ•ฐ | +|--------------------------------------------------------------------|---------------|--------------------------------|-----|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-pose.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-pose.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-pose.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-pose_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-pose.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-pose.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-pose_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-pose.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-pose.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-pose_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-pose_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-pose_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-pose_ncnn_model/` | โœ… | `imgsz`, `half` | + +ๅœจ[ๅฏผๅ‡บ](https://docs.ultralytics.com/modes/export/) ้กต้ขไธญๆŸฅ็œ‹ๅฎŒๆ•ด็š„`ๅฏผๅ‡บ`็ป†่Š‚ใ€‚ diff --git a/ultralytics/docs/zh/tasks/pose.md:Zone.Identifier b/ultralytics/docs/zh/tasks/pose.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/tasks/pose.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/docs/zh/tasks/segment.md b/ultralytics/docs/zh/tasks/segment.md new file mode 100755 index 0000000..246ef58 --- /dev/null +++ b/ultralytics/docs/zh/tasks/segment.md @@ -0,0 +1,188 @@ +--- +comments: true +description: ๅญฆไน ๅฆ‚ไฝ•ไฝฟ็”จUltralytics YOLO่ฟ›่กŒๅฎžไพ‹ๅˆ†ๅ‰ฒๆจกๅž‹ใ€‚ๅŒ…ๆ‹ฌ่ฎญ็ปƒใ€้ชŒ่ฏใ€ๅ›พๅƒ้ข„ๆต‹ๅ’Œๆจกๅž‹ๅฏผๅ‡บ็š„่ฏดๆ˜Žใ€‚ +keywords: yolov8, ๅฎžไพ‹ๅˆ†ๅ‰ฒ, Ultralytics, COCOๆ•ฐๆฎ้›†, ๅ›พๅƒๅˆ†ๅ‰ฒ, ็‰ฉไฝ“ๆฃ€ๆต‹, ๆจกๅž‹่ฎญ็ปƒ, ๆจกๅž‹้ชŒ่ฏ, ๅ›พๅƒ้ข„ๆต‹, ๆจกๅž‹ๅฏผๅ‡บ +--- + +# ๅฎžไพ‹ๅˆ†ๅ‰ฒ + +ๅฎžไพ‹ๅˆ†ๅ‰ฒ็คบไพ‹ + +ๅฎžไพ‹ๅˆ†ๅ‰ฒๆฏ”็‰ฉไฝ“ๆฃ€ๆต‹ๆœ‰ๆ‰€ๆทฑๅ…ฅ๏ผŒๅฎƒๆถ‰ๅŠๅˆฐ่ฏ†ๅˆซๅ›พๅƒไธญ็š„ไธชๅˆซ็‰ฉไฝ“ๅนถๅฐ†ๅฎƒไปฌไปŽๅ›พๅƒ็š„ๅ…ถไฝ™้ƒจๅˆ†ไธญๅˆ†ๅ‰ฒๅ‡บๆฅใ€‚ + +ๅฎžไพ‹ๅˆ†ๅ‰ฒๆจกๅž‹็š„่พ“ๅ‡บๆ˜ฏไธ€็ป„่’™็‰ˆๆˆ–่ฝฎๅป“๏ผŒ็”จไบŽๅ‹พ็”ปๅ›พๅƒไธญๆฏไธช็‰ฉไฝ“๏ผŒไปฅๅŠๆฏไธช็‰ฉไฝ“็š„็ฑปๅˆซๆ ‡็ญพๅ’Œ็ฝฎไฟกๅบฆๅˆ†ๆ•ฐใ€‚ๅฎžไพ‹ๅˆ†ๅ‰ฒๅœจๆ‚จ้œ€่ฆไธไป…็Ÿฅ้“ๅ›พๅƒไธญ็š„็‰ฉไฝ“ไฝ็ฝฎ๏ผŒ่ฟ˜้œ€่ฆ็Ÿฅ้“ๅฎƒไปฌ็กฎๅˆ‡ๅฝข็Šถๆ—ถ้žๅธธๆœ‰็”จใ€‚ + +

+
+ +
+ ่ง‚็œ‹: ๅœจPythonไธญไฝฟ็”จ้ข„่ฎญ็ปƒ็š„Ultralytics YOLOv8ๆจกๅž‹่ฟ่กŒๅˆ†ๅ‰ฒใ€‚ +

+ +!!! Tip "ๆ็คบ" + + YOLOv8ๅˆ†ๅ‰ฒๆจกๅž‹ไฝฟ็”จ`-seg`ๅŽ็ผ€๏ผŒๅณ`yolov8n-seg.pt`๏ผŒๅนถๅœจ[COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)ไธŠ่ฟ›่กŒ้ข„่ฎญ็ปƒใ€‚ + +## [ๆจกๅž‹](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models/v8) + +่ฟ™้‡Œๅฑ•็คบไบ†้ข„่ฎญ็ปƒ็š„YOLOv8ๅˆ†ๅ‰ฒๆจกๅž‹ใ€‚Detectใ€Segmentๅ’ŒPoseๆจกๅž‹้ƒฝๆ˜ฏๅœจ[COCO](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/coco.yaml)ๆ•ฐๆฎ้›†ไธŠ่ฟ›่กŒ้ข„่ฎญ็ปƒ็š„๏ผŒ่€ŒClassifyๆจกๅž‹ๅˆ™ๆ˜ฏๅœจ[ImageNet](https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/datasets/ImageNet.yaml)ๆ•ฐๆฎ้›†ไธŠ่ฟ›่กŒ้ข„่ฎญ็ปƒ็š„ใ€‚ + +[ๆจกๅž‹](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models)ไผšๅœจ้ฆ–ๆฌกไฝฟ็”จๆ—ถ่‡ชๅŠจไปŽUltralytics็š„ๆœ€ๆ–ฐ[็‰ˆๆœฌ](https://github.com/ultralytics/assets/releases)ไธ‹่ฝฝใ€‚ + +| ๆจกๅž‹ | ๅฐบๅฏธ
(ๅƒ็ด ) | mAPbox
50-95 | mAPmask
50-95 | ้€Ÿๅบฆ
CPU ONNX
(ms) | ้€Ÿๅบฆ
A100 TensorRT
(ms) | ๅ‚ๆ•ฐ
(M) | FLOPs
(B) | +|----------------------------------------------------------------------------------------------|-----------------|----------------------|-----------------------|-----------------------------|----------------------------------|----------------|-------------------| +| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 | +| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 | +| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 | +| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 | +| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 | + +- **mAPval** ๅ€ผ้’ˆๅฏน[COCO val2017](http://cocodataset.org)ๆ•ฐๆฎ้›†็š„ๅ•ๆจกๅž‹ๅ•ๅฐบๅบฆใ€‚ +
้€š่ฟ‡`yolo val segment data=coco.yaml device=0`ๅค็Žฐใ€‚ +- **้€Ÿๅบฆ** ๅŸบไบŽๅœจ[Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/)ๅฎžไพ‹ไธŠ่ฟ่กŒ็š„COCO valๅ›พๅƒ็š„ๅนณๅ‡ๅ€ผใ€‚ +
้€š่ฟ‡`yolo val segment data=coco128-seg.yaml batch=1 device=0|cpu`ๅค็Žฐใ€‚ + +## ่ฎญ็ปƒ + +ๅœจCOCO128-segๆ•ฐๆฎ้›†ไธŠไปฅ640็š„ๅ›พๅƒๅฐบๅฏธ่ฎญ็ปƒYOLOv8n-segๆจกๅž‹ๅ…ฑ100ไธชๅ‘จๆœŸใ€‚ๆƒณไบ†่งฃๆ›ดๅคšๅฏ็”จ็š„ๅ‚ๆ•ฐ๏ผŒ่ฏทๆŸฅ้˜…[้…็ฝฎ](/../usage/cfg.md)้กต้ขใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ่ฝฝๅ…ฅไธ€ไธชๆจกๅž‹ + model = YOLO('yolov8n-seg.yaml') # ไปŽYAMLๆž„ๅปบไธ€ไธชๆ–ฐๆจกๅž‹ + model = YOLO('yolov8n-seg.pt') # ่ฝฝๅ…ฅ้ข„่ฎญ็ปƒๆจกๅž‹๏ผˆๆŽจ่็”จไบŽ่ฎญ็ปƒ๏ผ‰ + model = YOLO('yolov8n-seg.yaml').load('yolov8n.pt') # ไปŽYAMLๆž„ๅปบๅนถไผ ้€’ๆƒ้‡ + + # ่ฎญ็ปƒๆจกๅž‹ + results = model.train(data='coco128-seg.yaml', epochs=100, imgsz=640) + ``` + === "CLI" + + ```bash + # ไปŽYAMLๆž„ๅปบๆ–ฐๆจกๅž‹ๅนถไปŽๅคดๅผ€ๅง‹่ฎญ็ปƒ + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml epochs=100 imgsz=640 + + # ไปŽ้ข„่ฎญ็ปƒ*.ptๆจกๅž‹ๅผ€ๅง‹่ฎญ็ปƒ + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.pt epochs=100 imgsz=640 + + # ไปŽYAMLๆž„ๅปบๆ–ฐๆจกๅž‹๏ผŒไผ ้€’้ข„่ฎญ็ปƒๆƒ้‡๏ผŒๅผ€ๅง‹่ฎญ็ปƒ + yolo segment train data=coco128-seg.yaml model=yolov8n-seg.yaml pretrained=yolov8n-seg.pt epochs=100 imgsz=640 + ``` + +### ๆ•ฐๆฎ้›†ๆ ผๅผ + +ๅฏไปฅๅœจ[ๆ•ฐๆฎ้›†ๆŒ‡ๅ—](/../datasets/segment/index.md)ไธญ่ฏฆ็ป†ไบ†่งฃYOLOๅˆ†ๅ‰ฒๆ•ฐๆฎ้›†ๆ ผๅผใ€‚่ฆๅฐ†็Žฐๆœ‰ๆ•ฐๆฎ้›†ไปŽๅ…ถไป–ๆ ผๅผ๏ผˆๅฆ‚COCO็ญ‰๏ผ‰่ฝฌๆขไธบYOLOๆ ผๅผ๏ผŒ่ฏทไฝฟ็”จUltralytics็š„[JSON2YOLO](https://github.com/ultralytics/JSON2YOLO)ๅทฅๅ…ทใ€‚ + +## ้ชŒ่ฏ + +ๅœจCOCO128-segๆ•ฐๆฎ้›†ไธŠ้ชŒ่ฏๅทฒ่ฎญ็ปƒ็š„YOLOv8n-segๆจกๅž‹็š„ๅ‡†็กฎๆ€งใ€‚ไธ้œ€่ฆไผ ้€’ไปปไฝ•ๅ‚ๆ•ฐ๏ผŒๅ› ไธบ`model`ไฟ็•™ไบ†ๅ…ถ่ฎญ็ปƒ็š„`data`ๅ’Œไฝœไธบๆจกๅž‹ๅฑžๆ€ง็š„่ฎพ็ฝฎใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ่ฝฝๅ…ฅไธ€ไธชๆจกๅž‹ + model = YOLO('yolov8n-seg.pt') # ่ฝฝๅ…ฅๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ่ฝฝๅ…ฅ่‡ชๅฎšไน‰ๆจกๅž‹ + + # ้ชŒ่ฏๆจกๅž‹ + metrics = model.val() # ไธ้œ€่ฆๅ‚ๆ•ฐ๏ผŒๆ•ฐๆฎ้›†ๅ’Œ่ฎพ็ฝฎ่ขซ่ฎฐไฝไบ† + metrics.box.map # map50-95(B) + metrics.box.map50 # map50(B) + metrics.box.map75 # map75(B) + metrics.box.maps # ๅ„็ฑปๅˆซmap50-95(B)ๅˆ—่กจ + metrics.seg.map # map50-95(M) + metrics.seg.map50 # map50(M) + metrics.seg.map75 # map75(M) + metrics.seg.maps # ๅ„็ฑปๅˆซmap50-95(M)ๅˆ—่กจ + ``` + === "CLI" + + ```bash + yolo segment val model=yolov8n-seg.pt # ้ชŒ่ฏๅฎ˜ๆ–นๆจกๅž‹ + yolo segment val model=path/to/best.pt # ้ชŒ่ฏ่‡ชๅฎšไน‰ๆจกๅž‹ + ``` + +## ้ข„ๆต‹ + +ไฝฟ็”จๅทฒ่ฎญ็ปƒ็š„YOLOv8n-segๆจกๅž‹ๅœจๅ›พๅƒไธŠ่ฟ›่กŒ้ข„ๆต‹ใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ่ฝฝๅ…ฅไธ€ไธชๆจกๅž‹ + model = YOLO('yolov8n-seg.pt') # ่ฝฝๅ…ฅๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ่ฝฝๅ…ฅ่‡ชๅฎšไน‰ๆจกๅž‹ + + # ไฝฟ็”จๆจกๅž‹่ฟ›่กŒ้ข„ๆต‹ + results = model('https://ultralytics.com/images/bus.jpg') # ๅฏนไธ€ๅผ ๅ›พๅƒ่ฟ›่กŒ้ข„ๆต‹ + ``` + === "CLI" + + ```bash + yolo segment predict model=yolov8n-seg.pt source='https://ultralytics.com/images/bus.jpg' # ไฝฟ็”จๅฎ˜ๆ–นๆจกๅž‹่ฟ›่กŒ้ข„ๆต‹ + yolo segment predict model=path/to/best.pt source='https://ultralytics.com/images/bus.jpg' # ไฝฟ็”จ่‡ชๅฎšไน‰ๆจกๅž‹่ฟ›่กŒ้ข„ๆต‹ + ``` + +้ข„ๆต‹ๆจกๅผ็š„ๅฎŒๆ•ด่ฏฆๆƒ…่ฏทๅ‚่ง[Predict](https://docs.ultralytics.com/modes/predict/)้กต้ขใ€‚ + +## ๅฏผๅ‡บ + +ๅฐ†YOLOv8n-segๆจกๅž‹ๅฏผๅ‡บไธบONNXใ€CoreML็ญ‰ไธๅŒๆ ผๅผใ€‚ + +!!! Example "็คบไพ‹" + + === "Python" + + ```python + from ultralytics import YOLO + + # ่ฝฝๅ…ฅไธ€ไธชๆจกๅž‹ + model = YOLO('yolov8n-seg.pt') # ่ฝฝๅ…ฅๅฎ˜ๆ–นๆจกๅž‹ + model = YOLO('path/to/best.pt') # ่ฝฝๅ…ฅ่‡ชๅฎšไน‰่ฎญ็ปƒๆจกๅž‹ + + # ๅฏผๅ‡บๆจกๅž‹ + model.export(format='onnx') + ``` + === "CLI" + + ```bash + yolo export model=yolov8n-seg.pt format=onnx # ๅฏผๅ‡บๅฎ˜ๆ–นๆจกๅž‹ + yolo export model=path/to/best.pt format=onnx # ๅฏผๅ‡บ่‡ชๅฎšไน‰่ฎญ็ปƒๆจกๅž‹ + ``` + +YOLOv8-segๅฏผๅ‡บๆ ผๅผ็š„ๅฏ็”จ่กจๆ ผๅฆ‚ไธ‹ๆ‰€็คบใ€‚ๆ‚จๅฏไปฅ็›ดๆŽฅๅœจๅฏผๅ‡บ็š„ๆจกๅž‹ไธŠ่ฟ›่กŒ้ข„ๆต‹ๆˆ–้ชŒ่ฏ๏ผŒไพ‹ๅฆ‚`yolo predict model=yolov8n-seg.onnx`ใ€‚ๅฏผๅ‡บๅฎŒๆˆๅŽ๏ผŒ็คบไพ‹็”จๆณ•ๅฐ†ๆ˜พ็คบๆ‚จ็š„ๆจกๅž‹ใ€‚ + +| ๆ ผๅผ | `format` ๅ‚ๆ•ฐ | ๆจกๅž‹ | ๅ…ƒๆ•ฐๆฎ | ๅ‚ๆ•ฐ | +|--------------------------------------------------------------------|---------------|-------------------------------|-----|-----------------------------------------------------| +| [PyTorch](https://pytorch.org/) | - | `yolov8n-seg.pt` | โœ… | - | +| [TorchScript](https://pytorch.org/docs/stable/jit.html) | `torchscript` | `yolov8n-seg.torchscript` | โœ… | `imgsz`, `optimize` | +| [ONNX](https://onnx.ai/) | `onnx` | `yolov8n-seg.onnx` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `opset` | +| [OpenVINO](https://docs.openvino.ai/latest/index.html) | `openvino` | `yolov8n-seg_openvino_model/` | โœ… | `imgsz`, `half` | +| [TensorRT](https://developer.nvidia.com/tensorrt) | `engine` | `yolov8n-seg.engine` | โœ… | `imgsz`, `half`, `dynamic`, `simplify`, `workspace` | +| [CoreML](https://github.com/apple/coremltools) | `coreml` | `yolov8n-seg.mlpackage` | โœ… | `imgsz`, `half`, `int8`, `nms` | +| [TF SavedModel](https://www.tensorflow.org/guide/saved_model) | `saved_model` | `yolov8n-seg_saved_model/` | โœ… | `imgsz`, `keras` | +| [TF GraphDef](https://www.tensorflow.org/api_docs/python/tf/Graph) | `pb` | `yolov8n-seg.pb` | โŒ | `imgsz` | +| [TF Lite](https://www.tensorflow.org/lite) | `tflite` | `yolov8n-seg.tflite` | โœ… | `imgsz`, `half`, `int8` | +| [TF Edge TPU](https://coral.ai/docs/edgetpu/models-intro/) | `edgetpu` | `yolov8n-seg_edgetpu.tflite` | โœ… | `imgsz` | +| [TF.js](https://www.tensorflow.org/js) | `tfjs` | `yolov8n-seg_web_model/` | โœ… | `imgsz` | +| [PaddlePaddle](https://github.com/PaddlePaddle) | `paddle` | `yolov8n-seg_paddle_model/` | โœ… | `imgsz` | +| [ncnn](https://github.com/Tencent/ncnn) | `ncnn` | `yolov8n-seg_ncnn_model/` | โœ… | `imgsz`, `half` | + +ๅฏผๅ‡บๆจกๅผ็š„ๅฎŒๆ•ด่ฏฆๆƒ…่ฏทๅ‚่ง[Export](https://docs.ultralytics.com/modes/export/)้กต้ขใ€‚ diff --git a/ultralytics/docs/zh/tasks/segment.md:Zone.Identifier b/ultralytics/docs/zh/tasks/segment.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/docs/zh/tasks/segment.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/drawbox.py b/ultralytics/drawbox.py new file mode 100755 index 0000000..ce43dd5 --- /dev/null +++ b/ultralytics/drawbox.py @@ -0,0 +1,38 @@ +import os +from ultralytics import YOLO +import cv2 + +def predict_and_save_images(model, image_folder, output_folder): + """ไฝฟ็”จYOLOv8ๆจกๅž‹้ข„ๆต‹ๅ›พๅƒๅนถไฟๅญ˜็ป“ๆžœใ€‚""" + if not os.path.exists(output_folder): + os.makedirs(output_folder) + print(f"Created output directory: {output_folder}") + + image_files = os.listdir(image_folder) + print(f"Found {len(image_files)} images in {image_folder}") + + for image_file in image_files: + image_path = os.path.join(image_folder, image_file) + image = cv2.imread(image_path) + if image is None: + print(f"Could not read image: {image_path}") + continue + + results = model(image_path) + annotated_image = results[0].plot() + + output_path = os.path.join(output_folder, image_file) + success = cv2.imwrite(output_path, annotated_image) + if not success: + print(f"Could not write image: {output_path}") + else: + print(f"Image saved: {output_path}") + +# ็คบไพ‹็”จๆณ• +image_folder = '/root/catkin_ws/src/ultralytics/ours_15000/renders' # ่พ“ๅ…ฅๅ›พๅƒๆ–‡ไปถๅคน่ทฏๅพ„ +output_folder = '/root/catkin_ws/src/ultralytics/ours_15000/renders_box' # ่พ“ๅ‡บ็ป“ๆžœๆ–‡ไปถๅคน่ทฏๅพ„ + +# ๅŠ ่ฝฝYOLOv8ๆจกๅž‹ (ไพ‹ๅฆ‚ไฝฟ็”จ้ข„่ฎญ็ปƒ็š„ YOLOv8n ๆจกๅž‹) +model = YOLO('yolov8n.pt') + +predict_and_save_images(model, image_folder, output_folder) diff --git a/ultralytics/drawbox.py:Zone.Identifier b/ultralytics/drawbox.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/drawbox.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/eval.py b/ultralytics/eval.py new file mode 100755 index 0000000..dd438ba --- /dev/null +++ b/ultralytics/eval.py @@ -0,0 +1,68 @@ +import os +import numpy as np + +def parse_yolo_label(file_path): + """่งฃๆžYOLOๆ ผๅผ็š„ๆ ‡็ญพๆ–‡ไปถใ€‚""" + labels = [] + try: + with open(file_path, 'r') as file: + lines = file.readlines() + labels = [list(map(float, line.strip().split())) for line in lines] + except FileNotFoundError: + pass # ๅฆ‚ๆžœๆ–‡ไปถไธๅญ˜ๅœจ๏ผŒ่ฟ”ๅ›ž็ฉบๅˆ—่กจ + return labels + +def calculate_iou(box1, box2): + """่ฎก็ฎ—ไธคไธช่พน็•Œๆก†็š„ไบคๅนถๆฏ”๏ผˆIoU๏ผ‰ใ€‚""" + x1, y1, w1, h1 = box1 + x2, y2, w2, h2 = box2 + + inter_w = max(0, min(x1 + w1 / 2, x2 + w2 / 2) - max(x1 - w1 / 2, x2 - w2 / 2)) + inter_h = max(0, min(y1 + h1 / 2, y2 + h2 / 2) - max(y1 - h1 / 2, y2 - h2 / 2)) + inter_area = inter_w * inter_h + + union_area = w1 * h1 + w2 * h2 - inter_area + iou = inter_area / union_area if union_area > 0 else 0 + return iou + +def evaluate_predictions(gt_folder, pred_folder, iou_threshold=0.9): + """่ฏ„ไผฐ้ข„ๆต‹็š„็ฒพๅบฆใ€ๅฌๅ›ž็އๅ’ŒF1ๅˆ†ๆ•ฐใ€‚""" + gt_files = os.listdir(gt_folder) + tp = 0 # ็œŸๆญฃไพ‹ + fp = 0 # ๅ‡ๆญฃไพ‹ + fn = 0 # ๅ‡่ดŸไพ‹ + + for gt_file in gt_files: + gt_path = os.path.join(gt_folder, gt_file) + pred_path = os.path.join(pred_folder, gt_file) + + gt_labels = parse_yolo_label(gt_path) + pred_labels = parse_yolo_label(pred_path) + + matched = [False] * len(pred_labels) + + for gt in gt_labels: + gt_matched = False + for i, pred in enumerate(pred_labels): + if calculate_iou(gt[1:], pred[1:]) >= iou_threshold: + if not matched[i]: # ้˜ฒๆญขๅคšไธช็œŸๅฎžๆ ‡็ญพๅŒน้…ๅˆฐๅŒไธ€ไธช้ข„ๆต‹ๆ ‡็ญพ + matched[i] = True + gt_matched = True + tp += 1 + break + if not gt_matched: + fn += 1 + + fp += matched.count(False) # ๆ‰€ๆœ‰ๆœชๅŒน้…็š„้ข„ๆต‹้ƒฝ่ง†ไธบๅ‡ๆญฃไพ‹ + + precision = tp / (tp + fp) if (tp + fp) > 0 else 0 + recall = tp / (tp + fn) if (tp + fn) > 0 else 0 + f1 = 2 * (precision * recall) / (precision + recall) if (precision + recall) > 0 else 0 + + return precision, recall, f1 + +# ็คบไพ‹็”จๆณ• +gt_folder = '/root/catkin_ws/src/ultralytics/ours_15000/labels_renders2' +pred_folder = '/root/catkin_ws/src/ultralytics/ours_15000/labels_renders' +precision, recall, f1 = evaluate_predictions(gt_folder, pred_folder) +print(f'Precision: {precision}, Recall: {recall}, F1 Score: {f1}') \ No newline at end of file diff --git a/ultralytics/eval.py:Zone.Identifier b/ultralytics/eval.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/eval.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/README.md b/ultralytics/examples/README.md new file mode 100755 index 0000000..d49bdfe --- /dev/null +++ b/ultralytics/examples/README.md @@ -0,0 +1,35 @@ +## Ultralytics YOLOv8 Example Applications + +This repository features a collection of real-world applications and walkthroughs, provided as either Python files or notebooks. Explore the examples below to see how YOLOv8 can be integrated into various applications. + +### Ultralytics YOLO Example Applications + +| Title | Format | Contributor | +| ----------------------------------------------------------------------------------------------------------------------------------------- | ------------------ | ----------------------------------------------------------------------------------------- | +| [YOLO ONNX Detection Inference with C++](./YOLOv8-CPP-Inference) | C++/ONNX | [Justas Bartnykas](https://github.com/JustasBart) | +| [YOLO OpenCV ONNX Detection Python](./YOLOv8-OpenCV-ONNX-Python) | OpenCV/Python/ONNX | [Farid Inawan](https://github.com/frdteknikelektro) | +| [YOLOv8 .NET ONNX ImageSharp](https://github.com/dme-compunet/YOLOv8) | C#/ONNX/ImageSharp | [Compunet](https://github.com/dme-compunet) | +| [YOLO .Net ONNX Detection C#](https://www.nuget.org/packages/Yolov8.Net) | C# .Net | [Samuel Stainback](https://github.com/sstainba) | +| [YOLOv8 on NVIDIA Jetson(TensorRT and DeepStream)](https://wiki.seeedstudio.com/YOLOv8-DeepStream-TRT-Jetson/) | Python | [Lakshantha](https://github.com/lakshanthad) | +| [YOLOv8 ONNXRuntime Python](./YOLOv8-ONNXRuntime) | Python/ONNXRuntime | [Semih Demirel](https://github.com/semihhdemirel) | +| [YOLOv8 ONNXRuntime CPP](./YOLOv8-ONNXRuntime-CPP) | C++/ONNXRuntime | [DennisJcy](https://github.com/DennisJcy), [Onuralp Sezer](https://github.com/onuralpszr) | +| [RTDETR ONNXRuntime C#](https://github.com/Kayzwer/yolo-cs/blob/master/RTDETR.cs) | C#/ONNX | [Kayzwer](https://github.com/Kayzwer) | +| [YOLOv8 SAHI Video Inference](https://github.com/RizwanMunawar/ultralytics/blob/main/examples/YOLOv8-SAHI-Inference-Video/yolov8_sahi.py) | Python | [Muhammad Rizwan Munawar](https://github.com/RizwanMunawar) | +| [YOLOv8 Region Counter](https://github.com/RizwanMunawar/ultralytics/blob/main/examples/YOLOv8-Region-Counter/yolov8_region_counter.py) | Python | [Muhammad Rizwan Munawar](https://github.com/RizwanMunawar) | +| [YOLOv8 Segmentation ONNXRuntime Python](./YOLOv8-Segmentation-ONNXRuntime-Python) | Python/ONNXRuntime | [jamjamjon](https://github.com/jamjamjon) | +| [YOLOv8 LibTorch CPP](./YOLOv8-LibTorch-CPP-Inference) | C++/LibTorch | [Myyura](https://github.com/Myyura) | + +### How to Contribute + +We greatly appreciate contributions from the community, including examples, applications, and guides. If you'd like to contribute, please follow these guidelines: + +1. Create a pull request (PR) with the title prefix `[Example]`, adding your new example folder to the `examples/` directory within the repository. +1. Make sure your project adheres to the following standards: + - Makes use of the `ultralytics` package. + - Includes a `README.md` with clear instructions for setting up and running the example. + - Refrains from adding large files or dependencies unless they are absolutely necessary for the example. + - Contributors should be willing to provide support for their examples and address related issues. + +For more detailed information and guidance on contributing, please visit our [contribution documentation](https://docs.ultralytics.com/help/contributing). + +If you encounter any questions or concerns regarding these guidelines, feel free to open a PR or an issue in the repository, and we will assist you in the contribution process. diff --git a/ultralytics/examples/README.md:Zone.Identifier b/ultralytics/examples/README.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/README.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-CPP-Inference/CMakeLists.txt b/ultralytics/examples/YOLOv8-CPP-Inference/CMakeLists.txt new file mode 100755 index 0000000..bc2f33f --- /dev/null +++ b/ultralytics/examples/YOLOv8-CPP-Inference/CMakeLists.txt @@ -0,0 +1,28 @@ +cmake_minimum_required(VERSION 3.5) + +project(Yolov8CPPInference VERSION 0.1) + +set(CMAKE_INCLUDE_CURRENT_DIR ON) + +# CUDA +set(CUDA_TOOLKIT_ROOT_DIR "/usr/local/cuda") +find_package(CUDA 11 REQUIRED) + +set(CMAKE_CUDA_STANDARD 11) +set(CMAKE_CUDA_STANDARD_REQUIRED ON) +# !CUDA + +# OpenCV +find_package(OpenCV REQUIRED) +include_directories(${OpenCV_INCLUDE_DIRS}) +# !OpenCV + +set(PROJECT_SOURCES + main.cpp + + inference.h + inference.cpp +) + +add_executable(Yolov8CPPInference ${PROJECT_SOURCES}) +target_link_libraries(Yolov8CPPInference ${OpenCV_LIBS}) diff --git a/ultralytics/examples/YOLOv8-CPP-Inference/CMakeLists.txt:Zone.Identifier b/ultralytics/examples/YOLOv8-CPP-Inference/CMakeLists.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-CPP-Inference/CMakeLists.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-CPP-Inference/README.md b/ultralytics/examples/YOLOv8-CPP-Inference/README.md new file mode 100755 index 0000000..601c1d0 --- /dev/null +++ b/ultralytics/examples/YOLOv8-CPP-Inference/README.md @@ -0,0 +1,50 @@ +# YOLOv8/YOLOv5 Inference C++ + +This example demonstrates how to perform inference using YOLOv8 and YOLOv5 models in C++ with OpenCV's DNN API. + +## Usage + +```bash +git clone ultralytics +cd ultralytics +pip install . +cd examples/YOLOv8-CPP-Inference + +# Add a **yolov8\_.onnx** and/or **yolov5\_.onnx** model(s) to the ultralytics folder. +# Edit the **main.cpp** to change the **projectBasePath** to match your user. + +# Note that by default the CMake file will try and import the CUDA library to be used with the OpenCVs dnn (cuDNN) GPU Inference. +# If your OpenCV build does not use CUDA/cuDNN you can remove that import call and run the example on CPU. + +mkdir build +cd build +cmake .. +make +./Yolov8CPPInference +``` + +## Exporting YOLOv8 and YOLOv5 Models + +To export YOLOv8 models: + +```commandline +yolo export model=yolov8s.pt imgsz=480,640 format=onnx opset=12 +``` + +To export YOLOv5 models: + +```commandline +python3 export.py --weights yolov5s.pt --img 480 640 --include onnx --opset 12 +``` + +yolov8s.onnx: + +![image](https://user-images.githubusercontent.com/40023722/217356132-a4cecf2e-2729-4acb-b80a-6559022d7707.png) + +yolov5s.onnx: + +![image](https://user-images.githubusercontent.com/40023722/217357005-07464492-d1da-42e3-98a7-fc753f87d5e6.png) + +This repository utilizes OpenCV's DNN API to run ONNX exported models of YOLOv5 and YOLOv8. In theory, it should work for YOLOv6 and YOLOv7 as well, but they have not been tested. Note that the example networks are exported with rectangular (640x480) resolutions, but any exported resolution will work. You may want to use the letterbox approach for square images, depending on your use case. + +The **main** branch version uses Qt as a GUI wrapper. The primary focus here is the **Inference** class file, which demonstrates how to transpose YOLOv8 models to work as YOLOv5 models. diff --git a/ultralytics/examples/YOLOv8-CPP-Inference/README.md:Zone.Identifier b/ultralytics/examples/YOLOv8-CPP-Inference/README.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-CPP-Inference/README.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-CPP-Inference/inference.cpp b/ultralytics/examples/YOLOv8-CPP-Inference/inference.cpp new file mode 100755 index 0000000..12c2607 --- /dev/null +++ b/ultralytics/examples/YOLOv8-CPP-Inference/inference.cpp @@ -0,0 +1,185 @@ +#include "inference.h" + +Inference::Inference(const std::string &onnxModelPath, const cv::Size &modelInputShape, const std::string &classesTxtFile, const bool &runWithCuda) +{ + modelPath = onnxModelPath; + modelShape = modelInputShape; + classesPath = classesTxtFile; + cudaEnabled = runWithCuda; + + loadOnnxNetwork(); + // loadClassesFromFile(); The classes are hard-coded for this example +} + +std::vector Inference::runInference(const cv::Mat &input) +{ + cv::Mat modelInput = input; + if (letterBoxForSquare && modelShape.width == modelShape.height) + modelInput = formatToSquare(modelInput); + + cv::Mat blob; + cv::dnn::blobFromImage(modelInput, blob, 1.0/255.0, modelShape, cv::Scalar(), true, false); + net.setInput(blob); + + std::vector outputs; + net.forward(outputs, net.getUnconnectedOutLayersNames()); + + int rows = outputs[0].size[1]; + int dimensions = outputs[0].size[2]; + + bool yolov8 = false; + // yolov5 has an output of shape (batchSize, 25200, 85) (Num classes + box[x,y,w,h] + confidence[c]) + // yolov8 has an output of shape (batchSize, 84, 8400) (Num classes + box[x,y,w,h]) + if (dimensions > rows) // Check if the shape[2] is more than shape[1] (yolov8) + { + yolov8 = true; + rows = outputs[0].size[2]; + dimensions = outputs[0].size[1]; + + outputs[0] = outputs[0].reshape(1, dimensions); + cv::transpose(outputs[0], outputs[0]); + } + float *data = (float *)outputs[0].data; + + float x_factor = modelInput.cols / modelShape.width; + float y_factor = modelInput.rows / modelShape.height; + + std::vector class_ids; + std::vector confidences; + std::vector boxes; + + for (int i = 0; i < rows; ++i) + { + if (yolov8) + { + float *classes_scores = data+4; + + cv::Mat scores(1, classes.size(), CV_32FC1, classes_scores); + cv::Point class_id; + double maxClassScore; + + minMaxLoc(scores, 0, &maxClassScore, 0, &class_id); + + if (maxClassScore > modelScoreThreshold) + { + confidences.push_back(maxClassScore); + class_ids.push_back(class_id.x); + + float x = data[0]; + float y = data[1]; + float w = data[2]; + float h = data[3]; + + int left = int((x - 0.5 * w) * x_factor); + int top = int((y - 0.5 * h) * y_factor); + + int width = int(w * x_factor); + int height = int(h * y_factor); + + boxes.push_back(cv::Rect(left, top, width, height)); + } + } + else // yolov5 + { + float confidence = data[4]; + + if (confidence >= modelConfidenceThreshold) + { + float *classes_scores = data+5; + + cv::Mat scores(1, classes.size(), CV_32FC1, classes_scores); + cv::Point class_id; + double max_class_score; + + minMaxLoc(scores, 0, &max_class_score, 0, &class_id); + + if (max_class_score > modelScoreThreshold) + { + confidences.push_back(confidence); + class_ids.push_back(class_id.x); + + float x = data[0]; + float y = data[1]; + float w = data[2]; + float h = data[3]; + + int left = int((x - 0.5 * w) * x_factor); + int top = int((y - 0.5 * h) * y_factor); + + int width = int(w * x_factor); + int height = int(h * y_factor); + + boxes.push_back(cv::Rect(left, top, width, height)); + } + } + } + + data += dimensions; + } + + std::vector nms_result; + cv::dnn::NMSBoxes(boxes, confidences, modelScoreThreshold, modelNMSThreshold, nms_result); + + std::vector detections{}; + for (unsigned long i = 0; i < nms_result.size(); ++i) + { + int idx = nms_result[i]; + + Detection result; + result.class_id = class_ids[idx]; + result.confidence = confidences[idx]; + + std::random_device rd; + std::mt19937 gen(rd()); + std::uniform_int_distribution dis(100, 255); + result.color = cv::Scalar(dis(gen), + dis(gen), + dis(gen)); + + result.className = classes[result.class_id]; + result.box = boxes[idx]; + + detections.push_back(result); + } + + return detections; +} + +void Inference::loadClassesFromFile() +{ + std::ifstream inputFile(classesPath); + if (inputFile.is_open()) + { + std::string classLine; + while (std::getline(inputFile, classLine)) + classes.push_back(classLine); + inputFile.close(); + } +} + +void Inference::loadOnnxNetwork() +{ + net = cv::dnn::readNetFromONNX(modelPath); + if (cudaEnabled) + { + std::cout << "\nRunning on CUDA" << std::endl; + net.setPreferableBackend(cv::dnn::DNN_BACKEND_CUDA); + net.setPreferableTarget(cv::dnn::DNN_TARGET_CUDA); + } + else + { + std::cout << "\nRunning on CPU" << std::endl; + net.setPreferableBackend(cv::dnn::DNN_BACKEND_OPENCV); + net.setPreferableTarget(cv::dnn::DNN_TARGET_CPU); + } +} + +cv::Mat Inference::formatToSquare(const cv::Mat &source) +{ + int col = source.cols; + int row = source.rows; + int _max = MAX(col, row); + cv::Mat result = cv::Mat::zeros(_max, _max, CV_8UC3); + source.copyTo(result(cv::Rect(0, 0, col, row))); + return result; +} diff --git a/ultralytics/examples/YOLOv8-CPP-Inference/inference.cpp:Zone.Identifier b/ultralytics/examples/YOLOv8-CPP-Inference/inference.cpp:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-CPP-Inference/inference.cpp:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-CPP-Inference/inference.h b/ultralytics/examples/YOLOv8-CPP-Inference/inference.h new file mode 100755 index 0000000..dc6149f --- /dev/null +++ b/ultralytics/examples/YOLOv8-CPP-Inference/inference.h @@ -0,0 +1,52 @@ +#ifndef INFERENCE_H +#define INFERENCE_H + +// Cpp native +#include +#include +#include +#include + +// OpenCV / DNN / Inference +#include +#include +#include + +struct Detection +{ + int class_id{0}; + std::string className{}; + float confidence{0.0}; + cv::Scalar color{}; + cv::Rect box{}; +}; + +class Inference +{ +public: + Inference(const std::string &onnxModelPath, const cv::Size &modelInputShape = {640, 640}, const std::string &classesTxtFile = "", const bool &runWithCuda = true); + std::vector runInference(const cv::Mat &input); + +private: + void loadClassesFromFile(); + void loadOnnxNetwork(); + cv::Mat formatToSquare(const cv::Mat &source); + + std::string modelPath{}; + std::string classesPath{}; + bool cudaEnabled{}; + + std::vector classes{"person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"}; + + cv::Size2f modelShape{}; + + float modelConfidenceThreshold {0.25}; + float modelScoreThreshold {0.45}; + float modelNMSThreshold {0.50}; + + bool letterBoxForSquare = true; + + cv::dnn::Net net; +}; + +#endif // INFERENCE_H diff --git a/ultralytics/examples/YOLOv8-CPP-Inference/inference.h:Zone.Identifier b/ultralytics/examples/YOLOv8-CPP-Inference/inference.h:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-CPP-Inference/inference.h:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-CPP-Inference/main.cpp b/ultralytics/examples/YOLOv8-CPP-Inference/main.cpp new file mode 100755 index 0000000..6d1ba98 --- /dev/null +++ b/ultralytics/examples/YOLOv8-CPP-Inference/main.cpp @@ -0,0 +1,70 @@ +#include +#include +#include + +#include + +#include "inference.h" + +using namespace std; +using namespace cv; + +int main(int argc, char **argv) +{ + std::string projectBasePath = "/home/user/ultralytics"; // Set your ultralytics base path + + bool runOnGPU = true; + + // + // Pass in either: + // + // "yolov8s.onnx" or "yolov5s.onnx" + // + // To run Inference with yolov8/yolov5 (ONNX) + // + + // Note that in this example the classes are hard-coded and 'classes.txt' is a place holder. + Inference inf(projectBasePath + "/yolov8s.onnx", cv::Size(640, 480), "classes.txt", runOnGPU); + + std::vector imageNames; + imageNames.push_back(projectBasePath + "/ultralytics/assets/bus.jpg"); + imageNames.push_back(projectBasePath + "/ultralytics/assets/zidane.jpg"); + + for (int i = 0; i < imageNames.size(); ++i) + { + cv::Mat frame = cv::imread(imageNames[i]); + + // Inference starts here... + std::vector output = inf.runInference(frame); + + int detections = output.size(); + std::cout << "Number of detections:" << detections << std::endl; + + for (int i = 0; i < detections; ++i) + { + Detection detection = output[i]; + + cv::Rect box = detection.box; + cv::Scalar color = detection.color; + + // Detection box + cv::rectangle(frame, box, color, 2); + + // Detection box text + std::string classString = detection.className + ' ' + std::to_string(detection.confidence).substr(0, 4); + cv::Size textSize = cv::getTextSize(classString, cv::FONT_HERSHEY_DUPLEX, 1, 2, 0); + cv::Rect textBox(box.x, box.y - 40, textSize.width + 10, textSize.height + 20); + + cv::rectangle(frame, textBox, color, cv::FILLED); + cv::putText(frame, classString, cv::Point(box.x + 5, box.y - 10), cv::FONT_HERSHEY_DUPLEX, 1, cv::Scalar(0, 0, 0), 2, 0); + } + // Inference ends here... + + // This is only for preview purposes + float scale = 0.8; + cv::resize(frame, frame, cv::Size(frame.cols*scale, frame.rows*scale)); + cv::imshow("Inference", frame); + + cv::waitKey(-1); + } +} diff --git a/ultralytics/examples/YOLOv8-CPP-Inference/main.cpp:Zone.Identifier b/ultralytics/examples/YOLOv8-CPP-Inference/main.cpp:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-CPP-Inference/main.cpp:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/CMakeLists.txt b/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/CMakeLists.txt new file mode 100755 index 0000000..2cbd796 --- /dev/null +++ b/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/CMakeLists.txt @@ -0,0 +1,47 @@ +๏ปฟcmake_minimum_required(VERSION 3.18 FATAL_ERROR) + +project(yolov8_libtorch_example) + +set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_EXTENSIONS OFF) + + +# -------------- OpenCV -------------- +set(OpenCV_DIR "/path/to/opencv/lib/cmake/opencv4") +find_package(OpenCV REQUIRED) + +message(STATUS "OpenCV library status:") +message(STATUS " config: ${OpenCV_DIR}") +message(STATUS " version: ${OpenCV_VERSION}") +message(STATUS " libraries: ${OpenCV_LIBS}") +message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") + +include_directories(${OpenCV_INCLUDE_DIRS}) + +# -------------- libtorch -------------- +list(APPEND CMAKE_PREFIX_PATH "/path/to/libtorch") +set(Torch_DIR "/path/to/libtorch/share/cmake/Torch") + +find_package(Torch REQUIRED) +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${TORCH_CXX_FLAGS}") +message("${TORCH_LIBRARIES}") +message("${TORCH_INCLUDE_DIRS}") + +# The following code block is suggested to be used on Windows. +# According to https://github.com/pytorch/pytorch/issues/25457, +# the DLLs need to be copied to avoid memory errors. +# if (MSVC) +# file(GLOB TORCH_DLLS "${TORCH_INSTALL_PREFIX}/lib/*.dll") +# add_custom_command(TARGET yolov8_libtorch_example +# POST_BUILD +# COMMAND ${CMAKE_COMMAND} -E copy_if_different +# ${TORCH_DLLS} +# $) +# endif (MSVC) + +include_directories(${TORCH_INCLUDE_DIRS}) + +add_executable(yolov8_libtorch_inference "${CMAKE_CURRENT_SOURCE_DIR}/main.cc") +target_link_libraries(yolov8_libtorch_inference ${TORCH_LIBRARIES} ${OpenCV_LIBS}) +set_property(TARGET yolov8_libtorch_inference PROPERTY CXX_STANDARD 17) diff --git a/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/CMakeLists.txt:Zone.Identifier b/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/CMakeLists.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/CMakeLists.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/README.md b/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/README.md new file mode 100755 index 0000000..930c3cd --- /dev/null +++ b/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/README.md @@ -0,0 +1,35 @@ +๏ปฟ# YOLOv8 LibTorch Inference C++ + +This example demonstrates how to perform inference using YOLOv8 models in C++ with LibTorch API. + +## Dependencies + +| Dependency | Version | +| ------------ | -------- | +| OpenCV | >=4.0.0 | +| C++ Standard | >=17 | +| Cmake | >=3.18 | +| Libtorch | >=1.12.1 | + +## Usage + +```bash +git clone ultralytics +cd ultralytics +pip install . +cd examples/YOLOv8-LibTorch-CPP-Inference + +mkdir build +cd build +cmake .. +make +./yolov8_libtorch_inference +``` + +## Exporting YOLOv8 + +To export YOLOv8 models: + +```commandline +yolo export model=yolov8s.pt imgsz=640 format=torchscript +``` diff --git a/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/README.md:Zone.Identifier b/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/README.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/README.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/main.cc b/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/main.cc new file mode 100755 index 0000000..ebb1a75 --- /dev/null +++ b/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/main.cc @@ -0,0 +1,259 @@ +๏ปฟ#include + +#include +#include +#include +#include +#include + +using torch::indexing::Slice; +using torch::indexing::None; + + +float generate_scale(cv::Mat& image, const std::vector& target_size) { + int origin_w = image.cols; + int origin_h = image.rows; + + int target_h = target_size[0]; + int target_w = target_size[1]; + + float ratio_h = static_cast(target_h) / static_cast(origin_h); + float ratio_w = static_cast(target_w) / static_cast(origin_w); + float resize_scale = std::min(ratio_h, ratio_w); + return resize_scale; +} + + +float letterbox(cv::Mat &input_image, cv::Mat &output_image, const std::vector &target_size) { + if (input_image.cols == target_size[1] && input_image.rows == target_size[0]) { + if (input_image.data == output_image.data) { + return 1.; + } else { + output_image = input_image.clone(); + return 1.; + } + } + + float resize_scale = generate_scale(input_image, target_size); + int new_shape_w = std::round(input_image.cols * resize_scale); + int new_shape_h = std::round(input_image.rows * resize_scale); + float padw = (target_size[1] - new_shape_w) / 2.; + float padh = (target_size[0] - new_shape_h) / 2.; + + int top = std::round(padh - 0.1); + int bottom = std::round(padh + 0.1); + int left = std::round(padw - 0.1); + int right = std::round(padw + 0.1); + + cv::resize(input_image, output_image, + cv::Size(new_shape_w, new_shape_h), + 0, 0, cv::INTER_AREA); + + cv::copyMakeBorder(output_image, output_image, top, bottom, left, right, + cv::BORDER_CONSTANT, cv::Scalar(114.)); + return resize_scale; +} + + +torch::Tensor xyxy2xywh(const torch::Tensor& x) { + auto y = torch::empty_like(x); + y.index_put_({"...", 0}, (x.index({"...", 0}) + x.index({"...", 2})).div(2)); + y.index_put_({"...", 1}, (x.index({"...", 1}) + x.index({"...", 3})).div(2)); + y.index_put_({"...", 2}, x.index({"...", 2}) - x.index({"...", 0})); + y.index_put_({"...", 3}, x.index({"...", 3}) - x.index({"...", 1})); + return y; +} + + +torch::Tensor xywh2xyxy(const torch::Tensor& x) { + auto y = torch::empty_like(x); + auto dw = x.index({"...", 2}).div(2); + auto dh = x.index({"...", 3}).div(2); + y.index_put_({"...", 0}, x.index({"...", 0}) - dw); + y.index_put_({"...", 1}, x.index({"...", 1}) - dh); + y.index_put_({"...", 2}, x.index({"...", 0}) + dw); + y.index_put_({"...", 3}, x.index({"...", 1}) + dh); + return y; +} + + +// Reference: https://github.com/pytorch/vision/blob/main/torchvision/csrc/ops/cpu/nms_kernel.cpp +torch::Tensor nms(const torch::Tensor& bboxes, const torch::Tensor& scores, float iou_threshold) { + if (bboxes.numel() == 0) + return torch::empty({0}, bboxes.options().dtype(torch::kLong)); + + auto x1_t = bboxes.select(1, 0).contiguous(); + auto y1_t = bboxes.select(1, 1).contiguous(); + auto x2_t = bboxes.select(1, 2).contiguous(); + auto y2_t = bboxes.select(1, 3).contiguous(); + + torch::Tensor areas_t = (x2_t - x1_t) * (y2_t - y1_t); + + auto order_t = std::get<1>( + scores.sort(/*stable=*/true, /*dim=*/0, /* descending=*/true)); + + auto ndets = bboxes.size(0); + torch::Tensor suppressed_t = torch::zeros({ndets}, bboxes.options().dtype(torch::kByte)); + torch::Tensor keep_t = torch::zeros({ndets}, bboxes.options().dtype(torch::kLong)); + + auto suppressed = suppressed_t.data_ptr(); + auto keep = keep_t.data_ptr(); + auto order = order_t.data_ptr(); + auto x1 = x1_t.data_ptr(); + auto y1 = y1_t.data_ptr(); + auto x2 = x2_t.data_ptr(); + auto y2 = y2_t.data_ptr(); + auto areas = areas_t.data_ptr(); + + int64_t num_to_keep = 0; + + for (int64_t _i = 0; _i < ndets; _i++) { + auto i = order[_i]; + if (suppressed[i] == 1) + continue; + keep[num_to_keep++] = i; + auto ix1 = x1[i]; + auto iy1 = y1[i]; + auto ix2 = x2[i]; + auto iy2 = y2[i]; + auto iarea = areas[i]; + + for (int64_t _j = _i + 1; _j < ndets; _j++) { + auto j = order[_j]; + if (suppressed[j] == 1) + continue; + auto xx1 = std::max(ix1, x1[j]); + auto yy1 = std::max(iy1, y1[j]); + auto xx2 = std::min(ix2, x2[j]); + auto yy2 = std::min(iy2, y2[j]); + + auto w = std::max(static_cast(0), xx2 - xx1); + auto h = std::max(static_cast(0), yy2 - yy1); + auto inter = w * h; + auto ovr = inter / (iarea + areas[j] - inter); + if (ovr > iou_threshold) + suppressed[j] = 1; + } + } + return keep_t.narrow(0, 0, num_to_keep); +} + + +torch::Tensor non_max_supperession(torch::Tensor& prediction, float conf_thres = 0.25, float iou_thres = 0.45, int max_det = 300) { + auto bs = prediction.size(0); + auto nc = prediction.size(1) - 4; + auto nm = prediction.size(1) - nc - 4; + auto mi = 4 + nc; + auto xc = prediction.index({Slice(), Slice(4, mi)}).amax(1) > conf_thres; + + prediction = prediction.transpose(-1, -2); + prediction.index_put_({"...", Slice({None, 4})}, xywh2xyxy(prediction.index({"...", Slice(None, 4)}))); + + std::vector output; + for (int i = 0; i < bs; i++) { + output.push_back(torch::zeros({0, 6 + nm}, prediction.device())); + } + + for (int xi = 0; xi < prediction.size(0); xi++) { + auto x = prediction[xi]; + x = x.index({xc[xi]}); + auto x_split = x.split({4, nc, nm}, 1); + auto box = x_split[0], cls = x_split[1], mask = x_split[2]; + auto [conf, j] = cls.max(1, true); + x = torch::cat({box, conf, j.toType(torch::kFloat), mask}, 1); + x = x.index({conf.view(-1) > conf_thres}); + int n = x.size(0); + if (!n) { continue; } + + // NMS + auto c = x.index({Slice(), Slice{5, 6}}) * 7680; + auto boxes = x.index({Slice(), Slice(None, 4)}) + c; + auto scores = x.index({Slice(), 4}); + auto i = nms(boxes, scores, iou_thres); + i = i.index({Slice(None, max_det)}); + output[xi] = x.index({i}); + } + + return torch::stack(output); +} + + +torch::Tensor clip_boxes(torch::Tensor& boxes, const std::vector& shape) { + boxes.index_put_({"...", 0}, boxes.index({"...", 0}).clamp(0, shape[1])); + boxes.index_put_({"...", 1}, boxes.index({"...", 1}).clamp(0, shape[0])); + boxes.index_put_({"...", 2}, boxes.index({"...", 2}).clamp(0, shape[1])); + boxes.index_put_({"...", 3}, boxes.index({"...", 3}).clamp(0, shape[0])); + return boxes; +} + + +torch::Tensor scale_boxes(const std::vector& img1_shape, torch::Tensor& boxes, const std::vector& img0_shape) { + auto gain = (std::min)((float)img1_shape[0] / img0_shape[0], (float)img1_shape[1] / img0_shape[1]); + auto pad0 = std::round((float)(img1_shape[1] - img0_shape[1] * gain) / 2. - 0.1); + auto pad1 = std::round((float)(img1_shape[0] - img0_shape[0] * gain) / 2. - 0.1); + + boxes.index_put_({"...", 0}, boxes.index({"...", 0}) - pad0); + boxes.index_put_({"...", 2}, boxes.index({"...", 2}) - pad0); + boxes.index_put_({"...", 1}, boxes.index({"...", 1}) - pad1); + boxes.index_put_({"...", 3}, boxes.index({"...", 3}) - pad1); + boxes.index_put_({"...", Slice(None, 4)}, boxes.index({"...", Slice(None, 4)}).div(gain)); + return boxes; +} + + +int main() { + // Device + torch::Device device(torch::cuda::is_available() ? torch::kCUDA :torch::kCPU); + + // Note that in this example the classes are hard-coded + std::vector classes {"person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", "fire hydrant", + "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", + "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", + "baseball bat", "baseball glove", "skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", + "spoon", "bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", + "couch", "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", + "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"}; + + try { + // Load the model (e.g. yolov8s.torchscript) + std::string model_path = "/path/to/yolov8s.torchscript"; + torch::jit::script::Module yolo_model; + yolo_model = torch::jit::load(model_path); + yolo_model.eval(); + yolo_model.to(device, torch::kFloat32); + + // Load image and preprocess + cv::Mat image = cv::imread("/path/to/bus.jpg"); + cv::Mat input_image; + letterbox(image, input_image, {640, 640}); + + torch::Tensor image_tensor = torch::from_blob(input_image.data, {input_image.rows, input_image.cols, 3}, torch::kByte).to(device); + image_tensor = image_tensor.toType(torch::kFloat32).div(255); + image_tensor = image_tensor.permute({2, 0, 1}); + image_tensor = image_tensor.unsqueeze(0); + std::vector inputs {image_tensor}; + + // Inference + torch::Tensor output = yolo_model.forward(inputs).toTensor().cpu(); + + // NMS + auto keep = non_max_supperession(output)[0]; + auto boxes = keep.index({Slice(), Slice(None, 4)}); + keep.index_put_({Slice(), Slice(None, 4)}, scale_boxes({input_image.rows, input_image.cols}, boxes, {image.rows, image.cols})); + + // Show the results + for (int i = 0; i < keep.size(0); i++) { + int x1 = keep[i][0].item().toFloat(); + int y1 = keep[i][1].item().toFloat(); + int x2 = keep[i][2].item().toFloat(); + int y2 = keep[i][3].item().toFloat(); + float conf = keep[i][4].item().toFloat(); + int cls = keep[i][5].item().toInt(); + std::cout << "Rect: [" << x1 << "," << y1 << "," << x2 << "," << y2 << "] Conf: " << conf << " Class: " << classes[cls] << std::endl; + } + } catch (const c10::Error& e) { + std::cout << e.msg() << std::endl; + } + + return 0; +} diff --git a/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/main.cc:Zone.Identifier b/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/main.cc:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-LibTorch-CPP-Inference/main.cc:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/CMakeLists.txt b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/CMakeLists.txt new file mode 100755 index 0000000..86232cc --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/CMakeLists.txt @@ -0,0 +1,96 @@ +cmake_minimum_required(VERSION 3.5) + +set(PROJECT_NAME Yolov8OnnxRuntimeCPPInference) +project(${PROJECT_NAME} VERSION 0.0.1 LANGUAGES CXX) + + +# -------------- Support C++17 for using filesystem ------------------# +set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_EXTENSIONS ON) +set(CMAKE_INCLUDE_CURRENT_DIR ON) + + +# -------------- OpenCV ------------------# +find_package(OpenCV REQUIRED) +include_directories(${OpenCV_INCLUDE_DIRS}) + + +# -------------- Compile CUDA for FP16 inference if needed ------------------# +option(USE_CUDA "Enable CUDA support" ON) +if (NOT APPLE AND USE_CUDA) + find_package(CUDA REQUIRED) + include_directories(${CUDA_INCLUDE_DIRS}) + add_definitions(-DUSE_CUDA) +else () + set(USE_CUDA OFF) +endif () + +# -------------- ONNXRUNTIME ------------------# + +# Set ONNXRUNTIME_VERSION +set(ONNXRUNTIME_VERSION 1.15.1) + +if (WIN32) + if (USE_CUDA) + set(ONNXRUNTIME_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/onnxruntime-win-x64-gpu-${ONNXRUNTIME_VERSION}") + else () + set(ONNXRUNTIME_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/onnxruntime-win-x64-${ONNXRUNTIME_VERSION}") + endif () +elseif (LINUX) + if (USE_CUDA) + set(ONNXRUNTIME_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/onnxruntime-linux-x64-gpu-${ONNXRUNTIME_VERSION}") + else () + set(ONNXRUNTIME_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/onnxruntime-linux-x64-${ONNXRUNTIME_VERSION}") + endif () +elseif (APPLE) + set(ONNXRUNTIME_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/onnxruntime-osx-arm64-${ONNXRUNTIME_VERSION}") + # Apple X64 binary + # set(ONNXRUNTIME_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/onnxruntime-osx-x64-${ONNXRUNTIME_VERSION}") + # Apple Universal binary + # set(ONNXRUNTIME_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/onnxruntime-osx-universal2-${ONNXRUNTIME_VERSION}") +endif () + +include_directories(${PROJECT_NAME} ${ONNXRUNTIME_ROOT}/include) + +set(PROJECT_SOURCES + main.cpp + inference.h + inference.cpp +) + +add_executable(${PROJECT_NAME} ${PROJECT_SOURCES}) + +if (WIN32) + target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS} ${ONNXRUNTIME_ROOT}/lib/onnxruntime.lib) + if (USE_CUDA) + target_link_libraries(${PROJECT_NAME} ${CUDA_LIBRARIES}) + endif () +elseif (LINUX) + target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS} ${ONNXRUNTIME_ROOT}/lib/libonnxruntime.so) + if (USE_CUDA) + target_link_libraries(${PROJECT_NAME} ${CUDA_LIBRARIES}) + endif () +elseif (APPLE) + target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS} ${ONNXRUNTIME_ROOT}/lib/libonnxruntime.dylib) +endif () + +# For windows system, copy onnxruntime.dll to the same folder of the executable file +if (WIN32) + add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different + "${ONNXRUNTIME_ROOT}/lib/onnxruntime.dll" + $) +endif () + +# Download https://raw.githubusercontent.com/ultralytics/ultralytics/main/ultralytics/cfg/datasets/coco.yaml +# and put it in the same folder of the executable file +configure_file(coco.yaml ${CMAKE_CURRENT_BINARY_DIR}/coco.yaml COPYONLY) + +# Copy yolov8n.onnx file to the same folder of the executable file +configure_file(yolov8n.onnx ${CMAKE_CURRENT_BINARY_DIR}/yolov8n.onnx COPYONLY) + +# Create folder name images in the same folder of the executable file +add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/images +) diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/CMakeLists.txt:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/CMakeLists.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/CMakeLists.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/README.md b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/README.md new file mode 100755 index 0000000..032cf4a --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/README.md @@ -0,0 +1,108 @@ +

YOLOv8 OnnxRuntime C++

+ +

+ C++ + Onnx-runtime +

+ +This example demonstrates how to perform inference using YOLOv8 in C++ with ONNX Runtime and OpenCV's API. + +## Benefits โœจ + +- Friendly for deployment in the industrial sector. +- Faster than OpenCV's DNN inference on both CPU and GPU. +- Supports FP32 and FP16 CUDA acceleration. + +## Note :coffee: + +1.~~This repository should also work for YOLOv5, which needs a permute operator for the output of the YOLOv5 model, but this has not been implemented yet.~~ Benefit for ultralytics's latest release,a `Transpose` op is added to the Yolov8 model,while make v8 and v5 has the same output shape.Therefore,you can inference your yolov5/v7/v8 via this project. + +## Exporting YOLOv8 Models ๐Ÿ“ฆ + +To export YOLOv8 models, use the following Python script: + +```python +from ultralytics import YOLO + +# Load a YOLOv8 model +model = YOLO("yolov8n.pt") + +# Export the model +model.export(format="onnx", opset=12, simplify=True, dynamic=False, imgsz=640) +``` + +Alternatively, you can use the following command for exporting the model in the terminal + +```bash +yolo export model=yolov8n.pt opset=12 simplify=True dynamic=False format=onnx imgsz=640,640 +``` + +## Exporting YOLOv8 FP16 Models ๐Ÿ“ฆ + +```python +import onnx +from onnxconverter_common import float16 + +model = onnx.load(R'YOUR_ONNX_PATH') +model_fp16 = float16.convert_float_to_float16(model) +onnx.save(model_fp16, R'YOUR_FP16_ONNX_PATH') +``` + +## Download COCO.yaml file ๐Ÿ“‚ + +In order to run example, you also need to download coco.yaml. You can download the file manually from [here](https://raw.githubusercontent.com/ultralytics/ultralytics/main/ultralytics/cfg/datasets/coco.yaml) + +## Dependencies โš™๏ธ + +| Dependency | Version | +| -------------------------------- | -------------- | +| Onnxruntime(linux,windows,macos) | >=1.14.1 | +| OpenCV | >=4.0.0 | +| C++ Standard | >=17 | +| Cmake | >=3.5 | +| Cuda (Optional) | >=11.4 \<12.0 | +| cuDNN (Cuda required) | =8 | + +Note: The dependency on C++17 is due to the usage of the C++17 filesystem feature. + +Note (2): Due to ONNX Runtime, we need to use CUDA 11 and cuDNN 8. Keep in mind that this requirement might change in the future. + +## Build ๐Ÿ› ๏ธ + +1. Clone the repository to your local machine. +1. Navigate to the root directory of the repository. +1. Create a build directory and navigate to it: + +```console +mkdir build && cd build +``` + +4. Run CMake to generate the build files: + +```console +cmake .. +``` + +5. Build the project: + +```console +make +``` + +6. The built executable should now be located in the `build` directory. + +## Usage ๐Ÿš€ + +```c++ +//change your param as you like +//Pay attention to your device and the onnx model type(fp32 or fp16) +DL_INIT_PARAM params; +params.rectConfidenceThreshold = 0.1; +params.iouThreshold = 0.5; +params.modelPath = "yolov8n.onnx"; +params.imgSize = { 640, 640 }; +params.cudaEnable = true; +params.modelType = YOLO_DETECT_V8; +yoloDetector->CreateSession(params); +Detector(yoloDetector); +``` diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/README.md:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/README.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/README.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/inference.cpp b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/inference.cpp new file mode 100755 index 0000000..3d0427f --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/inference.cpp @@ -0,0 +1,363 @@ +#include "inference.h" +#include + +#define benchmark +#define min(a,b) (((a) < (b)) ? (a) : (b)) +YOLO_V8::YOLO_V8() { + +} + + +YOLO_V8::~YOLO_V8() { + delete session; +} + +#ifdef USE_CUDA +namespace Ort +{ + template<> + struct TypeToTensorType { static constexpr ONNXTensorElementDataType type = ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT16; }; +} +#endif + + +template +char* BlobFromImage(cv::Mat& iImg, T& iBlob) { + int channels = iImg.channels(); + int imgHeight = iImg.rows; + int imgWidth = iImg.cols; + + for (int c = 0; c < channels; c++) + { + for (int h = 0; h < imgHeight; h++) + { + for (int w = 0; w < imgWidth; w++) + { + iBlob[c * imgWidth * imgHeight + h * imgWidth + w] = typename std::remove_pointer::type( + (iImg.at(h, w)[c]) / 255.0f); + } + } + } + return RET_OK; +} + + +char* YOLO_V8::PreProcess(cv::Mat& iImg, std::vector iImgSize, cv::Mat& oImg) +{ + if (iImg.channels() == 3) + { + oImg = iImg.clone(); + cv::cvtColor(oImg, oImg, cv::COLOR_BGR2RGB); + } + else + { + cv::cvtColor(iImg, oImg, cv::COLOR_GRAY2RGB); + } + + switch (modelType) + { + case YOLO_DETECT_V8: + case YOLO_POSE: + case YOLO_DETECT_V8_HALF: + case YOLO_POSE_V8_HALF://LetterBox + { + if (iImg.cols >= iImg.rows) + { + resizeScales = iImg.cols / (float)iImgSize.at(0); + cv::resize(oImg, oImg, cv::Size(iImgSize.at(0), int(iImg.rows / resizeScales))); + } + else + { + resizeScales = iImg.rows / (float)iImgSize.at(0); + cv::resize(oImg, oImg, cv::Size(int(iImg.cols / resizeScales), iImgSize.at(1))); + } + cv::Mat tempImg = cv::Mat::zeros(iImgSize.at(0), iImgSize.at(1), CV_8UC3); + oImg.copyTo(tempImg(cv::Rect(0, 0, oImg.cols, oImg.rows))); + oImg = tempImg; + break; + } + case YOLO_CLS://CenterCrop + { + int h = iImg.rows; + int w = iImg.cols; + int m = min(h, w); + int top = (h - m) / 2; + int left = (w - m) / 2; + cv::resize(oImg(cv::Rect(left, top, m, m)), oImg, cv::Size(iImgSize.at(0), iImgSize.at(1))); + break; + } + } + return RET_OK; +} + + +char* YOLO_V8::CreateSession(DL_INIT_PARAM& iParams) { + char* Ret = RET_OK; + std::regex pattern("[\u4e00-\u9fa5]"); + bool result = std::regex_search(iParams.modelPath, pattern); + if (result) + { + Ret = "[YOLO_V8]:Your model path is error.Change your model path without chinese characters."; + std::cout << Ret << std::endl; + return Ret; + } + try + { + rectConfidenceThreshold = iParams.rectConfidenceThreshold; + iouThreshold = iParams.iouThreshold; + imgSize = iParams.imgSize; + modelType = iParams.modelType; + env = Ort::Env(ORT_LOGGING_LEVEL_WARNING, "Yolo"); + Ort::SessionOptions sessionOption; + if (iParams.cudaEnable) + { + cudaEnable = iParams.cudaEnable; + OrtCUDAProviderOptions cudaOption; + cudaOption.device_id = 0; + sessionOption.AppendExecutionProvider_CUDA(cudaOption); + } + sessionOption.SetGraphOptimizationLevel(GraphOptimizationLevel::ORT_ENABLE_ALL); + sessionOption.SetIntraOpNumThreads(iParams.intraOpNumThreads); + sessionOption.SetLogSeverityLevel(iParams.logSeverityLevel); + +#ifdef _WIN32 + int ModelPathSize = MultiByteToWideChar(CP_UTF8, 0, iParams.modelPath.c_str(), static_cast(iParams.modelPath.length()), nullptr, 0); + wchar_t* wide_cstr = new wchar_t[ModelPathSize + 1]; + MultiByteToWideChar(CP_UTF8, 0, iParams.modelPath.c_str(), static_cast(iParams.modelPath.length()), wide_cstr, ModelPathSize); + wide_cstr[ModelPathSize] = L'\0'; + const wchar_t* modelPath = wide_cstr; +#else + const char* modelPath = iParams.ModelPath.c_str(); +#endif // _WIN32 + + session = new Ort::Session(env, modelPath, sessionOption); + Ort::AllocatorWithDefaultOptions allocator; + size_t inputNodesNum = session->GetInputCount(); + for (size_t i = 0; i < inputNodesNum; i++) + { + Ort::AllocatedStringPtr input_node_name = session->GetInputNameAllocated(i, allocator); + char* temp_buf = new char[50]; + strcpy(temp_buf, input_node_name.get()); + inputNodeNames.push_back(temp_buf); + } + size_t OutputNodesNum = session->GetOutputCount(); + for (size_t i = 0; i < OutputNodesNum; i++) + { + Ort::AllocatedStringPtr output_node_name = session->GetOutputNameAllocated(i, allocator); + char* temp_buf = new char[10]; + strcpy(temp_buf, output_node_name.get()); + outputNodeNames.push_back(temp_buf); + } + options = Ort::RunOptions{ nullptr }; + WarmUpSession(); + return RET_OK; + } + catch (const std::exception& e) + { + const char* str1 = "[YOLO_V8]:"; + const char* str2 = e.what(); + std::string result = std::string(str1) + std::string(str2); + char* merged = new char[result.length() + 1]; + std::strcpy(merged, result.c_str()); + std::cout << merged << std::endl; + delete[] merged; + return "[YOLO_V8]:Create session failed."; + } + +} + + +char* YOLO_V8::RunSession(cv::Mat& iImg, std::vector& oResult) { +#ifdef benchmark + clock_t starttime_1 = clock(); +#endif // benchmark + + char* Ret = RET_OK; + cv::Mat processedImg; + PreProcess(iImg, imgSize, processedImg); + if (modelType < 4) + { + float* blob = new float[processedImg.total() * 3]; + BlobFromImage(processedImg, blob); + std::vector inputNodeDims = { 1, 3, imgSize.at(0), imgSize.at(1) }; + TensorProcess(starttime_1, iImg, blob, inputNodeDims, oResult); + } + else + { +#ifdef USE_CUDA + half* blob = new half[processedImg.total() * 3]; + BlobFromImage(processedImg, blob); + std::vector inputNodeDims = { 1,3,imgSize.at(0),imgSize.at(1) }; + TensorProcess(starttime_1, iImg, blob, inputNodeDims, oResult); +#endif + } + + return Ret; +} + + +template +char* YOLO_V8::TensorProcess(clock_t& starttime_1, cv::Mat& iImg, N& blob, std::vector& inputNodeDims, + std::vector& oResult) { + Ort::Value inputTensor = Ort::Value::CreateTensor::type>( + Ort::MemoryInfo::CreateCpu(OrtDeviceAllocator, OrtMemTypeCPU), blob, 3 * imgSize.at(0) * imgSize.at(1), + inputNodeDims.data(), inputNodeDims.size()); +#ifdef benchmark + clock_t starttime_2 = clock(); +#endif // benchmark + auto outputTensor = session->Run(options, inputNodeNames.data(), &inputTensor, 1, outputNodeNames.data(), + outputNodeNames.size()); +#ifdef benchmark + clock_t starttime_3 = clock(); +#endif // benchmark + + Ort::TypeInfo typeInfo = outputTensor.front().GetTypeInfo(); + auto tensor_info = typeInfo.GetTensorTypeAndShapeInfo(); + std::vector outputNodeDims = tensor_info.GetShape(); + auto output = outputTensor.front().GetTensorMutableData::type>(); + delete blob; + switch (modelType) + { + case YOLO_DETECT_V8: + case YOLO_DETECT_V8_HALF: + { + int strideNum = outputNodeDims[1];//8400 + int signalResultNum = outputNodeDims[2];//84 + std::vector class_ids; + std::vector confidences; + std::vector boxes; + cv::Mat rawData; + if (modelType == YOLO_DETECT_V8) + { + // FP32 + rawData = cv::Mat(strideNum, signalResultNum, CV_32F, output); + } + else + { + // FP16 + rawData = cv::Mat(strideNum, signalResultNum, CV_16F, output); + rawData.convertTo(rawData, CV_32F); + } + //Note: + //ultralytics add transpose operator to the output of yolov8 model.which make yolov8/v5/v7 has same shape + //https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt + //rowData = rowData.t(); + + float* data = (float*)rawData.data; + + for (int i = 0; i < strideNum; ++i) + { + float* classesScores = data + 4; + cv::Mat scores(1, this->classes.size(), CV_32FC1, classesScores); + cv::Point class_id; + double maxClassScore; + cv::minMaxLoc(scores, 0, &maxClassScore, 0, &class_id); + if (maxClassScore > rectConfidenceThreshold) + { + confidences.push_back(maxClassScore); + class_ids.push_back(class_id.x); + float x = data[0]; + float y = data[1]; + float w = data[2]; + float h = data[3]; + + int left = int((x - 0.5 * w) * resizeScales); + int top = int((y - 0.5 * h) * resizeScales); + + int width = int(w * resizeScales); + int height = int(h * resizeScales); + + boxes.push_back(cv::Rect(left, top, width, height)); + } + data += signalResultNum; + } + std::vector nmsResult; + cv::dnn::NMSBoxes(boxes, confidences, rectConfidenceThreshold, iouThreshold, nmsResult); + for (int i = 0; i < nmsResult.size(); ++i) + { + int idx = nmsResult[i]; + DL_RESULT result; + result.classId = class_ids[idx]; + result.confidence = confidences[idx]; + result.box = boxes[idx]; + oResult.push_back(result); + } + +#ifdef benchmark + clock_t starttime_4 = clock(); + double pre_process_time = (double)(starttime_2 - starttime_1) / CLOCKS_PER_SEC * 1000; + double process_time = (double)(starttime_3 - starttime_2) / CLOCKS_PER_SEC * 1000; + double post_process_time = (double)(starttime_4 - starttime_3) / CLOCKS_PER_SEC * 1000; + if (cudaEnable) + { + std::cout << "[YOLO_V8(CUDA)]: " << pre_process_time << "ms pre-process, " << process_time << "ms inference, " << post_process_time << "ms post-process." << std::endl; + } + else + { + std::cout << "[YOLO_V8(CPU)]: " << pre_process_time << "ms pre-process, " << process_time << "ms inference, " << post_process_time << "ms post-process." << std::endl; + } +#endif // benchmark + + break; + } + case YOLO_CLS: + { + DL_RESULT result; + for (int i = 0; i < this->classes.size(); i++) + { + result.classId = i; + result.confidence = output[i]; + oResult.push_back(result); + } + break; + } + default: + std::cout << "[YOLO_V8]: " << "Not support model type." << std::endl; + } + return RET_OK; + +} + + +char* YOLO_V8::WarmUpSession() { + clock_t starttime_1 = clock(); + cv::Mat iImg = cv::Mat(cv::Size(imgSize.at(0), imgSize.at(1)), CV_8UC3); + cv::Mat processedImg; + PreProcess(iImg, imgSize, processedImg); + if (modelType < 4) + { + float* blob = new float[iImg.total() * 3]; + BlobFromImage(processedImg, blob); + std::vector YOLO_input_node_dims = { 1, 3, imgSize.at(0), imgSize.at(1) }; + Ort::Value input_tensor = Ort::Value::CreateTensor( + Ort::MemoryInfo::CreateCpu(OrtDeviceAllocator, OrtMemTypeCPU), blob, 3 * imgSize.at(0) * imgSize.at(1), + YOLO_input_node_dims.data(), YOLO_input_node_dims.size()); + auto output_tensors = session->Run(options, inputNodeNames.data(), &input_tensor, 1, outputNodeNames.data(), + outputNodeNames.size()); + delete[] blob; + clock_t starttime_4 = clock(); + double post_process_time = (double)(starttime_4 - starttime_1) / CLOCKS_PER_SEC * 1000; + if (cudaEnable) + { + std::cout << "[YOLO_V8(CUDA)]: " << "Cuda warm-up cost " << post_process_time << " ms. " << std::endl; + } + } + else + { +#ifdef USE_CUDA + half* blob = new half[iImg.total() * 3]; + BlobFromImage(processedImg, blob); + std::vector YOLO_input_node_dims = { 1,3,imgSize.at(0),imgSize.at(1) }; + Ort::Value input_tensor = Ort::Value::CreateTensor(Ort::MemoryInfo::CreateCpu(OrtDeviceAllocator, OrtMemTypeCPU), blob, 3 * imgSize.at(0) * imgSize.at(1), YOLO_input_node_dims.data(), YOLO_input_node_dims.size()); + auto output_tensors = session->Run(options, inputNodeNames.data(), &input_tensor, 1, outputNodeNames.data(), outputNodeNames.size()); + delete[] blob; + clock_t starttime_4 = clock(); + double post_process_time = (double)(starttime_4 - starttime_1) / CLOCKS_PER_SEC * 1000; + if (cudaEnable) + { + std::cout << "[YOLO_V8(CUDA)]: " << "Cuda warm-up cost " << post_process_time << " ms. " << std::endl; + } +#endif + } + return RET_OK; +} diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/inference.cpp:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/inference.cpp:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/inference.cpp:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/inference.h b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/inference.h new file mode 100755 index 0000000..3174ae9 --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/inference.h @@ -0,0 +1,93 @@ +#pragma once + +#define RET_OK nullptr + +#ifdef _WIN32 +#include +#include +#include +#endif + +#include +#include +#include +#include +#include "onnxruntime_cxx_api.h" + +#ifdef USE_CUDA +#include +#endif + + +enum MODEL_TYPE +{ + //FLOAT32 MODEL + YOLO_DETECT_V8 = 1, + YOLO_POSE = 2, + YOLO_CLS = 3, + + //FLOAT16 MODEL + YOLO_DETECT_V8_HALF = 4, + YOLO_POSE_V8_HALF = 5, +}; + + +typedef struct _DL_INIT_PARAM +{ + std::string modelPath; + MODEL_TYPE modelType = YOLO_DETECT_V8; + std::vector imgSize = { 640, 640 }; + float rectConfidenceThreshold = 0.6; + float iouThreshold = 0.5; + int keyPointsNum = 2;//Note:kpt number for pose + bool cudaEnable = false; + int logSeverityLevel = 3; + int intraOpNumThreads = 1; +} DL_INIT_PARAM; + + +typedef struct _DL_RESULT +{ + int classId; + float confidence; + cv::Rect box; + std::vector keyPoints; +} DL_RESULT; + + +class YOLO_V8 +{ +public: + YOLO_V8(); + + ~YOLO_V8(); + +public: + char* CreateSession(DL_INIT_PARAM& iParams); + + char* RunSession(cv::Mat& iImg, std::vector& oResult); + + char* WarmUpSession(); + + template + char* TensorProcess(clock_t& starttime_1, cv::Mat& iImg, N& blob, std::vector& inputNodeDims, + std::vector& oResult); + + char* PreProcess(cv::Mat& iImg, std::vector iImgSize, cv::Mat& oImg); + + std::vector classes{}; + +private: + Ort::Env env; + Ort::Session* session; + bool cudaEnable; + Ort::RunOptions options; + std::vector inputNodeNames; + std::vector outputNodeNames; + + MODEL_TYPE modelType; + std::vector imgSize; + float rectConfidenceThreshold; + float iouThreshold; + float resizeScales;//letterbox scale +}; diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/inference.h:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/inference.h:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/inference.h:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/main.cpp b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/main.cpp new file mode 100755 index 0000000..6e4ef1d --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/main.cpp @@ -0,0 +1,193 @@ +#include +#include +#include "inference.h" +#include +#include +#include + +void Detector(YOLO_V8*& p) { + std::filesystem::path current_path = std::filesystem::current_path(); + std::filesystem::path imgs_path = current_path / "images"; + for (auto& i : std::filesystem::directory_iterator(imgs_path)) + { + if (i.path().extension() == ".jpg" || i.path().extension() == ".png" || i.path().extension() == ".jpeg") + { + std::string img_path = i.path().string(); + cv::Mat img = cv::imread(img_path); + std::vector res; + p->RunSession(img, res); + + for (auto& re : res) + { + cv::RNG rng(cv::getTickCount()); + cv::Scalar color(rng.uniform(0, 256), rng.uniform(0, 256), rng.uniform(0, 256)); + + cv::rectangle(img, re.box, color, 3); + + float confidence = floor(100 * re.confidence) / 100; + std::cout << std::fixed << std::setprecision(2); + std::string label = p->classes[re.classId] + " " + + std::to_string(confidence).substr(0, std::to_string(confidence).size() - 4); + + cv::rectangle( + img, + cv::Point(re.box.x, re.box.y - 25), + cv::Point(re.box.x + label.length() * 15, re.box.y), + color, + cv::FILLED + ); + + cv::putText( + img, + label, + cv::Point(re.box.x, re.box.y - 5), + cv::FONT_HERSHEY_SIMPLEX, + 0.75, + cv::Scalar(0, 0, 0), + 2 + ); + + + } + std::cout << "Press any key to exit" << std::endl; + cv::imshow("Result of Detection", img); + cv::waitKey(0); + cv::destroyAllWindows(); + } + } +} + + +void Classifier(YOLO_V8*& p) +{ + std::filesystem::path current_path = std::filesystem::current_path(); + std::filesystem::path imgs_path = current_path;// / "images" + std::random_device rd; + std::mt19937 gen(rd()); + std::uniform_int_distribution dis(0, 255); + for (auto& i : std::filesystem::directory_iterator(imgs_path)) + { + if (i.path().extension() == ".jpg" || i.path().extension() == ".png") + { + std::string img_path = i.path().string(); + //std::cout << img_path << std::endl; + cv::Mat img = cv::imread(img_path); + std::vector res; + char* ret = p->RunSession(img, res); + + float positionY = 50; + for (int i = 0; i < res.size(); i++) + { + int r = dis(gen); + int g = dis(gen); + int b = dis(gen); + cv::putText(img, std::to_string(i) + ":", cv::Point(10, positionY), cv::FONT_HERSHEY_SIMPLEX, 1, cv::Scalar(b, g, r), 2); + cv::putText(img, std::to_string(res.at(i).confidence), cv::Point(70, positionY), cv::FONT_HERSHEY_SIMPLEX, 1, cv::Scalar(b, g, r), 2); + positionY += 50; + } + + cv::imshow("TEST_CLS", img); + cv::waitKey(0); + cv::destroyAllWindows(); + //cv::imwrite("E:\\output\\" + std::to_string(k) + ".png", img); + } + + } +} + + + +int ReadCocoYaml(YOLO_V8*& p) { + // Open the YAML file + std::ifstream file("coco.yaml"); + if (!file.is_open()) + { + std::cerr << "Failed to open file" << std::endl; + return 1; + } + + // Read the file line by line + std::string line; + std::vector lines; + while (std::getline(file, line)) + { + lines.push_back(line); + } + + // Find the start and end of the names section + std::size_t start = 0; + std::size_t end = 0; + for (std::size_t i = 0; i < lines.size(); i++) + { + if (lines[i].find("names:") != std::string::npos) + { + start = i + 1; + } + else if (start > 0 && lines[i].find(':') == std::string::npos) + { + end = i; + break; + } + } + + // Extract the names + std::vector names; + for (std::size_t i = start; i < end; i++) + { + std::stringstream ss(lines[i]); + std::string name; + std::getline(ss, name, ':'); // Extract the number before the delimiter + std::getline(ss, name); // Extract the string after the delimiter + names.push_back(name); + } + + p->classes = names; + return 0; +} + + +void DetectTest() +{ + YOLO_V8* yoloDetector = new YOLO_V8; + ReadCocoYaml(yoloDetector); + DL_INIT_PARAM params; + params.rectConfidenceThreshold = 0.1; + params.iouThreshold = 0.5; + params.modelPath = "yolov8n.onnx"; + params.imgSize = { 640, 640 }; +#ifdef USE_CUDA + params.cudaEnable = true; + + // GPU FP32 inference + params.modelType = YOLO_DETECT_V8; + // GPU FP16 inference + //Note: change fp16 onnx model + //params.modelType = YOLO_DETECT_V8_HALF; + +#else + // CPU inference + params.modelType = YOLO_DETECT_V8; + params.cudaEnable = false; + +#endif + yoloDetector->CreateSession(params); + Detector(yoloDetector); +} + + +void ClsTest() +{ + YOLO_V8* yoloDetector = new YOLO_V8; + std::string model_path = "cls.onnx"; + ReadCocoYaml(yoloDetector); + DL_INIT_PARAM params{ model_path, YOLO_CLS, {224, 224} }; + yoloDetector->CreateSession(params); + Classifier(yoloDetector); +} + + +int main() +{ + //DetectTest(); + ClsTest(); +} diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/main.cpp:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/main.cpp:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-CPP/main.cpp:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/Cargo.toml b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/Cargo.toml new file mode 100755 index 0000000..101f72e --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "yolov8-rs" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +clap = { version = "4.2.4", features = ["derive"] } +image = { version = "0.24.7", default-features = false, features = ["jpeg", "png", "webp-encoder"] } +imageproc = { version = "0.23.0", default-features = false } +ndarray = { version = "0.15.6" } +ort = {version = "1.16.3", default-features = false, features = ["load-dynamic", "copy-dylibs", "half"]} +rusttype = { version = "0.9", default-features = false } +anyhow = { version = "1.0.75"} +regex = { version = "1.5.4" } +rand = { version ="0.8.5" } +chrono = { version = "0.4.30" } +half = { version = "2.3.1" } +dirs = { version = "5.0.1" } +ureq = { version = "2.9.1" } diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/Cargo.toml:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/Cargo.toml:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/Cargo.toml:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/README.md b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/README.md new file mode 100755 index 0000000..8961d9c --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/README.md @@ -0,0 +1,221 @@ +# YOLOv8-ONNXRuntime-Rust for All the Key YOLO Tasks + +This repository provides a Rust demo for performing YOLOv8 tasks like `Classification`, `Segmentation`, `Detection` and `Pose Detection` using ONNXRuntime. + +## Features + +- Support `Classification`, `Segmentation`, `Detection`, `Pose(Keypoints)-Detection` tasks. +- Support `FP16` & `FP32` ONNX models. +- Support `CPU`, `CUDA` and `TensorRT` execution provider to accelerate computation. +- Support dynamic input shapes(`batch`, `width`, `height`). + +## Installation + +### 1. Install Rust + +Please follow the Rust official installation. (https://www.rust-lang.org/tools/install) + +### 2. Install ONNXRuntime + +This repository use `ort` crate, which is ONNXRuntime wrapper for Rust. (https://docs.rs/ort/latest/ort/) + +You can follow the instruction with `ort` doc or simply do this: + +- step1: Download ONNXRuntime(https://github.com/microsoft/onnxruntime/releases) +- setp2: Set environment variable `PATH` for linking. + +On ubuntu, You can do like this: + +``` +vim ~/.bashrc + +# Add the path of ONNXRUntime lib +export LD_LIBRARY_PATH=/home/qweasd/Documents/onnxruntime-linux-x64-gpu-1.16.3/lib${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}} + +source ~/.bashrc +``` + +### 3. \[Optional\] Install CUDA & CuDNN & TensorRT + +- CUDA execution provider requires CUDA v11.6+. +- TensorRT execution provider requires CUDA v11.4+ and TensorRT v8.4+. + +## Get Started + +### 1. Export the YOLOv8 ONNX Models + +```bash +pip install -U ultralytics + +# export onnx model with dynamic shapes +yolo export model=yolov8m.pt format=onnx simplify dynamic +yolo export model=yolov8m-cls.pt format=onnx simplify dynamic +yolo export model=yolov8m-pose.pt format=onnx simplify dynamic +yolo export model=yolov8m-seg.pt format=onnx simplify dynamic + + +# export onnx model with constant shapes +yolo export model=yolov8m.pt format=onnx simplify +yolo export model=yolov8m-cls.pt format=onnx simplify +yolo export model=yolov8m-pose.pt format=onnx simplify +yolo export model=yolov8m-seg.pt format=onnx simplify +``` + +### 2. Run Inference + +It will perform inference with the ONNX model on the source image. + +``` +cargo run --release -- --model --source +``` + +Set `--cuda` to use CUDA execution provider to speed up inference. + +``` +cargo run --release -- --cuda --model --source +``` + +Set `--trt` to use TensorRT execution provider, and you can set `--fp16` at the same time to use TensorRT FP16 engine. + +``` +cargo run --release -- --trt --fp16 --model --source +``` + +Set `--device_id` to select which device to run. When you have only one GPU, and you set `device_id` to 1 will not cause program panic, the `ort` would automatically fall back to `CPU` EP. + +``` +cargo run --release -- --cuda --device_id 0 --model --source +``` + +Set `--batch` to do multi-batch-size inference. + +If you're using `--trt`, you can also set `--batch-min` and `--batch-max` to explicitly specify min/max/opt batch for dynamic batch input.(https://onnxruntime.ai/docs/execution-providers/TensorRT-ExecutionProvider.html#explicit-shape-range-for-dynamic-shape-input).(Note that the ONNX model should exported with dynamic shapes) + +``` +cargo run --release -- --cuda --batch 2 --model --source +``` + +Set `--height` and `--width` to do dynamic image size inference. (Note that the ONNX model should exported with dynamic shapes) + +``` +cargo run --release -- --cuda --width 480 --height 640 --model --source +``` + +Set `--profile` to check time consumed in each stage.(Note that the model usually needs to take 1~3 times dry run to warmup. Make sure to run enough times to evaluate the result.) + +``` +cargo run --release -- --trt --fp16 --profile --model --source +``` + +Results: (yolov8m.onnx, batch=1, 3 times, trt, fp16, RTX 3060Ti) + +``` +==> 0 +[Model Preprocess]: 12.75788ms +[ORT H2D]: 237.118ยตs +[ORT Inference]: 507.895469ms +[ORT D2H]: 191.655ยตs +[Model Inference]: 508.34589ms +[Model Postprocess]: 1.061122ms +==> 1 +[Model Preprocess]: 13.658655ms +[ORT H2D]: 209.975ยตs +[ORT Inference]: 5.12372ms +[ORT D2H]: 182.389ยตs +[Model Inference]: 5.530022ms +[Model Postprocess]: 1.04851ms +==> 2 +[Model Preprocess]: 12.475332ms +[ORT H2D]: 246.127ยตs +[ORT Inference]: 5.048432ms +[ORT D2H]: 187.117ยตs +[Model Inference]: 5.493119ms +[Model Postprocess]: 1.040906ms +``` + +And also: + +`--conf`: confidence threshold \[default: 0.3\] + +`--iou`: iou threshold in NMS \[default: 0.45\] + +`--kconf`: confidence threshold of keypoint \[default: 0.55\] + +`--plot`: plot inference result with random RGB color and save + +you can check out all CLI arguments by: + +``` +git clone https://github.com/ultralytics/ultralytics +cd ultralytics/examples/YOLOv8-ONNXRuntime-Rust +cargo run --release -- --help +``` + +## Examples + +### Classification + +Running dynamic shape ONNX model on `CPU` with image size `--height 224 --width 224`. Saving plotted image in `runs` directory. + +``` +cargo run --release -- --model ../assets/weights/yolov8m-cls-dyn.onnx --source ../assets/images/dog.jpg --height 224 --width 224 --plot --profile +``` + +You will see result like: + +``` +Summary: +> Task: Classify (Ultralytics 8.0.217) +> EP: Cpu +> Dtype: Float32 +> Batch: 1 (Dynamic), Height: 224 (Dynamic), Width: 224 (Dynamic) +> nc: 1000 nk: 0, nm: 0, conf: 0.3, kconf: 0.55, iou: 0.45 + +[Model Preprocess]: 16.363477ms +[ORT H2D]: 50.722ยตs +[ORT Inference]: 16.295808ms +[ORT D2H]: 8.37ยตs +[Model Inference]: 16.367046ms +[Model Postprocess]: 3.527ยตs +[ + YOLOResult { + Probs(top5): Some([(208, 0.6950566), (209, 0.13823675), (178, 0.04849795), (215, 0.019029364), (212, 0.016506357)]), + Bboxes: None, + Keypoints: None, + Masks: None, + }, +] + +``` + +![2023-11-25-22-02-02-156623351](https://github.com/jamjamjon/ultralytics/assets/51357717/ef75c2ae-c5ab-44cc-9d9e-e60b51e39662) + +### Object Detection + +Using `CUDA` EP and dynamic image size `--height 640 --width 480` + +``` +cargo run --release -- --cuda --model ../assets/weights/yolov8m-dynamic.onnx --source ../assets/images/bus.jpg --plot --height 640 --width 480 +``` + +![det](https://github.com/jamjamjon/ultralytics/assets/51357717/5d89a19d-0c96-4a59-875c-defab6887a2c) + +### Pose Detection + +using `TensorRT` EP + +``` +cargo run --release -- --trt --model ../assets/weights/yolov8m-pose.onnx --source ../assets/images/bus.jpg --plot +``` + +![2023-11-25-22-31-45-127054025](https://github.com/jamjamjon/ultralytics/assets/51357717/157b5ba7-bfcf-47cf-bee7-68b62e0de1c4) + +### Instance Segmentation + +using `TensorRT` EP and FP16 model `--fp16` + +``` +cargo run --release -- --trt --fp16 --model ../assets/weights/yolov8m-seg.onnx --source ../assets/images/0172.jpg --plot +``` + +![seg](https://github.com/jamjamjon/ultralytics/assets/51357717/cf046f4f-9533-478a-adc7-4de22443a641) diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/README.md:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/README.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/README.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/cli.rs b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/cli.rs new file mode 100755 index 0000000..2ba0dd4 --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/cli.rs @@ -0,0 +1,87 @@ +use clap::Parser; + +use crate::YOLOTask; + +#[derive(Parser, Clone)] +#[command(author, version, about, long_about = None)] +pub struct Args { + /// ONNX model path + #[arg(long, required = true)] + pub model: String, + + /// input path + #[arg(long, required = true)] + pub source: String, + + /// device id + #[arg(long, default_value_t = 0)] + pub device_id: u32, + + /// using TensorRT EP + #[arg(long)] + pub trt: bool, + + /// using CUDA EP + #[arg(long)] + pub cuda: bool, + + /// input batch size + #[arg(long, default_value_t = 1)] + pub batch: u32, + + /// trt input min_batch size + #[arg(long, default_value_t = 1)] + pub batch_min: u32, + + /// trt input max_batch size + #[arg(long, default_value_t = 32)] + pub batch_max: u32, + + /// using TensorRT --fp16 + #[arg(long)] + pub fp16: bool, + + /// specify YOLO task + #[arg(long, value_enum)] + pub task: Option, + + /// num_classes + #[arg(long)] + pub nc: Option, + + /// num_keypoints + #[arg(long)] + pub nk: Option, + + /// num_masks + #[arg(long)] + pub nm: Option, + + /// input image width + #[arg(long)] + pub width: Option, + + /// input image height + #[arg(long)] + pub height: Option, + + /// confidence threshold + #[arg(long, required = false, default_value_t = 0.3)] + pub conf: f32, + + /// iou threshold in NMS + #[arg(long, required = false, default_value_t = 0.45)] + pub iou: f32, + + /// confidence threshold of keypoint + #[arg(long, required = false, default_value_t = 0.55)] + pub kconf: f32, + + /// plot inference result and save + #[arg(long)] + pub plot: bool, + + /// check time consumed in each stage + #[arg(long)] + pub profile: bool, +} diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/cli.rs:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/cli.rs:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/cli.rs:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/lib.rs b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/lib.rs new file mode 100755 index 0000000..1af7f7c --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/lib.rs @@ -0,0 +1,119 @@ +#![allow(clippy::type_complexity)] + +use std::io::{Read, Write}; + +pub mod cli; +pub mod model; +pub mod ort_backend; +pub mod yolo_result; +pub use crate::cli::Args; +pub use crate::model::YOLOv8; +pub use crate::ort_backend::{Batch, OrtBackend, OrtConfig, OrtEP, YOLOTask}; +pub use crate::yolo_result::{Bbox, Embedding, Point2, YOLOResult}; + +pub fn non_max_suppression( + xs: &mut Vec<(Bbox, Option>, Option>)>, + iou_threshold: f32, +) { + xs.sort_by(|b1, b2| b2.0.confidence().partial_cmp(&b1.0.confidence()).unwrap()); + + let mut current_index = 0; + for index in 0..xs.len() { + let mut drop = false; + for prev_index in 0..current_index { + let iou = xs[prev_index].0.iou(&xs[index].0); + if iou > iou_threshold { + drop = true; + break; + } + } + if !drop { + xs.swap(current_index, index); + current_index += 1; + } + } + xs.truncate(current_index); +} + +pub fn gen_time_string(delimiter: &str) -> String { + let offset = chrono::FixedOffset::east_opt(8 * 60 * 60).unwrap(); // Beijing + let t_now = chrono::Utc::now().with_timezone(&offset); + let fmt = format!( + "%Y{}%m{}%d{}%H{}%M{}%S{}%f", + delimiter, delimiter, delimiter, delimiter, delimiter, delimiter + ); + t_now.format(&fmt).to_string() +} + +pub const SKELETON: [(usize, usize); 16] = [ + (0, 1), + (0, 2), + (1, 3), + (2, 4), + (5, 6), + (5, 11), + (6, 12), + (11, 12), + (5, 7), + (6, 8), + (7, 9), + (8, 10), + (11, 13), + (12, 14), + (13, 15), + (14, 16), +]; + +pub fn check_font(font: &str) -> rusttype::Font<'static> { + // check then load font + + // ultralytics font path + let font_path_config = match dirs::config_dir() { + Some(mut d) => { + d.push("Ultralytics"); + d.push(font); + d + } + None => panic!("Unsupported operating system. Now support Linux, MacOS, Windows."), + }; + + // current font path + let font_path_current = std::path::PathBuf::from(font); + + // check font + let font_path = if font_path_config.exists() { + font_path_config + } else if font_path_current.exists() { + font_path_current + } else { + println!("Downloading font..."); + let source_url = "https://ultralytics.com/assets/Arial.ttf"; + let resp = ureq::get(source_url) + .timeout(std::time::Duration::from_secs(500)) + .call() + .unwrap_or_else(|err| panic!("> Failed to download font: {source_url}: {err:?}")); + + // read to buffer + let mut buffer = vec![]; + let total_size = resp + .header("Content-Length") + .and_then(|s| s.parse::().ok()) + .unwrap(); + let _reader = resp + .into_reader() + .take(total_size) + .read_to_end(&mut buffer) + .unwrap(); + + // save + let _path = std::fs::File::create(font).unwrap(); + let mut writer = std::io::BufWriter::new(_path); + writer.write_all(&buffer).unwrap(); + println!("Font saved at: {:?}", font_path_current.display()); + font_path_current + }; + + // load font + let buffer = std::fs::read(font_path).unwrap(); + rusttype::Font::try_from_vec(buffer).unwrap() +} diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/lib.rs:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/lib.rs:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/lib.rs:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/main.rs b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/main.rs new file mode 100755 index 0000000..8dd1567 --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/main.rs @@ -0,0 +1,28 @@ +use clap::Parser; + +use yolov8_rs::{Args, YOLOv8}; + +fn main() -> Result<(), Box> { + let args = Args::parse(); + + // 1. load image + let x = image::io::Reader::open(&args.source)? + .with_guessed_format()? + .decode()?; + + // 2. model support dynamic batch inference, so input should be a Vec + let xs = vec![x]; + + // You can test `--batch 2` with this + // let xs = vec![x.clone(), x]; + + // 3. build yolov8 model + let mut model = YOLOv8::new(args)?; + model.summary(); // model info + + // 4. run + let ys = model.run(&xs)?; + println!("{:?}", ys); + + Ok(()) +} diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/main.rs:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/main.rs:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/main.rs:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/model.rs b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/model.rs new file mode 100755 index 0000000..1c0e5e4 --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/model.rs @@ -0,0 +1,642 @@ +#![allow(clippy::type_complexity)] + +use anyhow::Result; +use image::{DynamicImage, GenericImageView, ImageBuffer}; +use ndarray::{s, Array, Axis, IxDyn}; +use rand::{thread_rng, Rng}; +use std::path::PathBuf; + +use crate::{ + check_font, gen_time_string, non_max_suppression, Args, Batch, Bbox, Embedding, OrtBackend, + OrtConfig, OrtEP, Point2, YOLOResult, YOLOTask, SKELETON, +}; + +pub struct YOLOv8 { + // YOLOv8 model for all yolo-tasks + engine: OrtBackend, + nc: u32, + nk: u32, + nm: u32, + height: u32, + width: u32, + batch: u32, + task: YOLOTask, + conf: f32, + kconf: f32, + iou: f32, + names: Vec, + color_palette: Vec<(u8, u8, u8)>, + profile: bool, + plot: bool, +} + +impl YOLOv8 { + pub fn new(config: Args) -> Result { + // execution provider + let ep = if config.trt { + OrtEP::Trt(config.device_id) + } else if config.cuda { + OrtEP::Cuda(config.device_id) + } else { + OrtEP::Cpu + }; + + // batch + let batch = Batch { + opt: config.batch, + min: config.batch_min, + max: config.batch_max, + }; + + // build ort engine + let ort_args = OrtConfig { + ep, + batch, + f: config.model, + task: config.task, + trt_fp16: config.fp16, + image_size: (config.height, config.width), + }; + let engine = OrtBackend::build(ort_args)?; + + // get batch, height, width, tasks, nc, nk, nm + let (batch, height, width, task) = ( + engine.batch(), + engine.height(), + engine.width(), + engine.task(), + ); + let nc = engine.nc().or(config.nc).unwrap_or_else(|| { + panic!("Failed to get num_classes, make it explicit with `--nc`"); + }); + let (nk, nm) = match task { + YOLOTask::Pose => { + let nk = engine.nk().or(config.nk).unwrap_or_else(|| { + panic!("Failed to get num_keypoints, make it explicit with `--nk`"); + }); + (nk, 0) + } + YOLOTask::Segment => { + let nm = engine.nm().or(config.nm).unwrap_or_else(|| { + panic!("Failed to get num_masks, make it explicit with `--nm`"); + }); + (0, nm) + } + _ => (0, 0), + }; + + // class names + let names = engine.names().unwrap_or(vec!["Unknown".to_string()]); + + // color palette + let mut rng = thread_rng(); + let color_palette: Vec<_> = names + .iter() + .map(|_| { + ( + rng.gen_range(0..=255), + rng.gen_range(0..=255), + rng.gen_range(0..=255), + ) + }) + .collect(); + + Ok(Self { + engine, + names, + conf: config.conf, + kconf: config.kconf, + iou: config.iou, + color_palette, + profile: config.profile, + plot: config.plot, + nc, + nk, + nm, + height, + width, + batch, + task, + }) + } + + pub fn scale_wh(&self, w0: f32, h0: f32, w1: f32, h1: f32) -> (f32, f32, f32) { + let r = (w1 / w0).min(h1 / h0); + (r, (w0 * r).round(), (h0 * r).round()) + } + + pub fn preprocess(&mut self, xs: &Vec) -> Result> { + let mut ys = + Array::ones((xs.len(), 3, self.height() as usize, self.width() as usize)).into_dyn(); + ys.fill(144.0 / 255.0); + for (idx, x) in xs.iter().enumerate() { + let img = match self.task() { + YOLOTask::Classify => x.resize_exact( + self.width(), + self.height(), + image::imageops::FilterType::Triangle, + ), + _ => { + let (w0, h0) = x.dimensions(); + let w0 = w0 as f32; + let h0 = h0 as f32; + let (_, w_new, h_new) = + self.scale_wh(w0, h0, self.width() as f32, self.height() as f32); // f32 round + x.resize_exact( + w_new as u32, + h_new as u32, + if let YOLOTask::Segment = self.task() { + image::imageops::FilterType::CatmullRom + } else { + image::imageops::FilterType::Triangle + }, + ) + } + }; + + for (x, y, rgb) in img.pixels() { + let x = x as usize; + let y = y as usize; + let [r, g, b, _] = rgb.0; + ys[[idx, 0, y, x]] = (r as f32) / 255.0; + ys[[idx, 1, y, x]] = (g as f32) / 255.0; + ys[[idx, 2, y, x]] = (b as f32) / 255.0; + } + } + + Ok(ys) + } + + pub fn run(&mut self, xs: &Vec) -> Result> { + // pre-process + let t_pre = std::time::Instant::now(); + let xs_ = self.preprocess(xs)?; + if self.profile { + println!("[Model Preprocess]: {:?}", t_pre.elapsed()); + } + + // run + let t_run = std::time::Instant::now(); + let ys = self.engine.run(xs_, self.profile)?; + if self.profile { + println!("[Model Inference]: {:?}", t_run.elapsed()); + } + + // post-process + let t_post = std::time::Instant::now(); + let ys = self.postprocess(ys, xs)?; + if self.profile { + println!("[Model Postprocess]: {:?}", t_post.elapsed()); + } + + // plot and save + if self.plot { + self.plot_and_save(&ys, xs, Some(&SKELETON)); + } + Ok(ys) + } + + pub fn postprocess( + &self, + xs: Vec>, + xs0: &[DynamicImage], + ) -> Result> { + if let YOLOTask::Classify = self.task() { + let mut ys = Vec::new(); + let preds = &xs[0]; + for batch in preds.axis_iter(Axis(0)) { + ys.push(YOLOResult::new( + Some(Embedding::new(batch.into_owned())), + None, + None, + None, + )); + } + Ok(ys) + } else { + const CXYWH_OFFSET: usize = 4; // cxcywh + const KPT_STEP: usize = 3; // xyconf + let preds = &xs[0]; + let protos = { + if xs.len() > 1 { + Some(&xs[1]) + } else { + None + } + }; + let mut ys = Vec::new(); + for (idx, anchor) in preds.axis_iter(Axis(0)).enumerate() { + // [bs, 4 + nc + nm, anchors] + // input image + let width_original = xs0[idx].width() as f32; + let height_original = xs0[idx].height() as f32; + let ratio = (self.width() as f32 / width_original) + .min(self.height() as f32 / height_original); + + // save each result + let mut data: Vec<(Bbox, Option>, Option>)> = Vec::new(); + for pred in anchor.axis_iter(Axis(1)) { + // split preds for different tasks + let bbox = pred.slice(s![0..CXYWH_OFFSET]); + let clss = pred.slice(s![CXYWH_OFFSET..CXYWH_OFFSET + self.nc() as usize]); + let kpts = { + if let YOLOTask::Pose = self.task() { + Some(pred.slice(s![pred.len() - KPT_STEP * self.nk() as usize..])) + } else { + None + } + }; + let coefs = { + if let YOLOTask::Segment = self.task() { + Some(pred.slice(s![pred.len() - self.nm() as usize..]).to_vec()) + } else { + None + } + }; + + // confidence and id + let (id, &confidence) = clss + .into_iter() + .enumerate() + .reduce(|max, x| if x.1 > max.1 { x } else { max }) + .unwrap(); // definitely will not panic! + + // confidence filter + if confidence < self.conf { + continue; + } + + // bbox re-scale + let cx = bbox[0] / ratio; + let cy = bbox[1] / ratio; + let w = bbox[2] / ratio; + let h = bbox[3] / ratio; + let x = cx - w / 2.; + let y = cy - h / 2.; + let y_bbox = Bbox::new( + x.max(0.0f32).min(width_original), + y.max(0.0f32).min(height_original), + w, + h, + id, + confidence, + ); + + // kpts + let y_kpts = { + if let Some(kpts) = kpts { + let mut kpts_ = Vec::new(); + // rescale + for i in 0..self.nk() as usize { + let kx = kpts[KPT_STEP * i] / ratio; + let ky = kpts[KPT_STEP * i + 1] / ratio; + let kconf = kpts[KPT_STEP * i + 2]; + if kconf < self.kconf { + kpts_.push(Point2::default()); + } else { + kpts_.push(Point2::new_with_conf( + kx.max(0.0f32).min(width_original), + ky.max(0.0f32).min(height_original), + kconf, + )); + } + } + Some(kpts_) + } else { + None + } + }; + + // data merged + data.push((y_bbox, y_kpts, coefs)); + } + + // nms + non_max_suppression(&mut data, self.iou); + + // decode + let mut y_bboxes: Vec = Vec::new(); + let mut y_kpts: Vec> = Vec::new(); + let mut y_masks: Vec> = Vec::new(); + for elem in data.into_iter() { + if let Some(kpts) = elem.1 { + y_kpts.push(kpts) + } + + // decode masks + if let Some(coefs) = elem.2 { + let proto = protos.unwrap().slice(s![idx, .., .., ..]); + let (nm, nh, nw) = proto.dim(); + + // coefs * proto -> mask + let coefs = Array::from_shape_vec((1, nm), coefs)?; // (n, nm) + let proto = proto.to_owned().into_shape((nm, nh * nw))?; // (nm, nh*nw) + let mask = coefs.dot(&proto).into_shape((nh, nw, 1))?; // (nh, nw, n) + + // build image from ndarray + let mask_im: ImageBuffer, Vec> = + match ImageBuffer::from_raw(nw as u32, nh as u32, mask.into_raw_vec()) { + Some(image) => image, + None => panic!("can not create image from ndarray"), + }; + let mut mask_im = image::DynamicImage::from(mask_im); // -> dyn + + // rescale masks + let (_, w_mask, h_mask) = + self.scale_wh(width_original, height_original, nw as f32, nh as f32); + let mask_cropped = mask_im.crop(0, 0, w_mask as u32, h_mask as u32); + let mask_original = mask_cropped.resize_exact( + // resize_to_fill + width_original as u32, + height_original as u32, + match self.task() { + YOLOTask::Segment => image::imageops::FilterType::CatmullRom, + _ => image::imageops::FilterType::Triangle, + }, + ); + + // crop-mask with bbox + let mut mask_original_cropped = mask_original.into_luma8(); + for y in 0..height_original as usize { + for x in 0..width_original as usize { + if x < elem.0.xmin() as usize + || x > elem.0.xmax() as usize + || y < elem.0.ymin() as usize + || y > elem.0.ymax() as usize + { + mask_original_cropped.put_pixel( + x as u32, + y as u32, + image::Luma([0u8]), + ); + } + } + } + y_masks.push(mask_original_cropped.into_raw()); + } + y_bboxes.push(elem.0); + } + + // save each result + let y = YOLOResult { + probs: None, + bboxes: if !y_bboxes.is_empty() { + Some(y_bboxes) + } else { + None + }, + keypoints: if !y_kpts.is_empty() { + Some(y_kpts) + } else { + None + }, + masks: if !y_masks.is_empty() { + Some(y_masks) + } else { + None + }, + }; + ys.push(y); + } + + Ok(ys) + } + } + + pub fn plot_and_save( + &self, + ys: &[YOLOResult], + xs0: &[DynamicImage], + skeletons: Option<&[(usize, usize)]>, + ) { + // check font then load + let font = check_font("Arial.ttf"); + for (_idb, (img0, y)) in xs0.iter().zip(ys.iter()).enumerate() { + let mut img = img0.to_rgb8(); + + // draw for classifier + if let Some(probs) = y.probs() { + for (i, k) in probs.topk(5).iter().enumerate() { + let legend = format!("{} {:.2}%", self.names[k.0], k.1); + let scale = 32; + let legend_size = img.width().max(img.height()) / scale; + let x = img.width() / 20; + let y = img.height() / 20 + i as u32 * legend_size; + imageproc::drawing::draw_text_mut( + &mut img, + image::Rgb([0, 255, 0]), + x as i32, + y as i32, + rusttype::Scale::uniform(legend_size as f32 - 1.), + &font, + &legend, + ); + } + } + + // draw bboxes & keypoints + if let Some(bboxes) = y.bboxes() { + for (_idx, bbox) in bboxes.iter().enumerate() { + // rect + imageproc::drawing::draw_hollow_rect_mut( + &mut img, + imageproc::rect::Rect::at(bbox.xmin() as i32, bbox.ymin() as i32) + .of_size(bbox.width() as u32, bbox.height() as u32), + image::Rgb(self.color_palette[bbox.id()].into()), + ); + + // text + let legend = format!("{} {:.2}%", self.names[bbox.id()], bbox.confidence()); + let scale = 40; + let legend_size = img.width().max(img.height()) / scale; + imageproc::drawing::draw_text_mut( + &mut img, + image::Rgb(self.color_palette[bbox.id()].into()), + bbox.xmin() as i32, + (bbox.ymin() - legend_size as f32) as i32, + rusttype::Scale::uniform(legend_size as f32 - 1.), + &font, + &legend, + ); + } + } + + // draw kpts + if let Some(keypoints) = y.keypoints() { + for kpts in keypoints.iter() { + for kpt in kpts.iter() { + // filter + if kpt.confidence() < self.kconf { + continue; + } + + // draw point + imageproc::drawing::draw_filled_circle_mut( + &mut img, + (kpt.x() as i32, kpt.y() as i32), + 2, + image::Rgb([0, 255, 0]), + ); + } + + // draw skeleton if has + if let Some(skeletons) = skeletons { + for &(idx1, idx2) in skeletons.iter() { + let kpt1 = &kpts[idx1]; + let kpt2 = &kpts[idx2]; + if kpt1.confidence() < self.kconf || kpt2.confidence() < self.kconf { + continue; + } + imageproc::drawing::draw_line_segment_mut( + &mut img, + (kpt1.x(), kpt1.y()), + (kpt2.x(), kpt2.y()), + image::Rgb([233, 14, 57]), + ); + } + } + } + } + + // draw mask + if let Some(masks) = y.masks() { + for (mask, _bbox) in masks.iter().zip(y.bboxes().unwrap().iter()) { + let mask_nd: ImageBuffer, Vec> = + match ImageBuffer::from_vec(img.width(), img.height(), mask.to_vec()) { + Some(image) => image, + None => panic!("can not crate image from ndarray"), + }; + + for _x in 0..img.width() { + for _y in 0..img.height() { + let mask_p = imageproc::drawing::Canvas::get_pixel(&mask_nd, _x, _y); + if mask_p.0[0] > 0 { + let mut img_p = imageproc::drawing::Canvas::get_pixel(&img, _x, _y); + // img_p.0[2] = self.color_palette[bbox.id()].2 / 2; + // img_p.0[1] = self.color_palette[bbox.id()].1 / 2; + // img_p.0[0] = self.color_palette[bbox.id()].0 / 2; + img_p.0[2] /= 2; + img_p.0[1] = 255 - (255 - img_p.0[2]) / 2; + img_p.0[0] /= 2; + imageproc::drawing::Canvas::draw_pixel(&mut img, _x, _y, img_p) + } + } + } + } + } + + // mkdir and save + let mut runs = PathBuf::from("runs"); + if !runs.exists() { + std::fs::create_dir_all(&runs).unwrap(); + } + runs.push(gen_time_string("-")); + let saveout = format!("{}.jpg", runs.to_str().unwrap()); + let _ = img.save(saveout); + } + } + + pub fn summary(&self) { + println!( + "\nSummary:\n\ + > Task: {:?}{}\n\ + > EP: {:?} {}\n\ + > Dtype: {:?}\n\ + > Batch: {} ({}), Height: {} ({}), Width: {} ({})\n\ + > nc: {} nk: {}, nm: {}, conf: {}, kconf: {}, iou: {}\n\ + ", + self.task(), + match self.engine.author().zip(self.engine.version()) { + Some((author, ver)) => format!(" ({} {})", author, ver), + None => String::from(""), + }, + self.engine.ep(), + if let OrtEP::Cpu = self.engine.ep() { + "" + } else { + "(May still fall back to CPU)" + }, + self.engine.dtype(), + self.batch(), + if self.engine.is_batch_dynamic() { + "Dynamic" + } else { + "Const" + }, + self.height(), + if self.engine.is_height_dynamic() { + "Dynamic" + } else { + "Const" + }, + self.width(), + if self.engine.is_width_dynamic() { + "Dynamic" + } else { + "Const" + }, + self.nc(), + self.nk(), + self.nm(), + self.conf, + self.kconf, + self.iou, + ); + } + + pub fn engine(&self) -> &OrtBackend { + &self.engine + } + + pub fn conf(&self) -> f32 { + self.conf + } + + pub fn set_conf(&mut self, val: f32) { + self.conf = val; + } + + pub fn conf_mut(&mut self) -> &mut f32 { + &mut self.conf + } + + pub fn kconf(&self) -> f32 { + self.kconf + } + + pub fn iou(&self) -> f32 { + self.iou + } + + pub fn task(&self) -> &YOLOTask { + &self.task + } + + pub fn batch(&self) -> u32 { + self.batch + } + + pub fn width(&self) -> u32 { + self.width + } + + pub fn height(&self) -> u32 { + self.height + } + + pub fn nc(&self) -> u32 { + self.nc + } + + pub fn nk(&self) -> u32 { + self.nk + } + + pub fn nm(&self) -> u32 { + self.nm + } + + pub fn names(&self) -> &Vec { + &self.names + } +} diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/model.rs:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/model.rs:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/model.rs:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/ort_backend.rs b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/ort_backend.rs new file mode 100755 index 0000000..5be93bd --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/ort_backend.rs @@ -0,0 +1,534 @@ +use anyhow::Result; +use clap::ValueEnum; +use half::f16; +use ndarray::{Array, CowArray, IxDyn}; +use ort::execution_providers::{CUDAExecutionProviderOptions, TensorRTExecutionProviderOptions}; +use ort::tensor::TensorElementDataType; +use ort::{Environment, ExecutionProvider, Session, SessionBuilder, Value}; +use regex::Regex; + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)] +pub enum YOLOTask { + // YOLO tasks + Classify, + Detect, + Pose, + Segment, +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +pub enum OrtEP { + // ONNXRuntime execution provider + Cpu, + Cuda(u32), + Trt(u32), +} + +#[derive(Debug)] +pub struct Batch { + pub opt: u32, + pub min: u32, + pub max: u32, +} + +impl Default for Batch { + fn default() -> Self { + Self { + opt: 1, + min: 1, + max: 1, + } + } +} + +#[derive(Debug, Default)] +pub struct OrtInputs { + // ONNX model inputs attrs + pub shapes: Vec>, + pub dtypes: Vec, + pub names: Vec, + pub sizes: Vec>, +} + +impl OrtInputs { + pub fn new(session: &Session) -> Self { + let mut shapes = Vec::new(); + let mut dtypes = Vec::new(); + let mut names = Vec::new(); + for i in session.inputs.iter() { + let shape: Vec = i + .dimensions() + .map(|x| if let Some(x) = x { x as i32 } else { -1i32 }) + .collect(); + shapes.push(shape); + dtypes.push(i.input_type); + names.push(i.name.clone()); + } + Self { + shapes, + dtypes, + names, + ..Default::default() + } + } +} + +#[derive(Debug)] +pub struct OrtConfig { + // ORT config + pub f: String, + pub task: Option, + pub ep: OrtEP, + pub trt_fp16: bool, + pub batch: Batch, + pub image_size: (Option, Option), +} + +#[derive(Debug)] +pub struct OrtBackend { + // ORT engine + session: Session, + task: YOLOTask, + ep: OrtEP, + batch: Batch, + inputs: OrtInputs, +} + +impl OrtBackend { + pub fn build(args: OrtConfig) -> Result { + // build env & session + let env = Environment::builder() + .with_name("YOLOv8") + .with_log_level(ort::LoggingLevel::Verbose) + .build()? + .into_arc(); + let session = SessionBuilder::new(&env)?.with_model_from_file(&args.f)?; + + // get inputs + let mut inputs = OrtInputs::new(&session); + + // batch size + let mut batch = args.batch; + let batch = if inputs.shapes[0][0] == -1 { + batch + } else { + assert_eq!( + inputs.shapes[0][0] as u32, batch.opt, + "Expected batch size: {}, got {}. Try using `--batch {}`.", + inputs.shapes[0][0] as u32, batch.opt, inputs.shapes[0][0] as u32 + ); + batch.opt = inputs.shapes[0][0] as u32; + batch + }; + + // input size: height and width + let height = if inputs.shapes[0][2] == -1 { + match args.image_size.0 { + Some(height) => height, + None => panic!("Failed to get model height. Make it explicit with `--height`"), + } + } else { + inputs.shapes[0][2] as u32 + }; + let width = if inputs.shapes[0][3] == -1 { + match args.image_size.1 { + Some(width) => width, + None => panic!("Failed to get model width. Make it explicit with `--width`"), + } + } else { + inputs.shapes[0][3] as u32 + }; + inputs.sizes.push(vec![height, width]); + + // build provider + let (ep, provider) = match args.ep { + OrtEP::Cuda(device_id) => Self::set_ep_cuda(device_id), + OrtEP::Trt(device_id) => Self::set_ep_trt(device_id, args.trt_fp16, &batch, &inputs), + _ => (OrtEP::Cpu, ExecutionProvider::CPU(Default::default())), + }; + + // build session again with the new provider + let session = SessionBuilder::new(&env)? + // .with_optimization_level(ort::GraphOptimizationLevel::Level3)? + .with_execution_providers([provider])? + .with_model_from_file(args.f)?; + + // task: using given one or guessing + let task = match args.task { + Some(task) => task, + None => match session.metadata() { + Err(_) => panic!("No metadata found. Try making it explicit by `--task`"), + Ok(metadata) => match metadata.custom("task") { + Err(_) => panic!("Can not get custom value. Try making it explicit by `--task`"), + Ok(value) => match value { + None => panic!("No correspoing value of `task` found in metadata. Make it explicit by `--task`"), + Some(task) => match task.as_str() { + "classify" => YOLOTask::Classify, + "detect" => YOLOTask::Detect, + "pose" => YOLOTask::Pose, + "segment" => YOLOTask::Segment, + x => todo!("{:?} is not supported for now!", x), + }, + }, + }, + }, + }; + + Ok(Self { + session, + task, + ep, + batch, + inputs, + }) + } + + pub fn fetch_inputs_from_session( + session: &Session, + ) -> (Vec>, Vec, Vec) { + // get inputs attrs from ONNX model + let mut shapes = Vec::new(); + let mut dtypes = Vec::new(); + let mut names = Vec::new(); + for i in session.inputs.iter() { + let shape: Vec = i + .dimensions() + .map(|x| if let Some(x) = x { x as i32 } else { -1i32 }) + .collect(); + shapes.push(shape); + dtypes.push(i.input_type); + names.push(i.name.clone()); + } + (shapes, dtypes, names) + } + + pub fn set_ep_cuda(device_id: u32) -> (OrtEP, ExecutionProvider) { + // set CUDA + if ExecutionProvider::CUDA(Default::default()).is_available() { + ( + OrtEP::Cuda(device_id), + ExecutionProvider::CUDA(CUDAExecutionProviderOptions { + device_id, + ..Default::default() + }), + ) + } else { + println!("> CUDA is not available! Using CPU."); + (OrtEP::Cpu, ExecutionProvider::CPU(Default::default())) + } + } + + pub fn set_ep_trt( + device_id: u32, + fp16: bool, + batch: &Batch, + inputs: &OrtInputs, + ) -> (OrtEP, ExecutionProvider) { + // set TensorRT + if ExecutionProvider::TensorRT(Default::default()).is_available() { + let (height, width) = (inputs.sizes[0][0], inputs.sizes[0][1]); + + // dtype match checking + if inputs.dtypes[0] == TensorElementDataType::Float16 && !fp16 { + panic!( + "Dtype mismatch! Expected: Float32, got: {:?}. You should use `--fp16`", + inputs.dtypes[0] + ); + } + + // dynamic shape: input_tensor_1:dim_1xdim_2x...,input_tensor_2:dim_3xdim_4x...,... + let mut opt_string = String::new(); + let mut min_string = String::new(); + let mut max_string = String::new(); + for name in inputs.names.iter() { + let s_opt = format!("{}:{}x3x{}x{},", name, batch.opt, height, width); + let s_min = format!("{}:{}x3x{}x{},", name, batch.min, height, width); + let s_max = format!("{}:{}x3x{}x{},", name, batch.max, height, width); + opt_string.push_str(s_opt.as_str()); + min_string.push_str(s_min.as_str()); + max_string.push_str(s_max.as_str()); + } + let _ = opt_string.pop(); + let _ = min_string.pop(); + let _ = max_string.pop(); + ( + OrtEP::Trt(device_id), + ExecutionProvider::TensorRT(TensorRTExecutionProviderOptions { + device_id, + fp16_enable: fp16, + timing_cache_enable: true, + profile_min_shapes: min_string, + profile_max_shapes: max_string, + profile_opt_shapes: opt_string, + ..Default::default() + }), + ) + } else { + println!("> TensorRT is not available! Try using CUDA..."); + Self::set_ep_cuda(device_id) + } + } + + pub fn fetch_from_metadata(&self, key: &str) -> Option { + // fetch value from onnx model file by key + match self.session.metadata() { + Err(_) => None, + Ok(metadata) => match metadata.custom(key) { + Err(_) => None, + Ok(value) => value, + }, + } + } + + pub fn run(&self, xs: Array, profile: bool) -> Result>> { + // ORT inference + match self.dtype() { + TensorElementDataType::Float16 => self.run_fp16(xs, profile), + TensorElementDataType::Float32 => self.run_fp32(xs, profile), + _ => todo!(), + } + } + + pub fn run_fp16(&self, xs: Array, profile: bool) -> Result>> { + // f32->f16 + let t = std::time::Instant::now(); + let xs = xs.mapv(f16::from_f32); + if profile { + println!("[ORT f32->f16]: {:?}", t.elapsed()); + } + + // h2d + let t = std::time::Instant::now(); + let xs = CowArray::from(xs); + let xs = vec![Value::from_array(self.session.allocator(), &xs)?]; + if profile { + println!("[ORT H2D]: {:?}", t.elapsed()); + } + + // run + let t = std::time::Instant::now(); + let ys = self.session.run(xs)?; + if profile { + println!("[ORT Inference]: {:?}", t.elapsed()); + } + + // d2h + Ok(ys + .iter() + .map(|x| { + // d2h + let t = std::time::Instant::now(); + let x = x.try_extract::<_>().unwrap().view().clone().into_owned(); + if profile { + println!("[ORT D2H]: {:?}", t.elapsed()); + } + + // f16->f32 + let t_ = std::time::Instant::now(); + let x = x.mapv(f16::to_f32); + if profile { + println!("[ORT f16->f32]: {:?}", t_.elapsed()); + } + x + }) + .collect::>>()) + } + + pub fn run_fp32(&self, xs: Array, profile: bool) -> Result>> { + // h2d + let t = std::time::Instant::now(); + let xs = CowArray::from(xs); + let xs = vec![Value::from_array(self.session.allocator(), &xs)?]; + if profile { + println!("[ORT H2D]: {:?}", t.elapsed()); + } + + // run + let t = std::time::Instant::now(); + let ys = self.session.run(xs)?; + if profile { + println!("[ORT Inference]: {:?}", t.elapsed()); + } + + // d2h + Ok(ys + .iter() + .map(|x| { + let t = std::time::Instant::now(); + let x = x.try_extract::<_>().unwrap().view().clone().into_owned(); + if profile { + println!("[ORT D2H]: {:?}", t.elapsed()); + } + x + }) + .collect::>>()) + } + + pub fn output_shapes(&self) -> Vec> { + let mut shapes = Vec::new(); + for o in &self.session.outputs { + let shape: Vec<_> = o + .dimensions() + .map(|x| if let Some(x) = x { x as i32 } else { -1i32 }) + .collect(); + shapes.push(shape); + } + shapes + } + + pub fn output_dtypes(&self) -> Vec { + let mut dtypes = Vec::new(); + self.session + .outputs + .iter() + .for_each(|x| dtypes.push(x.output_type)); + dtypes + } + + pub fn input_shapes(&self) -> &Vec> { + &self.inputs.shapes + } + + pub fn input_names(&self) -> &Vec { + &self.inputs.names + } + + pub fn input_dtypes(&self) -> &Vec { + &self.inputs.dtypes + } + + pub fn dtype(&self) -> TensorElementDataType { + self.input_dtypes()[0] + } + + pub fn height(&self) -> u32 { + self.inputs.sizes[0][0] + } + + pub fn width(&self) -> u32 { + self.inputs.sizes[0][1] + } + + pub fn is_height_dynamic(&self) -> bool { + self.input_shapes()[0][2] == -1 + } + + pub fn is_width_dynamic(&self) -> bool { + self.input_shapes()[0][3] == -1 + } + + pub fn batch(&self) -> u32 { + self.batch.opt + } + + pub fn is_batch_dynamic(&self) -> bool { + self.input_shapes()[0][0] == -1 + } + + pub fn ep(&self) -> &OrtEP { + &self.ep + } + + pub fn task(&self) -> YOLOTask { + self.task.clone() + } + + pub fn names(&self) -> Option> { + // class names, metadata parsing + // String format: `{0: 'person', 1: 'bicycle', 2: 'sports ball', ..., 27: "yellow_lady's_slipper"}` + match self.fetch_from_metadata("names") { + Some(names) => { + let re = Regex::new(r#"(['"])([-()\w '"]+)(['"])"#).unwrap(); + let mut names_ = vec![]; + for (_, [_, name, _]) in re.captures_iter(&names).map(|x| x.extract()) { + names_.push(name.to_string()); + } + Some(names_) + } + None => None, + } + } + + pub fn nk(&self) -> Option { + // num_keypoints, metadata parsing: String `nk` in onnx model: `[17, 3]` + match self.fetch_from_metadata("kpt_shape") { + None => None, + Some(kpt_string) => { + let re = Regex::new(r"([0-9]+), ([0-9]+)").unwrap(); + let caps = re.captures(&kpt_string).unwrap(); + Some(caps.get(1).unwrap().as_str().parse::().unwrap()) + } + } + } + + pub fn nc(&self) -> Option { + // num_classes + match self.names() { + // by names + Some(names) => Some(names.len() as u32), + None => match self.task() { + // by task calculation + YOLOTask::Classify => Some(self.output_shapes()[0][1] as u32), + YOLOTask::Detect => { + if self.output_shapes()[0][1] == -1 { + None + } else { + // cxywhclss + Some(self.output_shapes()[0][1] as u32 - 4) + } + } + YOLOTask::Pose => { + match self.nk() { + None => None, + Some(nk) => { + if self.output_shapes()[0][1] == -1 { + None + } else { + // cxywhclss3*kpt + Some(self.output_shapes()[0][1] as u32 - 4 - 3 * nk) + } + } + } + } + YOLOTask::Segment => { + if self.output_shapes()[0][1] == -1 { + None + } else { + // cxywhclssnm + Some((self.output_shapes()[0][1] - self.output_shapes()[1][1]) as u32 - 4) + } + } + }, + } + } + + pub fn nm(&self) -> Option { + // num_masks + match self.task() { + YOLOTask::Segment => Some(self.output_shapes()[1][1] as u32), + _ => None, + } + } + + pub fn na(&self) -> Option { + // num_anchors + match self.task() { + YOLOTask::Segment | YOLOTask::Detect | YOLOTask::Pose => { + if self.output_shapes()[0][2] == -1 { + None + } else { + Some(self.output_shapes()[0][2] as u32) + } + } + _ => None, + } + } + + pub fn author(&self) -> Option { + self.fetch_from_metadata("author") + } + + pub fn version(&self) -> Option { + self.fetch_from_metadata("version") + } +} diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/ort_backend.rs:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/ort_backend.rs:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/ort_backend.rs:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/yolo_result.rs b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/yolo_result.rs new file mode 100755 index 0000000..2fcc6d8 --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/yolo_result.rs @@ -0,0 +1,235 @@ +use ndarray::{Array, Axis, IxDyn}; + +#[derive(Clone, PartialEq, Default)] +pub struct YOLOResult { + // YOLO tasks results of an image + pub probs: Option, + pub bboxes: Option>, + pub keypoints: Option>>, + pub masks: Option>>, +} + +impl std::fmt::Debug for YOLOResult { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("YOLOResult") + .field( + "Probs(top5)", + &format_args!("{:?}", self.probs().map(|probs| probs.topk(5))), + ) + .field("Bboxes", &self.bboxes) + .field("Keypoints", &self.keypoints) + .field( + "Masks", + &format_args!("{:?}", self.masks().map(|masks| masks.len())), + ) + .finish() + } +} + +impl YOLOResult { + pub fn new( + probs: Option, + bboxes: Option>, + keypoints: Option>>, + masks: Option>>, + ) -> Self { + Self { + probs, + bboxes, + keypoints, + masks, + } + } + + pub fn probs(&self) -> Option<&Embedding> { + self.probs.as_ref() + } + + pub fn keypoints(&self) -> Option<&Vec>> { + self.keypoints.as_ref() + } + + pub fn masks(&self) -> Option<&Vec>> { + self.masks.as_ref() + } + + pub fn bboxes(&self) -> Option<&Vec> { + self.bboxes.as_ref() + } + + pub fn bboxes_mut(&mut self) -> Option<&mut Vec> { + self.bboxes.as_mut() + } +} + +#[derive(Debug, PartialEq, Clone, Default)] +pub struct Point2 { + // A point2d with x, y, conf + x: f32, + y: f32, + confidence: f32, +} + +impl Point2 { + pub fn new_with_conf(x: f32, y: f32, confidence: f32) -> Self { + Self { x, y, confidence } + } + + pub fn new(x: f32, y: f32) -> Self { + Self { + x, + y, + ..Default::default() + } + } + + pub fn x(&self) -> f32 { + self.x + } + + pub fn y(&self) -> f32 { + self.y + } + + pub fn confidence(&self) -> f32 { + self.confidence + } +} + +#[derive(Debug, Clone, PartialEq, Default)] +pub struct Embedding { + // An float32 n-dims tensor + data: Array, +} + +impl Embedding { + pub fn new(data: Array) -> Self { + Self { data } + } + + pub fn data(&self) -> &Array { + &self.data + } + + pub fn topk(&self, k: usize) -> Vec<(usize, f32)> { + let mut probs = self + .data + .iter() + .enumerate() + .map(|(a, b)| (a, *b)) + .collect::>(); + probs.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap()); + let mut topk = Vec::new(); + for &(id, confidence) in probs.iter().take(k) { + topk.push((id, confidence)); + } + topk + } + + pub fn norm(&self) -> Array { + let std_ = self.data.mapv(|x| x * x).sum_axis(Axis(0)).mapv(f32::sqrt); + self.data.clone() / std_ + } + + pub fn top1(&self) -> (usize, f32) { + self.topk(1)[0] + } +} + +#[derive(Debug, Clone, PartialEq, Default)] +pub struct Bbox { + // a bounding box around an object + xmin: f32, + ymin: f32, + width: f32, + height: f32, + id: usize, + confidence: f32, +} + +impl Bbox { + pub fn new_from_xywh(xmin: f32, ymin: f32, width: f32, height: f32) -> Self { + Self { + xmin, + ymin, + width, + height, + ..Default::default() + } + } + + pub fn new(xmin: f32, ymin: f32, width: f32, height: f32, id: usize, confidence: f32) -> Self { + Self { + xmin, + ymin, + width, + height, + id, + confidence, + } + } + + pub fn width(&self) -> f32 { + self.width + } + + pub fn height(&self) -> f32 { + self.height + } + + pub fn xmin(&self) -> f32 { + self.xmin + } + + pub fn ymin(&self) -> f32 { + self.ymin + } + + pub fn xmax(&self) -> f32 { + self.xmin + self.width + } + + pub fn ymax(&self) -> f32 { + self.ymin + self.height + } + + pub fn tl(&self) -> Point2 { + Point2::new(self.xmin, self.ymin) + } + + pub fn br(&self) -> Point2 { + Point2::new(self.xmax(), self.ymax()) + } + + pub fn cxcy(&self) -> Point2 { + Point2::new(self.xmin + self.width / 2., self.ymin + self.height / 2.) + } + + pub fn id(&self) -> usize { + self.id + } + + pub fn confidence(&self) -> f32 { + self.confidence + } + + pub fn area(&self) -> f32 { + self.width * self.height + } + + pub fn intersection_area(&self, another: &Bbox) -> f32 { + let l = self.xmin.max(another.xmin); + let r = (self.xmin + self.width).min(another.xmin + another.width); + let t = self.ymin.max(another.ymin); + let b = (self.ymin + self.height).min(another.ymin + another.height); + (r - l + 1.).max(0.) * (b - t + 1.).max(0.) + } + + pub fn union(&self, another: &Bbox) -> f32 { + self.area() + another.area() - self.intersection_area(another) + } + + pub fn iou(&self, another: &Bbox) -> f32 { + self.intersection_area(another) / self.union(another) + } +} diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/yolo_result.rs:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/yolo_result.rs:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime-Rust/src/yolo_result.rs:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime/README.md b/ultralytics/examples/YOLOv8-ONNXRuntime/README.md new file mode 100755 index 0000000..b206b2e --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime/README.md @@ -0,0 +1,43 @@ +# YOLOv8 - ONNX Runtime + +This project implements YOLOv8 using ONNX Runtime. + +## Installation + +To run this project, you need to install the required dependencies. The following instructions will guide you through the installation process. + +### Installing Required Dependencies + +You can install the required dependencies by running the following command: + +```bash +pip install -r requirements.txt +``` + +### Installing `onnxruntime-gpu` + +If you have an NVIDIA GPU and want to leverage GPU acceleration, you can install the onnxruntime-gpu package using the following command: + +```bash +pip install onnxruntime-gpu +``` + +Note: Make sure you have the appropriate GPU drivers installed on your system. + +### Installing `onnxruntime` (CPU version) + +If you don't have an NVIDIA GPU or prefer to use the CPU version of onnxruntime, you can install the onnxruntime package using the following command: + +```bash +pip install onnxruntime +``` + +### Usage + +After successfully installing the required packages, you can run the YOLOv8 implementation using the following command: + +```bash +python main.py --model yolov8n.onnx --img image.jpg --conf-thres 0.5 --iou-thres 0.5 +``` + +Make sure to replace yolov8n.onnx with the path to your YOLOv8 ONNX model file, image.jpg with the path to your input image, and adjust the confidence threshold (conf-thres) and IoU threshold (iou-thres) values as needed. diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime/README.md:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime/README.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime/README.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime/main.py b/ultralytics/examples/YOLOv8-ONNXRuntime/main.py new file mode 100755 index 0000000..ec76871 --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime/main.py @@ -0,0 +1,228 @@ +import argparse + +import cv2 +import numpy as np +import onnxruntime as ort +import torch + +from ultralytics.utils import ASSETS, yaml_load +from ultralytics.utils.checks import check_requirements, check_yaml + + +class YOLOv8: + """YOLOv8 object detection model class for handling inference and visualization.""" + + def __init__(self, onnx_model, input_image, confidence_thres, iou_thres): + """ + Initializes an instance of the YOLOv8 class. + + Args: + onnx_model: Path to the ONNX model. + input_image: Path to the input image. + confidence_thres: Confidence threshold for filtering detections. + iou_thres: IoU (Intersection over Union) threshold for non-maximum suppression. + """ + self.onnx_model = onnx_model + self.input_image = input_image + self.confidence_thres = confidence_thres + self.iou_thres = iou_thres + + # Load the class names from the COCO dataset + self.classes = yaml_load(check_yaml('coco128.yaml'))['names'] + + # Generate a color palette for the classes + self.color_palette = np.random.uniform(0, 255, size=(len(self.classes), 3)) + + def draw_detections(self, img, box, score, class_id): + """ + Draws bounding boxes and labels on the input image based on the detected objects. + + Args: + img: The input image to draw detections on. + box: Detected bounding box. + score: Corresponding detection score. + class_id: Class ID for the detected object. + + Returns: + None + """ + + # Extract the coordinates of the bounding box + x1, y1, w, h = box + + # Retrieve the color for the class ID + color = self.color_palette[class_id] + + # Draw the bounding box on the image + cv2.rectangle(img, (int(x1), int(y1)), (int(x1 + w), int(y1 + h)), color, 2) + + # Create the label text with class name and score + label = f'{self.classes[class_id]}: {score:.2f}' + + # Calculate the dimensions of the label text + (label_width, label_height), _ = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1) + + # Calculate the position of the label text + label_x = x1 + label_y = y1 - 10 if y1 - 10 > label_height else y1 + 10 + + # Draw a filled rectangle as the background for the label text + cv2.rectangle(img, (label_x, label_y - label_height), (label_x + label_width, label_y + label_height), color, + cv2.FILLED) + + # Draw the label text on the image + cv2.putText(img, label, (label_x, label_y), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1, cv2.LINE_AA) + + def preprocess(self): + """ + Preprocesses the input image before performing inference. + + Returns: + image_data: Preprocessed image data ready for inference. + """ + # Read the input image using OpenCV + self.img = cv2.imread(self.input_image) + + # Get the height and width of the input image + self.img_height, self.img_width = self.img.shape[:2] + + # Convert the image color space from BGR to RGB + img = cv2.cvtColor(self.img, cv2.COLOR_BGR2RGB) + + # Resize the image to match the input shape + img = cv2.resize(img, (self.input_width, self.input_height)) + + # Normalize the image data by dividing it by 255.0 + image_data = np.array(img) / 255.0 + + # Transpose the image to have the channel dimension as the first dimension + image_data = np.transpose(image_data, (2, 0, 1)) # Channel first + + # Expand the dimensions of the image data to match the expected input shape + image_data = np.expand_dims(image_data, axis=0).astype(np.float32) + + # Return the preprocessed image data + return image_data + + def postprocess(self, input_image, output): + """ + Performs post-processing on the model's output to extract bounding boxes, scores, and class IDs. + + Args: + input_image (numpy.ndarray): The input image. + output (numpy.ndarray): The output of the model. + + Returns: + numpy.ndarray: The input image with detections drawn on it. + """ + + # Transpose and squeeze the output to match the expected shape + outputs = np.transpose(np.squeeze(output[0])) + + # Get the number of rows in the outputs array + rows = outputs.shape[0] + + # Lists to store the bounding boxes, scores, and class IDs of the detections + boxes = [] + scores = [] + class_ids = [] + + # Calculate the scaling factors for the bounding box coordinates + x_factor = self.img_width / self.input_width + y_factor = self.img_height / self.input_height + + # Iterate over each row in the outputs array + for i in range(rows): + # Extract the class scores from the current row + classes_scores = outputs[i][4:] + + # Find the maximum score among the class scores + max_score = np.amax(classes_scores) + + # If the maximum score is above the confidence threshold + if max_score >= self.confidence_thres: + # Get the class ID with the highest score + class_id = np.argmax(classes_scores) + + # Extract the bounding box coordinates from the current row + x, y, w, h = outputs[i][0], outputs[i][1], outputs[i][2], outputs[i][3] + + # Calculate the scaled coordinates of the bounding box + left = int((x - w / 2) * x_factor) + top = int((y - h / 2) * y_factor) + width = int(w * x_factor) + height = int(h * y_factor) + + # Add the class ID, score, and box coordinates to the respective lists + class_ids.append(class_id) + scores.append(max_score) + boxes.append([left, top, width, height]) + + # Apply non-maximum suppression to filter out overlapping bounding boxes + indices = cv2.dnn.NMSBoxes(boxes, scores, self.confidence_thres, self.iou_thres) + + # Iterate over the selected indices after non-maximum suppression + for i in indices: + # Get the box, score, and class ID corresponding to the index + box = boxes[i] + score = scores[i] + class_id = class_ids[i] + + # Draw the detection on the input image + self.draw_detections(input_image, box, score, class_id) + + # Return the modified input image + return input_image + + def main(self): + """ + Performs inference using an ONNX model and returns the output image with drawn detections. + + Returns: + output_img: The output image with drawn detections. + """ + # Create an inference session using the ONNX model and specify execution providers + session = ort.InferenceSession(self.onnx_model, providers=['CUDAExecutionProvider', 'CPUExecutionProvider']) + + # Get the model inputs + model_inputs = session.get_inputs() + + # Store the shape of the input for later use + input_shape = model_inputs[0].shape + self.input_width = input_shape[2] + self.input_height = input_shape[3] + + # Preprocess the image data + img_data = self.preprocess() + + # Run inference using the preprocessed image data + outputs = session.run(None, {model_inputs[0].name: img_data}) + + # Perform post-processing on the outputs to obtain output image. + return self.postprocess(self.img, outputs) # output image + + +if __name__ == '__main__': + # Create an argument parser to handle command-line arguments + parser = argparse.ArgumentParser() + parser.add_argument('--model', type=str, default='yolov8n.onnx', help='Input your ONNX model.') + parser.add_argument('--img', type=str, default=str(ASSETS / 'bus.jpg'), help='Path to input image.') + parser.add_argument('--conf-thres', type=float, default=0.5, help='Confidence threshold') + parser.add_argument('--iou-thres', type=float, default=0.5, help='NMS IoU threshold') + args = parser.parse_args() + + # Check the requirements and select the appropriate backend (CPU or GPU) + check_requirements('onnxruntime-gpu' if torch.cuda.is_available() else 'onnxruntime') + + # Create an instance of the YOLOv8 class with the specified arguments + detection = YOLOv8(args.model, args.img, args.conf_thres, args.iou_thres) + + # Perform object detection and obtain the output image + output_image = detection.main() + + # Display the output image in a window + cv2.namedWindow('Output', cv2.WINDOW_NORMAL) + cv2.imshow('Output', output_image) + + # Wait for a key press to exit + cv2.waitKey(0) diff --git a/ultralytics/examples/YOLOv8-ONNXRuntime/main.py:Zone.Identifier b/ultralytics/examples/YOLOv8-ONNXRuntime/main.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-ONNXRuntime/main.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-OpenCV-ONNX-Python/README.md b/ultralytics/examples/YOLOv8-OpenCV-ONNX-Python/README.md new file mode 100755 index 0000000..c9076fa --- /dev/null +++ b/ultralytics/examples/YOLOv8-OpenCV-ONNX-Python/README.md @@ -0,0 +1,19 @@ +# YOLOv8 - OpenCV + +Implementation YOLOv8 on OpenCV using ONNX Format. + +Just simply clone and run + +```bash +pip install -r requirements.txt +python main.py --model yolov8n.onnx --img image.jpg +``` + +If you start from scratch: + +```bash +pip install ultralytics +yolo export model=yolov8n.pt imgsz=640 format=onnx opset=12 +``` + +_\*Make sure to include "opset=12"_ diff --git a/ultralytics/examples/YOLOv8-OpenCV-ONNX-Python/README.md:Zone.Identifier b/ultralytics/examples/YOLOv8-OpenCV-ONNX-Python/README.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-OpenCV-ONNX-Python/README.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-OpenCV-ONNX-Python/main.py b/ultralytics/examples/YOLOv8-OpenCV-ONNX-Python/main.py new file mode 100755 index 0000000..78b0b08 --- /dev/null +++ b/ultralytics/examples/YOLOv8-OpenCV-ONNX-Python/main.py @@ -0,0 +1,117 @@ +import argparse + +import cv2.dnn +import numpy as np + +from ultralytics.utils import ASSETS, yaml_load +from ultralytics.utils.checks import check_yaml + +CLASSES = yaml_load(check_yaml('coco128.yaml'))['names'] +colors = np.random.uniform(0, 255, size=(len(CLASSES), 3)) + + +def draw_bounding_box(img, class_id, confidence, x, y, x_plus_w, y_plus_h): + """ + Draws bounding boxes on the input image based on the provided arguments. + + Args: + img (numpy.ndarray): The input image to draw the bounding box on. + class_id (int): Class ID of the detected object. + confidence (float): Confidence score of the detected object. + x (int): X-coordinate of the top-left corner of the bounding box. + y (int): Y-coordinate of the top-left corner of the bounding box. + x_plus_w (int): X-coordinate of the bottom-right corner of the bounding box. + y_plus_h (int): Y-coordinate of the bottom-right corner of the bounding box. + """ + label = f'{CLASSES[class_id]} ({confidence:.2f})' + color = colors[class_id] + cv2.rectangle(img, (x, y), (x_plus_w, y_plus_h), color, 2) + cv2.putText(img, label, (x - 10, y - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2) + + +def main(onnx_model, input_image): + """ + Main function to load ONNX model, perform inference, draw bounding boxes, and display the output image. + + Args: + onnx_model (str): Path to the ONNX model. + input_image (str): Path to the input image. + + Returns: + list: List of dictionaries containing detection information such as class_id, class_name, confidence, etc. + """ + # Load the ONNX model + model: cv2.dnn.Net = cv2.dnn.readNetFromONNX(onnx_model) + + # Read the input image + original_image: np.ndarray = cv2.imread(input_image) + [height, width, _] = original_image.shape + + # Prepare a square image for inference + length = max((height, width)) + image = np.zeros((length, length, 3), np.uint8) + image[0:height, 0:width] = original_image + + # Calculate scale factor + scale = length / 640 + + # Preprocess the image and prepare blob for model + blob = cv2.dnn.blobFromImage(image, scalefactor=1 / 255, size=(640, 640), swapRB=True) + model.setInput(blob) + + # Perform inference + outputs = model.forward() + + # Prepare output array + outputs = np.array([cv2.transpose(outputs[0])]) + rows = outputs.shape[1] + + boxes = [] + scores = [] + class_ids = [] + + # Iterate through output to collect bounding boxes, confidence scores, and class IDs + for i in range(rows): + classes_scores = outputs[0][i][4:] + (minScore, maxScore, minClassLoc, (x, maxClassIndex)) = cv2.minMaxLoc(classes_scores) + if maxScore >= 0.25: + box = [ + outputs[0][i][0] - (0.5 * outputs[0][i][2]), outputs[0][i][1] - (0.5 * outputs[0][i][3]), + outputs[0][i][2], outputs[0][i][3]] + boxes.append(box) + scores.append(maxScore) + class_ids.append(maxClassIndex) + + # Apply NMS (Non-maximum suppression) + result_boxes = cv2.dnn.NMSBoxes(boxes, scores, 0.25, 0.45, 0.5) + + detections = [] + + # Iterate through NMS results to draw bounding boxes and labels + for i in range(len(result_boxes)): + index = result_boxes[i] + box = boxes[index] + detection = { + 'class_id': class_ids[index], + 'class_name': CLASSES[class_ids[index]], + 'confidence': scores[index], + 'box': box, + 'scale': scale} + detections.append(detection) + draw_bounding_box(original_image, class_ids[index], scores[index], round(box[0] * scale), round(box[1] * scale), + round((box[0] + box[2]) * scale), round((box[1] + box[3]) * scale)) + + # Display the image with bounding boxes + cv2.imshow('image', original_image) + cv2.waitKey(0) + cv2.destroyAllWindows() + + return detections + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--model', default='yolov8n.onnx', help='Input your ONNX model.') + parser.add_argument('--img', default=str(ASSETS / 'bus.jpg'), help='Path to input image.') + args = parser.parse_args() + main(args.model, args.img) diff --git a/ultralytics/examples/YOLOv8-OpenCV-ONNX-Python/main.py:Zone.Identifier b/ultralytics/examples/YOLOv8-OpenCV-ONNX-Python/main.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-OpenCV-ONNX-Python/main.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-Region-Counter/readme.md b/ultralytics/examples/YOLOv8-Region-Counter/readme.md new file mode 100755 index 0000000..2acf0a5 --- /dev/null +++ b/ultralytics/examples/YOLOv8-Region-Counter/readme.md @@ -0,0 +1,123 @@ +# Regions Counting Using YOLOv8 (Inference on Video) + +- Region counting is a method employed to tally the objects within a specified area, allowing for more sophisticated analyses when multiple regions are considered. These regions can be adjusted interactively using a Left Mouse Click, and the counting process occurs in real time. +- Regions can be adjusted to suit the user's preferences and requirements. + +
+

+ YOLOv8 region counting visual 1 + YOLOv8 region counting visual 2 +

+
+ +## Table of Contents + +- [Step 1: Install the Required Libraries](#step-1-install-the-required-libraries) +- [Step 2: Run the Region Counting Using Ultralytics YOLOv8](#step-2-run-the-region-counting-using-ultralytics-yolov8) +- [Usage Options](#usage-options) +- [FAQ](#faq) + +## Step 1: Install the Required Libraries + +Clone the repository, install dependencies and `cd` to this local directory for commands in Step 2. + +```bash +# Clone ultralytics repo +git clone https://github.com/ultralytics/ultralytics + +# cd to local directory +cd ultralytics/examples/YOLOv8-Region-Counter +``` + +## Step 2: Run the Region Counting Using Ultralytics YOLOv8 + +Here are the basic commands for running the inference: + +### Note + +After the video begins playing, you can freely move the region anywhere within the video by simply clicking and dragging using the left mouse button. + +```bash +# If you want to save results +python yolov8_region_counter.py --source "path/to/video.mp4" --save-img --view-img + +# If you want to run model on CPU +python yolov8_region_counter.py --source "path/to/video.mp4" --save-img --view-img --device cpu + +# If you want to change model file +python yolov8_region_counter.py --source "path/to/video.mp4" --save-img --weights "path/to/model.pt" + +# If you want to detect specific class (first class and third class) +python yolov8_region_counter.py --source "path/to/video.mp4" --classes 0 2 --weights "path/to/model.pt" + +# If you dont want to save results +python yolov8_region_counter.py --source "path/to/video.mp4" --view-img +``` + +## Usage Options + +- `--source`: Specifies the path to the video file you want to run inference on. +- `--device`: Specifies the device `cpu` or `0` +- `--save-img`: Flag to save the detection results as images. +- `--weights`: Specifies a different YOLOv8 model file (e.g., `yolov8n.pt`, `yolov8s.pt`, `yolov8m.pt`, `yolov8l.pt`, `yolov8x.pt`). +- `--classes`: Specifies the class to be detected +- `--line-thickness`: Specifies the bounding box thickness +- `--region-thickness`: Specifies the region boxes thickness +- `--track-thickness`: Specifies the track line thickness + +## FAQ + +**1. What Does Region Counting Involve?** + +Region counting is a computational method utilized to ascertain the quantity of objects within a specific area in recorded video or real-time streams. This technique finds frequent application in image processing, computer vision, and pattern recognition, facilitating the analysis and segmentation of objects or features based on their spatial relationships. + +**2. Is Friendly Region Plotting Supported by the Region Counter?** + +The Region Counter offers the capability to create regions in various formats, such as polygons and rectangles. You have the flexibility to modify region attributes, including coordinates, colors, and other details, as demonstrated in the following code: + +```python +from shapely.geometry import Polygon + +counting_regions = [ + { + "name": "YOLOv8 Polygon Region", + "polygon": Polygon( + [(50, 80), (250, 20), (450, 80), (400, 350), (100, 350)] + ), # Polygon with five points (Pentagon) + "counts": 0, + "dragging": False, + "region_color": (255, 42, 4), # BGR Value + "text_color": (255, 255, 255), # Region Text Color + }, + { + "name": "YOLOv8 Rectangle Region", + "polygon": Polygon( + [(200, 250), (440, 250), (440, 550), (200, 550)] + ), # Rectangle with four points + "counts": 0, + "dragging": False, + "region_color": (37, 255, 225), # BGR Value + "text_color": (0, 0, 0), # Region Text Color + }, +] +``` + +**3. Why Combine Region Counting with YOLOv8?** + +YOLOv8 specializes in the detection and tracking of objects in video streams. Region counting complements this by enabling object counting within designated areas, making it a valuable application of YOLOv8. + +**4. How Can I Troubleshoot Issues?** + +To gain more insights during inference, you can include the `--debug` flag in your command: + +```bash +python yolov8_region_counter.py --source "path to video file" --debug +``` + +**5. Can I Employ Other YOLO Versions?** + +Certainly, you have the flexibility to specify different YOLO model weights using the `--weights` option. + +**6. Where Can I Access Additional Information?** + +For a comprehensive guide on using YOLOv8 with Object Tracking, please refer to [Multi-Object Tracking with Ultralytics YOLO](https://docs.ultralytics.com/modes/track/). diff --git a/ultralytics/examples/YOLOv8-Region-Counter/readme.md:Zone.Identifier b/ultralytics/examples/YOLOv8-Region-Counter/readme.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-Region-Counter/readme.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-Region-Counter/yolov8_region_counter.py b/ultralytics/examples/YOLOv8-Region-Counter/yolov8_region_counter.py new file mode 100755 index 0000000..5379fd3 --- /dev/null +++ b/ultralytics/examples/YOLOv8-Region-Counter/yolov8_region_counter.py @@ -0,0 +1,218 @@ +import argparse +from collections import defaultdict +from pathlib import Path + +import cv2 +import numpy as np +from shapely.geometry import Polygon +from shapely.geometry.point import Point + +from ultralytics import YOLO +from ultralytics.utils.files import increment_path +from ultralytics.utils.plotting import Annotator, colors + +track_history = defaultdict(list) + +current_region = None +counting_regions = [ + { + 'name': 'YOLOv8 Polygon Region', + 'polygon': Polygon([(50, 80), (250, 20), (450, 80), (400, 350), (100, 350)]), # Polygon points + 'counts': 0, + 'dragging': False, + 'region_color': (255, 42, 4), # BGR Value + 'text_color': (255, 255, 255) # Region Text Color + }, + { + 'name': 'YOLOv8 Rectangle Region', + 'polygon': Polygon([(200, 250), (440, 250), (440, 550), (200, 550)]), # Polygon points + 'counts': 0, + 'dragging': False, + 'region_color': (37, 255, 225), # BGR Value + 'text_color': (0, 0, 0), # Region Text Color + }, ] + + +def mouse_callback(event, x, y, flags, param): + """Mouse call back event.""" + global current_region + + # Mouse left button down event + if event == cv2.EVENT_LBUTTONDOWN: + for region in counting_regions: + if region['polygon'].contains(Point((x, y))): + current_region = region + current_region['dragging'] = True + current_region['offset_x'] = x + current_region['offset_y'] = y + + # Mouse move event + elif event == cv2.EVENT_MOUSEMOVE: + if current_region is not None and current_region['dragging']: + dx = x - current_region['offset_x'] + dy = y - current_region['offset_y'] + current_region['polygon'] = Polygon([ + (p[0] + dx, p[1] + dy) for p in current_region['polygon'].exterior.coords]) + current_region['offset_x'] = x + current_region['offset_y'] = y + + # Mouse left button up event + elif event == cv2.EVENT_LBUTTONUP: + if current_region is not None and current_region['dragging']: + current_region['dragging'] = False + + +def run( + weights='yolov8n.pt', + source=None, + device='cpu', + view_img=False, + save_img=False, + exist_ok=False, + classes=None, + line_thickness=2, + track_thickness=2, + region_thickness=2, +): + """ + Run Region counting on a video using YOLOv8 and ByteTrack. + + Supports movable region for real time counting inside specific area. + Supports multiple regions counting. + Regions can be Polygons or rectangle in shape + + Args: + weights (str): Model weights path. + source (str): Video file path. + device (str): processing device cpu, 0, 1 + view_img (bool): Show results. + save_img (bool): Save results. + exist_ok (bool): Overwrite existing files. + classes (list): classes to detect and track + line_thickness (int): Bounding box thickness. + track_thickness (int): Tracking line thickness + region_thickness (int): Region thickness. + """ + vid_frame_count = 0 + + # Check source path + if not Path(source).exists(): + raise FileNotFoundError(f"Source path '{source}' does not exist.") + + # Setup Model + model = YOLO(f'{weights}') + model.to('cuda') if device == '0' else model.to('cpu') + + # Extract classes names + names = model.model.names + + # Video setup + videocapture = cv2.VideoCapture(source) + frame_width, frame_height = int(videocapture.get(3)), int(videocapture.get(4)) + fps, fourcc = int(videocapture.get(5)), cv2.VideoWriter_fourcc(*'mp4v') + + # Output setup + save_dir = increment_path(Path('ultralytics_rc_output') / 'exp', exist_ok) + save_dir.mkdir(parents=True, exist_ok=True) + video_writer = cv2.VideoWriter(str(save_dir / f'{Path(source).stem}.mp4'), fourcc, fps, (frame_width, frame_height)) + + # Iterate over video frames + while videocapture.isOpened(): + success, frame = videocapture.read() + if not success: + break + vid_frame_count += 1 + + # Extract the results + results = model.track(frame, persist=True, classes=classes) + + if results[0].boxes.id is not None: + boxes = results[0].boxes.xyxy.cpu() + track_ids = results[0].boxes.id.int().cpu().tolist() + clss = results[0].boxes.cls.cpu().tolist() + + annotator = Annotator(frame, line_width=line_thickness, example=str(names)) + + for box, track_id, cls in zip(boxes, track_ids, clss): + annotator.box_label(box, str(names[cls]), color=colors(cls, True)) + bbox_center = (box[0] + box[2]) / 2, (box[1] + box[3]) / 2 # Bbox center + + track = track_history[track_id] # Tracking Lines plot + track.append((float(bbox_center[0]), float(bbox_center[1]))) + if len(track) > 30: + track.pop(0) + points = np.hstack(track).astype(np.int32).reshape((-1, 1, 2)) + cv2.polylines(frame, [points], isClosed=False, color=colors(cls, True), thickness=track_thickness) + + # Check if detection inside region + for region in counting_regions: + if region['polygon'].contains(Point((bbox_center[0], bbox_center[1]))): + region['counts'] += 1 + + # Draw regions (Polygons/Rectangles) + for region in counting_regions: + region_label = str(region['counts']) + region_color = region['region_color'] + region_text_color = region['text_color'] + + polygon_coords = np.array(region['polygon'].exterior.coords, dtype=np.int32) + centroid_x, centroid_y = int(region['polygon'].centroid.x), int(region['polygon'].centroid.y) + + text_size, _ = cv2.getTextSize(region_label, + cv2.FONT_HERSHEY_SIMPLEX, + fontScale=0.7, + thickness=line_thickness) + text_x = centroid_x - text_size[0] // 2 + text_y = centroid_y + text_size[1] // 2 + cv2.rectangle(frame, (text_x - 5, text_y - text_size[1] - 5), (text_x + text_size[0] + 5, text_y + 5), + region_color, -1) + cv2.putText(frame, region_label, (text_x, text_y), cv2.FONT_HERSHEY_SIMPLEX, 0.7, region_text_color, + line_thickness) + cv2.polylines(frame, [polygon_coords], isClosed=True, color=region_color, thickness=region_thickness) + + if view_img: + if vid_frame_count == 1: + cv2.namedWindow('Ultralytics YOLOv8 Region Counter Movable') + cv2.setMouseCallback('Ultralytics YOLOv8 Region Counter Movable', mouse_callback) + cv2.imshow('Ultralytics YOLOv8 Region Counter Movable', frame) + + if save_img: + video_writer.write(frame) + + for region in counting_regions: # Reinitialize count for each region + region['counts'] = 0 + + if cv2.waitKey(1) & 0xFF == ord('q'): + break + + del vid_frame_count + video_writer.release() + videocapture.release() + cv2.destroyAllWindows() + + +def parse_opt(): + """Parse command line arguments.""" + parser = argparse.ArgumentParser() + parser.add_argument('--weights', type=str, default='yolov8n.pt', help='initial weights path') + parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') + parser.add_argument('--source', type=str, required=True, help='video file path') + parser.add_argument('--view-img', action='store_true', help='show results') + parser.add_argument('--save-img', action='store_true', help='save results') + parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment') + parser.add_argument('--classes', nargs='+', type=int, help='filter by class: --classes 0, or --classes 0 2 3') + parser.add_argument('--line-thickness', type=int, default=2, help='bounding box thickness') + parser.add_argument('--track-thickness', type=int, default=2, help='Tracking line thickness') + parser.add_argument('--region-thickness', type=int, default=4, help='Region thickness') + + return parser.parse_args() + + +def main(opt): + """Main function.""" + run(**vars(opt)) + + +if __name__ == '__main__': + opt = parse_opt() + main(opt) diff --git a/ultralytics/examples/YOLOv8-Region-Counter/yolov8_region_counter.py:Zone.Identifier b/ultralytics/examples/YOLOv8-Region-Counter/yolov8_region_counter.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-Region-Counter/yolov8_region_counter.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-SAHI-Inference-Video/readme.md b/ultralytics/examples/YOLOv8-SAHI-Inference-Video/readme.md new file mode 100755 index 0000000..f24df30 --- /dev/null +++ b/ultralytics/examples/YOLOv8-SAHI-Inference-Video/readme.md @@ -0,0 +1,69 @@ +# YOLOv8 with SAHI (Inference on Video) + +[SAHI](https://docs.ultralytics.com/guides/sahi-tiled-inference/) is designed to optimize object detection algorithms for large-scale and high-resolution imagery. It partitions images into manageable slices, performs object detection on each slice, and then stitches the results back together. This tutorial will guide you through the process of running YOLOv8 inference on video files with the aid of SAHI. + +## Table of Contents + +- [Step 1: Install the Required Libraries](#step-1-install-the-required-libraries) +- [Step 2: Run the Inference with SAHI using Ultralytics YOLOv8](#step-2-run-the-inference-with-sahi-using-ultralytics-yolov8) +- [Usage Options](#usage-options) +- [FAQ](#faq) + +## Step 1: Install the Required Libraries + +Clone the repository, install dependencies and `cd` to this local directory for commands in Step 2. + +```bash +# Clone ultralytics repo +git clone https://github.com/ultralytics/ultralytics + +# Install dependencies +pip install sahi ultralytics + +# cd to local directory +cd ultralytics/examples/YOLOv8-SAHI-Inference-Video +``` + +## Step 2: Run the Inference with SAHI using Ultralytics YOLOv8 + +Here are the basic commands for running the inference: + +```bash +#if you want to save results +python yolov8_sahi.py --source "path/to/video.mp4" --save-img + +#if you want to change model file +python yolov8_sahi.py --source "path/to/video.mp4" --save-img --weights "yolov8n.pt" +``` + +## Usage Options + +- `--source`: Specifies the path to the video file you want to run inference on. +- `--save-img`: Flag to save the detection results as images. +- `--weights`: Specifies a different YOLOv8 model file (e.g., `yolov8n.pt`, `yolov8s.pt`, `yolov8m.pt`, `yolov8l.pt`, `yolov8x.pt`). + +## FAQ + +**1. What is SAHI?** + +SAHI stands for Slicing, Analysis, and Healing of Images. It is a library designed to optimize object detection algorithms for large-scale and high-resolution images. The library source code is available on [GitHub](https://github.com/obss/sahi). + +**2. Why use SAHI with YOLOv8?** + +SAHI can handle large-scale images by slicing them into smaller, more manageable sizes without compromising the detection quality. This makes it a great companion to YOLOv8, especially when working with high-resolution videos. + +**3. How do I debug issues?** + +You can add the `--debug` flag to your command to print out more information during inference: + +```bash +python yolov8_sahi.py --source "path to video file" --debug +``` + +**4. Can I use other YOLO versions?** + +Yes, you can specify different YOLO model weights using the `--weights` option. + +**5. Where can I find more information?** + +For a full guide to YOLOv8 with SAHI see [https://docs.ultralytics.com/guides/sahi-tiled-inference](https://docs.ultralytics.com/guides/sahi-tiled-inference/). diff --git a/ultralytics/examples/YOLOv8-SAHI-Inference-Video/readme.md:Zone.Identifier b/ultralytics/examples/YOLOv8-SAHI-Inference-Video/readme.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-SAHI-Inference-Video/readme.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-SAHI-Inference-Video/yolov8_sahi.py b/ultralytics/examples/YOLOv8-SAHI-Inference-Video/yolov8_sahi.py new file mode 100755 index 0000000..7ab8441 --- /dev/null +++ b/ultralytics/examples/YOLOv8-SAHI-Inference-Video/yolov8_sahi.py @@ -0,0 +1,111 @@ +import argparse +from pathlib import Path + +import cv2 +from sahi import AutoDetectionModel +from sahi.predict import get_sliced_prediction +from sahi.utils.yolov8 import download_yolov8s_model + +from ultralytics.utils.files import increment_path + + +def run(weights='yolov8n.pt', source='test.mp4', view_img=False, save_img=False, exist_ok=False): + """ + Run object detection on a video using YOLOv8 and SAHI. + + Args: + weights (str): Model weights path. + source (str): Video file path. + view_img (bool): Show results. + save_img (bool): Save results. + exist_ok (bool): Overwrite existing files. + """ + + # Check source path + if not Path(source).exists(): + raise FileNotFoundError(f"Source path '{source}' does not exist.") + + yolov8_model_path = f'models/{weights}' + download_yolov8s_model(yolov8_model_path) + detection_model = AutoDetectionModel.from_pretrained(model_type='yolov8', + model_path=yolov8_model_path, + confidence_threshold=0.3, + device='cpu') + + # Video setup + videocapture = cv2.VideoCapture(source) + frame_width, frame_height = int(videocapture.get(3)), int(videocapture.get(4)) + fps, fourcc = int(videocapture.get(5)), cv2.VideoWriter_fourcc(*'mp4v') + + # Output setup + save_dir = increment_path(Path('ultralytics_results_with_sahi') / 'exp', exist_ok) + save_dir.mkdir(parents=True, exist_ok=True) + video_writer = cv2.VideoWriter(str(save_dir / f'{Path(source).stem}.mp4'), fourcc, fps, (frame_width, frame_height)) + + while videocapture.isOpened(): + success, frame = videocapture.read() + if not success: + break + + results = get_sliced_prediction(frame, + detection_model, + slice_height=512, + slice_width=512, + overlap_height_ratio=0.2, + overlap_width_ratio=0.2) + object_prediction_list = results.object_prediction_list + + boxes_list = [] + clss_list = [] + for ind, _ in enumerate(object_prediction_list): + boxes = object_prediction_list[ind].bbox.minx, object_prediction_list[ind].bbox.miny, \ + object_prediction_list[ind].bbox.maxx, object_prediction_list[ind].bbox.maxy + clss = object_prediction_list[ind].category.name + boxes_list.append(boxes) + clss_list.append(clss) + + for box, cls in zip(boxes_list, clss_list): + x1, y1, x2, y2 = box + cv2.rectangle(frame, (int(x1), int(y1)), (int(x2), int(y2)), (56, 56, 255), 2) + label = str(cls) + t_size = cv2.getTextSize(label, 0, fontScale=0.6, thickness=1)[0] + cv2.rectangle(frame, (int(x1), int(y1) - t_size[1] - 3), (int(x1) + t_size[0], int(y1) + 3), (56, 56, 255), + -1) + cv2.putText(frame, + label, (int(x1), int(y1) - 2), + 0, + 0.6, [255, 255, 255], + thickness=1, + lineType=cv2.LINE_AA) + + if view_img: + cv2.imshow(Path(source).stem, frame) + if save_img: + video_writer.write(frame) + + if cv2.waitKey(1) & 0xFF == ord('q'): + break + video_writer.release() + videocapture.release() + cv2.destroyAllWindows() + + +def parse_opt(): + """Parse command line arguments.""" + parser = argparse.ArgumentParser() + parser.add_argument('--weights', type=str, default='yolov8n.pt', help='initial weights path') + parser.add_argument('--source', type=str, required=True, help='video file path') + parser.add_argument('--view-img', action='store_true', help='show results') + parser.add_argument('--save-img', action='store_true', help='save results') + parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment') + return parser.parse_args() + + +def main(opt): + """Main function.""" + run(**vars(opt)) + + +if __name__ == '__main__': + opt = parse_opt() + main(opt) diff --git a/ultralytics/examples/YOLOv8-SAHI-Inference-Video/yolov8_sahi.py:Zone.Identifier b/ultralytics/examples/YOLOv8-SAHI-Inference-Video/yolov8_sahi.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-SAHI-Inference-Video/yolov8_sahi.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-Segmentation-ONNXRuntime-Python/README.md b/ultralytics/examples/YOLOv8-Segmentation-ONNXRuntime-Python/README.md new file mode 100755 index 0000000..9327f1f --- /dev/null +++ b/ultralytics/examples/YOLOv8-Segmentation-ONNXRuntime-Python/README.md @@ -0,0 +1,63 @@ +# YOLOv8-Segmentation-ONNXRuntime-Python Demo + +This repository provides a Python demo for performing segmentation with YOLOv8 using ONNX Runtime, highlighting the interoperability of YOLOv8 models without the need for the full PyTorch stack. + +## Features + +- **Framework Agnostic**: Runs segmentation inference purely on ONNX Runtime without importing PyTorch. +- **Efficient Inference**: Supports both FP32 and FP16 precision for ONNX models, catering to different computational needs. +- **Ease of Use**: Utilizes simple command-line arguments for model execution. +- **Broad Compatibility**: Leverages Numpy and OpenCV for image processing, ensuring broad compatibility with various environments. + +## Installation + +Install the required packages using pip. You will need `ultralytics` for exporting YOLOv8-seg ONNX model and using some utility functions, `onnxruntime-gpu` for GPU-accelerated inference, and `opencv-python` for image processing. + +```bash +pip install ultralytics +pip install onnxruntime-gpu # For GPU support +# pip install onnxruntime # Use this instead if you don't have an NVIDIA GPU +pip install numpy +pip install opencv-python +``` + +## Getting Started + +### 1. Export the YOLOv8 ONNX Model + +Export the YOLOv8 segmentation model to ONNX format using the provided `ultralytics` package. + +```bash +yolo export model=yolov8s-seg.pt imgsz=640 format=onnx opset=12 simplify +``` + +### 2. Run Inference + +Perform inference with the exported ONNX model on your images. + +```bash +python main.py --model-path --source +``` + +### Example Output + +After running the command, you should see segmentation results similar to this: + +Segmentation Demo + +## Advanced Usage + +For more advanced usage, including real-time video processing, please refer to the `main.py` script's command-line arguments. + +## Contributing + +We welcome contributions to improve this demo! Please submit issues and pull requests for bug reports, feature requests, or submitting a new algorithm enhancement. + +## License + +This project is licensed under the AGPL-3.0 License - see the [LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) file for details. + +## Acknowledgments + +- The YOLOv8-Segmentation-ONNXRuntime-Python demo is contributed by GitHub user [jamjamjon](https://github.com/jamjamjon). +- Thanks to the ONNX Runtime community for providing a robust and efficient inference engine. diff --git a/ultralytics/examples/YOLOv8-Segmentation-ONNXRuntime-Python/README.md:Zone.Identifier b/ultralytics/examples/YOLOv8-Segmentation-ONNXRuntime-Python/README.md:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-Segmentation-ONNXRuntime-Python/README.md:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/YOLOv8-Segmentation-ONNXRuntime-Python/main.py b/ultralytics/examples/YOLOv8-Segmentation-ONNXRuntime-Python/main.py new file mode 100755 index 0000000..b13eab3 --- /dev/null +++ b/ultralytics/examples/YOLOv8-Segmentation-ONNXRuntime-Python/main.py @@ -0,0 +1,321 @@ +import argparse + +import cv2 +import numpy as np +import onnxruntime as ort + +from ultralytics.utils import ASSETS, yaml_load +from ultralytics.utils.checks import check_yaml +from ultralytics.utils.plotting import Colors + + +class YOLOv8Seg: + """YOLOv8 segmentation model.""" + + def __init__(self, onnx_model): + """ + Initialization. + + Args: + onnx_model (str): Path to the ONNX model. + """ + + # Build Ort session + self.session = ort.InferenceSession(onnx_model, + providers=['CUDAExecutionProvider', 'CPUExecutionProvider'] + if ort.get_device() == 'GPU' else ['CPUExecutionProvider']) + + # Numpy dtype: support both FP32 and FP16 onnx model + self.ndtype = np.half if self.session.get_inputs()[0].type == 'tensor(float16)' else np.single + + # Get model width and height(YOLOv8-seg only has one input) + self.model_height, self.model_width = [x.shape for x in self.session.get_inputs()][0][-2:] + + # Load COCO class names + self.classes = yaml_load(check_yaml('coco128.yaml'))['names'] + + # Create color palette + self.color_palette = Colors() + + def __call__(self, im0, conf_threshold=0.4, iou_threshold=0.45, nm=32): + """ + The whole pipeline: pre-process -> inference -> post-process. + + Args: + im0 (Numpy.ndarray): original input image. + conf_threshold (float): confidence threshold for filtering predictions. + iou_threshold (float): iou threshold for NMS. + nm (int): the number of masks. + + Returns: + boxes (List): list of bounding boxes. + segments (List): list of segments. + masks (np.ndarray): [N, H, W], output masks. + """ + + # Pre-process + im, ratio, (pad_w, pad_h) = self.preprocess(im0) + + # Ort inference + preds = self.session.run(None, {self.session.get_inputs()[0].name: im}) + + # Post-process + boxes, segments, masks = self.postprocess(preds, + im0=im0, + ratio=ratio, + pad_w=pad_w, + pad_h=pad_h, + conf_threshold=conf_threshold, + iou_threshold=iou_threshold, + nm=nm) + return boxes, segments, masks + + def preprocess(self, img): + """ + Pre-processes the input image. + + Args: + img (Numpy.ndarray): image about to be processed. + + Returns: + img_process (Numpy.ndarray): image preprocessed for inference. + ratio (tuple): width, height ratios in letterbox. + pad_w (float): width padding in letterbox. + pad_h (float): height padding in letterbox. + """ + + # Resize and pad input image using letterbox() (Borrowed from Ultralytics) + shape = img.shape[:2] # original image shape + new_shape = (self.model_height, self.model_width) + r = min(new_shape[0] / shape[0], new_shape[1] / shape[1]) + ratio = r, r + new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r)) + pad_w, pad_h = (new_shape[1] - new_unpad[0]) / 2, (new_shape[0] - new_unpad[1]) / 2 # wh padding + if shape[::-1] != new_unpad: # resize + img = cv2.resize(img, new_unpad, interpolation=cv2.INTER_LINEAR) + top, bottom = int(round(pad_h - 0.1)), int(round(pad_h + 0.1)) + left, right = int(round(pad_w - 0.1)), int(round(pad_w + 0.1)) + img = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=(114, 114, 114)) + + # Transforms: HWC to CHW -> BGR to RGB -> div(255) -> contiguous -> add axis(optional) + img = np.ascontiguousarray(np.einsum('HWC->CHW', img)[::-1], dtype=self.ndtype) / 255.0 + img_process = img[None] if len(img.shape) == 3 else img + return img_process, ratio, (pad_w, pad_h) + + def postprocess(self, preds, im0, ratio, pad_w, pad_h, conf_threshold, iou_threshold, nm=32): + """ + Post-process the prediction. + + Args: + preds (Numpy.ndarray): predictions come from ort.session.run(). + im0 (Numpy.ndarray): [h, w, c] original input image. + ratio (tuple): width, height ratios in letterbox. + pad_w (float): width padding in letterbox. + pad_h (float): height padding in letterbox. + conf_threshold (float): conf threshold. + iou_threshold (float): iou threshold. + nm (int): the number of masks. + + Returns: + boxes (List): list of bounding boxes. + segments (List): list of segments. + masks (np.ndarray): [N, H, W], output masks. + """ + x, protos = preds[0], preds[1] # Two outputs: predictions and protos + + # Transpose the first output: (Batch_size, xywh_conf_cls_nm, Num_anchors) -> (Batch_size, Num_anchors, xywh_conf_cls_nm) + x = np.einsum('bcn->bnc', x) + + # Predictions filtering by conf-threshold + x = x[np.amax(x[..., 4:-nm], axis=-1) > conf_threshold] + + # Create a new matrix which merge these(box, score, cls, nm) into one + # For more details about `numpy.c_()`: https://numpy.org/doc/1.26/reference/generated/numpy.c_.html + x = np.c_[x[..., :4], np.amax(x[..., 4:-nm], axis=-1), np.argmax(x[..., 4:-nm], axis=-1), x[..., -nm:]] + + # NMS filtering + x = x[cv2.dnn.NMSBoxes(x[:, :4], x[:, 4], conf_threshold, iou_threshold)] + + # Decode and return + if len(x) > 0: + + # Bounding boxes format change: cxcywh -> xyxy + x[..., [0, 1]] -= x[..., [2, 3]] / 2 + x[..., [2, 3]] += x[..., [0, 1]] + + # Rescales bounding boxes from model shape(model_height, model_width) to the shape of original image + x[..., :4] -= [pad_w, pad_h, pad_w, pad_h] + x[..., :4] /= min(ratio) + + # Bounding boxes boundary clamp + x[..., [0, 2]] = x[:, [0, 2]].clip(0, im0.shape[1]) + x[..., [1, 3]] = x[:, [1, 3]].clip(0, im0.shape[0]) + + # Process masks + masks = self.process_mask(protos[0], x[:, 6:], x[:, :4], im0.shape) + + # Masks -> Segments(contours) + segments = self.masks2segments(masks) + return x[..., :6], segments, masks # boxes, segments, masks + else: + return [], [], [] + + @staticmethod + def masks2segments(masks): + """ + It takes a list of masks(n,h,w) and returns a list of segments(n,xy) (Borrowed from + https://github.com/ultralytics/ultralytics/blob/465df3024f44fa97d4fad9986530d5a13cdabdca/ultralytics/utils/ops.py#L750) + + Args: + masks (numpy.ndarray): the output of the model, which is a tensor of shape (batch_size, 160, 160). + + Returns: + segments (List): list of segment masks. + """ + segments = [] + for x in masks.astype('uint8'): + c = cv2.findContours(x, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)[0] # CHAIN_APPROX_SIMPLE + if c: + c = np.array(c[np.array([len(x) for x in c]).argmax()]).reshape(-1, 2) + else: + c = np.zeros((0, 2)) # no segments found + segments.append(c.astype('float32')) + return segments + + @staticmethod + def crop_mask(masks, boxes): + """ + It takes a mask and a bounding box, and returns a mask that is cropped to the bounding box. (Borrowed from + https://github.com/ultralytics/ultralytics/blob/465df3024f44fa97d4fad9986530d5a13cdabdca/ultralytics/utils/ops.py#L599) + + Args: + masks (Numpy.ndarray): [n, h, w] tensor of masks. + boxes (Numpy.ndarray): [n, 4] tensor of bbox coordinates in relative point form. + + Returns: + (Numpy.ndarray): The masks are being cropped to the bounding box. + """ + n, h, w = masks.shape + x1, y1, x2, y2 = np.split(boxes[:, :, None], 4, 1) + r = np.arange(w, dtype=x1.dtype)[None, None, :] + c = np.arange(h, dtype=x1.dtype)[None, :, None] + return masks * ((r >= x1) * (r < x2) * (c >= y1) * (c < y2)) + + def process_mask(self, protos, masks_in, bboxes, im0_shape): + """ + Takes the output of the mask head, and applies the mask to the bounding boxes. This produces masks of higher quality + but is slower. (Borrowed from https://github.com/ultralytics/ultralytics/blob/465df3024f44fa97d4fad9986530d5a13cdabdca/ultralytics/utils/ops.py#L618) + + Args: + protos (numpy.ndarray): [mask_dim, mask_h, mask_w]. + masks_in (numpy.ndarray): [n, mask_dim], n is number of masks after nms. + bboxes (numpy.ndarray): bboxes re-scaled to original image shape. + im0_shape (tuple): the size of the input image (h,w,c). + + Returns: + (numpy.ndarray): The upsampled masks. + """ + c, mh, mw = protos.shape + masks = np.matmul(masks_in, protos.reshape((c, -1))).reshape((-1, mh, mw)).transpose(1, 2, 0) # HWN + masks = np.ascontiguousarray(masks) + masks = self.scale_mask(masks, im0_shape) # re-scale mask from P3 shape to original input image shape + masks = np.einsum('HWN -> NHW', masks) # HWN -> NHW + masks = self.crop_mask(masks, bboxes) + return np.greater(masks, 0.5) + + @staticmethod + def scale_mask(masks, im0_shape, ratio_pad=None): + """ + Takes a mask, and resizes it to the original image size. (Borrowed from + https://github.com/ultralytics/ultralytics/blob/465df3024f44fa97d4fad9986530d5a13cdabdca/ultralytics/utils/ops.py#L305) + + Args: + masks (np.ndarray): resized and padded masks/images, [h, w, num]/[h, w, 3]. + im0_shape (tuple): the original image shape. + ratio_pad (tuple): the ratio of the padding to the original image. + + Returns: + masks (np.ndarray): The masks that are being returned. + """ + im1_shape = masks.shape[:2] + if ratio_pad is None: # calculate from im0_shape + gain = min(im1_shape[0] / im0_shape[0], im1_shape[1] / im0_shape[1]) # gain = old / new + pad = (im1_shape[1] - im0_shape[1] * gain) / 2, (im1_shape[0] - im0_shape[0] * gain) / 2 # wh padding + else: + pad = ratio_pad[1] + + # Calculate tlbr of mask + top, left = int(round(pad[1] - 0.1)), int(round(pad[0] - 0.1)) # y, x + bottom, right = int(round(im1_shape[0] - pad[1] + 0.1)), int(round(im1_shape[1] - pad[0] + 0.1)) + if len(masks.shape) < 2: + raise ValueError(f'"len of masks shape" should be 2 or 3, but got {len(masks.shape)}') + masks = masks[top:bottom, left:right] + masks = cv2.resize(masks, (im0_shape[1], im0_shape[0]), + interpolation=cv2.INTER_LINEAR) # INTER_CUBIC would be better + if len(masks.shape) == 2: + masks = masks[:, :, None] + return masks + + def draw_and_visualize(self, im, bboxes, segments, vis=False, save=True): + """ + Draw and visualize results. + + Args: + im (np.ndarray): original image, shape [h, w, c]. + bboxes (numpy.ndarray): [n, 4], n is number of bboxes. + segments (List): list of segment masks. + vis (bool): imshow using OpenCV. + save (bool): save image annotated. + + Returns: + None + """ + + # Draw rectangles and polygons + im_canvas = im.copy() + for (*box, conf, cls_), segment in zip(bboxes, segments): + # draw contour and fill mask + cv2.polylines(im, np.int32([segment]), True, (255, 255, 255), 2) # white borderline + cv2.fillPoly(im_canvas, np.int32([segment]), self.color_palette(int(cls_), bgr=True)) + + # draw bbox rectangle + cv2.rectangle(im, (int(box[0]), int(box[1])), (int(box[2]), int(box[3])), + self.color_palette(int(cls_), bgr=True), 1, cv2.LINE_AA) + cv2.putText(im, f'{self.classes[cls_]}: {conf:.3f}', (int(box[0]), int(box[1] - 9)), + cv2.FONT_HERSHEY_SIMPLEX, 0.7, self.color_palette(int(cls_), bgr=True), 2, cv2.LINE_AA) + + # Mix image + im = cv2.addWeighted(im_canvas, 0.3, im, 0.7, 0) + + # Show image + if vis: + cv2.imshow('demo', im) + cv2.waitKey(0) + cv2.destroyAllWindows() + + # Save image + if save: + cv2.imwrite('demo.jpg', im) + + +if __name__ == '__main__': + # Create an argument parser to handle command-line arguments + parser = argparse.ArgumentParser() + parser.add_argument('--model', type=str, required=True, help='Path to ONNX model') + parser.add_argument('--source', type=str, default=str(ASSETS / 'bus.jpg'), help='Path to input image') + parser.add_argument('--conf', type=float, default=0.25, help='Confidence threshold') + parser.add_argument('--iou', type=float, default=0.45, help='NMS IoU threshold') + args = parser.parse_args() + + # Build model + model = YOLOv8Seg(args.model) + + # Read image by OpenCV + img = cv2.imread(args.source) + + # Inference + boxes, segments, _ = model(img, conf_threshold=args.conf, iou_threshold=args.iou) + + # Draw bboxes and polygons + if len(boxes) > 0: + model.draw_and_visualize(img, boxes, segments, vis=False, save=True) diff --git a/ultralytics/examples/YOLOv8-Segmentation-ONNXRuntime-Python/main.py:Zone.Identifier b/ultralytics/examples/YOLOv8-Segmentation-ONNXRuntime-Python/main.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/YOLOv8-Segmentation-ONNXRuntime-Python/main.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/hub.ipynb b/ultralytics/examples/hub.ipynb new file mode 100755 index 0000000..5d8be2a --- /dev/null +++ b/ultralytics/examples/hub.ipynb @@ -0,0 +1,106 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "Ultralytics HUB", + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "FIzICjaph_Wy" + }, + "source": [ + "\n", + "\n", + "\n", + "
\n", + "\n", + "[ไธญๆ–‡](https://docs.ultralytics.com/zh/) | [ํ•œ๊ตญ์–ด](https://docs.ultralytics.com/ko/) | [ๆ—ฅๆœฌ่ชž](https://docs.ultralytics.com/ja/) | [ะ ัƒััะบะธะน](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Franรงais](https://docs.ultralytics.com/fr/) | [Espaรฑol](https://docs.ultralytics.com/es/) | [Portuguรชs](https://docs.ultralytics.com/pt/) | [เคนเคฟเคจเฅเคฆเฅ€](https://docs.ultralytics.com/hi/) | [ุงู„ุนุฑุจูŠุฉ](https://docs.ultralytics.com/ar/)\n", + "\n", + " \n", + " \"CI\n", + " \n", + " \"Open\n", + "\n", + "Welcome to the [Ultralytics](https://ultralytics.com/) HUB notebook!\n", + "\n", + "This notebook allows you to train [YOLOv5](https://github.com/ultralytics/yolov5) and [YOLOv8](https://github.com/ultralytics/ultralytics) ๐Ÿš€ models using [HUB](https://hub.ultralytics.com/). Please browse the HUB Docs for details, raise an issue on GitHub for support, and join our Discord community for questions and discussions!\n", + "
" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "eRQ2ow94MiOv" + }, + "source": [ + "# Setup\n", + "\n", + "Pip install `ultralytics` and [dependencies](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) and check software and hardware." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "FyDnXd-n4c7Y", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "01e34b44-a26f-4dbc-a5a1-6e29bca01a1b" + }, + "source": [ + "%pip install ultralytics # install\n", + "from ultralytics import YOLO, checks, hub\n", + "checks() # checks" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stderr", + "text": [ + "Ultralytics YOLOv8.0.210 ๐Ÿš€ Python-3.10.12 torch-2.0.1+cu118 CUDA:0 (Tesla T4, 15102MiB)\n", + "Setup complete โœ… (2 CPUs, 12.7 GB RAM, 24.4/78.2 GB disk)\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "cQ9BwaAqxAm4" + }, + "source": [ + "# Start\n", + "\n", + "Login with your [API key](https://hub.ultralytics.com/settings?tab=api+keys), select your YOLO ๐Ÿš€ model and start training!" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "XSlZaJ9Iw_iZ" + }, + "source": [ + "hub.login('API_KEY') # use your API key\n", + "\n", + "model = YOLO('https://hub.ultralytics.com/MODEL_ID') # use your model URL\n", + "results = model.train() # train model" + ], + "execution_count": null, + "outputs": [] + } + ] +} diff --git a/ultralytics/examples/hub.ipynb:Zone.Identifier b/ultralytics/examples/hub.ipynb:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/hub.ipynb:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/examples/tutorial.ipynb b/ultralytics/examples/tutorial.ipynb new file mode 100755 index 0000000..d3bbafe --- /dev/null +++ b/ultralytics/examples/tutorial.ipynb @@ -0,0 +1,616 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "YOLOv8 Tutorial", + "provenance": [], + "toc_visible": true + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "accelerator": "GPU" + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "t6MPjfT5NrKQ" + }, + "source": [ + "
\n", + "\n", + " \n", + " \n", + "\n", + " [ไธญๆ–‡](https://docs.ultralytics.com/zh/) | [ํ•œ๊ตญ์–ด](https://docs.ultralytics.com/ko/) | [ๆ—ฅๆœฌ่ชž](https://docs.ultralytics.com/ja/) | [ะ ัƒััะบะธะน](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Franรงais](https://docs.ultralytics.com/fr/) | [Espaรฑol](https://docs.ultralytics.com/es/) | [Portuguรชs](https://docs.ultralytics.com/pt/) | [เคนเคฟเคจเฅเคฆเฅ€](https://docs.ultralytics.com/hi/) | [ุงู„ุนุฑุจูŠุฉ](https://docs.ultralytics.com/ar/)\n", + "\n", + " \"Run\n", + " \"Open\n", + " \"Open\n", + "\n", + "Welcome to the Ultralytics YOLOv8 ๐Ÿš€ notebook! YOLOv8 is the latest version of the YOLO (You Only Look Once) AI models developed by Ultralytics. This notebook serves as the starting point for exploring the various resources available to help you get started with YOLOv8 and understand its features and capabilities.\n", + "\n", + "YOLOv8 models are fast, accurate, and easy to use, making them ideal for various object detection and image segmentation tasks. They can be trained on large datasets and run on diverse hardware platforms, from CPUs to GPUs.\n", + "\n", + "We hope that the resources in this notebook will help you get the most out of YOLOv8. Please browse the YOLOv8 Docs for details, raise an issue on GitHub for support, and join our Discord community for questions and discussions!\n", + "\n", + "
" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7mGmQbAO5pQb" + }, + "source": [ + "# Setup\n", + "\n", + "Pip install `ultralytics` and [dependencies](https://github.com/ultralytics/ultralytics/blob/main/requirements.txt) and check software and hardware." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "wbvMlHd_QwMG", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "27ca383c-0a97-4679-f1c5-ba843f033de7" + }, + "source": [ + "%pip install ultralytics\n", + "import ultralytics\n", + "ultralytics.checks()" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stderr", + "text": [ + "Ultralytics YOLOv8.0.145 ๐Ÿš€ Python-3.10.6 torch-2.0.1+cu118 CUDA:0 (Tesla T4, 15102MiB)\n", + "Setup complete โœ… (2 CPUs, 12.7 GB RAM, 24.2/78.2 GB disk)\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "4JnkELT0cIJg" + }, + "source": [ + "# 1. Predict\n", + "\n", + "YOLOv8 may be used directly in the Command Line Interface (CLI) with a `yolo` command for a variety of tasks and modes and accepts additional arguments, i.e. `imgsz=640`. See a full list of available `yolo` [arguments](https://docs.ultralytics.com/usage/cfg/) and other details in the [YOLOv8 Predict Docs](https://docs.ultralytics.com/modes/train/).\n" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "zR9ZbuQCH7FX", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "64489d1f-e71a-44b5-92f6-2088781ca096" + }, + "source": [ + "# Run inference on an image with YOLOv8n\n", + "!yolo predict model=yolov8n.pt source='https://ultralytics.com/images/zidane.jpg'" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Downloading https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8n.pt to 'yolov8n.pt'...\n", + "100% 6.23M/6.23M [00:00<00:00, 77.2MB/s]\n", + "Ultralytics YOLOv8.0.145 ๐Ÿš€ Python-3.10.6 torch-2.0.1+cu118 CUDA:0 (Tesla T4, 15102MiB)\n", + "YOLOv8n summary (fused): 168 layers, 3151904 parameters, 0 gradients\n", + "\n", + "Downloading https://ultralytics.com/images/zidane.jpg to 'zidane.jpg'...\n", + "100% 165k/165k [00:00<00:00, 7.46MB/s]\n", + "image 1/1 /content/zidane.jpg: 384x640 2 persons, 1 tie, 365.8ms\n", + "Speed: 13.7ms preprocess, 365.8ms inference, 431.7ms postprocess per image at shape (1, 3, 384, 640)\n", + "Results saved to \u001b[1mruns/detect/predict\u001b[0m\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "hkAzDWJ7cWTr" + }, + "source": [ + "        \n", + "" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "0eq1SMWl6Sfn" + }, + "source": [ + "# 2. Val\n", + "Validate a model's accuracy on the [COCO](https://docs.ultralytics.com/datasets/detect/coco/) dataset's `val` or `test` splits. The latest YOLOv8 [models](https://github.com/ultralytics/ultralytics#models) are downloaded automatically the first time they are used. See [YOLOv8 Val Docs](https://docs.ultralytics.com/modes/val/) for more information." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "WQPtK1QYVaD_" + }, + "source": [ + "# Download COCO val\n", + "import torch\n", + "torch.hub.download_url_to_file('https://ultralytics.com/assets/coco2017val.zip', 'tmp.zip') # download (780M - 5000 images)\n", + "!unzip -q tmp.zip -d datasets && rm tmp.zip # unzip" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "X58w8JLpMnjH", + "outputId": "e3aacd98-ceca-49b7-e112-a0c25979ad6c", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "source": [ + "# Validate YOLOv8n on COCO8 val\n", + "!yolo val model=yolov8n.pt data=coco8.yaml" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Ultralytics YOLOv8.0.145 ๐Ÿš€ Python-3.10.6 torch-2.0.1+cu118 CUDA:0 (Tesla T4, 15102MiB)\n", + "YOLOv8n summary (fused): 168 layers, 3151904 parameters, 0 gradients\n", + "\n", + "Dataset 'coco8.yaml' images not found โš ๏ธ, missing path '/content/datasets/coco8/images/val'\n", + "Downloading https://ultralytics.com/assets/coco8.zip to '/content/datasets/coco8.zip'...\n", + "100% 433k/433k [00:00<00:00, 12.4MB/s]\n", + "Unzipping /content/datasets/coco8.zip to /content/datasets...\n", + "Dataset download success โœ… (0.7s), saved to \u001b[1m/content/datasets\u001b[0m\n", + "\n", + "Downloading https://ultralytics.com/assets/Arial.ttf to '/root/.config/Ultralytics/Arial.ttf'...\n", + "100% 755k/755k [00:00<00:00, 17.5MB/s]\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /content/datasets/coco8/labels/val... 4 images, 0 backgrounds, 0 corrupt: 100% 4/4 [00:00<00:00, 276.04it/s]\n", + "\u001b[34m\u001b[1mval: \u001b[0mNew cache created: /content/datasets/coco8/labels/val.cache\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100% 1/1 [00:03<00:00, 3.84s/it]\n", + " all 4 17 0.621 0.833 0.888 0.63\n", + " person 4 10 0.721 0.5 0.519 0.269\n", + " dog 4 1 0.37 1 0.995 0.597\n", + " horse 4 2 0.751 1 0.995 0.631\n", + " elephant 4 2 0.505 0.5 0.828 0.394\n", + " umbrella 4 1 0.564 1 0.995 0.995\n", + " potted plant 4 1 0.814 1 0.995 0.895\n", + "Speed: 0.3ms preprocess, 78.7ms inference, 0.0ms loss, 65.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/val\u001b[0m\n" + ] + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ZY2VXXXu74w5" + }, + "source": [ + "# 3. Train\n", + "\n", + "

\n", + "\n", + "Train YOLOv8 on [Detect](https://docs.ultralytics.com/tasks/detect/), [Segment](https://docs.ultralytics.com/tasks/segment/), [Classify](https://docs.ultralytics.com/tasks/classify/) and [Pose](https://docs.ultralytics.com/tasks/pose/) datasets. See [YOLOv8 Train Docs](https://docs.ultralytics.com/modes/train/) for more information." + ] + }, + { + "cell_type": "code", + "source": [ + "#@title Select YOLOv8 ๐Ÿš€ logger {run: 'auto'}\n", + "logger = 'Comet' #@param ['Comet', 'TensorBoard']\n", + "\n", + "if logger == 'Comet':\n", + " %pip install -q comet_ml\n", + " import comet_ml; comet_ml.init()\n", + "elif logger == 'TensorBoard':\n", + " %load_ext tensorboard\n", + " %tensorboard --logdir ." + ], + "metadata": { + "id": "ktegpM42AooT" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "1NcFxRcFdJ_O", + "outputId": "b750f2fe-c4d9-4764-b8d5-ed7bd920697b", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "source": [ + "# Train YOLOv8n on COCO8 for 3 epochs\n", + "!yolo train model=yolov8n.pt data=coco8.yaml epochs=3 imgsz=640" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Ultralytics YOLOv8.0.145 ๐Ÿš€ Python-3.10.6 torch-2.0.1+cu118 CUDA:0 (Tesla T4, 15102MiB)\n", + "\u001b[34m\u001b[1mengine/trainer: \u001b[0mtask=detect, mode=train, model=yolov8n.pt, data=coco8.yaml, epochs=3, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=None, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, tracker=botsort.yaml, save_dir=runs/detect/train\n", + "\n", + " from n params module arguments \n", + " 0 -1 1 464 ultralytics.nn.modules.conv.Conv [3, 16, 3, 2] \n", + " 1 -1 1 4672 ultralytics.nn.modules.conv.Conv [16, 32, 3, 2] \n", + " 2 -1 1 7360 ultralytics.nn.modules.block.C2f [32, 32, 1, True] \n", + " 3 -1 1 18560 ultralytics.nn.modules.conv.Conv [32, 64, 3, 2] \n", + " 4 -1 2 49664 ultralytics.nn.modules.block.C2f [64, 64, 2, True] \n", + " 5 -1 1 73984 ultralytics.nn.modules.conv.Conv [64, 128, 3, 2] \n", + " 6 -1 2 197632 ultralytics.nn.modules.block.C2f [128, 128, 2, True] \n", + " 7 -1 1 295424 ultralytics.nn.modules.conv.Conv [128, 256, 3, 2] \n", + " 8 -1 1 460288 ultralytics.nn.modules.block.C2f [256, 256, 1, True] \n", + " 9 -1 1 164608 ultralytics.nn.modules.block.SPPF [256, 256, 5] \n", + " 10 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest'] \n", + " 11 [-1, 6] 1 0 ultralytics.nn.modules.conv.Concat [1] \n", + " 12 -1 1 148224 ultralytics.nn.modules.block.C2f [384, 128, 1] \n", + " 13 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest'] \n", + " 14 [-1, 4] 1 0 ultralytics.nn.modules.conv.Concat [1] \n", + " 15 -1 1 37248 ultralytics.nn.modules.block.C2f [192, 64, 1] \n", + " 16 -1 1 36992 ultralytics.nn.modules.conv.Conv [64, 64, 3, 2] \n", + " 17 [-1, 12] 1 0 ultralytics.nn.modules.conv.Concat [1] \n", + " 18 -1 1 123648 ultralytics.nn.modules.block.C2f [192, 128, 1] \n", + " 19 -1 1 147712 ultralytics.nn.modules.conv.Conv [128, 128, 3, 2] \n", + " 20 [-1, 9] 1 0 ultralytics.nn.modules.conv.Concat [1] \n", + " 21 -1 1 493056 ultralytics.nn.modules.block.C2f [384, 256, 1] \n", + " 22 [15, 18, 21] 1 897664 ultralytics.nn.modules.head.Detect [80, [64, 128, 256]] \n", + "Model summary: 225 layers, 3157200 parameters, 3157184 gradients\n", + "\n", + "Transferred 355/355 items from pretrained weights\n", + "\u001b[34m\u001b[1mTensorBoard: \u001b[0mStart with 'tensorboard --logdir runs/detect/train', view at http://localhost:6006/\n", + "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", + "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed โœ…\n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /content/datasets/coco8/labels/train... 4 images, 0 backgrounds, 0 corrupt: 100% 4/4 [00:00<00:00, 860.11it/s]\n", + "\u001b[34m\u001b[1mtrain: \u001b[0mNew cache created: /content/datasets/coco8/labels/train.cache\n", + "\u001b[34m\u001b[1malbumentations: \u001b[0mBlur(p=0.01, blur_limit=(3, 7)), MedianBlur(p=0.01, blur_limit=(3, 7)), ToGray(p=0.01), CLAHE(p=0.01, clip_limit=(1, 4.0), tile_grid_size=(8, 8))\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /content/datasets/coco8/labels/val.cache... 4 images, 0 backgrounds, 0 corrupt: 100% 4/4 [00:00\n" + ], + "metadata": { + "id": "Phm9ccmOKye5" + } + }, + { + "cell_type": "markdown", + "source": [ + "## 1. Detection\n", + "\n", + "YOLOv8 _detection_ models have no suffix and are the default YOLOv8 models, i.e. `yolov8n.pt` and are pretrained on COCO. See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for full details.\n" + ], + "metadata": { + "id": "yq26lwpYK1lq" + } + }, + { + "cell_type": "code", + "source": [ + "# Load YOLOv8n, train it on COCO128 for 3 epochs and predict an image with it\n", + "from ultralytics import YOLO\n", + "\n", + "model = YOLO('yolov8n.pt') # load a pretrained YOLOv8n detection model\n", + "model.train(data='coco128.yaml', epochs=3) # train the model\n", + "model('https://ultralytics.com/images/bus.jpg') # predict on an image" + ], + "metadata": { + "id": "8Go5qqS9LbC5" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "## 2. Segmentation\n", + "\n", + "YOLOv8 _segmentation_ models use the `-seg` suffix, i.e. `yolov8n-seg.pt` and are pretrained on COCO. See [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for full details.\n" + ], + "metadata": { + "id": "7ZW58jUzK66B" + } + }, + { + "cell_type": "code", + "source": [ + "# Load YOLOv8n-seg, train it on COCO128-seg for 3 epochs and predict an image with it\n", + "from ultralytics import YOLO\n", + "\n", + "model = YOLO('yolov8n-seg.pt') # load a pretrained YOLOv8n segmentation model\n", + "model.train(data='coco128-seg.yaml', epochs=3) # train the model\n", + "model('https://ultralytics.com/images/bus.jpg') # predict on an image" + ], + "metadata": { + "id": "WFPJIQl_L5HT" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "## 3. Classification\n", + "\n", + "YOLOv8 _classification_ models use the `-cls` suffix, i.e. `yolov8n-cls.pt` and are pretrained on ImageNet. See [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for full details.\n" + ], + "metadata": { + "id": "ax3p94VNK9zR" + } + }, + { + "cell_type": "code", + "source": [ + "# Load YOLOv8n-cls, train it on mnist160 for 3 epochs and predict an image with it\n", + "from ultralytics import YOLO\n", + "\n", + "model = YOLO('yolov8n-cls.pt') # load a pretrained YOLOv8n classification model\n", + "model.train(data='mnist160', epochs=3) # train the model\n", + "model('https://ultralytics.com/images/bus.jpg') # predict on an image" + ], + "metadata": { + "id": "5q9Zu6zlL5rS" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "## 4. Pose\n", + "\n", + "YOLOv8 _pose_ models use the `-pose` suffix, i.e. `yolov8n-pose.pt` and are pretrained on COCO Keypoints. See [Pose Docs](https://docs.ultralytics.com/tasks/pose/) for full details." + ], + "metadata": { + "id": "SpIaFLiO11TG" + } + }, + { + "cell_type": "code", + "source": [ + "# Load YOLOv8n-pose, train it on COCO8-pose for 3 epochs and predict an image with it\n", + "from ultralytics import YOLO\n", + "\n", + "model = YOLO('yolov8n-pose.pt') # load a pretrained YOLOv8n classification model\n", + "model.train(data='coco8-pose.yaml', epochs=3) # train the model\n", + "model('https://ultralytics.com/images/bus.jpg') # predict on an image" + ], + "metadata": { + "id": "si4aKFNg19vX" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "IEijrePND_2I" + }, + "source": [ + "# Appendix\n", + "\n", + "Additional content below." + ] + }, + { + "cell_type": "code", + "source": [ + "# Pip install from source\n", + "!pip install git+https://github.com/ultralytics/ultralytics@main" + ], + "metadata": { + "id": "pIdE6i8C3LYp" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "# Git clone and run tests on updates branch\n", + "!git clone https://github.com/ultralytics/ultralytics -b main\n", + "%pip install -qe ultralytics" + ], + "metadata": { + "id": "uRKlwxSJdhd1" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "# Run tests (Git clone only)\n", + "!pytest ultralytics/tests" + ], + "metadata": { + "id": "GtPlh7mcCGZX" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "# Validate multiple models\n", + "for x in 'nsmlx':\n", + " !yolo val model=yolov8{x}.pt data=coco.yaml" + ], + "metadata": { + "id": "Wdc6t_bfzDDk" + }, + "execution_count": null, + "outputs": [] + } + ] +} diff --git a/ultralytics/examples/tutorial.ipynb:Zone.Identifier b/ultralytics/examples/tutorial.ipynb:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/examples/tutorial.ipynb:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/export.py b/ultralytics/export.py new file mode 100755 index 0000000..da71d66 --- /dev/null +++ b/ultralytics/export.py @@ -0,0 +1,11 @@ +from ultralytics import YOLO + +# ๅŠ ่ฝฝๆจกๅž‹ +#model = YOLO("yolov8n.yaml") # ไปŽๅคดๅผ€ๅง‹ๆž„ๅปบๆ–ฐๆจกๅž‹ +model = YOLO("yolov8s-seg.pt") # ๅŠ ่ฝฝ้ข„่ฎญ็ปƒๆจกๅž‹๏ผˆๅปบ่ฎฎ็”จไบŽ่ฎญ็ปƒ๏ผ‰ + +# ไฝฟ็”จๆจกๅž‹ +#model.train(data="coco128.yaml", epochs=3) # ่ฎญ็ปƒๆจกๅž‹ +#metrics = model.val() # ๅœจ้ชŒ่ฏ้›†ไธŠ่ฏ„ไผฐๆจกๅž‹ๆ€ง่ƒฝ +results = model("https://ultralytics.com/images/bus.jpg",save = True) # ๅฏนๅ›พๅƒ่ฟ›่กŒ้ข„ๆต‹ +success = model.export(format="onnx") # ๅฐ†ๆจกๅž‹ๅฏผๅ‡บไธบ ONNX ๆ ผๅผ \ No newline at end of file diff --git a/ultralytics/export.py:Zone.Identifier b/ultralytics/export.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/export.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00000.png b/ultralytics/ours_15000/gt/00000.png new file mode 100755 index 0000000..9ad4971 Binary files /dev/null and b/ultralytics/ours_15000/gt/00000.png differ diff --git a/ultralytics/ours_15000/gt/00000.png:Zone.Identifier b/ultralytics/ours_15000/gt/00000.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00000.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00001.png b/ultralytics/ours_15000/gt/00001.png new file mode 100755 index 0000000..d23acba Binary files /dev/null and b/ultralytics/ours_15000/gt/00001.png differ diff --git a/ultralytics/ours_15000/gt/00001.png:Zone.Identifier b/ultralytics/ours_15000/gt/00001.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00001.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00002.png b/ultralytics/ours_15000/gt/00002.png new file mode 100755 index 0000000..600494b Binary files /dev/null and b/ultralytics/ours_15000/gt/00002.png differ diff --git a/ultralytics/ours_15000/gt/00002.png:Zone.Identifier b/ultralytics/ours_15000/gt/00002.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00002.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00003.png b/ultralytics/ours_15000/gt/00003.png new file mode 100755 index 0000000..afa812a Binary files /dev/null and b/ultralytics/ours_15000/gt/00003.png differ diff --git a/ultralytics/ours_15000/gt/00003.png:Zone.Identifier b/ultralytics/ours_15000/gt/00003.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00003.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00004.png b/ultralytics/ours_15000/gt/00004.png new file mode 100755 index 0000000..084eb7f Binary files /dev/null and b/ultralytics/ours_15000/gt/00004.png differ diff --git a/ultralytics/ours_15000/gt/00004.png:Zone.Identifier b/ultralytics/ours_15000/gt/00004.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00004.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00005.png b/ultralytics/ours_15000/gt/00005.png new file mode 100755 index 0000000..561c242 Binary files /dev/null and b/ultralytics/ours_15000/gt/00005.png differ diff --git a/ultralytics/ours_15000/gt/00005.png:Zone.Identifier b/ultralytics/ours_15000/gt/00005.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00005.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00006.png b/ultralytics/ours_15000/gt/00006.png new file mode 100755 index 0000000..400a347 Binary files /dev/null and b/ultralytics/ours_15000/gt/00006.png differ diff --git a/ultralytics/ours_15000/gt/00006.png:Zone.Identifier b/ultralytics/ours_15000/gt/00006.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00006.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00007.png b/ultralytics/ours_15000/gt/00007.png new file mode 100755 index 0000000..e2eb913 Binary files /dev/null and b/ultralytics/ours_15000/gt/00007.png differ diff --git a/ultralytics/ours_15000/gt/00007.png:Zone.Identifier b/ultralytics/ours_15000/gt/00007.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00007.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00008.png b/ultralytics/ours_15000/gt/00008.png new file mode 100755 index 0000000..4c5ebdb Binary files /dev/null and b/ultralytics/ours_15000/gt/00008.png differ diff --git a/ultralytics/ours_15000/gt/00008.png:Zone.Identifier b/ultralytics/ours_15000/gt/00008.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00008.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00009.png b/ultralytics/ours_15000/gt/00009.png new file mode 100755 index 0000000..89780c0 Binary files /dev/null and b/ultralytics/ours_15000/gt/00009.png differ diff --git a/ultralytics/ours_15000/gt/00009.png:Zone.Identifier b/ultralytics/ours_15000/gt/00009.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00009.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00010.png b/ultralytics/ours_15000/gt/00010.png new file mode 100755 index 0000000..a73a4ae Binary files /dev/null and b/ultralytics/ours_15000/gt/00010.png differ diff --git a/ultralytics/ours_15000/gt/00010.png:Zone.Identifier b/ultralytics/ours_15000/gt/00010.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00010.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00011.png b/ultralytics/ours_15000/gt/00011.png new file mode 100755 index 0000000..91620e2 Binary files /dev/null and b/ultralytics/ours_15000/gt/00011.png differ diff --git a/ultralytics/ours_15000/gt/00011.png:Zone.Identifier b/ultralytics/ours_15000/gt/00011.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00011.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00012.png b/ultralytics/ours_15000/gt/00012.png new file mode 100755 index 0000000..01f6a2a Binary files /dev/null and b/ultralytics/ours_15000/gt/00012.png differ diff --git a/ultralytics/ours_15000/gt/00012.png:Zone.Identifier b/ultralytics/ours_15000/gt/00012.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00012.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00013.png b/ultralytics/ours_15000/gt/00013.png new file mode 100755 index 0000000..22335e1 Binary files /dev/null and b/ultralytics/ours_15000/gt/00013.png differ diff --git a/ultralytics/ours_15000/gt/00013.png:Zone.Identifier b/ultralytics/ours_15000/gt/00013.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00013.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00014.png b/ultralytics/ours_15000/gt/00014.png new file mode 100755 index 0000000..601bcf0 Binary files /dev/null and b/ultralytics/ours_15000/gt/00014.png differ diff --git a/ultralytics/ours_15000/gt/00014.png:Zone.Identifier b/ultralytics/ours_15000/gt/00014.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00014.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00015.png b/ultralytics/ours_15000/gt/00015.png new file mode 100755 index 0000000..070d16a Binary files /dev/null and b/ultralytics/ours_15000/gt/00015.png differ diff --git a/ultralytics/ours_15000/gt/00015.png:Zone.Identifier b/ultralytics/ours_15000/gt/00015.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00015.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00016.png b/ultralytics/ours_15000/gt/00016.png new file mode 100755 index 0000000..4a46965 Binary files /dev/null and b/ultralytics/ours_15000/gt/00016.png differ diff --git a/ultralytics/ours_15000/gt/00016.png:Zone.Identifier b/ultralytics/ours_15000/gt/00016.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00016.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00017.png b/ultralytics/ours_15000/gt/00017.png new file mode 100755 index 0000000..3582549 Binary files /dev/null and b/ultralytics/ours_15000/gt/00017.png differ diff --git a/ultralytics/ours_15000/gt/00017.png:Zone.Identifier b/ultralytics/ours_15000/gt/00017.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00017.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00018.png b/ultralytics/ours_15000/gt/00018.png new file mode 100755 index 0000000..8e8863f Binary files /dev/null and b/ultralytics/ours_15000/gt/00018.png differ diff --git a/ultralytics/ours_15000/gt/00018.png:Zone.Identifier b/ultralytics/ours_15000/gt/00018.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00018.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00019.png b/ultralytics/ours_15000/gt/00019.png new file mode 100755 index 0000000..30f7eeb Binary files /dev/null and b/ultralytics/ours_15000/gt/00019.png differ diff --git a/ultralytics/ours_15000/gt/00019.png:Zone.Identifier b/ultralytics/ours_15000/gt/00019.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00019.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00020.png b/ultralytics/ours_15000/gt/00020.png new file mode 100755 index 0000000..e5920d2 Binary files /dev/null and b/ultralytics/ours_15000/gt/00020.png differ diff --git a/ultralytics/ours_15000/gt/00020.png:Zone.Identifier b/ultralytics/ours_15000/gt/00020.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00020.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00021.png b/ultralytics/ours_15000/gt/00021.png new file mode 100755 index 0000000..59f4681 Binary files /dev/null and b/ultralytics/ours_15000/gt/00021.png differ diff --git a/ultralytics/ours_15000/gt/00021.png:Zone.Identifier b/ultralytics/ours_15000/gt/00021.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00021.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00022.png b/ultralytics/ours_15000/gt/00022.png new file mode 100755 index 0000000..5e0a625 Binary files /dev/null and b/ultralytics/ours_15000/gt/00022.png differ diff --git a/ultralytics/ours_15000/gt/00022.png:Zone.Identifier b/ultralytics/ours_15000/gt/00022.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00022.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00023.png b/ultralytics/ours_15000/gt/00023.png new file mode 100755 index 0000000..652829f Binary files /dev/null and b/ultralytics/ours_15000/gt/00023.png differ diff --git a/ultralytics/ours_15000/gt/00023.png:Zone.Identifier b/ultralytics/ours_15000/gt/00023.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00023.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00024.png b/ultralytics/ours_15000/gt/00024.png new file mode 100755 index 0000000..eecb2c2 Binary files /dev/null and b/ultralytics/ours_15000/gt/00024.png differ diff --git a/ultralytics/ours_15000/gt/00024.png:Zone.Identifier b/ultralytics/ours_15000/gt/00024.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00024.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00025.png b/ultralytics/ours_15000/gt/00025.png new file mode 100755 index 0000000..0c75b59 Binary files /dev/null and b/ultralytics/ours_15000/gt/00025.png differ diff --git a/ultralytics/ours_15000/gt/00025.png:Zone.Identifier b/ultralytics/ours_15000/gt/00025.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00025.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00026.png b/ultralytics/ours_15000/gt/00026.png new file mode 100755 index 0000000..19b5a9a Binary files /dev/null and b/ultralytics/ours_15000/gt/00026.png differ diff --git a/ultralytics/ours_15000/gt/00026.png:Zone.Identifier b/ultralytics/ours_15000/gt/00026.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00026.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00027.png b/ultralytics/ours_15000/gt/00027.png new file mode 100755 index 0000000..5574335 Binary files /dev/null and b/ultralytics/ours_15000/gt/00027.png differ diff --git a/ultralytics/ours_15000/gt/00027.png:Zone.Identifier b/ultralytics/ours_15000/gt/00027.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00027.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00028.png b/ultralytics/ours_15000/gt/00028.png new file mode 100755 index 0000000..4e017b9 Binary files /dev/null and b/ultralytics/ours_15000/gt/00028.png differ diff --git a/ultralytics/ours_15000/gt/00028.png:Zone.Identifier b/ultralytics/ours_15000/gt/00028.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00028.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00029.png b/ultralytics/ours_15000/gt/00029.png new file mode 100755 index 0000000..f374874 Binary files /dev/null and b/ultralytics/ours_15000/gt/00029.png differ diff --git a/ultralytics/ours_15000/gt/00029.png:Zone.Identifier b/ultralytics/ours_15000/gt/00029.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00029.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00030.png b/ultralytics/ours_15000/gt/00030.png new file mode 100755 index 0000000..dd91f14 Binary files /dev/null and b/ultralytics/ours_15000/gt/00030.png differ diff --git a/ultralytics/ours_15000/gt/00030.png:Zone.Identifier b/ultralytics/ours_15000/gt/00030.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00030.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00031.png b/ultralytics/ours_15000/gt/00031.png new file mode 100755 index 0000000..7050258 Binary files /dev/null and b/ultralytics/ours_15000/gt/00031.png differ diff --git a/ultralytics/ours_15000/gt/00031.png:Zone.Identifier b/ultralytics/ours_15000/gt/00031.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00031.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00032.png b/ultralytics/ours_15000/gt/00032.png new file mode 100755 index 0000000..4b871a1 Binary files /dev/null and b/ultralytics/ours_15000/gt/00032.png differ diff --git a/ultralytics/ours_15000/gt/00032.png:Zone.Identifier b/ultralytics/ours_15000/gt/00032.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00032.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00033.png b/ultralytics/ours_15000/gt/00033.png new file mode 100755 index 0000000..cbad679 Binary files /dev/null and b/ultralytics/ours_15000/gt/00033.png differ diff --git a/ultralytics/ours_15000/gt/00033.png:Zone.Identifier b/ultralytics/ours_15000/gt/00033.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00033.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00034.png b/ultralytics/ours_15000/gt/00034.png new file mode 100755 index 0000000..14cbfc4 Binary files /dev/null and b/ultralytics/ours_15000/gt/00034.png differ diff --git a/ultralytics/ours_15000/gt/00034.png:Zone.Identifier b/ultralytics/ours_15000/gt/00034.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00034.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00035.png b/ultralytics/ours_15000/gt/00035.png new file mode 100755 index 0000000..3498599 Binary files /dev/null and b/ultralytics/ours_15000/gt/00035.png differ diff --git a/ultralytics/ours_15000/gt/00035.png:Zone.Identifier b/ultralytics/ours_15000/gt/00035.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00035.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00036.png b/ultralytics/ours_15000/gt/00036.png new file mode 100755 index 0000000..fc532c4 Binary files /dev/null and b/ultralytics/ours_15000/gt/00036.png differ diff --git a/ultralytics/ours_15000/gt/00036.png:Zone.Identifier b/ultralytics/ours_15000/gt/00036.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00036.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00037.png b/ultralytics/ours_15000/gt/00037.png new file mode 100755 index 0000000..ed68f10 Binary files /dev/null and b/ultralytics/ours_15000/gt/00037.png differ diff --git a/ultralytics/ours_15000/gt/00037.png:Zone.Identifier b/ultralytics/ours_15000/gt/00037.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00037.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt/00038.png b/ultralytics/ours_15000/gt/00038.png new file mode 100755 index 0000000..05ec93c Binary files /dev/null and b/ultralytics/ours_15000/gt/00038.png differ diff --git a/ultralytics/ours_15000/gt/00038.png:Zone.Identifier b/ultralytics/ours_15000/gt/00038.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt/00038.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00000.png b/ultralytics/ours_15000/gt_box/00000.png new file mode 100755 index 0000000..d44a50d Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00000.png differ diff --git a/ultralytics/ours_15000/gt_box/00000.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00000.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00000.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00001.png b/ultralytics/ours_15000/gt_box/00001.png new file mode 100755 index 0000000..bea44ac Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00001.png differ diff --git a/ultralytics/ours_15000/gt_box/00001.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00001.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00001.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00002.png b/ultralytics/ours_15000/gt_box/00002.png new file mode 100755 index 0000000..50c3262 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00002.png differ diff --git a/ultralytics/ours_15000/gt_box/00002.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00002.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00002.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00003.png b/ultralytics/ours_15000/gt_box/00003.png new file mode 100755 index 0000000..7216a99 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00003.png differ diff --git a/ultralytics/ours_15000/gt_box/00003.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00003.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00003.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00004.png b/ultralytics/ours_15000/gt_box/00004.png new file mode 100755 index 0000000..3f7af17 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00004.png differ diff --git a/ultralytics/ours_15000/gt_box/00004.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00004.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00004.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00005.png b/ultralytics/ours_15000/gt_box/00005.png new file mode 100755 index 0000000..8cea88a Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00005.png differ diff --git a/ultralytics/ours_15000/gt_box/00005.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00005.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00005.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00006.png b/ultralytics/ours_15000/gt_box/00006.png new file mode 100755 index 0000000..34badf8 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00006.png differ diff --git a/ultralytics/ours_15000/gt_box/00006.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00006.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00006.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00007.png b/ultralytics/ours_15000/gt_box/00007.png new file mode 100755 index 0000000..a36d46e Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00007.png differ diff --git a/ultralytics/ours_15000/gt_box/00007.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00007.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00007.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00008.png b/ultralytics/ours_15000/gt_box/00008.png new file mode 100755 index 0000000..d75a2b1 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00008.png differ diff --git a/ultralytics/ours_15000/gt_box/00008.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00008.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00008.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00009.png b/ultralytics/ours_15000/gt_box/00009.png new file mode 100755 index 0000000..8103df7 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00009.png differ diff --git a/ultralytics/ours_15000/gt_box/00009.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00009.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00009.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00010.png b/ultralytics/ours_15000/gt_box/00010.png new file mode 100755 index 0000000..3fe615e Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00010.png differ diff --git a/ultralytics/ours_15000/gt_box/00010.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00010.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00010.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00011.png b/ultralytics/ours_15000/gt_box/00011.png new file mode 100755 index 0000000..b7796bd Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00011.png differ diff --git a/ultralytics/ours_15000/gt_box/00011.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00011.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00011.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00012.png b/ultralytics/ours_15000/gt_box/00012.png new file mode 100755 index 0000000..4a0ed24 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00012.png differ diff --git a/ultralytics/ours_15000/gt_box/00012.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00012.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00012.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00013.png b/ultralytics/ours_15000/gt_box/00013.png new file mode 100755 index 0000000..a27013b Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00013.png differ diff --git a/ultralytics/ours_15000/gt_box/00013.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00013.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00013.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00014.png b/ultralytics/ours_15000/gt_box/00014.png new file mode 100755 index 0000000..cf5359d Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00014.png differ diff --git a/ultralytics/ours_15000/gt_box/00014.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00014.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00014.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00015.png b/ultralytics/ours_15000/gt_box/00015.png new file mode 100755 index 0000000..69aa3e6 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00015.png differ diff --git a/ultralytics/ours_15000/gt_box/00015.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00015.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00015.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00016.png b/ultralytics/ours_15000/gt_box/00016.png new file mode 100755 index 0000000..7fd4014 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00016.png differ diff --git a/ultralytics/ours_15000/gt_box/00016.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00016.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00016.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00017.png b/ultralytics/ours_15000/gt_box/00017.png new file mode 100755 index 0000000..ad269ac Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00017.png differ diff --git a/ultralytics/ours_15000/gt_box/00017.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00017.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00017.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00018.png b/ultralytics/ours_15000/gt_box/00018.png new file mode 100755 index 0000000..6f26e1e Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00018.png differ diff --git a/ultralytics/ours_15000/gt_box/00018.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00018.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00018.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00019.png b/ultralytics/ours_15000/gt_box/00019.png new file mode 100755 index 0000000..2ab01a4 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00019.png differ diff --git a/ultralytics/ours_15000/gt_box/00019.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00019.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00019.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00020.png b/ultralytics/ours_15000/gt_box/00020.png new file mode 100755 index 0000000..57e0b76 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00020.png differ diff --git a/ultralytics/ours_15000/gt_box/00020.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00020.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00020.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00021.png b/ultralytics/ours_15000/gt_box/00021.png new file mode 100755 index 0000000..0236a62 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00021.png differ diff --git a/ultralytics/ours_15000/gt_box/00021.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00021.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00021.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00022.png b/ultralytics/ours_15000/gt_box/00022.png new file mode 100755 index 0000000..557f603 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00022.png differ diff --git a/ultralytics/ours_15000/gt_box/00022.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00022.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00022.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00023.png b/ultralytics/ours_15000/gt_box/00023.png new file mode 100755 index 0000000..21c480a Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00023.png differ diff --git a/ultralytics/ours_15000/gt_box/00023.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00023.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00023.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00024.png b/ultralytics/ours_15000/gt_box/00024.png new file mode 100755 index 0000000..9bd8b77 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00024.png differ diff --git a/ultralytics/ours_15000/gt_box/00024.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00024.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00024.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00025.png b/ultralytics/ours_15000/gt_box/00025.png new file mode 100755 index 0000000..767ed19 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00025.png differ diff --git a/ultralytics/ours_15000/gt_box/00025.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00025.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00025.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00026.png b/ultralytics/ours_15000/gt_box/00026.png new file mode 100755 index 0000000..f1e3607 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00026.png differ diff --git a/ultralytics/ours_15000/gt_box/00026.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00026.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00026.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00027.png b/ultralytics/ours_15000/gt_box/00027.png new file mode 100755 index 0000000..0723a0c Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00027.png differ diff --git a/ultralytics/ours_15000/gt_box/00027.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00027.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00027.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00028.png b/ultralytics/ours_15000/gt_box/00028.png new file mode 100755 index 0000000..9c95d3d Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00028.png differ diff --git a/ultralytics/ours_15000/gt_box/00028.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00028.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00028.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00029.png b/ultralytics/ours_15000/gt_box/00029.png new file mode 100755 index 0000000..965a95f Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00029.png differ diff --git a/ultralytics/ours_15000/gt_box/00029.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00029.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00029.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00030.png b/ultralytics/ours_15000/gt_box/00030.png new file mode 100755 index 0000000..ac2b2f2 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00030.png differ diff --git a/ultralytics/ours_15000/gt_box/00030.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00030.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00030.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00031.png b/ultralytics/ours_15000/gt_box/00031.png new file mode 100755 index 0000000..daf64a0 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00031.png differ diff --git a/ultralytics/ours_15000/gt_box/00031.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00031.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00031.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00032.png b/ultralytics/ours_15000/gt_box/00032.png new file mode 100755 index 0000000..516a851 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00032.png differ diff --git a/ultralytics/ours_15000/gt_box/00032.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00032.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00032.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00033.png b/ultralytics/ours_15000/gt_box/00033.png new file mode 100755 index 0000000..09577c3 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00033.png differ diff --git a/ultralytics/ours_15000/gt_box/00033.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00033.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00033.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00034.png b/ultralytics/ours_15000/gt_box/00034.png new file mode 100755 index 0000000..0b6aa01 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00034.png differ diff --git a/ultralytics/ours_15000/gt_box/00034.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00034.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00034.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00035.png b/ultralytics/ours_15000/gt_box/00035.png new file mode 100755 index 0000000..e75e0ab Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00035.png differ diff --git a/ultralytics/ours_15000/gt_box/00035.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00035.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00035.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00036.png b/ultralytics/ours_15000/gt_box/00036.png new file mode 100755 index 0000000..210310a Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00036.png differ diff --git a/ultralytics/ours_15000/gt_box/00036.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00036.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00036.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00037.png b/ultralytics/ours_15000/gt_box/00037.png new file mode 100755 index 0000000..7bfa789 Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00037.png differ diff --git a/ultralytics/ours_15000/gt_box/00037.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00037.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00037.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_box/00038.png b/ultralytics/ours_15000/gt_box/00038.png new file mode 100755 index 0000000..ab743df Binary files /dev/null and b/ultralytics/ours_15000/gt_box/00038.png differ diff --git a/ultralytics/ours_15000/gt_box/00038.png:Zone.Identifier b/ultralytics/ours_15000/gt_box/00038.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_box/00038.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00000.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00000.png new file mode 100755 index 0000000..d44a50d Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00000.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00000.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00000.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00000.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00000.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00000.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00001.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00001.png new file mode 100755 index 0000000..bea44ac Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00001.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00001.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00001.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00001.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00001.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00001.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00002.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00002.png new file mode 100755 index 0000000..50c3262 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00002.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00002.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00002.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00002.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00002.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00002.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00003.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00003.png new file mode 100755 index 0000000..7216a99 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00003.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00003.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00003.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00003.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00003.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00003.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00004.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00004.png new file mode 100755 index 0000000..3f7af17 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00004.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00004.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00004.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00004.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00004.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00004.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00005.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00005.png new file mode 100755 index 0000000..8cea88a Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00005.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00005.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00005.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00005.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00005.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00005.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00006.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00006.png new file mode 100755 index 0000000..34badf8 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00006.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00006.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00006.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00006.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00006.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00006.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00007.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00007.png new file mode 100755 index 0000000..a36d46e Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00007.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00007.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00007.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00007.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00007.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00007.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00008.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00008.png new file mode 100755 index 0000000..d75a2b1 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00008.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00008.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00008.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00008.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00008.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00008.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00009.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00009.png new file mode 100755 index 0000000..8103df7 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00009.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00009.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00009.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00009.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00009.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00009.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00010.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00010.png new file mode 100755 index 0000000..3fe615e Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00010.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00010.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00010.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00010.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00010.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00010.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00011.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00011.png new file mode 100755 index 0000000..b7796bd Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00011.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00011.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00011.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00011.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00011.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00011.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00012.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00012.png new file mode 100755 index 0000000..4a0ed24 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00012.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00012.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00012.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00012.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00012.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00012.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00013.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00013.png new file mode 100755 index 0000000..a27013b Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00013.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00013.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00013.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00013.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00013.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00013.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00014.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00014.png new file mode 100755 index 0000000..cf5359d Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00014.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00014.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00014.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00014.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00014.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00014.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00015.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00015.png new file mode 100755 index 0000000..69aa3e6 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00015.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00015.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00015.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00015.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00015.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00015.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00016.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00016.png new file mode 100755 index 0000000..7fd4014 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00016.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00016.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00016.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00016.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00016.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00016.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00017.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00017.png new file mode 100755 index 0000000..ad269ac Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00017.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00017.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00017.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00017.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00017.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00017.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00018.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00018.png new file mode 100755 index 0000000..6f26e1e Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00018.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00018.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00018.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00018.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00018.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00018.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00019.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00019.png new file mode 100755 index 0000000..2ab01a4 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00019.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00019.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00019.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00019.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00019.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00019.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00020.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00020.png new file mode 100755 index 0000000..57e0b76 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00020.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00020.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00020.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00020.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00020.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00020.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00021.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00021.png new file mode 100755 index 0000000..0236a62 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00021.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00021.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00021.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00021.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00021.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00021.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00022.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00022.png new file mode 100755 index 0000000..557f603 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00022.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00022.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00022.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00022.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00022.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00022.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00023.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00023.png new file mode 100755 index 0000000..21c480a Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00023.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00023.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00023.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00023.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00023.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00023.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00024.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00024.png new file mode 100755 index 0000000..9bd8b77 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00024.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00024.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00024.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00024.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00024.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00024.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00025.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00025.png new file mode 100755 index 0000000..767ed19 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00025.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00025.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00025.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00025.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00025.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00025.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00026.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00026.png new file mode 100755 index 0000000..f1e3607 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00026.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00026.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00026.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00026.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00026.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00026.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00027.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00027.png new file mode 100755 index 0000000..0723a0c Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00027.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00027.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00027.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00027.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00027.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00027.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00028.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00028.png new file mode 100755 index 0000000..9c95d3d Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00028.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00028.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00028.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00028.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00028.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00028.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00029.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00029.png new file mode 100755 index 0000000..965a95f Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00029.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00029.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00029.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00029.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00029.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00029.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00030.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00030.png new file mode 100755 index 0000000..ac2b2f2 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00030.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00030.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00030.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00030.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00030.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00030.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00031.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00031.png new file mode 100755 index 0000000..daf64a0 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00031.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00031.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00031.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00031.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00031.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00031.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00032.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00032.png new file mode 100755 index 0000000..516a851 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00032.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00032.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00032.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00032.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00032.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00032.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00033.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00033.png new file mode 100755 index 0000000..09577c3 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00033.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00033.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00033.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00033.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00033.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00033.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00034.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00034.png new file mode 100755 index 0000000..0b6aa01 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00034.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00034.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00034.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00034.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00034.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00034.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00035.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00035.png new file mode 100755 index 0000000..e75e0ab Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00035.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00035.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00035.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00035.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00035.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00035.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00036.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00036.png new file mode 100755 index 0000000..210310a Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00036.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00036.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00036.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00036.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00036.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00036.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00037.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00037.png new file mode 100755 index 0000000..7bfa789 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00037.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00037.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00037.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00037.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00037.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00037.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00038.png b/ultralytics/ours_15000/gt_renders_box/gt_box/00038.png new file mode 100755 index 0000000..ab743df Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/gt_box/00038.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00038.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00038.png:Zone.Identifier new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/gt_renders_box/gt_box/00038.png:Zone.Identifier:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/gt_box/00038.png:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/gt_box/00038.png:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00000.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00000.png new file mode 100755 index 0000000..60353cc Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00000.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00000.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00000.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00000.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00001.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00001.png new file mode 100755 index 0000000..a36d5e4 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00001.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00001.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00001.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00001.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00002.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00002.png new file mode 100755 index 0000000..1ef88ae Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00002.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00002.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00002.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00002.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00003.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00003.png new file mode 100755 index 0000000..0e79d5c Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00003.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00003.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00003.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00003.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00004.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00004.png new file mode 100755 index 0000000..42de7f5 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00004.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00004.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00004.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00004.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00005.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00005.png new file mode 100755 index 0000000..9f7760b Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00005.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00005.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00005.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00005.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00006.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00006.png new file mode 100755 index 0000000..914cb55 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00006.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00006.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00006.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00006.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00007.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00007.png new file mode 100755 index 0000000..4e4e7c2 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00007.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00007.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00007.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00007.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00008.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00008.png new file mode 100755 index 0000000..4491d95 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00008.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00008.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00008.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00008.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00009.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00009.png new file mode 100755 index 0000000..5fa3c3b Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00009.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00009.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00009.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00009.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00010.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00010.png new file mode 100755 index 0000000..4eb0c9b Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00010.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00010.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00010.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00010.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00011.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00011.png new file mode 100755 index 0000000..2785b9f Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00011.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00011.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00011.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00011.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00012.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00012.png new file mode 100755 index 0000000..e94ee52 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00012.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00012.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00012.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00012.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00013.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00013.png new file mode 100755 index 0000000..def4597 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00013.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00013.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00013.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00013.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00014.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00014.png new file mode 100755 index 0000000..b63df74 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00014.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00014.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00014.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00014.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00015.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00015.png new file mode 100755 index 0000000..c41e258 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00015.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00015.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00015.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00015.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00016.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00016.png new file mode 100755 index 0000000..e39fffc Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00016.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00016.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00016.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00016.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00017.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00017.png new file mode 100755 index 0000000..ed12834 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00017.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00017.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00017.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00017.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00018.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00018.png new file mode 100755 index 0000000..d3c9b58 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00018.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00018.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00018.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00018.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00019.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00019.png new file mode 100755 index 0000000..270cf49 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00019.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00019.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00019.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00019.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00020.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00020.png new file mode 100755 index 0000000..8e3801f Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00020.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00020.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00020.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00020.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00021.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00021.png new file mode 100755 index 0000000..9acaddf Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00021.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00021.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00021.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00021.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00022.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00022.png new file mode 100755 index 0000000..e543af3 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00022.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00022.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00022.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00022.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00023.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00023.png new file mode 100755 index 0000000..e6b29ad Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00023.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00023.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00023.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00023.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00024.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00024.png new file mode 100755 index 0000000..c4fd6a1 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00024.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00024.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00024.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00024.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00025.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00025.png new file mode 100755 index 0000000..c2e4715 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00025.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00025.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00025.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00025.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00026.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00026.png new file mode 100755 index 0000000..8e4cdb7 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00026.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00026.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00026.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00026.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00027.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00027.png new file mode 100755 index 0000000..dbb293d Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00027.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00027.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00027.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00027.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00028.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00028.png new file mode 100755 index 0000000..6326b41 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00028.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00028.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00028.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00028.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00029.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00029.png new file mode 100755 index 0000000..59597dd Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00029.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00029.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00029.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00029.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00030.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00030.png new file mode 100755 index 0000000..6e87eeb Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00030.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00030.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00030.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00030.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00031.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00031.png new file mode 100755 index 0000000..939fe7f Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00031.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00031.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00031.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00031.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00032.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00032.png new file mode 100755 index 0000000..35d00cf Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00032.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00032.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00032.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00032.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00033.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00033.png new file mode 100755 index 0000000..4fd7e61 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00033.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00033.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00033.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00033.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00034.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00034.png new file mode 100755 index 0000000..6ef7bbc Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00034.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00034.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00034.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00034.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00035.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00035.png new file mode 100755 index 0000000..f45a2a4 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00035.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00035.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00035.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00035.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00036.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00036.png new file mode 100755 index 0000000..759b26f Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00036.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00036.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00036.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00036.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00037.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00037.png new file mode 100755 index 0000000..314e1c3 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00037.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00037.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00037.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00037.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00038.png b/ultralytics/ours_15000/gt_renders_box/renders_box/00038.png new file mode 100755 index 0000000..b0019b6 Binary files /dev/null and b/ultralytics/ours_15000/gt_renders_box/renders_box/00038.png differ diff --git a/ultralytics/ours_15000/gt_renders_box/renders_box/00038.png:Zone.Identifier b/ultralytics/ours_15000/gt_renders_box/renders_box/00038.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/gt_renders_box/renders_box/00038.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00000.txt b/ultralytics/ours_15000/labels_gt/00000.txt new file mode 100755 index 0000000..2825168 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00000.txt @@ -0,0 +1,9 @@ +56 511.4192810058594 332.4375 401.11846923828125 455.7718200683594 +45 424.55279541015625 762.0631103515625 209.660400390625 144.256591796875 +41 268.75335693359375 734.4775390625 97.10980224609375 133.324951171875 +62 1375.654052734375 406.95758056640625 447.1771240234375 774.7940673828125 +77 501.18988037109375 322.69970703125 202.774658203125 125.74966430664062 +0 88.78277587890625 766.2339477539062 177.0216064453125 582.2296142578125 +39 529.0463256835938 600.8639526367188 71.59091186523438 154.260986328125 +57 173.04635620117188 559.041748046875 345.447509765625 450.9307861328125 +15 839.5738525390625 729.031005859375 192.04266357421875 106.47735595703125 diff --git a/ultralytics/ours_15000/labels_gt/00000.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00000.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00000.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00001.txt b/ultralytics/ours_15000/labels_gt/00001.txt new file mode 100755 index 0000000..f42c7dc --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00001.txt @@ -0,0 +1,8 @@ +45 380.3346252441406 507.9164123535156 157.1083984375 122.24627685546875 +56 854.879638671875 196.01121520996094 377.02423095703125 378.4043273925781 +39 552.570068359375 405.9953308105469 57.51666259765625 145.16571044921875 +56 689.36962890625 451.8404541015625 272.83026123046875 318.7688293457031 +40 500.9447326660156 533.4939575195312 81.71478271484375 126.422119140625 +77 843.85888671875 139.31216430664062 187.35455322265625 123.88970184326172 +60 393.9997863769531 686.6287231445312 554.4846801757812 714.7930908203125 +73 139.0690460205078 801.7929077148438 278.1380920410156 183.6785888671875 diff --git a/ultralytics/ours_15000/labels_gt/00001.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00001.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00001.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00002.txt b/ultralytics/ours_15000/labels_gt/00002.txt new file mode 100755 index 0000000..16e312f --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00002.txt @@ -0,0 +1,6 @@ +45 410.75146484375 415.2247009277344 122.92678833007812 91.70257568359375 +56 1219.775390625 243.55905151367188 395.799560546875 484.71954345703125 +57 446.783447265625 324.46820068359375 875.8935546875 555.3404541015625 +73 123.21436309814453 582.8720092773438 142.24864196777344 91.0616455078125 +77 1211.0689697265625 214.84645080566406 181.0576171875 130.68344116210938 +57 304.1683044433594 556.7301025390625 607.5669555664062 982.3744506835938 diff --git a/ultralytics/ours_15000/labels_gt/00002.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00002.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00002.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00003.txt b/ultralytics/ours_15000/labels_gt/00003.txt new file mode 100755 index 0000000..4da1bf4 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00003.txt @@ -0,0 +1,7 @@ +56 537.984619140625 104.16346740722656 247.22317504882812 206.86410522460938 +58 479.4595947265625 256.2663879394531 202.379150390625 446.7165222167969 +39 1446.98876953125 218.94735717773438 66.2655029296875 136.17584228515625 +45 1534.4891357421875 241.72080993652344 113.75732421875 69.06478881835938 +56 1509.798583984375 978.3316650390625 178.9814453125 163.74114990234375 +45 1531.998046875 230.22962951660156 123.3929443359375 93.27597045898438 +57 1495.73583984375 567.513916015625 207.9345703125 451.093505859375 diff --git a/ultralytics/ours_15000/labels_gt/00003.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00003.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00003.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00004.txt b/ultralytics/ours_15000/labels_gt/00004.txt new file mode 100755 index 0000000..de36cfd --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00004.txt @@ -0,0 +1,9 @@ +58 225.6078643798828 817.0474243164062 252.49099731445312 452.5406494140625 +65 1266.0582275390625 992.1220703125 52.354736328125 64.290771484375 +45 1192.4674072265625 926.94091796875 109.881591796875 77.95751953125 +41 1328.543701171875 956.1817626953125 73.452392578125 106.571044921875 +58 174.7242431640625 689.6722412109375 349.448486328125 697.358642578125 +39 1093.828857421875 908.369384765625 73.0374755859375 136.822021484375 +57 1403.246826171875 860.028076171875 391.1273193359375 385.597900390625 +58 150.71444702148438 480.65234375 299.59619140625 289.175048828125 +57 1300.4989013671875 860.6617431640625 597.1959838867188 388.1005859375 diff --git a/ultralytics/ours_15000/labels_gt/00004.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00004.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00004.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00005.txt b/ultralytics/ours_15000/labels_gt/00005.txt new file mode 100755 index 0000000..c5a9031 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00005.txt @@ -0,0 +1,7 @@ +56 525.0687866210938 404.5985412597656 173.6837158203125 218.50994873046875 +39 1484.3916015625 481.3844299316406 55.6732177734375 169.01751708984375 +45 1551.8255615234375 508.44970703125 89.173583984375 76.29421997070312 +58 385.7151794433594 265.6966552734375 452.08038330078125 279.04376220703125 +60 604.8404541015625 349.25408935546875 273.75482177734375 181.63772583007812 +56 489.0699768066406 137.91238403320312 36.9207763671875 91.0045166015625 +56 335.2917785644531 712.9525146484375 387.00250244140625 679.2771606445312 diff --git a/ultralytics/ours_15000/labels_gt/00005.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00005.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00005.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00006.txt b/ultralytics/ours_15000/labels_gt/00006.txt new file mode 100755 index 0000000..608905a --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00006.txt @@ -0,0 +1,6 @@ +58 138.93362426757812 231.56040954589844 256.5150146484375 438.6011047363281 +57 1289.420166015625 670.7376708984375 619.00244140625 757.728515625 +57 1372.94873046875 259.8544921875 452.4678955078125 455.7700500488281 +24 1422.6416015625 461.09375 354.716796875 320.6671142578125 +56 256.140380859375 105.73925018310547 255.86163330078125 209.9776153564453 +41 1242.843994140625 294.73370361328125 67.7633056640625 93.70277404785156 diff --git a/ultralytics/ours_15000/labels_gt/00006.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00006.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00006.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00007.txt b/ultralytics/ours_15000/labels_gt/00007.txt new file mode 100755 index 0000000..050b845 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00007.txt @@ -0,0 +1,12 @@ +58 374.0290222167969 861.9793701171875 229.42352294921875 395.7236328125 +56 146.42373657226562 862.7569580078125 287.50738525390625 394.095703125 +39 1240.644775390625 865.550048828125 49.4871826171875 128.22113037109375 +41 1466.269287109375 885.677734375 72.8338623046875 106.5985107421875 +45 1327.665283203125 864.88232421875 115.65185546875 87.51568603515625 +58 288.7549133300781 734.825927734375 354.5235595703125 637.9509887695312 +60 567.4259033203125 576.2108764648438 422.410888671875 279.2039794921875 +56 458.7630615234375 687.2434692382812 229.74325561523438 215.7059326171875 +58 130.54673767089844 590.5625 150.84999084472656 146.180419921875 +60 1348.0296630859375 933.8707275390625 501.432861328125 251.70806884765625 +73 51.46575927734375 234.20774841308594 46.05314636230469 80.16958618164062 +73 61.24396514892578 331.3090515136719 30.916461944580078 88.8651123046875 diff --git a/ultralytics/ours_15000/labels_gt/00007.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00007.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00007.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00008.txt b/ultralytics/ours_15000/labels_gt/00008.txt new file mode 100755 index 0000000..07486a7 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00008.txt @@ -0,0 +1,6 @@ +56 451.5390625 734.7484741210938 338.3667297363281 634.6405029296875 +58 647.419189453125 614.5855712890625 189.506103515625 380.7623596191406 +62 61.82577133178711 384.8314208984375 123.53414154052734 269.9192810058594 +39 1502.4755859375 663.6512451171875 52.188720703125 126.98809814453125 +58 537.5816650390625 497.5560302734375 403.24627685546875 627.1126708984375 +73 959.1441040039062 133.77651977539062 122.0977783203125 28.172409057617188 diff --git a/ultralytics/ours_15000/labels_gt/00008.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00008.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00008.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00009.txt b/ultralytics/ours_15000/labels_gt/00009.txt new file mode 100755 index 0000000..2ec8362 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00009.txt @@ -0,0 +1,11 @@ +58 185.40713500976562 378.1705627441406 329.01239013671875 661.6659545898438 +65 1123.8450927734375 752.3284912109375 68.164306640625 52.508544921875 +57 1314.704833984375 763.7675170898438 569.2593994140625 559.6209716796875 +39 959.0958251953125 662.8599853515625 58.01025390625 140.66436767578125 +45 1071.66748046875 679.7269287109375 118.3934326171875 92.190673828125 +63 1145.338623046875 595.7103271484375 194.1015625 71.03607177734375 +62 74.65950775146484 386.87469482421875 149.20887756347656 285.8712463378906 +41 1197.238037109375 715.0230712890625 63.6959228515625 73.07415771484375 +41 1190.6336669921875 728.5533447265625 70.976806640625 96.68701171875 +39 971.331298828125 663.90380859375 82.93072509765625 145.718017578125 +56 1264.52197265625 903.313720703125 583.013671875 305.50506591796875 diff --git a/ultralytics/ours_15000/labels_gt/00009.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00009.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00009.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00010.txt b/ultralytics/ours_15000/labels_gt/00010.txt new file mode 100755 index 0000000..4f3216d --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00010.txt @@ -0,0 +1,8 @@ +56 418.7566223144531 106.73284149169922 250.88531494140625 209.1630401611328 +58 344.033447265625 229.84373474121094 199.03982543945312 435.1431579589844 +56 1395.743896484375 852.6063842773438 405.8131103515625 397.9835205078125 +57 1406.9716796875 356.40509033203125 384.2989501953125 683.945068359375 +41 1458.5357666015625 172.41888427734375 80.440673828125 94.32075500488281 +39 1239.6820068359375 142.55941772460938 48.37890625 125.00003814697266 +45 1325.31640625 156.25282287597656 117.051513671875 80.32046508789062 +45 1326.248779296875 167.26351928710938 107.964111328125 61.094970703125 diff --git a/ultralytics/ours_15000/labels_gt/00010.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00010.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00010.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00011.txt b/ultralytics/ours_15000/labels_gt/00011.txt new file mode 100755 index 0000000..49fdca5 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00011.txt @@ -0,0 +1 @@ +58 1135.7216796875 178.5731964111328 280.2962646484375 356.4311218261719 diff --git a/ultralytics/ours_15000/labels_gt/00011.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00011.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00011.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00012.txt b/ultralytics/ours_15000/labels_gt/00012.txt new file mode 100755 index 0000000..a477346 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00012.txt @@ -0,0 +1,6 @@ +58 752.65869140625 212.59754943847656 260.85809326171875 413.1434020996094 +39 1407.209228515625 374.8438415527344 66.4764404296875 163.61053466796875 +56 982.493896484375 106.86918640136719 215.759033203125 213.73837280273438 +45 1543.5057373046875 396.69140625 112.270751953125 100.58334350585938 +39 1463.953857421875 352.82794189453125 83.4794921875 198.1156005859375 +56 1535.348876953125 781.3843994140625 128.4163818359375 548.471923828125 diff --git a/ultralytics/ours_15000/labels_gt/00012.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00012.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00012.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00013.txt b/ultralytics/ours_15000/labels_gt/00013.txt new file mode 100755 index 0000000..128d7c1 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00013.txt @@ -0,0 +1,10 @@ +58 377.570556640625 397.2080078125 347.0758972167969 619.4268798828125 +45 925.1605224609375 849.4540405273438 126.9677734375 110.531005859375 +56 734.1798095703125 453.36505126953125 198.28216552734375 251.33609008789062 +41 1004.0988159179688 931.65771484375 87.9964599609375 113.01025390625 +65 913.0203247070312 942.0400390625 114.1138916015625 44.630126953125 +73 1138.6953125 979.1365966796875 134.7886962890625 102.72198486328125 +57 1307.749267578125 884.37744140625 583.10302734375 340.862548828125 +63 914.632568359375 234.07908630371094 246.48382568359375 121.11380004882812 +66 963.1992797851562 232.87469482421875 146.6798095703125 118.33256530761719 +39 766.1744384765625 799.7305297851562 71.87847900390625 152.2698974609375 diff --git a/ultralytics/ours_15000/labels_gt/00013.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00013.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00013.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00014.txt b/ultralytics/ours_15000/labels_gt/00014.txt new file mode 100755 index 0000000..a86bd82 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00014.txt @@ -0,0 +1,9 @@ +58 920.443359375 716.74853515625 323.16510009765625 581.6647338867188 +56 1178.88037109375 663.6239013671875 212.109375 293.645751953125 +73 644.615966796875 188.20248413085938 210.02154541015625 155.1676788330078 +74 802.4447021484375 39.09150695800781 71.4056396484375 77.6951904296875 +73 642.8343505859375 205.98521423339844 57.53448486328125 104.69100952148438 +73 653.5934448242188 205.98899841308594 47.9405517578125 103.75228881835938 +73 610.4966430664062 208.1611328125 53.455078125 101.92108154296875 +73 561.07080078125 185.65145874023438 41.456298828125 150.48670959472656 +56 1179.47607421875 705.7279663085938 207.1173095703125 210.3067626953125 diff --git a/ultralytics/ours_15000/labels_gt/00014.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00014.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00014.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00015.txt b/ultralytics/ours_15000/labels_gt/00015.txt new file mode 100755 index 0000000..66a4c63 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00015.txt @@ -0,0 +1,5 @@ +58 796.0745239257812 92.32919311523438 126.2506103515625 181.95263671875 +39 1476.2542724609375 149.9631805419922 105.643310546875 171.25289916992188 +39 1531.9708251953125 137.32818603515625 109.020263671875 204.600341796875 +46 646.356201171875 783.1761474609375 257.917236328125 114.078369140625 +57 1127.2698974609375 681.222412109375 941.3818359375 744.7509765625 diff --git a/ultralytics/ours_15000/labels_gt/00015.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00015.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00015.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00016.txt b/ultralytics/ours_15000/labels_gt/00016.txt new file mode 100755 index 0000000..380f30f --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00016.txt @@ -0,0 +1,6 @@ +58 193.76930236816406 275.69866943359375 374.4609069824219 551.3973388671875 +45 794.4253540039062 681.0787963867188 117.989501953125 102.4105224609375 +56 535.4075317382812 321.7506408691406 207.69277954101562 222.06439208984375 +41 887.2283325195312 757.3054809570312 99.5570068359375 111.0626220703125 +73 986.9110107421875 799.9398803710938 114.671630859375 108.85009765625 +57 1211.7099609375 820.171142578125 724.247314453125 467.465087890625 diff --git a/ultralytics/ours_15000/labels_gt/00016.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00016.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00016.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00017.txt b/ultralytics/ours_15000/labels_gt/00017.txt new file mode 100755 index 0000000..7c9e822 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00017.txt @@ -0,0 +1,4 @@ +58 1109.900146484375 326.40264892578125 330.6904296875 609.989501953125 +58 1154.198974609375 431.54681396484375 236.5185546875 398.8690490722656 +74 366.0892333984375 103.33074951171875 169.56161499023438 203.36288452148438 +62 1465.72509765625 81.6920166015625 266.043701171875 161.7139892578125 diff --git a/ultralytics/ours_15000/labels_gt/00017.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00017.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00017.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00018.txt b/ultralytics/ours_15000/labels_gt/00018.txt new file mode 100755 index 0000000..6f54ca5 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00018.txt @@ -0,0 +1,6 @@ +45 921.9700317382812 742.6441040039062 133.4637451171875 108.4925537109375 +56 1330.397705078125 391.8733215332031 374.3668212890625 433.80535888671875 +57 600.9934692382812 710.241943359375 1198.1370849609375 691.27734375 +56 1188.727783203125 685.6143798828125 274.3402099609375 285.40399169921875 +39 1067.415283203125 652.107177734375 60.1605224609375 131.99810791015625 +73 716.6908569335938 991.6322021484375 261.06591796875 145.35400390625 diff --git a/ultralytics/ours_15000/labels_gt/00018.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00018.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00018.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00019.txt b/ultralytics/ours_15000/labels_gt/00019.txt new file mode 100755 index 0000000..288e563 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00019.txt @@ -0,0 +1,6 @@ +56 618.7374267578125 421.2305603027344 350.095703125 459.38812255859375 +39 112.99761962890625 578.0225219726562 44.250709533691406 124.078369140625 +56 340.33489990234375 645.593505859375 335.83502197265625 327.58941650390625 +57 162.39686584472656 444.91339111328125 324.7937316894531 409.0928649902344 +60 91.9091796875 834.7383422851562 183.5391845703125 450.3692626953125 +77 619.004638671875 401.2059020996094 166.74346923828125 134.1510009765625 diff --git a/ultralytics/ours_15000/labels_gt/00019.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00019.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00019.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00020.txt b/ultralytics/ours_15000/labels_gt/00020.txt new file mode 100755 index 0000000..983e474 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00020.txt @@ -0,0 +1,9 @@ +45 137.48085021972656 197.90252685546875 139.18423461914062 96.75857543945312 +39 295.8536071777344 131.79486083984375 57.14495849609375 138.86187744140625 +57 151.49183654785156 589.911376953125 302.9836730957031 610.05615234375 +75 1463.7318115234375 978.569091796875 269.997314453125 172.77606201171875 +56 687.846923828125 117.63601684570312 344.04522705078125 235.27203369140625 +57 687.1129150390625 117.71862030029297 342.369384765625 235.43724060058594 +56 486.7486572265625 249.5191192626953 267.603759765625 278.9292907714844 +41 247.950927734375 210.30596923828125 71.19171142578125 79.54579162597656 +58 1379.4962158203125 796.713623046875 439.913818359375 523.8110961914062 diff --git a/ultralytics/ours_15000/labels_gt/00020.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00020.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00020.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00021.txt b/ultralytics/ours_15000/labels_gt/00021.txt new file mode 100755 index 0000000..7c3394b --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00021.txt @@ -0,0 +1,6 @@ +57 419.52130126953125 637.949462890625 839.0426025390625 821.173583984375 +56 1045.7705078125 216.68614196777344 351.1634521484375 400.0156555175781 +45 783.4061279296875 541.7034301757812 128.38641357421875 101.9273681640625 +56 950.2614135742188 484.5854187011719 278.1241455078125 240.97076416015625 +73 604.5101928710938 809.2720336914062 205.57162475585938 112.4932861328125 +77 1020.9207763671875 209.66278076171875 165.26702880859375 117.24412536621094 diff --git a/ultralytics/ours_15000/labels_gt/00021.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00021.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00021.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00022.txt b/ultralytics/ours_15000/labels_gt/00022.txt new file mode 100755 index 0000000..0af4d21 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00022.txt @@ -0,0 +1,5 @@ +45 190.1372833251953 295.3411865234375 129.42453002929688 91.03485107421875 +56 861.62841796875 206.93328857421875 357.83392333984375 412.61175537109375 +57 293.91253662109375 226.6883544921875 580.9632568359375 447.2225341796875 +57 860.5447998046875 210.16204833984375 357.82366943359375 415.4361572265625 +57 134.41018676757812 538.63330078125 268.82037353515625 467.7417297363281 diff --git a/ultralytics/ours_15000/labels_gt/00022.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00022.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00022.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00023.txt b/ultralytics/ours_15000/labels_gt/00023.txt new file mode 100755 index 0000000..1a28fae --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00023.txt @@ -0,0 +1,5 @@ +56 210.9785919189453 281.5224609375 380.2796936035156 431.0151062011719 +56 89.26393127441406 547.9739990234375 178.03631591796875 269.5361328125 +58 1253.573974609375 598.5875854492188 691.2705078125 925.2200927734375 +62 746.8617553710938 223.0107421875 238.430419921875 177.89207458496094 +62 942.3292236328125 185.26910400390625 324.581298828125 331.8287353515625 diff --git a/ultralytics/ours_15000/labels_gt/00023.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00023.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00023.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00024.txt b/ultralytics/ours_15000/labels_gt/00024.txt new file mode 100755 index 0000000..2d107c4 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00024.txt @@ -0,0 +1,3 @@ +58 1439.1416015625 615.0361938476562 320.6632080078125 588.8641357421875 +56 249.25820922851562 102.53416442871094 162.22467041015625 198.69351196289062 +75 1456.250732421875 749.2395629882812 273.9642333984375 318.0433349609375 diff --git a/ultralytics/ours_15000/labels_gt/00024.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00024.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00024.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00025.txt b/ultralytics/ours_15000/labels_gt/00025.txt new file mode 100755 index 0000000..c238e0a --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00025.txt @@ -0,0 +1,10 @@ +45 737.8883056640625 651.9853515625 118.46923828125 91.6976318359375 +56 1344.581787109375 480.348876953125 369.86572265625 463.014404296875 +39 887.5238647460938 604.9346313476562 41.1463623046875 120.0855712890625 +73 448.7968444824219 817.0993041992188 155.75030517578125 96.092529296875 +41 828.1822509765625 675.9771728515625 65.50506591796875 90.16259765625 +57 512.9719848632812 668.9072875976562 1025.9439697265625 770.6534423828125 +73 189.0782470703125 906.720703125 354.851318359375 135.70220947265625 +75 829.3604736328125 584.5616455078125 68.388671875 140.6534423828125 +56 1089.94140625 704.37158203125 295.43310546875 316.52532958984375 +57 408.912109375 576.0001831054688 811.656494140625 582.8067626953125 diff --git a/ultralytics/ours_15000/labels_gt/00025.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00025.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00025.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00026.txt b/ultralytics/ours_15000/labels_gt/00026.txt new file mode 100755 index 0000000..b9a3606 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00026.txt @@ -0,0 +1,4 @@ +57 260.2044982910156 900.294189453125 518.2760620117188 314.848388671875 +77 192.30029296875 1025.266357421875 314.1744384765625 80.5093994140625 +62 1391.3094482421875 797.18310546875 415.196533203125 519.89208984375 +41 1059.793212890625 296.38800048828125 51.0699462890625 47.1966552734375 diff --git a/ultralytics/ours_15000/labels_gt/00026.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00026.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00026.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00027.txt b/ultralytics/ours_15000/labels_gt/00027.txt new file mode 100755 index 0000000..378cba7 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00027.txt @@ -0,0 +1,7 @@ +56 440.6987609863281 214.5452117919922 565.7157592773438 423.8868103027344 +56 383.3110046386719 694.711181640625 561.8677368164062 633.0776977539062 +46 926.0938720703125 718.2742309570312 268.757568359375 198.3980712890625 +41 422.2413635253906 1004.5433349609375 155.50140380859375 120.84991455078125 +39 564.1445922851562 986.50390625 128.88394165039062 158.13531494140625 +57 441.39483642578125 213.16561889648438 564.92822265625 425.50592041015625 +57 115.45759582519531 674.1942138671875 229.69204711914062 758.5570678710938 diff --git a/ultralytics/ours_15000/labels_gt/00027.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00027.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00027.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00028.txt b/ultralytics/ours_15000/labels_gt/00028.txt new file mode 100755 index 0000000..1024ddd --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00028.txt @@ -0,0 +1,12 @@ +75 851.437744140625 952.292236328125 245.398681640625 221.68719482421875 +58 848.16943359375 525.3868408203125 913.162841796875 1046.681884765625 +58 881.0479736328125 755.2593994140625 701.7392578125 599.845947265625 +73 1411.04296875 593.4088745117188 100.4425048828125 95.6666259765625 +73 1431.55322265625 164.69873046875 82.0318603515625 165.7554931640625 +73 1468.179931640625 376.9989013671875 96.261962890625 187.3900146484375 +73 1529.62744140625 389.8619079589844 81.3861083984375 168.16448974609375 +73 1414.907470703125 376.0030517578125 80.0323486328125 185.21231079101562 +73 1392.819580078125 381.2193298339844 71.2398681640625 171.36590576171875 +73 1546.07080078125 648.7391357421875 107.8582763671875 164.05963134765625 +73 1370.2764892578125 380.1346130371094 80.236083984375 170.47967529296875 +73 1505.2764892578125 638.435791015625 123.08984375 176.36761474609375 diff --git a/ultralytics/ours_15000/labels_gt/00028.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00028.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00028.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00029.txt b/ultralytics/ours_15000/labels_gt/00029.txt new file mode 100755 index 0000000..1fca95a --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00029.txt @@ -0,0 +1 @@ +58 444.82373046875 618.3103637695312 835.16015625 867.1829833984375 diff --git a/ultralytics/ours_15000/labels_gt/00029.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00029.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00029.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00030.txt b/ultralytics/ours_15000/labels_gt/00030.txt new file mode 100755 index 0000000..cc4175d --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00030.txt @@ -0,0 +1,7 @@ +56 477.03289794921875 575.6170654296875 483.4775390625 551.1578369140625 +56 762.058349609375 209.6219482421875 544.12646484375 412.86029052734375 +39 187.09182739257812 911.374267578125 153.68450927734375 297.65472412109375 +57 762.6749267578125 209.1727294921875 543.1106567382812 411.6849365234375 +57 179.51358032226562 551.3162841796875 356.52593994140625 898.1056518554688 +46 995.784912109375 777.9097900390625 300.632080078125 172.49371337890625 +77 745.9265747070312 73.60415649414062 271.8133544921875 145.72100830078125 diff --git a/ultralytics/ours_15000/labels_gt/00030.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00030.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00030.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00031.txt b/ultralytics/ours_15000/labels_gt/00031.txt new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/labels_gt/00031.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00031.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00031.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00032.txt b/ultralytics/ours_15000/labels_gt/00032.txt new file mode 100755 index 0000000..b456608 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00032.txt @@ -0,0 +1,12 @@ +58 840.1622314453125 787.4189453125 404.35504150390625 539.6084594726562 +0 132.65029907226562 542.0628051757812 34.48808288574219 115.12753295898438 +62 585.2234497070312 895.8689575195312 332.4749755859375 323.7908935546875 +74 722.1151123046875 45.707618713378906 92.393798828125 90.77020263671875 +0 162.60792541503906 533.469482421875 35.05072021484375 115.83984375 +56 1143.9462890625 852.100341796875 251.08648681640625 258.39825439453125 +73 497.700439453125 235.2008819580078 70.21255493164062 136.24862670898438 +0 94.44181823730469 558.1847534179688 42.96905517578125 100.4869384765625 +73 498.40631103515625 206.93890380859375 295.7799072265625 210.5323028564453 +73 454.6231689453125 235.16299438476562 81.5447998046875 134.1905975341797 +73 469.44854736328125 236.8616485595703 61.31640625 131.34359741210938 +73 514.7150268554688 236.46865844726562 54.178924560546875 136.9453582763672 diff --git a/ultralytics/ours_15000/labels_gt/00032.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00032.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00032.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00033.txt b/ultralytics/ours_15000/labels_gt/00033.txt new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/labels_gt/00033.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00033.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00033.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00034.txt b/ultralytics/ours_15000/labels_gt/00034.txt new file mode 100755 index 0000000..fa192e1 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00034.txt @@ -0,0 +1,2 @@ +41 788.37158203125 736.6769409179688 74.91943359375 125.167236328125 +73 851.9610595703125 479.1068115234375 37.5201416015625 97.9681396484375 diff --git a/ultralytics/ours_15000/labels_gt/00034.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00034.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00034.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00035.txt b/ultralytics/ours_15000/labels_gt/00035.txt new file mode 100755 index 0000000..beda8d2 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00035.txt @@ -0,0 +1,9 @@ +41 22.40873146057129 704.3751220703125 44.81746292114258 134.56695556640625 +74 1494.8968505859375 519.512939453125 101.798583984375 99.41278076171875 +41 46.798038482666016 705.3744506835938 86.90966033935547 132.406005859375 +41 66.21147918701172 705.1309204101562 54.5704460144043 130.191650390625 +73 1257.3961181640625 706.510498046875 94.159423828125 113.32928466796875 +73 1358.18701171875 545.008056640625 43.3282470703125 102.50509643554688 +73 147.18508911132812 438.82110595703125 51.01466369628906 102.197265625 +73 1284.172607421875 707.4093627929688 64.8905029296875 110.6536865234375 +73 1268.26220703125 707.4222412109375 61.5157470703125 110.31353759765625 diff --git a/ultralytics/ours_15000/labels_gt/00035.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00035.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00035.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00036.txt b/ultralytics/ours_15000/labels_gt/00036.txt new file mode 100755 index 0000000..4604a53 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00036.txt @@ -0,0 +1,2 @@ +0 1184.75390625 523.3885498046875 430.55206298828125 255.07489013671875 +63 103.11344146728516 110.43177032470703 205.85621643066406 211.2140350341797 diff --git a/ultralytics/ours_15000/labels_gt/00036.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00036.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00036.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00037.txt b/ultralytics/ours_15000/labels_gt/00037.txt new file mode 100755 index 0000000..91d1eac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00037.txt @@ -0,0 +1,2 @@ +57 511.6624450683594 898.3057861328125 1023.3248901367188 312.64495849609375 +57 782.3731079101562 898.4188232421875 978.8968505859375 318.30364990234375 diff --git a/ultralytics/ours_15000/labels_gt/00037.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00037.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00037.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_gt/00038.txt b/ultralytics/ours_15000/labels_gt/00038.txt new file mode 100755 index 0000000..5837910 --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00038.txt @@ -0,0 +1,2 @@ +57 1170.4056396484375 493.9075927734375 854.7822265625 890.171875 +73 1307.6837158203125 513.9310302734375 174.33447265625 104.84237670898438 diff --git a/ultralytics/ours_15000/labels_gt/00038.txt:Zone.Identifier b/ultralytics/ours_15000/labels_gt/00038.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_gt/00038.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00000.txt b/ultralytics/ours_15000/labels_renders/00000.txt new file mode 100755 index 0000000..22d935c --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00000.txt @@ -0,0 +1,8 @@ +56 511.27532958984375 333.6192321777344 403.0279846191406 456.58966064453125 +45 424.82421875 762.4955444335938 210.14663696289062 145.8319091796875 +62 1374.1278076171875 405.0294494628906 449.56787109375 771.8057250976562 +41 269.02703857421875 730.5111083984375 101.47340393066406 138.88043212890625 +39 537.1754150390625 648.297607421875 91.90643310546875 245.28363037109375 +77 500.4969482421875 321.5142822265625 202.62136840820312 129.8236083984375 +0 88.17066192626953 697.6800537109375 176.34132385253906 712.280517578125 +60 472.7126159667969 866.1412963867188 588.7864379882812 370.6759033203125 diff --git a/ultralytics/ours_15000/labels_renders/00000.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00000.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00000.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00001.txt b/ultralytics/ours_15000/labels_renders/00001.txt new file mode 100755 index 0000000..4562052 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00001.txt @@ -0,0 +1,7 @@ +45 378.4690856933594 507.79248046875 163.01531982421875 122.8310546875 +56 855.2413330078125 196.1427001953125 377.140625 379.147216796875 +39 552.17724609375 408.2512512207031 58.99078369140625 143.90106201171875 +56 688.9881591796875 451.5233459472656 273.97418212890625 319.00543212890625 +77 843.4219970703125 138.951416015625 187.482421875 124.63638305664062 +40 501.23016357421875 535.4861450195312 83.45263671875 121.25277709960938 +57 292.35882568359375 367.5412902832031 584.294921875 608.1130981445312 diff --git a/ultralytics/ours_15000/labels_renders/00001.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00001.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00001.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00002.txt b/ultralytics/ours_15000/labels_renders/00002.txt new file mode 100755 index 0000000..6d79a13 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00002.txt @@ -0,0 +1,6 @@ +56 1219.177490234375 243.93067932128906 397.0306396484375 484.8166198730469 +45 410.3744812011719 415.013671875 124.80499267578125 91.45773315429688 +57 440.3482971191406 324.7543640136719 856.0110473632812 553.6808471679688 +77 1210.2646484375 214.70904541015625 180.6978759765625 130.9807586669922 +73 122.86636352539062 581.70654296875 141.933837890625 92.2119140625 +57 304.386474609375 554.9171752929688 607.80322265625 985.8953857421875 diff --git a/ultralytics/ours_15000/labels_renders/00002.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00002.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00002.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00003.txt b/ultralytics/ours_15000/labels_renders/00003.txt new file mode 100755 index 0000000..585ef84 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00003.txt @@ -0,0 +1,4 @@ +56 538.2301025390625 104.50711059570312 247.06417846679688 207.92776489257812 +58 481.66143798828125 258.5326232910156 206.474609375 448.455322265625 +39 1445.793701171875 221.9141387939453 61.6387939453125 138.26223754882812 +45 1533.8880615234375 239.07981872558594 115.46875 74.74758911132812 diff --git a/ultralytics/ours_15000/labels_renders/00003.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00003.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00003.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00004.txt b/ultralytics/ours_15000/labels_renders/00004.txt new file mode 100755 index 0000000..37ba462 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00004.txt @@ -0,0 +1,13 @@ +58 227.7575225830078 816.274169921875 252.65194702148438 447.06085205078125 +41 1328.132080078125 956.1613159179688 75.4034423828125 106.1737060546875 +58 174.69415283203125 683.2254638671875 349.3883056640625 686.889404296875 +65 1265.725341796875 992.71435546875 50.8294677734375 63.5634765625 +39 1091.79736328125 916.1778564453125 64.4674072265625 144.86651611328125 +45 1190.6898193359375 923.5718994140625 112.998291015625 84.69146728515625 +57 1300.902587890625 860.777587890625 596.6400146484375 387.88519287109375 +62 70.25575256347656 715.5887451171875 140.4990234375 327.95947265625 +58 172.08926391601562 568.0811157226562 344.17852783203125 463.9585876464844 +58 165.273193359375 490.346923828125 328.27490234375 311.3486328125 +39 1106.632568359375 914.3624877929688 84.20166015625 142.9842529296875 +39 1140.542236328125 884.443603515625 58.6478271484375 180.10394287109375 +57 1412.955322265625 859.60986328125 373.43994140625 384.071044921875 diff --git a/ultralytics/ours_15000/labels_renders/00004.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00004.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00004.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00005.txt b/ultralytics/ours_15000/labels_renders/00005.txt new file mode 100755 index 0000000..fd58ed5 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00005.txt @@ -0,0 +1,8 @@ +58 429.7823181152344 274.03570556640625 360.08209228515625 274.6998596191406 +56 521.5217895507812 405.92578125 180.80795288085938 215.3614501953125 +45 1547.9649658203125 511.89898681640625 89.961181640625 71.36328125 +39 1484.564208984375 481.72698974609375 57.83447265625 168.1378173828125 +56 488.8328552246094 134.17041015625 37.8885498046875 83.5430908203125 +56 447.3287353515625 859.7791748046875 219.36083984375 395.31463623046875 +60 615.343994140625 347.437255859375 289.0085754394531 186.52557373046875 +56 575.9189453125 120.78230285644531 152.35519409179688 105.04810333251953 diff --git a/ultralytics/ours_15000/labels_renders/00005.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00005.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00005.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00006.txt b/ultralytics/ours_15000/labels_renders/00006.txt new file mode 100755 index 0000000..2ec4a71 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00006.txt @@ -0,0 +1,8 @@ +58 140.6965789794922 231.6907958984375 258.2830505371094 439.2928466796875 +57 1289.879150390625 674.7433471679688 618.1890258789062 750.0042724609375 +57 1373.5223388671875 263.7394104003906 451.129150390625 453.73992919921875 +56 256.62286376953125 106.1178970336914 254.99557495117188 210.88954162597656 +56 1222.289306640625 931.0147705078125 640.9071044921875 242.61126708984375 +24 1423.560791015625 456.91619873046875 352.059326171875 305.9371643066406 +39 1042.0849609375 241.2950439453125 96.351318359375 170.69549560546875 +65 1180.482666015625 330.2144775390625 47.77001953125 53.155364990234375 diff --git a/ultralytics/ours_15000/labels_renders/00006.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00006.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00006.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00007.txt b/ultralytics/ours_15000/labels_renders/00007.txt new file mode 100755 index 0000000..7e1128e --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00007.txt @@ -0,0 +1,9 @@ +58 373.5185546875 859.6270751953125 228.89248657226562 398.77020263671875 +39 1240.7626953125 863.22119140625 51.3602294921875 126.16717529296875 +41 1466.4227294921875 885.7821655273438 72.7880859375 105.7926025390625 +45 1326.9462890625 863.9602661132812 116.9698486328125 91.260986328125 +58 280.09539794921875 733.7379150390625 357.944580078125 636.9672241210938 +56 459.4545593261719 686.7129516601562 228.16741943359375 216.6463623046875 +60 571.0 575.7108154296875 423.7544860839844 277.5291442871094 +73 51.5791015625 234.36138916015625 46.763092041015625 80.872802734375 +58 127.36004638671875 583.4022216796875 116.62628173828125 117.19940185546875 diff --git a/ultralytics/ours_15000/labels_renders/00007.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00007.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00007.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00008.txt b/ultralytics/ours_15000/labels_renders/00008.txt new file mode 100755 index 0000000..bb140b5 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00008.txt @@ -0,0 +1,6 @@ +58 641.472900390625 608.1875 195.27130126953125 396.68609619140625 +73 960.5880737304688 133.74798583984375 119.5242919921875 28.129074096679688 +39 1503.994140625 663.4765625 54.359130859375 124.09271240234375 +62 62.997161865234375 373.708984375 125.84921264648438 240.51658630371094 +73 655.18603515625 108.60963439941406 109.66888427734375 43.577606201171875 +73 800.2905883789062 93.8016357421875 145.51025390625 105.65341186523438 diff --git a/ultralytics/ours_15000/labels_renders/00008.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00008.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00008.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00009.txt b/ultralytics/ours_15000/labels_renders/00009.txt new file mode 100755 index 0000000..9d25752 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00009.txt @@ -0,0 +1,10 @@ +58 176.81793212890625 379.80035400390625 342.418212890625 658.4984130859375 +65 1123.322021484375 752.0413818359375 69.0457763671875 54.08062744140625 +39 959.39501953125 662.4888305664062 62.90130615234375 143.9349365234375 +57 1314.839111328125 764.7166748046875 569.0206298828125 559.126708984375 +41 1195.53564453125 723.3792724609375 67.916748046875 92.58807373046875 +63 1146.1864013671875 595.6881103515625 189.277587890625 70.86138916015625 +62 74.24581146240234 386.68927001953125 148.3898162841797 284.1929626464844 +41 1198.839599609375 710.7601928710938 61.2545166015625 65.0875244140625 +45 1072.362060546875 679.50830078125 117.60064697265625 92.25189208984375 +56 1264.7755126953125 904.21923828125 592.9550170898438 303.914794921875 diff --git a/ultralytics/ours_15000/labels_renders/00009.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00009.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00009.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00010.txt b/ultralytics/ours_15000/labels_renders/00010.txt new file mode 100755 index 0000000..6763fa7 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00010.txt @@ -0,0 +1,8 @@ +56 417.2845458984375 106.95010375976562 252.84420776367188 209.09188842773438 +58 343.49273681640625 228.30181884765625 199.31472778320312 436.57537841796875 +56 1395.873046875 851.9609375 405.4024658203125 397.88494873046875 +39 1241.0560302734375 139.07589721679688 48.994140625 128.83079528808594 +57 1335.03955078125 345.6293640136719 528.0860595703125 679.9777221679688 +45 1323.44775390625 157.19906616210938 117.5989990234375 79.09385681152344 +57 1404.3131103515625 395.2987365722656 389.283935546875 603.4081420898438 +41 1459.8455810546875 169.51271057128906 81.522705078125 98.42434692382812 diff --git a/ultralytics/ours_15000/labels_renders/00010.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00010.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00010.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00011.txt b/ultralytics/ours_15000/labels_renders/00011.txt new file mode 100755 index 0000000..0485e6f --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00011.txt @@ -0,0 +1 @@ +58 1137.345458984375 178.4976806640625 282.9019775390625 356.43310546875 diff --git a/ultralytics/ours_15000/labels_renders/00011.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00011.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00011.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00012.txt b/ultralytics/ours_15000/labels_renders/00012.txt new file mode 100755 index 0000000..f844959 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00012.txt @@ -0,0 +1,6 @@ +58 754.337646484375 211.654052734375 262.5634765625 414.86572265625 +39 1407.398681640625 375.3517150878906 68.20068359375 164.9468994140625 +45 1544.2626953125 395.9776306152344 110.88427734375 101.79791259765625 +39 1467.3458251953125 352.467041015625 90.665283203125 203.9893798828125 +56 982.401611328125 106.35389709472656 215.35809326171875 212.70779418945312 +56 1534.1982421875 782.8305053710938 129.5670166015625 541.8155517578125 diff --git a/ultralytics/ours_15000/labels_renders/00012.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00012.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00012.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00013.txt b/ultralytics/ours_15000/labels_renders/00013.txt new file mode 100755 index 0000000..d824701 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00013.txt @@ -0,0 +1,8 @@ +58 379.16815185546875 396.8303527832031 342.6254577636719 618.4660034179688 +41 1007.0888671875 932.8775024414062 83.873291015625 110.09033203125 +45 924.0405883789062 849.4921875 125.7347412109375 111.87744140625 +56 733.4710693359375 445.7867126464844 197.480224609375 266.51361083984375 +39 765.5612182617188 802.443359375 71.1212158203125 155.145263671875 +73 1139.1156005859375 979.5640869140625 134.77294921875 103.064453125 +63 915.64111328125 234.35342407226562 247.09466552734375 121.23883056640625 +66 962.266357421875 233.1523895263672 151.02581787109375 118.93972778320312 diff --git a/ultralytics/ours_15000/labels_renders/00013.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00013.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00013.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00014.txt b/ultralytics/ours_15000/labels_renders/00014.txt new file mode 100755 index 0000000..631b824 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00014.txt @@ -0,0 +1,10 @@ +58 920.62939453125 716.1492309570312 324.061279296875 583.731201171875 +56 1179.2794189453125 664.7342529296875 210.626953125 289.899169921875 +74 803.5303344726562 39.04254150390625 70.3270263671875 76.77761840820312 +57 334.360595703125 735.660400390625 666.2696533203125 569.7698974609375 +73 644.0205078125 187.45193481445312 212.04443359375 158.996826171875 +73 642.453125 206.26870727539062 57.93292236328125 104.82989501953125 +73 654.1170043945312 206.2064971923828 47.9656982421875 103.82229614257812 +73 610.2972412109375 207.6573486328125 54.0494384765625 103.26405334472656 +58 732.035400390625 522.9218139648438 78.27667236328125 95.4957275390625 +73 561.897705078125 185.10699462890625 43.44671630859375 151.1848907470703 diff --git a/ultralytics/ours_15000/labels_renders/00014.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00014.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00014.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00015.txt b/ultralytics/ours_15000/labels_renders/00015.txt new file mode 100755 index 0000000..39a5020 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00015.txt @@ -0,0 +1,3 @@ +58 806.95947265625 92.69340515136719 148.09466552734375 184.146728515625 +46 646.536376953125 782.8948364257812 258.55987548828125 114.61669921875 +57 1488.702880859375 678.771484375 220.3267822265625 743.1753540039062 diff --git a/ultralytics/ours_15000/labels_renders/00015.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00015.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00015.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00016.txt b/ultralytics/ours_15000/labels_renders/00016.txt new file mode 100755 index 0000000..0acae46 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00016.txt @@ -0,0 +1,6 @@ +58 191.97979736328125 275.6932067871094 378.3447265625 551.3864135742188 +45 794.0655517578125 680.749755859375 117.8798828125 104.173095703125 +56 536.561279296875 321.07391357421875 209.0704345703125 223.0760498046875 +41 886.6061401367188 757.5408325195312 99.6573486328125 110.9654541015625 +73 987.4557495117188 801.7666625976562 113.9488525390625 102.8812255859375 +0 1396.578857421875 820.7415161132812 327.85498046875 468.6654052734375 diff --git a/ultralytics/ours_15000/labels_renders/00016.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00016.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00016.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00017.txt b/ultralytics/ours_15000/labels_renders/00017.txt new file mode 100755 index 0000000..e2236cc --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00017.txt @@ -0,0 +1,3 @@ +58 1083.774658203125 324.63372802734375 398.5074462890625 609.6202392578125 +74 366.456298828125 102.14704895019531 168.9361572265625 201.22738647460938 +58 1155.603271484375 430.4409484863281 237.8175048828125 398.53240966796875 diff --git a/ultralytics/ours_15000/labels_renders/00017.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00017.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00017.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00018.txt b/ultralytics/ours_15000/labels_renders/00018.txt new file mode 100755 index 0000000..d456ac7 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00018.txt @@ -0,0 +1,6 @@ +45 920.884521484375 742.7841186523438 134.27215576171875 108.0960693359375 +56 1330.474365234375 391.9822082519531 376.8582763671875 433.87225341796875 +57 582.381591796875 710.3677368164062 1160.426025390625 690.9139404296875 +39 1066.3006591796875 659.1279296875 65.321533203125 145.716552734375 +56 1190.5718994140625 685.008544921875 270.51513671875 284.293701171875 +73 716.9117431640625 992.414306640625 261.297607421875 144.20660400390625 diff --git a/ultralytics/ours_15000/labels_renders/00018.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00018.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00018.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00019.txt b/ultralytics/ours_15000/labels_renders/00019.txt new file mode 100755 index 0000000..54b1aa6 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00019.txt @@ -0,0 +1,8 @@ +56 619.47607421875 421.3522644042969 350.78338623046875 457.56256103515625 +39 113.2337875366211 578.557861328125 40.39595031738281 126.43804931640625 +77 619.8947143554688 400.91650390625 166.39892578125 133.27902221679688 +39 47.70610809326172 584.3847045898438 88.66294860839844 187.33596801757812 +60 91.61477661132812 840.5615234375 183.07748413085938 438.385986328125 +62 1129.26513671875 527.7431640625 278.60546875 231.73309326171875 +57 163.08702087402344 443.3983154296875 326.1740417480469 395.5819091796875 +56 340.355712890625 645.932373046875 336.9693603515625 327.7073669433594 diff --git a/ultralytics/ours_15000/labels_renders/00019.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00019.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00019.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00020.txt b/ultralytics/ours_15000/labels_renders/00020.txt new file mode 100755 index 0000000..1a8397c --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00020.txt @@ -0,0 +1,9 @@ +45 138.11544799804688 197.4602508544922 139.70321655273438 96.644287109375 +57 151.2299346923828 585.4921875 302.4598693847656 603.4192504882812 +39 295.2613830566406 131.66567993164062 56.6494140625 139.4818115234375 +56 687.8071899414062 117.40111541748047 345.89501953125 234.80223083496094 +75 1462.9962158203125 977.9951171875 271.681884765625 173.16168212890625 +57 687.1492309570312 117.33847045898438 343.84912109375 234.67694091796875 +56 485.8592224121094 250.5316162109375 268.34185791015625 278.88116455078125 +39 243.00341796875 146.17828369140625 87.08518981933594 201.22528076171875 +58 1378.44091796875 793.4603271484375 441.5919189453125 525.4758911132812 diff --git a/ultralytics/ours_15000/labels_renders/00020.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00020.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00020.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00021.txt b/ultralytics/ours_15000/labels_renders/00021.txt new file mode 100755 index 0000000..f99670f --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00021.txt @@ -0,0 +1,8 @@ +57 420.05853271484375 628.8131713867188 839.8023681640625 810.2423095703125 +45 784.662353515625 541.6824951171875 125.94757080078125 101.17660522460938 +56 1046.0089111328125 215.88436889648438 345.43768310546875 401.29364013671875 +56 951.1431884765625 485.1061706542969 280.684326171875 241.04437255859375 +77 1026.03125 209.02206420898438 174.7125244140625 113.348388671875 +39 901.5615844726562 458.6107177734375 53.7823486328125 139.804443359375 +73 604.9786376953125 809.6673583984375 205.10992431640625 110.483642578125 +41 868.5194702148438 514.1730346679688 66.2823486328125 76.12899780273438 diff --git a/ultralytics/ours_15000/labels_renders/00021.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00021.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00021.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00022.txt b/ultralytics/ours_15000/labels_renders/00022.txt new file mode 100755 index 0000000..f1167b1 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00022.txt @@ -0,0 +1,9 @@ +45 189.41217041015625 295.027099609375 129.7722625732422 91.29342651367188 +56 862.0198974609375 207.36375427246094 358.16845703125 413.2187194824219 +57 294.0594787597656 225.70220947265625 583.3689575195312 447.7442626953125 +57 860.953369140625 210.75595092773438 358.052734375 415.866943359375 +56 570.0391845703125 375.01806640625 321.9225769042969 305.1059875488281 +58 1285.319580078125 577.0265502929688 627.1270141601562 950.5179443359375 +39 344.6036376953125 267.07684326171875 37.961029052734375 117.1187744140625 +57 132.41000366210938 488.5374450683594 264.41705322265625 563.5621948242188 +56 1311.548828125 408.3521728515625 378.3145751953125 443.1123046875 diff --git a/ultralytics/ours_15000/labels_renders/00022.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00022.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00022.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00023.txt b/ultralytics/ours_15000/labels_renders/00023.txt new file mode 100755 index 0000000..7b4eb2a --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00023.txt @@ -0,0 +1,6 @@ +56 212.7323455810547 281.81646728515625 377.3536682128906 432.158203125 +58 1255.658203125 596.0750732421875 687.4698486328125 926.57177734375 +56 89.03585815429688 547.2721557617188 177.36737060546875 269.9615173339844 +62 939.6290283203125 185.601806640625 319.14398193359375 333.788330078125 +62 747.2265014648438 225.78775024414062 239.6202392578125 182.78050231933594 +62 1011.3243408203125 189.01409912109375 464.06658935546875 339.43548583984375 diff --git a/ultralytics/ours_15000/labels_renders/00023.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00023.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00023.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00024.txt b/ultralytics/ours_15000/labels_renders/00024.txt new file mode 100755 index 0000000..8c58902 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00024.txt @@ -0,0 +1,3 @@ +58 1443.752685546875 618.438232421875 311.2359619140625 583.31396484375 +56 248.2419891357422 102.13229370117188 164.01168823242188 199.33282470703125 +75 1456.86279296875 748.9273071289062 275.495361328125 319.5076904296875 diff --git a/ultralytics/ours_15000/labels_renders/00024.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00024.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00024.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00025.txt b/ultralytics/ours_15000/labels_renders/00025.txt new file mode 100755 index 0000000..4ae14aa --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00025.txt @@ -0,0 +1,8 @@ +45 729.4473266601562 651.9398193359375 135.8978271484375 90.582275390625 +56 1344.2713623046875 479.3868103027344 368.571533203125 462.35736083984375 +39 888.2757568359375 604.596923828125 40.68359375 119.0130615234375 +73 188.27822875976562 906.3260498046875 355.70770263671875 134.409912109375 +73 448.6788330078125 817.425048828125 157.74551391601562 96.51031494140625 +57 497.670654296875 669.4253540039062 995.34130859375 771.5814208984375 +57 412.0604248046875 575.8843383789062 818.18408203125 582.8974609375 +77 1341.2391357421875 454.3167724609375 179.293212890625 130.30435180664062 diff --git a/ultralytics/ours_15000/labels_renders/00025.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00025.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00025.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00026.txt b/ultralytics/ours_15000/labels_renders/00026.txt new file mode 100755 index 0000000..a325e97 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00026.txt @@ -0,0 +1,4 @@ +62 1391.138671875 796.2158813476562 415.7359619140625 520.2818603515625 +57 260.6248474121094 900.116455078125 518.4754028320312 315.09375 +57 179.01101684570312 897.888671875 357.85162353515625 305.24200439453125 +41 1060.2393798828125 296.429443359375 51.076416015625 47.1707763671875 diff --git a/ultralytics/ours_15000/labels_renders/00026.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00026.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00026.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00027.txt b/ultralytics/ours_15000/labels_renders/00027.txt new file mode 100755 index 0000000..505549e --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00027.txt @@ -0,0 +1,9 @@ +56 441.0543212890625 214.96490478515625 567.2847900390625 425.2479248046875 +46 926.1787719726562 719.4459228515625 266.3555908203125 198.72589111328125 +41 419.93524169921875 1004.6264038085938 152.26547241210938 119.1473388671875 +39 572.9867553710938 985.2110595703125 179.73876953125 158.494873046875 +77 420.00811767578125 85.064208984375 288.0048522949219 169.07135009765625 +57 441.3297119140625 213.45791625976562 565.9984130859375 426.11004638671875 +57 116.50056457519531 675.8485107421875 231.64959716796875 756.6657104492188 +56 381.9000244140625 692.3802490234375 556.37353515625 628.0830688476562 +62 1485.2960205078125 202.09759521484375 228.150634765625 396.0709228515625 diff --git a/ultralytics/ours_15000/labels_renders/00027.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00027.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00027.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00028.txt b/ultralytics/ours_15000/labels_renders/00028.txt new file mode 100755 index 0000000..6608810 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00028.txt @@ -0,0 +1,8 @@ +58 873.367431640625 772.7467041015625 708.087158203125 567.2782592773438 +75 850.0362548828125 947.204833984375 251.08062744140625 228.29345703125 +58 860.1156005859375 526.5592041015625 897.566162109375 1047.23876953125 +73 1457.9833984375 377.9727478027344 71.3916015625 182.53729248046875 +73 1431.201416015625 163.97747802734375 83.8177490234375 167.5854034423828 +73 1413.1649169921875 592.4962158203125 102.391357421875 95.79437255859375 +73 1413.261962890625 373.2794189453125 78.3111572265625 190.82888793945312 +73 1545.7867431640625 646.471435546875 108.426513671875 164.61566162109375 diff --git a/ultralytics/ours_15000/labels_renders/00028.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00028.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00028.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00029.txt b/ultralytics/ours_15000/labels_renders/00029.txt new file mode 100755 index 0000000..c29f2b8 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00029.txt @@ -0,0 +1 @@ +58 440.9657287597656 627.196533203125 843.8950805664062 851.4749145507812 diff --git a/ultralytics/ours_15000/labels_renders/00029.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00029.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00029.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00030.txt b/ultralytics/ours_15000/labels_renders/00030.txt new file mode 100755 index 0000000..44e3f3a --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00030.txt @@ -0,0 +1,8 @@ +39 186.54702758789062 913.578369140625 154.67724609375 301.025390625 +56 475.7993469238281 576.1796264648438 487.85491943359375 555.01806640625 +56 760.1907958984375 210.1367950439453 542.13623046875 414.2564392089844 +57 761.0648193359375 209.00259399414062 541.5371704101562 413.3399658203125 +57 175.67822265625 531.0115966796875 350.40960693359375 860.342529296875 +56 177.26461791992188 527.0614013671875 354.01373291015625 855.412109375 +77 746.818359375 73.18589782714844 267.03607177734375 143.32424926757812 +39 68.36373901367188 915.61279296875 136.47537231445312 290.72698974609375 diff --git a/ultralytics/ours_15000/labels_renders/00030.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00030.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00030.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00031.txt b/ultralytics/ours_15000/labels_renders/00031.txt new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/labels_renders/00031.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00031.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00031.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00032.txt b/ultralytics/ours_15000/labels_renders/00032.txt new file mode 100755 index 0000000..7b5f0b1 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00032.txt @@ -0,0 +1,12 @@ +58 837.9232177734375 785.3630981445312 394.79608154296875 540.6253662109375 +0 131.90232849121094 541.9449462890625 34.76165771484375 116.64321899414062 +0 162.72621154785156 533.31494140625 34.966827392578125 117.13897705078125 +56 1144.0087890625 852.7993774414062 247.47259521484375 260.3765869140625 +0 93.91053771972656 558.9458618164062 40.769630432128906 101.76956176757812 +62 584.5952758789062 899.061767578125 333.0059814453125 317.066650390625 +74 723.309814453125 44.6624641418457 91.57073974609375 87.9958724975586 +73 496.6258239746094 234.48223876953125 68.72955322265625 136.63360595703125 +73 496.3199157714844 205.00262451171875 293.26666259765625 214.19287109375 +73 453.2244873046875 235.31353759765625 75.36572265625 133.1403350830078 +58 881.2276611328125 909.0621948242188 309.59771728515625 298.1658935546875 +73 472.01104736328125 236.59906005859375 55.838226318359375 130.983154296875 diff --git a/ultralytics/ours_15000/labels_renders/00032.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00032.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00032.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00033.txt b/ultralytics/ours_15000/labels_renders/00033.txt new file mode 100755 index 0000000..77e409b --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00033.txt @@ -0,0 +1 @@ +62 1020.3753051757812 159.41363525390625 874.7010498046875 311.7872314453125 diff --git a/ultralytics/ours_15000/labels_renders/00033.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00033.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00033.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00034.txt b/ultralytics/ours_15000/labels_renders/00034.txt new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/labels_renders/00034.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00034.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00034.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00035.txt b/ultralytics/ours_15000/labels_renders/00035.txt new file mode 100755 index 0000000..7c03df7 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00035.txt @@ -0,0 +1,8 @@ +74 1495.23291015625 519.0570678710938 101.0662841796875 97.1534423828125 +41 23.60684585571289 699.6473999023438 47.21369171142578 144.2095947265625 +41 64.7264404296875 696.35986328125 57.50849533081055 148.0108642578125 +73 1255.891845703125 706.6134033203125 90.4678955078125 113.03448486328125 +73 148.96478271484375 438.38568115234375 48.20530700683594 101.89218139648438 +73 1287.771240234375 707.64111328125 64.5179443359375 111.29364013671875 +73 1266.317626953125 706.9452514648438 58.9556884765625 110.95458984375 +73 1358.15771484375 544.8963623046875 41.9207763671875 102.17266845703125 diff --git a/ultralytics/ours_15000/labels_renders/00035.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00035.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00035.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00036.txt b/ultralytics/ours_15000/labels_renders/00036.txt new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/labels_renders/00036.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00036.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00036.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00037.txt b/ultralytics/ours_15000/labels_renders/00037.txt new file mode 100755 index 0000000..5b8f575 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00037.txt @@ -0,0 +1,2 @@ +57 665.382080078125 895.478515625 1319.70751953125 312.52197265625 +57 806.396484375 898.6373291015625 807.8945922851562 317.52197265625 diff --git a/ultralytics/ours_15000/labels_renders/00037.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00037.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00037.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders/00038.txt b/ultralytics/ours_15000/labels_renders/00038.txt new file mode 100755 index 0000000..d6aef1e --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00038.txt @@ -0,0 +1,3 @@ +57 1167.473388671875 495.8146667480469 861.8585205078125 894.1763305664062 +57 1230.2828369140625 267.1602478027344 737.69091796875 439.00909423828125 +73 1306.599365234375 514.4588623046875 176.2491455078125 104.13299560546875 diff --git a/ultralytics/ours_15000/labels_renders/00038.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders/00038.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders/00038.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00000.txt b/ultralytics/ours_15000/labels_renders2/00000.txt new file mode 100755 index 0000000..ed8ccb7 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00000.txt @@ -0,0 +1,8 @@ +56 511.2752990722656 333.6192321777344 403.02801513671875 456.58966064453125 +45 424.82421875 762.4955444335938 210.146728515625 145.8319091796875 +62 1374.1278076171875 405.0294494628906 449.56787109375 771.8057250976562 +41 269.02703857421875 730.5111083984375 101.47340393066406 138.88043212890625 +39 537.1754150390625 648.2975463867188 91.90643310546875 245.2835693359375 +77 500.49700927734375 321.5142517089844 202.62149047851562 129.8236083984375 +0 88.17063903808594 697.6800537109375 176.34127807617188 712.280517578125 +60 472.712646484375 866.1412963867188 588.786376953125 370.6759033203125 diff --git a/ultralytics/ours_15000/labels_renders2/00000.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00000.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00000.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00001.txt b/ultralytics/ours_15000/labels_renders2/00001.txt new file mode 100755 index 0000000..93cd132 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00001.txt @@ -0,0 +1,7 @@ +45 378.4691162109375 507.7925109863281 163.01522827148438 122.83099365234375 +56 855.2413330078125 196.14273071289062 377.140625 379.14727783203125 +39 552.17724609375 408.2512512207031 58.99078369140625 143.90106201171875 +56 688.9881591796875 451.52337646484375 273.97418212890625 319.0055236816406 +77 843.4219970703125 138.951416015625 187.482421875 124.63641357421875 +40 501.23016357421875 535.4861450195312 83.45263671875 121.25277709960938 +57 292.3587951660156 367.54132080078125 584.2949829101562 608.113037109375 diff --git a/ultralytics/ours_15000/labels_renders2/00001.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00001.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00001.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00002.txt b/ultralytics/ours_15000/labels_renders2/00002.txt new file mode 100755 index 0000000..4ad58c1 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00002.txt @@ -0,0 +1,6 @@ +56 1219.177490234375 243.93069458007812 397.0306396484375 484.81658935546875 +45 410.3744812011719 415.0136413574219 124.80499267578125 91.45782470703125 +57 440.3482666015625 324.7543640136719 856.0111083984375 553.6808471679688 +77 1210.2646484375 214.70904541015625 180.6978759765625 130.9807586669922 +73 122.86638641357422 581.70654296875 141.9337921142578 92.2119140625 +57 304.386474609375 554.9171752929688 607.80322265625 985.8953857421875 diff --git a/ultralytics/ours_15000/labels_renders2/00002.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00002.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00002.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00003.txt b/ultralytics/ours_15000/labels_renders2/00003.txt new file mode 100755 index 0000000..59aaec3 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00003.txt @@ -0,0 +1,4 @@ +56 538.2301025390625 104.50711822509766 247.06417846679688 207.9277801513672 +58 481.6614685058594 258.5325622558594 206.47467041015625 448.4552001953125 +39 1445.793701171875 221.9141387939453 61.6387939453125 138.26223754882812 +45 1533.8880615234375 239.07981872558594 115.46875 74.74758911132812 diff --git a/ultralytics/ours_15000/labels_renders2/00003.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00003.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00003.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00004.txt b/ultralytics/ours_15000/labels_renders2/00004.txt new file mode 100755 index 0000000..9743ded --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00004.txt @@ -0,0 +1,13 @@ +58 227.75753784179688 816.274169921875 252.65191650390625 447.06085205078125 +41 1328.132080078125 956.1613159179688 75.4034423828125 106.1737060546875 +58 174.69415283203125 683.2254638671875 349.3883056640625 686.889404296875 +65 1265.725341796875 992.71435546875 50.8294677734375 63.5634765625 +39 1091.79736328125 916.1778564453125 64.4674072265625 144.86651611328125 +45 1190.6898193359375 923.5718994140625 112.998291015625 84.69146728515625 +57 1300.9027099609375 860.777587890625 596.6399536132812 387.88519287109375 +62 70.2557601928711 715.5887451171875 140.49903869628906 327.95947265625 +58 172.08924865722656 568.0813598632812 344.1784973144531 463.9590759277344 +58 165.27316284179688 490.3468017578125 328.2747802734375 311.348388671875 +39 1106.632568359375 914.3624877929688 84.20166015625 142.9842529296875 +39 1140.542236328125 884.4434814453125 58.6478271484375 180.103759765625 +57 1412.955322265625 859.60986328125 373.43994140625 384.071044921875 diff --git a/ultralytics/ours_15000/labels_renders2/00004.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00004.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00004.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00005.txt b/ultralytics/ours_15000/labels_renders2/00005.txt new file mode 100755 index 0000000..604bbed --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00005.txt @@ -0,0 +1,8 @@ +58 429.7823181152344 274.0357360839844 360.08209228515625 274.70001220703125 +56 521.521728515625 405.92584228515625 180.80792236328125 215.36148071289062 +45 1547.965087890625 511.89898681640625 89.9609375 71.36328125 +39 1484.5643310546875 481.7269287109375 57.83447265625 168.13796997070312 +56 488.8328552246094 134.17041015625 37.8885498046875 83.5430908203125 +56 447.32879638671875 859.7791748046875 219.36105346679688 395.31463623046875 +60 615.3439331054688 347.437255859375 289.0084533691406 186.52565002441406 +56 575.9189453125 120.78230285644531 152.35519409179688 105.0481185913086 diff --git a/ultralytics/ours_15000/labels_renders2/00005.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00005.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00005.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00006.txt b/ultralytics/ours_15000/labels_renders2/00006.txt new file mode 100755 index 0000000..752666a --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00006.txt @@ -0,0 +1,8 @@ +58 140.69656372070312 231.6907501220703 258.28302001953125 439.2928161621094 +57 1289.879150390625 674.7432861328125 618.1890258789062 750.0040893554688 +57 1373.522216796875 263.7394104003906 451.1292724609375 453.73992919921875 +56 256.62286376953125 106.11790466308594 254.99563598632812 210.88955688476562 +56 1222.289306640625 931.014892578125 640.9071044921875 242.61138916015625 +24 1423.560791015625 456.91619873046875 352.059326171875 305.9371643066406 +39 1042.0849609375 241.29501342773438 96.35113525390625 170.695556640625 +65 1180.482666015625 330.2144775390625 47.77001953125 53.155364990234375 diff --git a/ultralytics/ours_15000/labels_renders2/00006.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00006.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00006.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00007.txt b/ultralytics/ours_15000/labels_renders2/00007.txt new file mode 100755 index 0000000..dcbb9a1 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00007.txt @@ -0,0 +1,9 @@ +58 373.5185546875 859.6270751953125 228.89248657226562 398.77020263671875 +39 1240.7626953125 863.22119140625 51.3602294921875 126.16717529296875 +41 1466.4227294921875 885.7821655273438 72.7880859375 105.7926025390625 +45 1326.9462890625 863.9602661132812 116.9698486328125 91.260986328125 +58 280.0953674316406 733.7378540039062 357.94451904296875 636.9671630859375 +56 459.45458984375 686.7129516601562 228.1673583984375 216.6463623046875 +60 571.0 575.7108154296875 423.7544860839844 277.5291442871094 +73 51.5791130065918 234.36138916015625 46.763099670410156 80.872802734375 +58 127.36003112792969 583.4022216796875 116.6262435913086 117.19940185546875 diff --git a/ultralytics/ours_15000/labels_renders2/00007.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00007.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00007.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00008.txt b/ultralytics/ours_15000/labels_renders2/00008.txt new file mode 100755 index 0000000..51b9a8d --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00008.txt @@ -0,0 +1,6 @@ +58 641.472900390625 608.1875 195.27130126953125 396.68609619140625 +73 960.5880737304688 133.74798583984375 119.5242919921875 28.129104614257812 +39 1503.994140625 663.4765625 54.359130859375 124.09271240234375 +62 62.997169494628906 373.7090148925781 125.84922790527344 240.5166473388672 +73 655.1861572265625 108.60963439941406 109.66888427734375 43.577613830566406 +73 800.2905883789062 93.8016357421875 145.51025390625 105.65341186523438 diff --git a/ultralytics/ours_15000/labels_renders2/00008.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00008.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00008.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00009.txt b/ultralytics/ours_15000/labels_renders2/00009.txt new file mode 100755 index 0000000..3f3d51e --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00009.txt @@ -0,0 +1,10 @@ +58 176.81793212890625 379.80035400390625 342.418212890625 658.4984130859375 +65 1123.322021484375 752.0413818359375 69.0457763671875 54.08062744140625 +39 959.39501953125 662.4888305664062 62.90130615234375 143.9349365234375 +57 1314.839111328125 764.7166748046875 569.0206298828125 559.126708984375 +41 1195.53564453125 723.3792724609375 67.916748046875 92.58807373046875 +63 1146.1864013671875 595.6881103515625 189.277587890625 70.86151123046875 +62 74.24581146240234 386.68927001953125 148.3898162841797 284.1929626464844 +41 1198.839599609375 710.7601928710938 61.2545166015625 65.0875244140625 +45 1072.362060546875 679.50830078125 117.60064697265625 92.25189208984375 +56 1264.775634765625 904.21923828125 592.9549560546875 303.914794921875 diff --git a/ultralytics/ours_15000/labels_renders2/00009.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00009.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00009.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00010.txt b/ultralytics/ours_15000/labels_renders2/00010.txt new file mode 100755 index 0000000..6fd6b6a --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00010.txt @@ -0,0 +1,8 @@ +56 417.2845458984375 106.95010375976562 252.84423828125 209.09188842773438 +58 343.49273681640625 228.30184936523438 199.3148193359375 436.57537841796875 +56 1395.873046875 851.9609375 405.4024658203125 397.88482666015625 +39 1241.0560302734375 139.0758819580078 48.994140625 128.830810546875 +57 1335.03955078125 345.6294250488281 528.0863037109375 679.9777221679688 +45 1323.44775390625 157.19906616210938 117.598876953125 79.09385681152344 +57 1404.3131103515625 395.2983093261719 389.283935546875 603.4087524414062 +41 1459.8455810546875 169.51271057128906 81.522705078125 98.42434692382812 diff --git a/ultralytics/ours_15000/labels_renders2/00010.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00010.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00010.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00011.txt b/ultralytics/ours_15000/labels_renders2/00011.txt new file mode 100755 index 0000000..0485e6f --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00011.txt @@ -0,0 +1 @@ +58 1137.345458984375 178.4976806640625 282.9019775390625 356.43310546875 diff --git a/ultralytics/ours_15000/labels_renders2/00011.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00011.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00011.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00012.txt b/ultralytics/ours_15000/labels_renders2/00012.txt new file mode 100755 index 0000000..96db974 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00012.txt @@ -0,0 +1,6 @@ +58 754.337646484375 211.654052734375 262.5634765625 414.86572265625 +39 1407.3988037109375 375.3517150878906 68.20068359375 164.9468994140625 +45 1544.2626953125 395.9776306152344 110.88427734375 101.79791259765625 +39 1467.3458251953125 352.467041015625 90.665283203125 203.9893798828125 +56 982.401611328125 106.3539047241211 215.35809326171875 212.7078094482422 +56 1534.1982421875 782.8305053710938 129.5670166015625 541.8155517578125 diff --git a/ultralytics/ours_15000/labels_renders2/00012.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00012.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00012.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00013.txt b/ultralytics/ours_15000/labels_renders2/00013.txt new file mode 100755 index 0000000..19d8df4 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00013.txt @@ -0,0 +1,8 @@ +58 379.168212890625 396.8303527832031 342.6253662109375 618.4658813476562 +41 1007.0888671875 932.8775024414062 83.873291015625 110.09033203125 +45 924.0405883789062 849.4921875 125.7347412109375 111.87744140625 +56 733.4710693359375 445.7865905761719 197.480224609375 266.51385498046875 +39 765.5612182617188 802.443359375 71.1212158203125 155.145263671875 +73 1139.1156005859375 979.5640869140625 134.77294921875 103.064453125 +63 915.64111328125 234.35342407226562 247.09466552734375 121.23883056640625 +66 962.266357421875 233.1523895263672 151.02581787109375 118.93972778320312 diff --git a/ultralytics/ours_15000/labels_renders2/00013.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00013.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00013.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00014.txt b/ultralytics/ours_15000/labels_renders2/00014.txt new file mode 100755 index 0000000..e814322 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00014.txt @@ -0,0 +1,10 @@ +58 920.6295166015625 716.149169921875 324.06109619140625 583.7313842773438 +56 1179.2794189453125 664.734375 210.626953125 289.89910888671875 +74 803.5303344726562 39.042537689208984 70.3270263671875 76.7776107788086 +57 334.36053466796875 735.660400390625 666.269775390625 569.7698974609375 +73 644.0205078125 187.45193481445312 212.04443359375 158.996826171875 +73 642.453125 206.26870727539062 57.93292236328125 104.82989501953125 +73 654.1170043945312 206.20648193359375 47.9656982421875 103.82232666015625 +73 610.2972412109375 207.6573486328125 54.04931640625 103.26405334472656 +58 732.035400390625 522.9218139648438 78.27667236328125 95.4957275390625 +73 561.897705078125 185.10699462890625 43.44671630859375 151.1848907470703 diff --git a/ultralytics/ours_15000/labels_renders2/00014.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00014.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00014.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00015.txt b/ultralytics/ours_15000/labels_renders2/00015.txt new file mode 100755 index 0000000..39a5020 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00015.txt @@ -0,0 +1,3 @@ +58 806.95947265625 92.69340515136719 148.09466552734375 184.146728515625 +46 646.536376953125 782.8948364257812 258.55987548828125 114.61669921875 +57 1488.702880859375 678.771484375 220.3267822265625 743.1753540039062 diff --git a/ultralytics/ours_15000/labels_renders2/00015.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00015.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00015.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00016.txt b/ultralytics/ours_15000/labels_renders2/00016.txt new file mode 100755 index 0000000..22b6657 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00016.txt @@ -0,0 +1,6 @@ +58 191.97979736328125 275.6932067871094 378.3447265625 551.3864135742188 +45 794.0655517578125 680.749755859375 117.8798828125 104.173095703125 +56 536.561279296875 321.07391357421875 209.0704345703125 223.0760498046875 +41 886.6061401367188 757.5408325195312 99.6573486328125 110.9654541015625 +73 987.4557495117188 801.7667236328125 113.9488525390625 102.88116455078125 +0 1396.578857421875 820.7415771484375 327.85498046875 468.665283203125 diff --git a/ultralytics/ours_15000/labels_renders2/00016.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00016.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00016.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00017.txt b/ultralytics/ours_15000/labels_renders2/00017.txt new file mode 100755 index 0000000..d888617 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00017.txt @@ -0,0 +1,3 @@ +58 1083.774658203125 324.63372802734375 398.5074462890625 609.6202392578125 +74 366.456298828125 102.14704895019531 168.9361572265625 201.22738647460938 +58 1155.603271484375 430.44097900390625 237.8175048828125 398.5323791503906 diff --git a/ultralytics/ours_15000/labels_renders2/00017.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00017.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00017.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00018.txt b/ultralytics/ours_15000/labels_renders2/00018.txt new file mode 100755 index 0000000..b9f3bcf --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00018.txt @@ -0,0 +1,6 @@ +45 920.884521484375 742.7841186523438 134.27215576171875 108.0960693359375 +56 1330.474609375 391.982177734375 376.8583984375 433.8722229003906 +57 582.3817749023438 710.3677368164062 1160.4263916015625 690.9139404296875 +39 1066.3006591796875 659.1279296875 65.321533203125 145.716552734375 +56 1190.5718994140625 685.008544921875 270.51513671875 284.293701171875 +73 716.9117431640625 992.414306640625 261.297607421875 144.20660400390625 diff --git a/ultralytics/ours_15000/labels_renders2/00018.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00018.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00018.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00019.txt b/ultralytics/ours_15000/labels_renders2/00019.txt new file mode 100755 index 0000000..821f554 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00019.txt @@ -0,0 +1,8 @@ +56 619.47607421875 421.3521728515625 350.78338623046875 457.5624694824219 +39 113.2337875366211 578.557861328125 40.39595031738281 126.43804931640625 +77 619.8947143554688 400.91650390625 166.39892578125 133.27902221679688 +39 47.70610809326172 584.3847045898438 88.66294860839844 187.33596801757812 +60 91.61476135253906 840.5615234375 183.07748413085938 438.385986328125 +62 1129.26513671875 527.7431640625 278.60546875 231.73300170898438 +57 163.0870361328125 443.3982849121094 326.174072265625 395.58184814453125 +56 340.3556823730469 645.9323120117188 336.96929931640625 327.7074890136719 diff --git a/ultralytics/ours_15000/labels_renders2/00019.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00019.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00019.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00020.txt b/ultralytics/ours_15000/labels_renders2/00020.txt new file mode 100755 index 0000000..e37ee14 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00020.txt @@ -0,0 +1,9 @@ +45 138.11546325683594 197.4602508544922 139.70321655273438 96.644287109375 +57 151.2299346923828 585.4921875 302.4598693847656 603.4193725585938 +39 295.2613830566406 131.66566467285156 56.6494140625 139.4818115234375 +56 687.8072509765625 117.401123046875 345.89508056640625 234.80224609375 +75 1462.9962158203125 977.9949951171875 271.681884765625 173.16162109375 +57 687.1491088867188 117.33847045898438 343.8489990234375 234.67694091796875 +56 485.8592224121094 250.5316162109375 268.34185791015625 278.8812255859375 +39 243.00341796875 146.1782684326172 87.08526611328125 201.22525024414062 +58 1378.44091796875 793.4603271484375 441.5919189453125 525.47607421875 diff --git a/ultralytics/ours_15000/labels_renders2/00020.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00020.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00020.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00021.txt b/ultralytics/ours_15000/labels_renders2/00021.txt new file mode 100755 index 0000000..552ce47 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00021.txt @@ -0,0 +1,8 @@ +57 420.05859375 628.8131713867188 839.802490234375 810.2423095703125 +45 784.662353515625 541.6824951171875 125.94757080078125 101.17660522460938 +56 1046.0089111328125 215.88433837890625 345.43768310546875 401.2935791015625 +56 951.1431884765625 485.1061706542969 280.684326171875 241.04437255859375 +77 1026.03125 209.02206420898438 174.7125244140625 113.348388671875 +39 901.5615844726562 458.6107177734375 53.7823486328125 139.804443359375 +73 604.9786376953125 809.6673583984375 205.10992431640625 110.483642578125 +41 868.5194702148438 514.1729736328125 66.2823486328125 76.129150390625 diff --git a/ultralytics/ours_15000/labels_renders2/00021.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00021.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00021.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00022.txt b/ultralytics/ours_15000/labels_renders2/00022.txt new file mode 100755 index 0000000..64fdb53 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00022.txt @@ -0,0 +1,9 @@ +45 189.41220092773438 295.027099609375 129.77224731445312 91.29342651367188 +56 862.0198974609375 207.36373901367188 358.16845703125 413.21868896484375 +57 294.0594787597656 225.70225524902344 583.3689575195312 447.7442932128906 +57 860.953369140625 210.75595092773438 358.052490234375 415.866943359375 +56 570.0391845703125 375.01806640625 321.9225769042969 305.10595703125 +58 1285.3193359375 577.0265502929688 627.1270751953125 950.5179443359375 +39 344.6036376953125 267.0768127441406 37.961029052734375 117.11871337890625 +57 132.41000366210938 488.5373840332031 264.4169921875 563.5621948242188 +56 1311.548828125 408.3521728515625 378.3145751953125 443.1123046875 diff --git a/ultralytics/ours_15000/labels_renders2/00022.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00022.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00022.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00023.txt b/ultralytics/ours_15000/labels_renders2/00023.txt new file mode 100755 index 0000000..6b67911 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00023.txt @@ -0,0 +1,6 @@ +56 212.73231506347656 281.8164978027344 377.3536682128906 432.15826416015625 +58 1255.658203125 596.0751342773438 687.4698486328125 926.5716552734375 +56 89.0358657836914 547.2721557617188 177.3673553466797 269.9615173339844 +62 939.6290283203125 185.601806640625 319.143798828125 333.788330078125 +62 747.2264404296875 225.78773498535156 239.62017822265625 182.78054809570312 +62 1011.3243408203125 189.01406860351562 464.06646728515625 339.43536376953125 diff --git a/ultralytics/ours_15000/labels_renders2/00023.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00023.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00023.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00024.txt b/ultralytics/ours_15000/labels_renders2/00024.txt new file mode 100755 index 0000000..b96d47f --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00024.txt @@ -0,0 +1,3 @@ +58 1443.752685546875 618.4381713867188 311.2359619140625 583.3140869140625 +56 248.24200439453125 102.13229370117188 164.01171875 199.33282470703125 +75 1456.86279296875 748.9273071289062 275.495361328125 319.5076904296875 diff --git a/ultralytics/ours_15000/labels_renders2/00024.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00024.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00024.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00025.txt b/ultralytics/ours_15000/labels_renders2/00025.txt new file mode 100755 index 0000000..9c41b48 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00025.txt @@ -0,0 +1,8 @@ +45 729.4473266601562 651.9398193359375 135.8978271484375 90.582275390625 +56 1344.2713623046875 479.3868103027344 368.571533203125 462.35736083984375 +39 888.2757568359375 604.5968627929688 40.68359375 119.0130615234375 +73 188.27821350097656 906.3260498046875 355.7076721191406 134.409912109375 +73 448.6788330078125 817.425048828125 157.74551391601562 96.51031494140625 +57 497.67059326171875 669.4253540039062 995.3411865234375 771.5814208984375 +57 412.06048583984375 575.8843994140625 818.1842041015625 582.8974609375 +77 1341.2391357421875 454.3168029785156 179.293212890625 130.30426025390625 diff --git a/ultralytics/ours_15000/labels_renders2/00025.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00025.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00025.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00026.txt b/ultralytics/ours_15000/labels_renders2/00026.txt new file mode 100755 index 0000000..d1b4ed5 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00026.txt @@ -0,0 +1,4 @@ +62 1391.138671875 796.2158203125 415.7359619140625 520.28173828125 +57 260.62481689453125 900.116455078125 518.4754638671875 315.09375 +57 179.010986328125 897.888671875 357.8515625 305.24200439453125 +41 1060.2393798828125 296.429443359375 51.076171875 47.1707763671875 diff --git a/ultralytics/ours_15000/labels_renders2/00026.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00026.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00026.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00027.txt b/ultralytics/ours_15000/labels_renders2/00027.txt new file mode 100755 index 0000000..e74c21e --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00027.txt @@ -0,0 +1,9 @@ +56 441.0543212890625 214.96493530273438 567.2847900390625 425.24798583984375 +46 926.1787719726562 719.4459228515625 266.3555908203125 198.72601318359375 +41 419.935302734375 1004.6264038085938 152.2655029296875 119.1473388671875 +39 572.98681640625 985.2110595703125 179.73876953125 158.494873046875 +77 420.00811767578125 85.064208984375 288.0048522949219 169.07135009765625 +57 441.3297119140625 213.4579315185547 565.9984130859375 426.1099548339844 +57 116.50056457519531 675.8485107421875 231.64962768554688 756.6658325195312 +56 381.9000549316406 692.3803100585938 556.3734741210938 628.0830078125 +62 1485.2960205078125 202.09759521484375 228.150634765625 396.0709228515625 diff --git a/ultralytics/ours_15000/labels_renders2/00027.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00027.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00027.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00028.txt b/ultralytics/ours_15000/labels_renders2/00028.txt new file mode 100755 index 0000000..85ccc6c --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00028.txt @@ -0,0 +1,8 @@ +58 873.3673706054688 772.746826171875 708.0870361328125 567.2783203125 +75 850.0362548828125 947.204833984375 251.08062744140625 228.29345703125 +58 860.1156616210938 526.5592041015625 897.5662841796875 1047.238525390625 +73 1457.9833984375 377.9727783203125 71.3916015625 182.5372314453125 +73 1431.201416015625 163.97747802734375 83.8177490234375 167.5854034423828 +73 1413.1649169921875 592.4962158203125 102.391357421875 95.79437255859375 +73 1413.261962890625 373.2794189453125 78.3111572265625 190.82888793945312 +73 1545.78662109375 646.471435546875 108.4267578125 164.61572265625 diff --git a/ultralytics/ours_15000/labels_renders2/00028.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00028.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00028.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00029.txt b/ultralytics/ours_15000/labels_renders2/00029.txt new file mode 100755 index 0000000..c29f2b8 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00029.txt @@ -0,0 +1 @@ +58 440.9657287597656 627.196533203125 843.8950805664062 851.4749145507812 diff --git a/ultralytics/ours_15000/labels_renders2/00029.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00029.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00029.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00030.txt b/ultralytics/ours_15000/labels_renders2/00030.txt new file mode 100755 index 0000000..8bcffa4 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00030.txt @@ -0,0 +1,8 @@ +39 186.54701232910156 913.5782470703125 154.67721557617188 301.025390625 +56 475.79937744140625 576.1796264648438 487.85498046875 555.01806640625 +56 760.1907348632812 210.1367950439453 542.1361083984375 414.2564392089844 +57 761.0648193359375 209.0026092529297 541.5371704101562 413.3399353027344 +57 175.67819213867188 531.0115966796875 350.40966796875 860.342529296875 +56 177.26461791992188 527.0614624023438 354.01373291015625 855.412109375 +77 746.818359375 73.18589782714844 267.0361328125 143.32424926757812 +39 68.36371612548828 915.61279296875 136.4753875732422 290.72698974609375 diff --git a/ultralytics/ours_15000/labels_renders2/00030.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00030.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00030.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00031.txt b/ultralytics/ours_15000/labels_renders2/00031.txt new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/labels_renders2/00031.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00031.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00031.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00032.txt b/ultralytics/ours_15000/labels_renders2/00032.txt new file mode 100755 index 0000000..32cf2c5 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00032.txt @@ -0,0 +1,12 @@ +58 837.92333984375 785.3630981445312 394.796142578125 540.6253662109375 +0 131.90232849121094 541.9449462890625 34.76165771484375 116.64321899414062 +0 162.72621154785156 533.3148803710938 34.966827392578125 117.1390380859375 +56 1144.0087890625 852.7993774414062 247.4725341796875 260.3765869140625 +0 93.91055297851562 558.94580078125 40.769630432128906 101.76950073242188 +62 584.5952758789062 899.061767578125 333.0059814453125 317.066650390625 +74 723.309814453125 44.6624755859375 91.57073974609375 87.99589538574219 +73 496.6258239746094 234.48223876953125 68.72955322265625 136.63360595703125 +73 496.3199157714844 205.0026397705078 293.26666259765625 214.19284057617188 +73 453.2244873046875 235.31353759765625 75.36572265625 133.1403350830078 +58 881.2276611328125 909.0621337890625 309.59771728515625 298.16583251953125 +73 472.01104736328125 236.59909057617188 55.838226318359375 130.98321533203125 diff --git a/ultralytics/ours_15000/labels_renders2/00032.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00032.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00032.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00033.txt b/ultralytics/ours_15000/labels_renders2/00033.txt new file mode 100755 index 0000000..0ccd425 --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00033.txt @@ -0,0 +1 @@ +62 1020.3753051757812 159.4136505126953 874.7010498046875 311.7873229980469 diff --git a/ultralytics/ours_15000/labels_renders2/00033.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00033.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00033.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00034.txt b/ultralytics/ours_15000/labels_renders2/00034.txt new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/labels_renders2/00034.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00034.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00034.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00035.txt b/ultralytics/ours_15000/labels_renders2/00035.txt new file mode 100755 index 0000000..681914f --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00035.txt @@ -0,0 +1,8 @@ +74 1495.23291015625 519.0570678710938 101.0662841796875 97.1534423828125 +41 23.606843948364258 699.6473999023438 47.213687896728516 144.2095947265625 +41 64.72644805908203 696.35986328125 57.508487701416016 148.0108642578125 +73 1255.891845703125 706.6134643554688 90.4678955078125 113.0343017578125 +73 148.96478271484375 438.38568115234375 48.20530700683594 101.89218139648438 +73 1287.771240234375 707.64111328125 64.5181884765625 111.29364013671875 +73 1266.317626953125 706.9453125 58.9556884765625 110.95458984375 +73 1358.15771484375 544.8963623046875 41.9207763671875 102.17266845703125 diff --git a/ultralytics/ours_15000/labels_renders2/00035.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00035.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00035.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00036.txt b/ultralytics/ours_15000/labels_renders2/00036.txt new file mode 100755 index 0000000..e69de29 diff --git a/ultralytics/ours_15000/labels_renders2/00036.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00036.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00036.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00037.txt b/ultralytics/ours_15000/labels_renders2/00037.txt new file mode 100755 index 0000000..4d761fa --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00037.txt @@ -0,0 +1,2 @@ +57 665.3819580078125 895.478515625 1319.70751953125 312.52197265625 +57 806.396484375 898.6373291015625 807.8949584960938 317.52197265625 diff --git a/ultralytics/ours_15000/labels_renders2/00037.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00037.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00037.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/labels_renders2/00038.txt b/ultralytics/ours_15000/labels_renders2/00038.txt new file mode 100755 index 0000000..a4995eb --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00038.txt @@ -0,0 +1,3 @@ +57 1167.473388671875 495.8146667480469 861.8585205078125 894.1763305664062 +57 1230.28271484375 267.16021728515625 737.6907958984375 439.0090637207031 +73 1306.59912109375 514.4588623046875 176.2490234375 104.13299560546875 diff --git a/ultralytics/ours_15000/labels_renders2/00038.txt:Zone.Identifier b/ultralytics/ours_15000/labels_renders2/00038.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/labels_renders2/00038.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/per_view_count.json b/ultralytics/ours_15000/per_view_count.json new file mode 100755 index 0000000..fbce699 --- /dev/null +++ b/ultralytics/ours_15000/per_view_count.json @@ -0,0 +1,41 @@ +{ + "00000.png": 342376, + "00001.png": 341512, + "00002.png": 348840, + "00003.png": 284398, + "00004.png": 314759, + "00005.png": 371457, + "00006.png": 316872, + "00007.png": 355033, + "00008.png": 356228, + "00009.png": 352529, + "00010.png": 327956, + "00011.png": 225105, + "00012.png": 295097, + "00013.png": 408259, + "00014.png": 325607, + "00015.png": 253787, + "00016.png": 349775, + "00017.png": 281414, + "00018.png": 291551, + "00019.png": 411080, + "00020.png": 358083, + "00021.png": 377586, + "00022.png": 446446, + "00023.png": 333113, + "00024.png": 228886, + "00025.png": 328250, + "00026.png": 178630, + "00027.png": 152232, + "00028.png": 81900, + "00029.png": 74003, + "00030.png": 154785, + "00031.png": 135077, + "00032.png": 274712, + "00033.png": 117212, + "00034.png": 61323, + "00035.png": 134044, + "00036.png": 87258, + "00037.png": 127631, + "00038.png": 115440 +} \ No newline at end of file diff --git a/ultralytics/ours_15000/per_view_count.json:Zone.Identifier b/ultralytics/ours_15000/per_view_count.json:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/per_view_count.json:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00000.png b/ultralytics/ours_15000/renders/00000.png new file mode 100755 index 0000000..5293458 Binary files /dev/null and b/ultralytics/ours_15000/renders/00000.png differ diff --git a/ultralytics/ours_15000/renders/00000.png:Zone.Identifier b/ultralytics/ours_15000/renders/00000.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00000.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00001.png b/ultralytics/ours_15000/renders/00001.png new file mode 100755 index 0000000..1881fef Binary files /dev/null and b/ultralytics/ours_15000/renders/00001.png differ diff --git a/ultralytics/ours_15000/renders/00001.png:Zone.Identifier b/ultralytics/ours_15000/renders/00001.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00001.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00002.png b/ultralytics/ours_15000/renders/00002.png new file mode 100755 index 0000000..0dbf4ff Binary files /dev/null and b/ultralytics/ours_15000/renders/00002.png differ diff --git a/ultralytics/ours_15000/renders/00002.png:Zone.Identifier b/ultralytics/ours_15000/renders/00002.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00002.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00003.png b/ultralytics/ours_15000/renders/00003.png new file mode 100755 index 0000000..e74b6c4 Binary files /dev/null and b/ultralytics/ours_15000/renders/00003.png differ diff --git a/ultralytics/ours_15000/renders/00003.png:Zone.Identifier b/ultralytics/ours_15000/renders/00003.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00003.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00004.png b/ultralytics/ours_15000/renders/00004.png new file mode 100755 index 0000000..a42316a Binary files /dev/null and b/ultralytics/ours_15000/renders/00004.png differ diff --git a/ultralytics/ours_15000/renders/00004.png:Zone.Identifier b/ultralytics/ours_15000/renders/00004.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00004.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00005.png b/ultralytics/ours_15000/renders/00005.png new file mode 100755 index 0000000..c8da721 Binary files /dev/null and b/ultralytics/ours_15000/renders/00005.png differ diff --git a/ultralytics/ours_15000/renders/00005.png:Zone.Identifier b/ultralytics/ours_15000/renders/00005.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00005.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00006.png b/ultralytics/ours_15000/renders/00006.png new file mode 100755 index 0000000..070ba6a Binary files /dev/null and b/ultralytics/ours_15000/renders/00006.png differ diff --git a/ultralytics/ours_15000/renders/00006.png:Zone.Identifier b/ultralytics/ours_15000/renders/00006.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00006.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00007.png b/ultralytics/ours_15000/renders/00007.png new file mode 100755 index 0000000..ded1fc3 Binary files /dev/null and b/ultralytics/ours_15000/renders/00007.png differ diff --git a/ultralytics/ours_15000/renders/00007.png:Zone.Identifier b/ultralytics/ours_15000/renders/00007.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00007.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00008.png b/ultralytics/ours_15000/renders/00008.png new file mode 100755 index 0000000..b8637f1 Binary files /dev/null and b/ultralytics/ours_15000/renders/00008.png differ diff --git a/ultralytics/ours_15000/renders/00008.png:Zone.Identifier b/ultralytics/ours_15000/renders/00008.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00008.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00009.png b/ultralytics/ours_15000/renders/00009.png new file mode 100755 index 0000000..f4f0a32 Binary files /dev/null and b/ultralytics/ours_15000/renders/00009.png differ diff --git a/ultralytics/ours_15000/renders/00009.png:Zone.Identifier b/ultralytics/ours_15000/renders/00009.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00009.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00010.png b/ultralytics/ours_15000/renders/00010.png new file mode 100755 index 0000000..ae54b36 Binary files /dev/null and b/ultralytics/ours_15000/renders/00010.png differ diff --git a/ultralytics/ours_15000/renders/00010.png:Zone.Identifier b/ultralytics/ours_15000/renders/00010.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00010.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00011.png b/ultralytics/ours_15000/renders/00011.png new file mode 100755 index 0000000..5989483 Binary files /dev/null and b/ultralytics/ours_15000/renders/00011.png differ diff --git a/ultralytics/ours_15000/renders/00011.png:Zone.Identifier b/ultralytics/ours_15000/renders/00011.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00011.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00012.png b/ultralytics/ours_15000/renders/00012.png new file mode 100755 index 0000000..35a12ea Binary files /dev/null and b/ultralytics/ours_15000/renders/00012.png differ diff --git a/ultralytics/ours_15000/renders/00012.png:Zone.Identifier b/ultralytics/ours_15000/renders/00012.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00012.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00013.png b/ultralytics/ours_15000/renders/00013.png new file mode 100755 index 0000000..933818f Binary files /dev/null and b/ultralytics/ours_15000/renders/00013.png differ diff --git a/ultralytics/ours_15000/renders/00013.png:Zone.Identifier b/ultralytics/ours_15000/renders/00013.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00013.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00014.png b/ultralytics/ours_15000/renders/00014.png new file mode 100755 index 0000000..8c32170 Binary files /dev/null and b/ultralytics/ours_15000/renders/00014.png differ diff --git a/ultralytics/ours_15000/renders/00014.png:Zone.Identifier b/ultralytics/ours_15000/renders/00014.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00014.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00015.png b/ultralytics/ours_15000/renders/00015.png new file mode 100755 index 0000000..973d77e Binary files /dev/null and b/ultralytics/ours_15000/renders/00015.png differ diff --git a/ultralytics/ours_15000/renders/00015.png:Zone.Identifier b/ultralytics/ours_15000/renders/00015.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00015.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00016.png b/ultralytics/ours_15000/renders/00016.png new file mode 100755 index 0000000..157b6ce Binary files /dev/null and b/ultralytics/ours_15000/renders/00016.png differ diff --git a/ultralytics/ours_15000/renders/00016.png:Zone.Identifier b/ultralytics/ours_15000/renders/00016.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00016.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00017.png b/ultralytics/ours_15000/renders/00017.png new file mode 100755 index 0000000..3bc2052 Binary files /dev/null and b/ultralytics/ours_15000/renders/00017.png differ diff --git a/ultralytics/ours_15000/renders/00017.png:Zone.Identifier b/ultralytics/ours_15000/renders/00017.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00017.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00018.png b/ultralytics/ours_15000/renders/00018.png new file mode 100755 index 0000000..c532fb5 Binary files /dev/null and b/ultralytics/ours_15000/renders/00018.png differ diff --git a/ultralytics/ours_15000/renders/00018.png:Zone.Identifier b/ultralytics/ours_15000/renders/00018.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00018.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00019.png b/ultralytics/ours_15000/renders/00019.png new file mode 100755 index 0000000..7d8c448 Binary files /dev/null and b/ultralytics/ours_15000/renders/00019.png differ diff --git a/ultralytics/ours_15000/renders/00019.png:Zone.Identifier b/ultralytics/ours_15000/renders/00019.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00019.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00020.png b/ultralytics/ours_15000/renders/00020.png new file mode 100755 index 0000000..c44d961 Binary files /dev/null and b/ultralytics/ours_15000/renders/00020.png differ diff --git a/ultralytics/ours_15000/renders/00020.png:Zone.Identifier b/ultralytics/ours_15000/renders/00020.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00020.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00021.png b/ultralytics/ours_15000/renders/00021.png new file mode 100755 index 0000000..6690f2b Binary files /dev/null and b/ultralytics/ours_15000/renders/00021.png differ diff --git a/ultralytics/ours_15000/renders/00021.png:Zone.Identifier b/ultralytics/ours_15000/renders/00021.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00021.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00022.png b/ultralytics/ours_15000/renders/00022.png new file mode 100755 index 0000000..f0d8b50 Binary files /dev/null and b/ultralytics/ours_15000/renders/00022.png differ diff --git a/ultralytics/ours_15000/renders/00022.png:Zone.Identifier b/ultralytics/ours_15000/renders/00022.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00022.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00023.png b/ultralytics/ours_15000/renders/00023.png new file mode 100755 index 0000000..6989321 Binary files /dev/null and b/ultralytics/ours_15000/renders/00023.png differ diff --git a/ultralytics/ours_15000/renders/00023.png:Zone.Identifier b/ultralytics/ours_15000/renders/00023.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00023.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00024.png b/ultralytics/ours_15000/renders/00024.png new file mode 100755 index 0000000..ae07e8c Binary files /dev/null and b/ultralytics/ours_15000/renders/00024.png differ diff --git a/ultralytics/ours_15000/renders/00024.png:Zone.Identifier b/ultralytics/ours_15000/renders/00024.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00024.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00025.png b/ultralytics/ours_15000/renders/00025.png new file mode 100755 index 0000000..12ce022 Binary files /dev/null and b/ultralytics/ours_15000/renders/00025.png differ diff --git a/ultralytics/ours_15000/renders/00025.png:Zone.Identifier b/ultralytics/ours_15000/renders/00025.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00025.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00026.png b/ultralytics/ours_15000/renders/00026.png new file mode 100755 index 0000000..71a6400 Binary files /dev/null and b/ultralytics/ours_15000/renders/00026.png differ diff --git a/ultralytics/ours_15000/renders/00026.png:Zone.Identifier b/ultralytics/ours_15000/renders/00026.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00026.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00027.png b/ultralytics/ours_15000/renders/00027.png new file mode 100755 index 0000000..38e2370 Binary files /dev/null and b/ultralytics/ours_15000/renders/00027.png differ diff --git a/ultralytics/ours_15000/renders/00027.png:Zone.Identifier b/ultralytics/ours_15000/renders/00027.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00027.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00028.png b/ultralytics/ours_15000/renders/00028.png new file mode 100755 index 0000000..5560aed Binary files /dev/null and b/ultralytics/ours_15000/renders/00028.png differ diff --git a/ultralytics/ours_15000/renders/00028.png:Zone.Identifier b/ultralytics/ours_15000/renders/00028.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00028.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00029.png b/ultralytics/ours_15000/renders/00029.png new file mode 100755 index 0000000..44e28ff Binary files /dev/null and b/ultralytics/ours_15000/renders/00029.png differ diff --git a/ultralytics/ours_15000/renders/00029.png:Zone.Identifier b/ultralytics/ours_15000/renders/00029.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00029.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00030.png b/ultralytics/ours_15000/renders/00030.png new file mode 100755 index 0000000..932058d Binary files /dev/null and b/ultralytics/ours_15000/renders/00030.png differ diff --git a/ultralytics/ours_15000/renders/00030.png:Zone.Identifier b/ultralytics/ours_15000/renders/00030.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00030.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00031.png b/ultralytics/ours_15000/renders/00031.png new file mode 100755 index 0000000..19a70a1 Binary files /dev/null and b/ultralytics/ours_15000/renders/00031.png differ diff --git a/ultralytics/ours_15000/renders/00031.png:Zone.Identifier b/ultralytics/ours_15000/renders/00031.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00031.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00032.png b/ultralytics/ours_15000/renders/00032.png new file mode 100755 index 0000000..6b34171 Binary files /dev/null and b/ultralytics/ours_15000/renders/00032.png differ diff --git a/ultralytics/ours_15000/renders/00032.png:Zone.Identifier b/ultralytics/ours_15000/renders/00032.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00032.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00033.png b/ultralytics/ours_15000/renders/00033.png new file mode 100755 index 0000000..ccbef33 Binary files /dev/null and b/ultralytics/ours_15000/renders/00033.png differ diff --git a/ultralytics/ours_15000/renders/00033.png:Zone.Identifier b/ultralytics/ours_15000/renders/00033.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00033.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00034.png b/ultralytics/ours_15000/renders/00034.png new file mode 100755 index 0000000..8c9c7be Binary files /dev/null and b/ultralytics/ours_15000/renders/00034.png differ diff --git a/ultralytics/ours_15000/renders/00034.png:Zone.Identifier b/ultralytics/ours_15000/renders/00034.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00034.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00035.png b/ultralytics/ours_15000/renders/00035.png new file mode 100755 index 0000000..3147053 Binary files /dev/null and b/ultralytics/ours_15000/renders/00035.png differ diff --git a/ultralytics/ours_15000/renders/00035.png:Zone.Identifier b/ultralytics/ours_15000/renders/00035.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00035.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00036.png b/ultralytics/ours_15000/renders/00036.png new file mode 100755 index 0000000..b754095 Binary files /dev/null and b/ultralytics/ours_15000/renders/00036.png differ diff --git a/ultralytics/ours_15000/renders/00036.png:Zone.Identifier b/ultralytics/ours_15000/renders/00036.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00036.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00037.png b/ultralytics/ours_15000/renders/00037.png new file mode 100755 index 0000000..391f1d4 Binary files /dev/null and b/ultralytics/ours_15000/renders/00037.png differ diff --git a/ultralytics/ours_15000/renders/00037.png:Zone.Identifier b/ultralytics/ours_15000/renders/00037.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00037.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders/00038.png b/ultralytics/ours_15000/renders/00038.png new file mode 100755 index 0000000..8c3dff7 Binary files /dev/null and b/ultralytics/ours_15000/renders/00038.png differ diff --git a/ultralytics/ours_15000/renders/00038.png:Zone.Identifier b/ultralytics/ours_15000/renders/00038.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders/00038.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00000.png b/ultralytics/ours_15000/renders_box/00000.png new file mode 100755 index 0000000..60353cc Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00000.png differ diff --git a/ultralytics/ours_15000/renders_box/00000.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00000.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00000.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00001.png b/ultralytics/ours_15000/renders_box/00001.png new file mode 100755 index 0000000..a36d5e4 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00001.png differ diff --git a/ultralytics/ours_15000/renders_box/00001.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00001.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00001.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00002.png b/ultralytics/ours_15000/renders_box/00002.png new file mode 100755 index 0000000..1ef88ae Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00002.png differ diff --git a/ultralytics/ours_15000/renders_box/00002.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00002.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00002.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00003.png b/ultralytics/ours_15000/renders_box/00003.png new file mode 100755 index 0000000..0e79d5c Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00003.png differ diff --git a/ultralytics/ours_15000/renders_box/00003.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00003.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00003.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00004.png b/ultralytics/ours_15000/renders_box/00004.png new file mode 100755 index 0000000..42de7f5 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00004.png differ diff --git a/ultralytics/ours_15000/renders_box/00004.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00004.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00004.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00005.png b/ultralytics/ours_15000/renders_box/00005.png new file mode 100755 index 0000000..9f7760b Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00005.png differ diff --git a/ultralytics/ours_15000/renders_box/00005.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00005.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00005.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00006.png b/ultralytics/ours_15000/renders_box/00006.png new file mode 100755 index 0000000..914cb55 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00006.png differ diff --git a/ultralytics/ours_15000/renders_box/00006.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00006.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00006.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00007.png b/ultralytics/ours_15000/renders_box/00007.png new file mode 100755 index 0000000..4e4e7c2 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00007.png differ diff --git a/ultralytics/ours_15000/renders_box/00007.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00007.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00007.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00008.png b/ultralytics/ours_15000/renders_box/00008.png new file mode 100755 index 0000000..4491d95 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00008.png differ diff --git a/ultralytics/ours_15000/renders_box/00008.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00008.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00008.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00009.png b/ultralytics/ours_15000/renders_box/00009.png new file mode 100755 index 0000000..5fa3c3b Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00009.png differ diff --git a/ultralytics/ours_15000/renders_box/00009.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00009.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00009.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00010.png b/ultralytics/ours_15000/renders_box/00010.png new file mode 100755 index 0000000..4eb0c9b Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00010.png differ diff --git a/ultralytics/ours_15000/renders_box/00010.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00010.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00010.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00011.png b/ultralytics/ours_15000/renders_box/00011.png new file mode 100755 index 0000000..2785b9f Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00011.png differ diff --git a/ultralytics/ours_15000/renders_box/00011.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00011.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00011.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00012.png b/ultralytics/ours_15000/renders_box/00012.png new file mode 100755 index 0000000..e94ee52 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00012.png differ diff --git a/ultralytics/ours_15000/renders_box/00012.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00012.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00012.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00013.png b/ultralytics/ours_15000/renders_box/00013.png new file mode 100755 index 0000000..def4597 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00013.png differ diff --git a/ultralytics/ours_15000/renders_box/00013.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00013.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00013.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00014.png b/ultralytics/ours_15000/renders_box/00014.png new file mode 100755 index 0000000..b63df74 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00014.png differ diff --git a/ultralytics/ours_15000/renders_box/00014.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00014.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00014.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00015.png b/ultralytics/ours_15000/renders_box/00015.png new file mode 100755 index 0000000..c41e258 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00015.png differ diff --git a/ultralytics/ours_15000/renders_box/00015.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00015.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00015.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00016.png b/ultralytics/ours_15000/renders_box/00016.png new file mode 100755 index 0000000..e39fffc Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00016.png differ diff --git a/ultralytics/ours_15000/renders_box/00016.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00016.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00016.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00017.png b/ultralytics/ours_15000/renders_box/00017.png new file mode 100755 index 0000000..ed12834 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00017.png differ diff --git a/ultralytics/ours_15000/renders_box/00017.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00017.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00017.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00018.png b/ultralytics/ours_15000/renders_box/00018.png new file mode 100755 index 0000000..d3c9b58 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00018.png differ diff --git a/ultralytics/ours_15000/renders_box/00018.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00018.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00018.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00019.png b/ultralytics/ours_15000/renders_box/00019.png new file mode 100755 index 0000000..270cf49 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00019.png differ diff --git a/ultralytics/ours_15000/renders_box/00019.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00019.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00019.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00020.png b/ultralytics/ours_15000/renders_box/00020.png new file mode 100755 index 0000000..8e3801f Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00020.png differ diff --git a/ultralytics/ours_15000/renders_box/00020.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00020.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00020.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00021.png b/ultralytics/ours_15000/renders_box/00021.png new file mode 100755 index 0000000..9acaddf Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00021.png differ diff --git a/ultralytics/ours_15000/renders_box/00021.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00021.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00021.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00022.png b/ultralytics/ours_15000/renders_box/00022.png new file mode 100755 index 0000000..e543af3 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00022.png differ diff --git a/ultralytics/ours_15000/renders_box/00022.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00022.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00022.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00023.png b/ultralytics/ours_15000/renders_box/00023.png new file mode 100755 index 0000000..e6b29ad Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00023.png differ diff --git a/ultralytics/ours_15000/renders_box/00023.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00023.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00023.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00024.png b/ultralytics/ours_15000/renders_box/00024.png new file mode 100755 index 0000000..c4fd6a1 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00024.png differ diff --git a/ultralytics/ours_15000/renders_box/00024.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00024.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00024.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00025.png b/ultralytics/ours_15000/renders_box/00025.png new file mode 100755 index 0000000..c2e4715 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00025.png differ diff --git a/ultralytics/ours_15000/renders_box/00025.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00025.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00025.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00026.png b/ultralytics/ours_15000/renders_box/00026.png new file mode 100755 index 0000000..8e4cdb7 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00026.png differ diff --git a/ultralytics/ours_15000/renders_box/00026.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00026.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00026.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00027.png b/ultralytics/ours_15000/renders_box/00027.png new file mode 100755 index 0000000..dbb293d Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00027.png differ diff --git a/ultralytics/ours_15000/renders_box/00027.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00027.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00027.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00028.png b/ultralytics/ours_15000/renders_box/00028.png new file mode 100755 index 0000000..6326b41 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00028.png differ diff --git a/ultralytics/ours_15000/renders_box/00028.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00028.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00028.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00029.png b/ultralytics/ours_15000/renders_box/00029.png new file mode 100755 index 0000000..59597dd Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00029.png differ diff --git a/ultralytics/ours_15000/renders_box/00029.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00029.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00029.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00030.png b/ultralytics/ours_15000/renders_box/00030.png new file mode 100755 index 0000000..6e87eeb Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00030.png differ diff --git a/ultralytics/ours_15000/renders_box/00030.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00030.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00030.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00031.png b/ultralytics/ours_15000/renders_box/00031.png new file mode 100755 index 0000000..939fe7f Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00031.png differ diff --git a/ultralytics/ours_15000/renders_box/00031.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00031.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00031.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00032.png b/ultralytics/ours_15000/renders_box/00032.png new file mode 100755 index 0000000..35d00cf Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00032.png differ diff --git a/ultralytics/ours_15000/renders_box/00032.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00032.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00032.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00033.png b/ultralytics/ours_15000/renders_box/00033.png new file mode 100755 index 0000000..4fd7e61 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00033.png differ diff --git a/ultralytics/ours_15000/renders_box/00033.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00033.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00033.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00034.png b/ultralytics/ours_15000/renders_box/00034.png new file mode 100755 index 0000000..6ef7bbc Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00034.png differ diff --git a/ultralytics/ours_15000/renders_box/00034.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00034.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00034.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00035.png b/ultralytics/ours_15000/renders_box/00035.png new file mode 100755 index 0000000..f45a2a4 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00035.png differ diff --git a/ultralytics/ours_15000/renders_box/00035.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00035.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00035.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00036.png b/ultralytics/ours_15000/renders_box/00036.png new file mode 100755 index 0000000..759b26f Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00036.png differ diff --git a/ultralytics/ours_15000/renders_box/00036.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00036.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00036.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00037.png b/ultralytics/ours_15000/renders_box/00037.png new file mode 100755 index 0000000..314e1c3 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00037.png differ diff --git a/ultralytics/ours_15000/renders_box/00037.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00037.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00037.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/ours_15000/renders_box/00038.png b/ultralytics/ours_15000/renders_box/00038.png new file mode 100755 index 0000000..b0019b6 Binary files /dev/null and b/ultralytics/ours_15000/renders_box/00038.png differ diff --git a/ultralytics/ours_15000/renders_box/00038.png:Zone.Identifier b/ultralytics/ours_15000/renders_box/00038.png:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/ours_15000/renders_box/00038.png:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/predict.py b/ultralytics/predict.py new file mode 100755 index 0000000..f97ea38 --- /dev/null +++ b/ultralytics/predict.py @@ -0,0 +1,45 @@ +import os +from ultralytics import YOLO + +def predict_and_save(model, input_folder, output_folder): + # ๅˆ›ๅปบ่พ“ๅ‡บๆ–‡ไปถๅคน๏ผˆๅฆ‚ๆžœไธๅญ˜ๅœจ๏ผ‰ + os.makedirs(output_folder, exist_ok=True) + + # ่Žทๅ–่พ“ๅ…ฅๆ–‡ไปถๅคนไธญ็š„ๆ‰€ๆœ‰ๅ›พๅƒๆ–‡ไปถ + image_files = sorted([f for f in os.listdir(input_folder) if f.endswith(('.jpg', '.jpeg', '.png'))]) + + # ้ข„ๆต‹ๆฏๅผ ๅ›พๅƒๅนถไฟๅญ˜็ป“ๆžœ + for image_file in image_files: + # ไฝฟ็”จๅ›พ็‰‡ๆ–‡ไปถ็š„็ผ–ๅท + file_name = os.path.splitext(image_file)[0] + output_file = os.path.join(output_folder, f'{file_name}.txt') + + # ๅŠ ่ฝฝๅ›พๅƒ่ฟ›่กŒ้ข„ๆต‹ + image_path = os.path.join(input_folder, image_file) + results = model(image_path) + + # ๆ‰“ๅผ€ๆ–‡ไปถๅนถไปฅๅ†™ๅ…ฅๆจกๅผไฟๅญ˜้ข„ๆต‹็ป“ๆžœๅˆฐๆ ‡็ญพๆ–‡ไปถ + with open(output_file, 'w') as f: + for result in results: + for bbox in result.boxes: + print(f"bbox.xywh: {bbox.xywh}") + if bbox.xywh.shape[1] == 4: # ็กฎ่ฎค bbox.xywh ๅฝข็Šถๆญฃ็กฎ + cls = int(bbox.cls[0]) + x_center = float(bbox.xywh[0, 0]) + y_center = float(bbox.xywh[0, 1]) + width = float(bbox.xywh[0, 2]) + height = float(bbox.xywh[0, 3]) + f.write(f'{cls} {x_center} {y_center} {width} {height}\n') + +# ็กฎไฟๆจกๅž‹่ทฏๅพ„ๆญฃ็กฎๅนถไธ”ๆ–‡ไปถๅญ˜ๅœจ +model_path = os.path.expanduser('~/catkin_ws/src/ultralytics/yolov8n.pt') +if not os.path.exists(model_path): + raise FileNotFoundError(f"Model file not found at {model_path}") + +# ๅŠ ่ฝฝๆจกๅž‹ +model = YOLO(model_path) # ๆ›ฟๆขไธบไฝ ็š„YOLOv8ๆจกๅž‹่ทฏๅพ„ +input_folder = '/root/catkin_ws/src/ultralytics/ours_15000/renders' # ็กฎไฟ่ฟ™ๆ˜ฏๅ›พๅƒๆ–‡ไปถๅคน +output_folder = '/root/catkin_ws/src/ultralytics/ours_15000/labels_renders2' + +predict_and_save(model, input_folder, output_folder) + diff --git a/ultralytics/predict.py:Zone.Identifier b/ultralytics/predict.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/predict.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/requirements.txt b/ultralytics/requirements.txt new file mode 100755 index 0000000..7d5829e --- /dev/null +++ b/ultralytics/requirements.txt @@ -0,0 +1,47 @@ +# Ultralytics requirements +# Example: pip install -r requirements.txt + +# Base ---------------------------------------- +matplotlib>=3.3.0 +numpy>=1.22.2 # pinned by Snyk to avoid a vulnerability +opencv-python>=4.6.0 +pillow>=7.1.2 +pyyaml>=5.3.1 +requests>=2.23.0 +scipy>=1.4.1 +torch>=1.8.0 +torchvision>=0.9.0 +tqdm>=4.64.0 + +# Logging ------------------------------------- +# tensorboard>=2.13.0 +# dvclive>=2.12.0 +# clearml +# comet + +# Plotting ------------------------------------ +pandas>=1.1.4 +seaborn>=0.11.0 + +# Export -------------------------------------- +# coremltools>=7.0 # CoreML export +# onnx>=1.12.0 # ONNX export +# onnxsim>=0.4.1 # ONNX simplifier +# nvidia-pyindex # TensorRT export +# nvidia-tensorrt # TensorRT export +# scikit-learn==0.19.2 # CoreML quantization +# tensorflow>=2.4.1,<=2.13.1 # TF exports (-cpu, -aarch64, -macos) +# tflite-support +# jax<=0.4.21 # tensorflowjs bug https://github.com/google/jax/issues/18978 +# jaxlib<=0.4.21 # tensorflowjs bug https://github.com/google/jax/issues/18978 +# tensorflowjs>=3.9.0 # TF.js export +# openvino-dev>=2023.0 # OpenVINO export + +# Extras -------------------------------------- +psutil # system utilization +py-cpuinfo # display CPU info +thop>=0.1.1 # FLOPs computation +# ipython # interactive notebook +# albumentations>=1.0.3 # training augmentations +# pycocotools>=2.0.6 # COCO mAP +# roboflow diff --git a/ultralytics/requirements.txt:Zone.Identifier b/ultralytics/requirements.txt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/requirements.txt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/setup.cfg b/ultralytics/setup.cfg new file mode 100755 index 0000000..ff36444 --- /dev/null +++ b/ultralytics/setup.cfg @@ -0,0 +1,71 @@ +# Project-wide configuration file, can be used for package metadata and other toll configurations +# Example usage: global configuration for PEP8 (via flake8) setting or default pytest arguments +# Local usage: pip install pre-commit, pre-commit run --all-files + +[metadata] +license_files = LICENSE +description_file = README.md + +[tool:pytest] +norecursedirs = + .git + dist + build +addopts = + --doctest-modules + --durations=30 + --color=yes + +[coverage:run] +source = ultralytics/ +data_file = tests/.coverage +omit = + ultralytics/utils/callbacks/* + +[flake8] +max-line-length = 120 +exclude = .tox,*.egg,build,temp +select = E,W,F +doctests = True +verbose = 2 +# https://pep8.readthedocs.io/en/latest/intro.html#error-codes +format = pylint +# see: https://www.flake8rules.com/ +ignore = E731,F405,E402,W504,E501 + # E731: Do not assign a lambda expression, use a def + # F405: name may be undefined, or defined from star imports: module + # E402: module level import not at top of file + # W504: line break after binary operator + # E501: line too long + # removed: + # F401: module imported but unused + # E231: missing whitespace after โ€˜,โ€™, โ€˜;โ€™, or โ€˜:โ€™ + # E127: continuation line over-indented for visual indent + # F403: โ€˜from module import *โ€™ used; unable to detect undefined names + + +[isort] +# https://pycqa.github.io/isort/docs/configuration/options.html +line_length = 120 +# see: https://pycqa.github.io/isort/docs/configuration/multi_line_output_modes.html +multi_line_output = 0 + +[yapf] +based_on_style = pep8 +spaces_before_comment = 2 +COLUMN_LIMIT = 120 +COALESCE_BRACKETS = True +SPACES_AROUND_POWER_OPERATOR = True +SPACE_BETWEEN_ENDING_COMMA_AND_CLOSING_BRACKET = True +SPLIT_BEFORE_CLOSING_BRACKET = False +SPLIT_BEFORE_FIRST_ARGUMENT = False +# EACH_DICT_ENTRY_ON_SEPARATE_LINE = False + +[docformatter] +wrap-summaries = 120 +wrap-descriptions = 120 +in-place = true +make-summary-multi-line = false +pre-summary-newline = true +force-wrap = false +close-quotes-on-newline = true diff --git a/ultralytics/setup.cfg:Zone.Identifier b/ultralytics/setup.cfg:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/setup.cfg:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/setup.py b/ultralytics/setup.py new file mode 100755 index 0000000..8463e70 --- /dev/null +++ b/ultralytics/setup.py @@ -0,0 +1,107 @@ +# Ultralytics YOLO ๐Ÿš€, AGPL-3.0 license + +import re +from pathlib import Path + +from setuptools import setup + +# Settings +FILE = Path(__file__).resolve() +PARENT = FILE.parent # root directory +README = (PARENT / 'README.md').read_text(encoding='utf-8') + + +def get_version(): + """ + Retrieve the version number from the 'ultralytics/__init__.py' file. + + Returns: + (str): The version number extracted from the '__version__' attribute in the 'ultralytics/__init__.py' file. + """ + file = PARENT / 'ultralytics/__init__.py' + return re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', file.read_text(encoding='utf-8'), re.M)[1] + + +def parse_requirements(file_path: Path): + """ + Parse a requirements.txt file, ignoring lines that start with '#' and any text after '#'. + + Args: + file_path (str | Path): Path to the requirements.txt file. + + Returns: + (List[str]): List of parsed requirements. + """ + + requirements = [] + for line in Path(file_path).read_text().splitlines(): + line = line.strip() + if line and not line.startswith('#'): + requirements.append(line.split('#')[0].strip()) # ignore inline comments + + return requirements + + +setup( + name='ultralytics', # name of pypi package + version=get_version(), # version of pypi package + python_requires='>=3.8', + license='AGPL-3.0', + description=('Ultralytics YOLOv8 for SOTA object detection, multi-object tracking, instance segmentation, ' + 'pose estimation and image classification.'), + long_description=README, + long_description_content_type='text/markdown', + url='https://github.com/ultralytics/ultralytics', + project_urls={ + 'Bug Reports': 'https://github.com/ultralytics/ultralytics/issues', + 'Funding': 'https://ultralytics.com', + 'Source': 'https://github.com/ultralytics/ultralytics'}, + author='Ultralytics', + author_email='hello@ultralytics.com', + packages=['ultralytics'] + [str(x) for x in Path('ultralytics').rglob('*/') if x.is_dir() and '__' not in str(x)], + package_data={ + '': ['*.yaml'], + 'ultralytics.assets': ['*.jpg']}, + include_package_data=True, + install_requires=parse_requirements(PARENT / 'requirements.txt'), + extras_require={ + 'dev': [ + 'ipython', + 'check-manifest', + 'pre-commit', + 'pytest', + 'pytest-cov', + 'coverage', + 'mkdocs-material', + 'mkdocstrings[python]', + 'mkdocs-redirects', # for 301 redirects + 'mkdocs-ultralytics-plugin>=0.0.34', # for meta descriptions and images, dates and authors + ], + 'export': [ + 'coremltools>=7.0', + 'openvino-dev>=2023.0', + 'tensorflow<=2.13.1', # TF bug https://github.com/ultralytics/ultralytics/issues/5161 + 'jax<=0.4.21', # tensorflowjs bug https://github.com/google/jax/issues/18978 + 'jaxlib<=0.4.21', # tensorflowjs bug https://github.com/google/jax/issues/18978 + 'tensorflowjs', # automatically installs tensorflow + ], }, + classifiers=[ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'Intended Audience :: Education', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Topic :: Software Development', + 'Topic :: Scientific/Engineering', + 'Topic :: Scientific/Engineering :: Artificial Intelligence', + 'Topic :: Scientific/Engineering :: Image Recognition', + 'Operating System :: POSIX :: Linux', + 'Operating System :: MacOS', + 'Operating System :: Microsoft :: Windows', ], + keywords='machine-learning, deep-learning, vision, ML, DL, AI, YOLO, YOLOv3, YOLOv5, YOLOv8, HUB, Ultralytics', + entry_points={'console_scripts': ['yolo = ultralytics.cfg:entrypoint', 'ultralytics = ultralytics.cfg:entrypoint']}) diff --git a/ultralytics/setup.py:Zone.Identifier b/ultralytics/setup.py:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/setup.py:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/yolov8m-seg.pt:Zone.Identifier b/ultralytics/yolov8m-seg.pt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/yolov8m-seg.pt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/yolov8n-seg.onnx:Zone.Identifier b/ultralytics/yolov8n-seg.onnx:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/yolov8n-seg.onnx:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/yolov8n-seg.pt:Zone.Identifier b/ultralytics/yolov8n-seg.pt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/yolov8n-seg.pt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/yolov8n.pt:Zone.Identifier b/ultralytics/yolov8n.pt:Zone.Identifier new file mode 100755 index 0000000..a7c17d6 --- /dev/null +++ b/ultralytics/yolov8n.pt:Zone.Identifier @@ -0,0 +1,3 @@ +[ZoneTransfer] +ZoneId=3 +HostUrl=https://objects.githubusercontent.com/github-production-release-asset-2e65be/521807533/6439a642-24f3-4fec-a962-1a97b22656d0?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=releaseassetproduction%2F20240619%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20240619T115350Z&X-Amz-Expires=300&X-Amz-Signature=884514733585a0148397a7fa196893eef526f89e179ec6e9527323880e19fe00&X-Amz-SignedHeaders=host&actor_id=0&key_id=0&repo_id=521807533&response-content-disposition=attachment%3B%20filename%3Dyolov8n.pt&response-content-type=application%2Foctet-stream diff --git a/ultralytics/yolov8n.pt:Zone.Identifier:Zone.Identifier b/ultralytics/yolov8n.pt:Zone.Identifier:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/yolov8n.pt:Zone.Identifier:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/yolov8s-seg.onnx:Zone.Identifier b/ultralytics/yolov8s-seg.onnx:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/yolov8s-seg.onnx:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/yolov8s-seg.pt:Zone.Identifier b/ultralytics/yolov8s-seg.pt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/yolov8s-seg.pt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3 diff --git a/ultralytics/~/catkin_ws/src/ultralytics/yolov8n.pt:Zone.Identifier b/ultralytics/~/catkin_ws/src/ultralytics/yolov8n.pt:Zone.Identifier new file mode 100755 index 0000000..a45e1ac --- /dev/null +++ b/ultralytics/~/catkin_ws/src/ultralytics/yolov8n.pt:Zone.Identifier @@ -0,0 +1,2 @@ +[ZoneTransfer] +ZoneId=3