diff --git a/tests/packagedcode/data/package_summary/python_whl-expected.json b/tests/packagedcode/data/package_summary/python_whl-expected.json index 34d98f9868..a8bd0af900 100644 --- a/tests/packagedcode/data/package_summary/python_whl-expected.json +++ b/tests/packagedcode/data/package_summary/python_whl-expected.json @@ -14,18 +14,18 @@ "primary_language": null, "other_license_expressions": [ { - "value": "mit", - "count": 7 + "value": null, + "count": 2 }, { - "value": null, - "count": 3 + "value": "mit", + "count": 2 } ], "other_holders": [ { "value": null, - "count": 9 + "count": 3 }, { "value": "Isaac Muse", @@ -47,7 +47,7 @@ "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Screen-scraping library\nBeautiful Soup is a library that makes it easy to scrape information\nfrom web pages. It sits atop an HTML or XML parser, providing Pythonic\nidioms for iterating, searching, and modifying the parse tree.\n\n# Quick start\n\n```\n>>> from bs4 import BeautifulSoup\n>>> soup = BeautifulSoup(\"

SomebadHTML\")\n>>> print(soup.prettify())\n\n \n

\n Some\n \n bad\n \n HTML\n \n \n

\n \n\n>>> soup.find(text=\"bad\")\n'bad'\n>>> soup.i\nHTML\n#\n>>> soup = BeautifulSoup(\"SomebadXML\", \"xml\")\n#\n>>> print(soup.prettify())\n\n\n Some\n \n bad\n \n XML\n \n\n```\n\nTo go beyond the basics, [comprehensive documentation is available](https://www.crummy.com/software/BeautifulSoup/bs4/doc/).\n\n# Links\n\n* [Homepage](https://www.crummy.com/software/BeautifulSoup/bs4/)\n* [Documentation](https://www.crummy.com/software/BeautifulSoup/bs4/doc/)\n* [Discussion group](https://groups.google.com/group/beautifulsoup/)\n* [Development](https://code.launchpad.net/beautifulsoup/)\n* [Bug tracker](https://bugs.launchpad.net/beautifulsoup/)\n* [Complete changelog](https://bazaar.launchpad.net/~leonardr/beautifulsoup/bs4/view/head:/CHANGELOG)\n\n# Note on Python 2 sunsetting\n\nBeautiful Soup's support for Python 2 was discontinued on December 31,\n2020: one year after the sunset date for Python 2 itself. From this\npoint onward, new Beautiful Soup development will exclusively target\nPython 3. The final release of Beautiful Soup 4 to support Python 2\nwas 4.9.3.\n\n# Supporting the project\n\nIf you use Beautiful Soup as part of your professional work, please consider a\n[Tidelift subscription](https://tidelift.com/subscription/pkg/pypi-beautifulsoup4?utm_source=pypi-beautifulsoup4&utm_medium=referral&utm_campaign=readme).\nThis will support many of the free software projects your organization\ndepends on, not just Beautiful Soup.\n\nIf you use Beautiful Soup for personal projects, the best way to say\nthank you is to read\n[Tool Safety](https://www.crummy.com/software/BeautifulSoup/zine/), a zine I\nwrote about what Beautiful Soup has taught me about software\ndevelopment.\n\n# Building the documentation\n\nThe bs4/doc/ directory contains full documentation in Sphinx\nformat. Run `make html` in that directory to create HTML\ndocumentation.\n\n# Running the unit tests\n\nBeautiful Soup supports unit test discovery using Pytest:\n\n```\n$ pytest\n```", + "description": "Screen-scraping library\nBeautiful Soup is a library that makes it easy to scrape information\nfrom web pages. It sits atop an HTML or XML parser, providing Pythonic\nidioms for iterating, searching, and modifying the parse tree.", "release_date": null, "parties": [ { @@ -132,8 +132,8 @@ "identifier": "mit-24a5293c-14d7-5403-efac-1a8b7532c0e8" } ], - "other_license_expression": "mit", - "other_license_expression_spdx": "MIT", + "other_license_expression": null, + "other_license_expression_spdx": null, "other_license_detections": [], "extracted_license_statement": "license: MIT License\nclassifiers:\n - 'License :: OSI Approved :: MIT License'\n", "notice_text": null, @@ -258,64 +258,6 @@ } ], "license_detections": [ - { - "identifier": "mit-5efc8b4c-330e-42ff-3423-92f89eb9526a", - "license_expression": "mit", - "license_expression_spdx": "MIT", - "detection_count": 4, - "reference_matches": [ - { - "license_expression": "mit", - "license_expression_spdx": "MIT", - "from_file": "python_whl/sample/builder/__init__.py", - "start_line": 1, - "end_line": 1, - "matcher": "2-aho", - "score": 100.0, - "matched_length": 11, - "match_coverage": 100.0, - "rule_relevance": 100, - "rule_identifier": "mit_413.RULE", - "rule_url": "https://github.com/nexB/scancode-toolkit/tree/develop/src/licensedcode/data/rules/mit_413.RULE" - }, - { - "license_expression": "mit", - "license_expression_spdx": "MIT", - "from_file": "python_whl/sample/builder/__init__.py", - "start_line": 2, - "end_line": 2, - "matcher": "2-aho", - "score": 100.0, - "matched_length": 2, - "match_coverage": 100.0, - "rule_relevance": 100, - "rule_identifier": "mit_30.RULE", - "rule_url": "https://github.com/nexB/scancode-toolkit/tree/develop/src/licensedcode/data/rules/mit_30.RULE" - } - ] - }, - { - "identifier": "mit-10691223-5437-037e-cdc7-2fea369a7666", - "license_expression": "mit", - "license_expression_spdx": "MIT", - "detection_count": 1, - "reference_matches": [ - { - "license_expression": "mit", - "license_expression_spdx": "MIT", - "from_file": "python_whl/sample/builder/_htmlparser.py", - "start_line": 4, - "end_line": 5, - "matcher": "3-seq", - "score": 63.16, - "matched_length": 12, - "match_coverage": 63.16, - "rule_relevance": 100, - "rule_identifier": "mit_264.RULE", - "rule_url": "https://github.com/nexB/scancode-toolkit/tree/develop/src/licensedcode/data/rules/mit_264.RULE" - } - ] - }, { "identifier": "mit-1f8486fd-7f89-7615-577f-4f2da8721e17", "license_expression": "mit", @@ -326,8 +268,8 @@ "license_expression": "mit", "license_expression_spdx": "MIT", "from_file": "python_whl/sample/__init__.py", - "start_line": 20, - "end_line": 21, + "start_line": 11, + "end_line": 12, "matcher": "3-seq", "score": 63.16, "matched_length": 12, @@ -550,7 +492,7 @@ "qualifiers": {}, "subpath": null, "primary_language": "Python", - "description": "Screen-scraping library\nBeautiful Soup is a library that makes it easy to scrape information\nfrom web pages. It sits atop an HTML or XML parser, providing Pythonic\nidioms for iterating, searching, and modifying the parse tree.\n\n# Quick start\n\n```\n>>> from bs4 import BeautifulSoup\n>>> soup = BeautifulSoup(\"

SomebadHTML\")\n>>> print(soup.prettify())\n\n \n

\n Some\n \n bad\n \n HTML\n \n \n

\n \n\n>>> soup.find(text=\"bad\")\n'bad'\n>>> soup.i\nHTML\n#\n>>> soup = BeautifulSoup(\"SomebadXML\", \"xml\")\n#\n>>> print(soup.prettify())\n\n\n Some\n \n bad\n \n XML\n \n\n```\n\nTo go beyond the basics, [comprehensive documentation is available](https://www.crummy.com/software/BeautifulSoup/bs4/doc/).\n\n# Links\n\n* [Homepage](https://www.crummy.com/software/BeautifulSoup/bs4/)\n* [Documentation](https://www.crummy.com/software/BeautifulSoup/bs4/doc/)\n* [Discussion group](https://groups.google.com/group/beautifulsoup/)\n* [Development](https://code.launchpad.net/beautifulsoup/)\n* [Bug tracker](https://bugs.launchpad.net/beautifulsoup/)\n* [Complete changelog](https://bazaar.launchpad.net/~leonardr/beautifulsoup/bs4/view/head:/CHANGELOG)\n\n# Note on Python 2 sunsetting\n\nBeautiful Soup's support for Python 2 was discontinued on December 31,\n2020: one year after the sunset date for Python 2 itself. From this\npoint onward, new Beautiful Soup development will exclusively target\nPython 3. The final release of Beautiful Soup 4 to support Python 2\nwas 4.9.3.\n\n# Supporting the project\n\nIf you use Beautiful Soup as part of your professional work, please consider a\n[Tidelift subscription](https://tidelift.com/subscription/pkg/pypi-beautifulsoup4?utm_source=pypi-beautifulsoup4&utm_medium=referral&utm_campaign=readme).\nThis will support many of the free software projects your organization\ndepends on, not just Beautiful Soup.\n\nIf you use Beautiful Soup for personal projects, the best way to say\nthank you is to read\n[Tool Safety](https://www.crummy.com/software/BeautifulSoup/zine/), a zine I\nwrote about what Beautiful Soup has taught me about software\ndevelopment.\n\n# Building the documentation\n\nThe bs4/doc/ directory contains full documentation in Sphinx\nformat. Run `make html` in that directory to create HTML\ndocumentation.\n\n# Running the unit tests\n\nBeautiful Soup supports unit test discovery using Pytest:\n\n```\n$ pytest\n```", + "description": "Screen-scraping library\nBeautiful Soup is a library that makes it easy to scrape information\nfrom web pages. It sits atop an HTML or XML parser, providing Pythonic\nidioms for iterating, searching, and modifying the parse tree.", "release_date": null, "parties": [ { @@ -641,98 +583,7 @@ "extracted_license_statement": "license: MIT License\nclassifiers:\n - 'License :: OSI Approved :: MIT License'\n", "notice_text": null, "source_packages": [], - "file_references": [ - { - "path": "sample/__init__.py", - "size": 33822, - "sha1": null, - "md5": null, - "sha256": "92adf6702b5088d8e353d5f08c3d1bd637573795840bcee726a4925b73e15643", - "sha512": null, - "extra_data": {} - }, - { - "path": "sample/builder/_html5lib.py", - "size": 19114, - "sha1": null, - "md5": null, - "sha256": "d30fa198f339c16476883b91091e8cbd8e993d76e0fe181d7729be616aa3d37b", - "sha512": null, - "extra_data": {} - }, - { - "path": "sample/builder/_htmlparser.py", - "size": 14923, - "sha1": null, - "md5": null, - "sha256": "fd50f9674f23e80f5860c478cbb6537dd333c22081b1251000fb8789807e5992", - "sha512": null, - "extra_data": {} - }, - { - "path": "sample/builder/_lxml.py", - "size": 14948, - "sha1": null, - "md5": null, - "sha256": "c8a74cc7591d5fb1f60a8a704961189b84a0adf91df960e3f076ec91c68b6ae5", - "sha512": null, - "extra_data": {} - }, - { - "path": "sample/tests/__init__.py", - "size": 48392, - "sha1": null, - "md5": null, - "sha256": "3727537a076cfebecca0e12e40b4ba4c599303d4f02b72b1261c0492a8c21934", - "sha512": null, - "extra_data": {} - }, - { - "path": "samples-0.1.0.dist-info/METADATA", - "size": 3790, - "sha1": null, - "md5": null, - "sha256": "524392d64a088e56a4232f50d6edb208dc03105394652acb72c6d5fa64c89f3e", - "sha512": null, - "extra_data": {} - }, - { - "path": "samples-0.1.0.dist-info/WHEEL", - "size": 87, - "sha1": null, - "md5": null, - "sha256": "99161210bdc887a8396bf095308730885fffd007b8fe02d8874d5814dc22ab59", - "sha512": null, - "extra_data": {} - }, - { - "path": "samples-0.1.0.dist-info/licenses/AUTHORS", - "size": 2176, - "sha1": null, - "md5": null, - "sha256": "b9221d6eb05bd6ca1b75797b56b954beebe29b674df64177307e04767d162861", - "sha512": null, - "extra_data": {} - }, - { - "path": "samples-0.1.0.dist-info/licenses/LICENSE", - "size": 1441, - "sha1": null, - "md5": null, - "sha256": "55b4d8d4b1e5bc86d10efac91b74c87bcb77526b0f5b9edaf8b9cd2adc7397b2", - "sha512": null, - "extra_data": {} - }, - { - "path": "samples-0.1.0.dist-info/RECORD", - "size": null, - "sha1": null, - "md5": null, - "sha256": null, - "sha512": null, - "extra_data": {} - } - ], + "file_references": [], "is_private": false, "is_virtual": false, "extra_data": { @@ -875,27 +726,7 @@ } ], "license_clues": [], - "percentage_of_license_text": 2.35, - "copyrights": [], - "holders": [], - "authors": [], - "scan_errors": [] - }, - { - "path": "sample-0.1.0.dist-info/RECORD", - "type": "file", - "package_data": [], - "for_packages": [], - "is_legal": false, - "is_manifest": false, - "is_readme": false, - "is_top_level": true, - "is_key_file": false, - "detected_license_expression": null, - "detected_license_expression_spdx": null, - "license_detections": [], - "license_clues": [], - "percentage_of_license_text": 0, + "percentage_of_license_text": 6.09, "copyrights": [], "holders": [], "authors": [], @@ -1081,9 +912,7 @@ "path": "sample/__init__.py", "type": "file", "package_data": [], - "for_packages": [ - "pkg:pypi/beautifulsoup4@4.12.3?uuid=fixed-uid-done-for-testing-5642512d1758" - ], + "for_packages": [], "is_legal": false, "is_manifest": false, "is_readme": false, @@ -1100,8 +929,8 @@ "license_expression": "mit", "spdx_license_expression": "MIT", "from_file": "python_whl/sample/__init__.py", - "start_line": 20, - "end_line": 21, + "start_line": 11, + "end_line": 12, "matcher": "3-seq", "score": 63.16, "matched_length": 12, @@ -1115,342 +944,28 @@ } ], "license_clues": [], - "percentage_of_license_text": 9.68, + "percentage_of_license_text": 17.14, "copyrights": [ { "copyright": "Copyright (c) 2004-2024 Leonard Richardson", - "start_line": 19, - "end_line": 19 + "start_line": 10, + "end_line": 10 } ], "holders": [ { "holder": "Leonard Richardson", - "start_line": 19, - "end_line": 19 + "start_line": 10, + "end_line": 10 } ], "authors": [ { "author": "Leonard Richardson (leonardr@segfault.org)", - "start_line": 17, - "end_line": 17 - } - ], - "scan_errors": [] - }, - { - "path": "sample/builder", - "type": "directory", - "package_data": [], - "for_packages": [], - "is_legal": false, - "is_manifest": false, - "is_readme": false, - "is_top_level": false, - "is_key_file": false, - "detected_license_expression": null, - "detected_license_expression_spdx": null, - "license_detections": [], - "license_clues": [], - "percentage_of_license_text": 0, - "copyrights": [], - "holders": [], - "authors": [], - "scan_errors": [] - }, - { - "path": "sample/builder/__init__.py", - "type": "file", - "package_data": [], - "for_packages": [], - "is_legal": false, - "is_manifest": false, - "is_readme": false, - "is_top_level": false, - "is_key_file": false, - "detected_license_expression": "mit", - "detected_license_expression_spdx": "MIT", - "license_detections": [ - { - "license_expression": "mit", - "license_expression_spdx": "MIT", - "matches": [ - { - "license_expression": "mit", - "spdx_license_expression": "MIT", - "from_file": "python_whl/sample/builder/__init__.py", - "start_line": 1, - "end_line": 1, - "matcher": "2-aho", - "score": 100.0, - "matched_length": 11, - "match_coverage": 100.0, - "rule_relevance": 100, - "rule_identifier": "mit_413.RULE", - "rule_url": "https://github.com/nexB/scancode-toolkit/tree/develop/src/licensedcode/data/rules/mit_413.RULE" - }, - { - "license_expression": "mit", - "spdx_license_expression": "MIT", - "from_file": "python_whl/sample/builder/__init__.py", - "start_line": 2, - "end_line": 2, - "matcher": "2-aho", - "score": 100.0, - "matched_length": 2, - "match_coverage": 100.0, - "rule_relevance": 100, - "rule_identifier": "mit_30.RULE", - "rule_url": "https://github.com/nexB/scancode-toolkit/tree/develop/src/licensedcode/data/rules/mit_30.RULE" - } - ], - "identifier": "mit-5efc8b4c-330e-42ff-3423-92f89eb9526a" - } - ], - "license_clues": [], - "percentage_of_license_text": 100.0, - "copyrights": [], - "holders": [], - "authors": [], - "scan_errors": [] - }, - { - "path": "sample/builder/_html5lib.py", - "type": "file", - "package_data": [], - "for_packages": [ - "pkg:pypi/beautifulsoup4@4.12.3?uuid=fixed-uid-done-for-testing-5642512d1758" - ], - "is_legal": false, - "is_manifest": false, - "is_readme": false, - "is_top_level": false, - "is_key_file": false, - "detected_license_expression": "mit", - "detected_license_expression_spdx": "MIT", - "license_detections": [ - { - "license_expression": "mit", - "license_expression_spdx": "MIT", - "matches": [ - { - "license_expression": "mit", - "spdx_license_expression": "MIT", - "from_file": "python_whl/sample/builder/_html5lib.py", - "start_line": 1, - "end_line": 1, - "matcher": "2-aho", - "score": 100.0, - "matched_length": 11, - "match_coverage": 100.0, - "rule_relevance": 100, - "rule_identifier": "mit_413.RULE", - "rule_url": "https://github.com/nexB/scancode-toolkit/tree/develop/src/licensedcode/data/rules/mit_413.RULE" - }, - { - "license_expression": "mit", - "spdx_license_expression": "MIT", - "from_file": "python_whl/sample/builder/_html5lib.py", - "start_line": 2, - "end_line": 2, - "matcher": "2-aho", - "score": 100.0, - "matched_length": 2, - "match_coverage": 100.0, - "rule_relevance": 100, - "rule_identifier": "mit_30.RULE", - "rule_url": "https://github.com/nexB/scancode-toolkit/tree/develop/src/licensedcode/data/rules/mit_30.RULE" - } - ], - "identifier": "mit-5efc8b4c-330e-42ff-3423-92f89eb9526a" - } - ], - "license_clues": [], - "percentage_of_license_text": 86.67, - "copyrights": [], - "holders": [], - "authors": [], - "scan_errors": [] - }, - { - "path": "sample/builder/_htmlparser.py", - "type": "file", - "package_data": [], - "for_packages": [ - "pkg:pypi/beautifulsoup4@4.12.3?uuid=fixed-uid-done-for-testing-5642512d1758" - ], - "is_legal": false, - "is_manifest": false, - "is_readme": false, - "is_top_level": false, - "is_key_file": false, - "detected_license_expression": "mit", - "detected_license_expression_spdx": "MIT", - "license_detections": [ - { - "license_expression": "mit", - "license_expression_spdx": "MIT", - "matches": [ - { - "license_expression": "mit", - "spdx_license_expression": "MIT", - "from_file": "python_whl/sample/builder/_htmlparser.py", - "start_line": 4, - "end_line": 5, - "matcher": "3-seq", - "score": 63.16, - "matched_length": 12, - "match_coverage": 63.16, - "rule_relevance": 100, - "rule_identifier": "mit_264.RULE", - "rule_url": "https://github.com/nexB/scancode-toolkit/tree/develop/src/licensedcode/data/rules/mit_264.RULE" - } - ], - "identifier": "mit-10691223-5437-037e-cdc7-2fea369a7666" - } - ], - "license_clues": [], - "percentage_of_license_text": 38.71, - "copyrights": [], - "holders": [], - "authors": [], - "scan_errors": [] - }, - { - "path": "sample/builder/_lxml.py", - "type": "file", - "package_data": [], - "for_packages": [ - "pkg:pypi/beautifulsoup4@4.12.3?uuid=fixed-uid-done-for-testing-5642512d1758" - ], - "is_legal": false, - "is_manifest": false, - "is_readme": false, - "is_top_level": false, - "is_key_file": false, - "detected_license_expression": "mit", - "detected_license_expression_spdx": "MIT", - "license_detections": [ - { - "license_expression": "mit", - "license_expression_spdx": "MIT", - "matches": [ - { - "license_expression": "mit", - "spdx_license_expression": "MIT", - "from_file": "python_whl/sample/builder/_lxml.py", - "start_line": 1, - "end_line": 1, - "matcher": "2-aho", - "score": 100.0, - "matched_length": 11, - "match_coverage": 100.0, - "rule_relevance": 100, - "rule_identifier": "mit_413.RULE", - "rule_url": "https://github.com/nexB/scancode-toolkit/tree/develop/src/licensedcode/data/rules/mit_413.RULE" - }, - { - "license_expression": "mit", - "spdx_license_expression": "MIT", - "from_file": "python_whl/sample/builder/_lxml.py", - "start_line": 2, - "end_line": 2, - "matcher": "2-aho", - "score": 100.0, - "matched_length": 2, - "match_coverage": 100.0, - "rule_relevance": 100, - "rule_identifier": "mit_30.RULE", - "rule_url": "https://github.com/nexB/scancode-toolkit/tree/develop/src/licensedcode/data/rules/mit_30.RULE" - } - ], - "identifier": "mit-5efc8b4c-330e-42ff-3423-92f89eb9526a" - } - ], - "license_clues": [], - "percentage_of_license_text": 81.25, - "copyrights": [], - "holders": [], - "authors": [], - "scan_errors": [] - }, - { - "path": "sample/tests", - "type": "directory", - "package_data": [], - "for_packages": [], - "is_legal": false, - "is_manifest": false, - "is_readme": false, - "is_top_level": false, - "is_key_file": false, - "detected_license_expression": null, - "detected_license_expression_spdx": null, - "license_detections": [], - "license_clues": [], - "percentage_of_license_text": 0, - "copyrights": [], - "holders": [], - "authors": [], - "scan_errors": [] - }, - { - "path": "sample/tests/__init__.py", - "type": "file", - "package_data": [], - "for_packages": [ - "pkg:pypi/beautifulsoup4@4.12.3?uuid=fixed-uid-done-for-testing-5642512d1758" - ], - "is_legal": false, - "is_manifest": false, - "is_readme": false, - "is_top_level": false, - "is_key_file": false, - "detected_license_expression": "mit", - "detected_license_expression_spdx": "MIT", - "license_detections": [ - { - "license_expression": "mit", - "license_expression_spdx": "MIT", - "matches": [ - { - "license_expression": "mit", - "spdx_license_expression": "MIT", - "from_file": "python_whl/sample/tests/__init__.py", - "start_line": 4, - "end_line": 4, - "matcher": "2-aho", - "score": 100.0, - "matched_length": 11, - "match_coverage": 100.0, - "rule_relevance": 100, - "rule_identifier": "mit_413.RULE", - "rule_url": "https://github.com/nexB/scancode-toolkit/tree/develop/src/licensedcode/data/rules/mit_413.RULE" - }, - { - "license_expression": "mit", - "spdx_license_expression": "MIT", - "from_file": "python_whl/sample/tests/__init__.py", - "start_line": 5, - "end_line": 5, - "matcher": "2-aho", - "score": 100.0, - "matched_length": 2, - "match_coverage": 100.0, - "rule_relevance": 100, - "rule_identifier": "mit_30.RULE", - "rule_url": "https://github.com/nexB/scancode-toolkit/tree/develop/src/licensedcode/data/rules/mit_30.RULE" - } - ], - "identifier": "mit-5efc8b4c-330e-42ff-3423-92f89eb9526a" + "start_line": 8, + "end_line": 8 } ], - "license_clues": [], - "percentage_of_license_text": 65.0, - "copyrights": [], - "holders": [], - "authors": [], "scan_errors": [] } ] diff --git a/tests/packagedcode/data/package_summary/python_whl/sample-0.1.0.dist-info/METADATA b/tests/packagedcode/data/package_summary/python_whl/sample-0.1.0.dist-info/METADATA index a2681d725d..0f870ce574 100644 --- a/tests/packagedcode/data/package_summary/python_whl/sample-0.1.0.dist-info/METADATA +++ b/tests/packagedcode/data/package_summary/python_whl/sample-0.1.0.dist-info/METADATA @@ -34,89 +34,4 @@ Description-Content-Type: text/markdown Beautiful Soup is a library that makes it easy to scrape information from web pages. It sits atop an HTML or XML parser, providing Pythonic -idioms for iterating, searching, and modifying the parse tree. - -# Quick start - -``` ->>> from bs4 import BeautifulSoup ->>> soup = BeautifulSoup("

SomebadHTML") ->>> print(soup.prettify()) - - -

- Some - - bad - - HTML - - -

- - ->>> soup.find(text="bad") -'bad' ->>> soup.i -HTML -# ->>> soup = BeautifulSoup("SomebadXML", "xml") -# ->>> print(soup.prettify()) - - - Some - - bad - - XML - - -``` - -To go beyond the basics, [comprehensive documentation is available](https://www.crummy.com/software/BeautifulSoup/bs4/doc/). - -# Links - -* [Homepage](https://www.crummy.com/software/BeautifulSoup/bs4/) -* [Documentation](https://www.crummy.com/software/BeautifulSoup/bs4/doc/) -* [Discussion group](https://groups.google.com/group/beautifulsoup/) -* [Development](https://code.launchpad.net/beautifulsoup/) -* [Bug tracker](https://bugs.launchpad.net/beautifulsoup/) -* [Complete changelog](https://bazaar.launchpad.net/~leonardr/beautifulsoup/bs4/view/head:/CHANGELOG) - -# Note on Python 2 sunsetting - -Beautiful Soup's support for Python 2 was discontinued on December 31, -2020: one year after the sunset date for Python 2 itself. From this -point onward, new Beautiful Soup development will exclusively target -Python 3. The final release of Beautiful Soup 4 to support Python 2 -was 4.9.3. - -# Supporting the project - -If you use Beautiful Soup as part of your professional work, please consider a -[Tidelift subscription](https://tidelift.com/subscription/pkg/pypi-beautifulsoup4?utm_source=pypi-beautifulsoup4&utm_medium=referral&utm_campaign=readme). -This will support many of the free software projects your organization -depends on, not just Beautiful Soup. - -If you use Beautiful Soup for personal projects, the best way to say -thank you is to read -[Tool Safety](https://www.crummy.com/software/BeautifulSoup/zine/), a zine I -wrote about what Beautiful Soup has taught me about software -development. - -# Building the documentation - -The bs4/doc/ directory contains full documentation in Sphinx -format. Run `make html` in that directory to create HTML -documentation. - -# Running the unit tests - -Beautiful Soup supports unit test discovery using Pytest: - -``` -$ pytest -``` - +idioms for iterating, searching, and modifying the parse tree. \ No newline at end of file diff --git a/tests/packagedcode/data/package_summary/python_whl/sample-0.1.0.dist-info/RECORD b/tests/packagedcode/data/package_summary/python_whl/sample-0.1.0.dist-info/RECORD deleted file mode 100644 index 0b9b81a24d..0000000000 --- a/tests/packagedcode/data/package_summary/python_whl/sample-0.1.0.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -sample/__init__.py,sha256=kq32cCtQiNjjU9XwjD0b1jdXN5WEC87nJqSSW3PhVkM,33822 -sample/builder/_html5lib.py,sha256=0w-hmPM5wWR2iDuRCR6MvY6ZPXbg_hgddym-YWqj03s,19114 -sample/builder/_htmlparser.py,sha256=_VD5Z08j6A9YYMR4y7ZTfdMzwiCBsSUQAPuHiYB-WZI,14923 -sample/builder/_lxml.py,sha256=yKdMx1kdX7H2CopwSWEYm4Sgrfkd-WDj8HbskcaLauU,14948 -sample/tests/__init__.py,sha256=NydTegds_r7MoOEuQLS6TFmTA9TwK3KxJhwEkqjCGTQ,48392 -samples-0.1.0.dist-info/METADATA,sha256=UkOS1koIjlakIy9Q1u2yCNwDEFOUZSrLcsbV-mTInz4,3790 -samples-0.1.0.dist-info/WHEEL,sha256=mRYSEL3Ih6g5a_CVMIcwiF__0Ae4_gLYh01YFNwiq1k,87 -samples-0.1.0.dist-info/licenses/AUTHORS,sha256=uSIdbrBb1sobdXl7VrlUvuvim2dN9kF3MH4Edn0WKGE,2176 -samples-0.1.0.dist-info/licenses/LICENSE,sha256=VbTY1LHlvIbRDvrJG3TIe8t3UmsPW57a-LnNKtxzl7I,1441 -samples-0.1.0.dist-info/RECORD, diff --git a/tests/packagedcode/data/package_summary/python_whl/sample-0.1.0.dist-info/licenses/AUTHORS b/tests/packagedcode/data/package_summary/python_whl/sample-0.1.0.dist-info/licenses/AUTHORS index 1f14fe07de..2ede63fea3 100644 --- a/tests/packagedcode/data/package_summary/python_whl/sample-0.1.0.dist-info/licenses/AUTHORS +++ b/tests/packagedcode/data/package_summary/python_whl/sample-0.1.0.dist-info/licenses/AUTHORS @@ -11,39 +11,3 @@ of UnicodeDammit. Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful Soup 4 working under Python 3. - -Simon Willison wrote soupselect, which was used to make Beautiful Soup -support CSS selectors. Isaac Muse wrote SoupSieve, which made it -possible to _remove_ the CSS selector code from Beautiful Soup. - -Sam Ruby helped with a lot of edge cases. - -Jonathan Ellis was awarded the prestigious Beau Potage D'Or for his -work in solving the nestable tags conundrum. - -An incomplete list of people have contributed patches to Beautiful -Soup: - - Istvan Albert, Andrew Lin, Anthony Baxter, Oliver Beattie, Andrew -Boyko, Tony Chang, Francisco Canas, "Delong", Zephyr Fang, Fuzzy, -Roman Gaufman, Yoni Gilad, Richie Hindle, Toshihiro Kamiya, Peteris -Krumins, Kent Johnson, Marek Kapolka, Andreas Kostyrka, Roel Kramer, -Ben Last, Robert Leftwich, Stefaan Lippens, "liquider", Staffan -Malmgren, Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", -Ed Oskiewicz, Martijn Peters, Greg Phillips, Giles Radford, Stefano -Revera, Arthur Rudolph, Marko Samastur, James Salter, Jouni Seppänen, -Alexander Schmolck, Tim Shirley, Geoffrey Sneddon, Ville Skyttä, -"Vikas", Jens Svalgaard, Andy Theyers, Eric Weiser, Glyn Webster, John -Wiseman, Paul Wright, Danny Yoo - -An incomplete list of people who made suggestions or found bugs or -found ways to break Beautiful Soup: - - Hanno Böck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel, - Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes, - Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams, - warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison, - Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed - Summers, Dennis Sutch, Chris Smith, Aaron Swartz, Stuart - Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de - Sousa Rocha, Yichun Wei, Per Vognsen diff --git a/tests/packagedcode/data/package_summary/python_whl/sample/__init__.py b/tests/packagedcode/data/package_summary/python_whl/sample/__init__.py index 108c4e15d8..062c68c718 100644 --- a/tests/packagedcode/data/package_summary/python_whl/sample/__init__.py +++ b/tests/packagedcode/data/package_summary/python_whl/sample/__init__.py @@ -1,15 +1,6 @@ """Beautiful Soup Elixir and Tonic - "The Screen-Scraper's Friend". http://www.crummy.com/software/BeautifulSoup/ - -Beautiful Soup uses a pluggable XML or HTML parser to parse a -(possibly invalid) document into a tree representation. Beautiful Soup -provides methods and Pythonic idioms that make it easy to navigate, -search, and modify the parse tree. - -Beautiful Soup works with Python 3.6 and up. It works better if lxml -and/or html5lib is installed. - For more than you ever wanted to know about Beautiful Soup, see the documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/ """ diff --git a/tests/packagedcode/data/package_summary/python_whl/sample/builder/__init__.py b/tests/packagedcode/data/package_summary/python_whl/sample/builder/__init__.py deleted file mode 100644 index 9f48251972..0000000000 --- a/tests/packagedcode/data/package_summary/python_whl/sample/builder/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Use of this source code is governed by the MIT license. -__license__ = "MIT" \ No newline at end of file diff --git a/tests/packagedcode/data/package_summary/python_whl/sample/builder/_html5lib.py b/tests/packagedcode/data/package_summary/python_whl/sample/builder/_html5lib.py deleted file mode 100644 index 358c59d1a5..0000000000 --- a/tests/packagedcode/data/package_summary/python_whl/sample/builder/_html5lib.py +++ /dev/null @@ -1,6 +0,0 @@ -# Use of this source code is governed by the MIT license. -__license__ = "MIT" - -__all__ = [ - 'HTML5TreeBuilder', - ] \ No newline at end of file diff --git a/tests/packagedcode/data/package_summary/python_whl/sample/builder/_htmlparser.py b/tests/packagedcode/data/package_summary/python_whl/sample/builder/_htmlparser.py deleted file mode 100644 index b43625db10..0000000000 --- a/tests/packagedcode/data/package_summary/python_whl/sample/builder/_htmlparser.py +++ /dev/null @@ -1,9 +0,0 @@ -# encoding: utf-8 -"""Use the HTMLParser library to parse HTML files that aren't too bad.""" - -# Use of this source code is governed by the MIT license. -__license__ = "MIT" - -__all__ = [ - 'HTMLParserTreeBuilder', - ] \ No newline at end of file diff --git a/tests/packagedcode/data/package_summary/python_whl/sample/builder/_lxml.py b/tests/packagedcode/data/package_summary/python_whl/sample/builder/_lxml.py deleted file mode 100644 index 310439f057..0000000000 --- a/tests/packagedcode/data/package_summary/python_whl/sample/builder/_lxml.py +++ /dev/null @@ -1,7 +0,0 @@ -# Use of this source code is governed by the MIT license. -__license__ = "MIT" - -__all__ = [ - 'LXMLTreeBuilderForXML', - 'LXMLTreeBuilder', - ] \ No newline at end of file diff --git a/tests/packagedcode/data/package_summary/python_whl/sample/tests/__init__.py b/tests/packagedcode/data/package_summary/python_whl/sample/tests/__init__.py deleted file mode 100644 index e6e7e56225..0000000000 --- a/tests/packagedcode/data/package_summary/python_whl/sample/tests/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# encoding: utf-8 -"""Helper classes for tests.""" - -# Use of this source code is governed by the MIT license. -__license__ = "MIT" diff --git a/tests/packagedcode/data/package_summary/rubygems-expected.json b/tests/packagedcode/data/package_summary/rubygems-expected.json index af3bbd5ca3..e207e24ac2 100644 --- a/tests/packagedcode/data/package_summary/rubygems-expected.json +++ b/tests/packagedcode/data/package_summary/rubygems-expected.json @@ -15,7 +15,7 @@ "other_license_expressions": [ { "value": null, - "count": 5 + "count": 4 }, { "value": "apache-2.0", @@ -25,7 +25,7 @@ "other_holders": [ { "value": null, - "count": 7 + "count": 6 }, { "value": "Dominik Richter", @@ -459,26 +459,6 @@ "authors": [], "scan_errors": [] }, - { - "path": "README.md", - "type": "file", - "package_data": [], - "for_packages": [], - "is_legal": false, - "is_manifest": false, - "is_readme": true, - "is_top_level": true, - "is_key_file": true, - "detected_license_expression": null, - "detected_license_expression_spdx": null, - "license_detections": [], - "license_clues": [], - "percentage_of_license_text": 0, - "copyrights": [], - "holders": [], - "authors": [], - "scan_errors": [] - }, { "path": "bin", "type": "directory", diff --git a/tests/packagedcode/data/package_summary/rubygems/README.md b/tests/packagedcode/data/package_summary/rubygems/README.md deleted file mode 100644 index bb64eb981a..0000000000 --- a/tests/packagedcode/data/package_summary/rubygems/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# inspec-bin - -This gem exists so that we can publish the `inspec` executable in a separate gem from the main `inspec` library gem. This permits the community to use Chef InSpec as a library. \ No newline at end of file