# typed: true# frozen_string_literal: truerequire"toml-rb"require"sorbet-runtime"require"dependabot/file_fetchers"require"dependabot/file_fetchers/base"require"dependabot/uv/language_version_manager"require"dependabot/uv/requirements_file_matcher"require"dependabot/uv/requirement_parser"require"dependabot/uv/file_parser/pyproject_files_parser"require"dependabot/uv/file_parser/python_requirement_parser"require"dependabot/errors"moduleDependabotmoduleUvclassFileFetcher<Dependabot::FileFetchers::BaseextendT::SigextendT::HelpersCHILD_REQUIREMENT_REGEX=/^-r\s?(?<path>.*\.(?:txt|in))/CONSTRAINT_REGEX=/^-c\s?(?<path>.*\.(?:txt|in))/DEPENDENCY_TYPES=%w(packages dev-packages).freezeREQUIREMENT_FILE_PATTERNS={extensions: [".txt",".in"],filenames: ["uv.lock"]}.freezeMAX_FILE_SIZE=500_000defself.required_files_in?(filenames)returntrueiffilenames.any?{|name|name.end_with?(*REQUIREMENT_FILE_PATTERNS[:extensions])}# If there is a directory of requirements return truereturntrueiffilenames.include?("requirements")# If this repo is using pyproject.toml return true (uv.lock files require a pyproject.toml)filenames.include?("pyproject.toml")enddefself.required_files_message"Repo must contain a requirements.txt, uv.lock, requirements.in, or pyproject.toml"\enddefecosystem_versions# Hmm... it's weird that this calls file parser methods, but here we are in the file fetcher... for all# ecosystems our goal is to extract the user specified versions, so we'll need to do file parsing... so should# we move this `ecosystem_versions` metrics method to run in the file parser for all ecosystems? Downside is if# file parsing blows up, this metric isn't emitted, but reality is we have to parse anyway... as we want to know# the user-specified range of versions, not the version Dependabot chose to run.python_requirement_parser=FileParser::PythonRequirementParser.new(dependency_files: files)language_version_manager=LanguageVersionManager.new(python_requirement_parser: python_requirement_parser)Dependabot.logger.info("Dependabot is using Python version '#{language_version_manager.python_version}'."){languages: {python: {# TODO: alternatively this could use `python_requirement_parser.user_specified_requirements` which# returns an array... which we could flip to return a hash of manifest name => version# string and then check for min/max versions... today it simply defaults to# array.first which seems rather arbitrary."raw"=>language_version_manager.user_specified_python_version||"unknown","max"=>language_version_manager.python_major_minor||"unknown"}}}endsig{override.returns(T::Array[DependencyFile])}deffetch_filesfetched_files=[]fetched_files+=pyproject_filesfetched_files+=requirements_in_filesfetched_files+=requirement_filesifrequirements_txt_files.any?fetched_files+=uv_lock_filesfetched_files+=project_filesfetched_files<<python_version_fileifpython_version_fileuniq_files(fetched_files)endprivatedefuniq_files(fetched_files)uniq_files=fetched_files.reject(&:support_file?).uniquniq_files+=fetched_files.reject{|f|uniq_files.map(&:name).include?(f.name)}enddefpyproject_files[pyproject].compactenddefrequirement_files[*requirements_txt_files,*child_requirement_txt_files,*constraints_files]enddefpython_version_filereturn@python_version_fileifdefined?(@python_version_file)@python_version_file=fetch_support_file(".python-version")return@python_version_fileif@python_version_filereturnif[".","/"].include?(directory)# Check the top-level for a .python-version file, tooreverse_path=Pathname.new(directory[0]).relative_path_from(directory)@python_version_file=fetch_support_file(File.join(reverse_path,".python-version"))&.tap{|f|f.name=".python-version"}enddefpyprojectreturn@pyprojectifdefined?(@pyproject)@pyproject=fetch_file_if_present("pyproject.toml")enddefrequirements_txt_filesreq_txt_and_in_files.select{|f|f.name.end_with?(".txt")}enddefrequirements_in_filesreq_txt_and_in_files.select{|f|f.name.end_with?(".in")}+child_requirement_in_filesenddefuv_lock_filesreq_txt_and_in_files.select{|f|f.name.end_with?("uv.lock")}+child_uv_lock_filesenddefparsed_pyprojectraise"No pyproject.toml"unlesspyproject@parsed_pyproject||=TomlRB.parse(pyproject.content)rescueTomlRB::ParseError,TomlRB::ValueOverwriteErrorraiseDependabot::DependencyFileNotParseable,pyproject.pathenddefreq_txt_and_in_filesreturn@req_txt_and_in_filesif@req_txt_and_in_files@req_txt_and_in_files=[]@req_txt_and_in_files+=fetch_requirement_files_from_path@req_txt_and_in_files+=fetch_requirement_files_from_dirs@req_txt_and_in_filesenddefreq_files_for_dir(requirements_dir)dir=directory.gsub(%r{(^/|/$)},"")relative_reqs_dir=requirements_dir.path.gsub(%r{^/?#{Regexp.escape(dir)}/?},"")fetch_requirement_files_from_path(relative_reqs_dir)enddefchild_requirement_txt_fileschild_requirement_files.select{|f|f.name.end_with?(".txt")}enddefchild_requirement_in_fileschild_requirement_files.select{|f|f.name.end_with?(".in")}enddefchild_uv_lock_fileschild_requirement_files.select{|f|f.name.end_with?("uv.lock")}enddefchild_requirement_files@child_requirement_files||=beginfetched_files=req_txt_and_in_files.dupreq_txt_and_in_files.flat_mapdo|requirement_file|child_files=fetch_child_requirement_files(file: requirement_file,previously_fetched_files: fetched_files)fetched_files+=child_fileschild_filesendendenddeffetch_child_requirement_files(file:,previously_fetched_files:)paths=file.content.scan(CHILD_REQUIREMENT_REGEX).flattencurrent_dir=File.dirname(file.name)paths.flat_mapdo|path|path=File.join(current_dir,path)unlesscurrent_dir=="."path=cleanpath(path)nextifpreviously_fetched_files.map(&:name).include?(path)nextiffile.name==pathfetched_file=fetch_file_from_host(path)grandchild_requirement_files=fetch_child_requirement_files(file: fetched_file,previously_fetched_files: previously_fetched_files+[file])[fetched_file,*grandchild_requirement_files]end.compactenddefconstraints_filesall_requirement_files=requirements_txt_files+child_requirement_txt_filesconstraints_paths=all_requirement_files.mapdo|req_file|current_dir=File.dirname(req_file.name)paths=req_file.content.scan(CONSTRAINT_REGEX).flattenpaths.mapdo|path|path=File.join(current_dir,path)unlesscurrent_dir=="."cleanpath(path)endend.flatten.uniqconstraints_paths.map{|path|fetch_file_from_host(path)}enddefproject_filesproject_files=T.let([],T::Array[Dependabot::DependencyFile])unfetchable_deps=[]path_dependencies.eachdo|dep|path=dep[:path]project_files+=fetch_project_file(path)rescueDependabot::DependencyFileNotFoundunfetchable_deps<<"\"#{dep[:name]}\" at #{cleanpath(File.join(directory,dep[:file]))}"endraiseDependabot::PathDependenciesNotReachable,unfetchable_depsifunfetchable_deps.any?project_filesenddeffetch_project_file(path)project_files=[]path=cleanpath(File.join(path,"pyproject.toml"))unlesssdist_or_wheel?(path)return[]ifpath=="pyproject.toml"&&pyprojectproject_files<<fetch_file_from_host(path,fetch_submodules: true).tap{|f|f.support_file=true}project_filesenddefsdist_or_wheel?(path)path.end_with?(".tar.gz",".whl",".zip")enddefrequirements_file?(file)returnfalseunlessfile.content.valid_encoding?returntrueiffile.name.match?(/requirements/x)file.content.lines.all?do|line|nexttrueifline.strip.empty?nexttrueifline.strip.start_with?("#","-r ","-c ","-e ","--")line.match?(RequirementParser::VALID_REQ_TXT_REQUIREMENT)endenddefpath_dependencies[*requirement_txt_path_dependencies,*requirement_in_path_dependencies]enddefrequirement_txt_path_dependencies(requirements_txt_files+child_requirement_txt_files).map{|req_file|parse_requirement_path_dependencies(req_file)}.flatten.uniq{|dep|dep[:path]}enddefrequirement_in_path_dependenciesrequirements_in_files.map{|req_file|parse_requirement_path_dependencies(req_file)}.flatten.uniq{|dep|dep[:path]}enddefparse_requirement_path_dependencies(req_file)# If this is a pip-compile lockfile, rely on whatever path dependencies we found in the main manifestreturn[]ifrequirements_in_file_matcher.compiled_file?(req_file)uneditable_reqs=req_file.content.scan(/(?<name>^['"]?(?:file:)?(?<path>\..*?)(?=\[|#|'|"|$))/).filter_mapdo|n,p|{name: n.strip,path: p.strip,file: req_file.name}unlessp.include?("://")endeditable_reqs=req_file.content.scan(/(?<name>^(?:-e)\s+['"]?(?:file:)?(?<path>.*?)(?=\[|#|'|"|$))/).filter_mapdo|n,p|{name: n.strip,path: p.strip,file: req_file.name}unlessp.include?("://")||p.include?("git@")enduneditable_reqs+editable_reqsenddefcleanpath(path)Pathname.new(path).cleanpath.to_pathenddefrequirements_in_file_matcher@requirements_in_file_matcher||=RequiremenstFileMatcher.new(requirements_in_files)enddeffetch_requirement_files_from_path(path=nil)contents=path?repo_contents(dir: path):repo_contentsfilter_requirement_files(contents,base_path: path)enddeffetch_requirement_files_from_dirsrepo_contents.select{|f|f.type=="dir"}.flat_map{|dir|req_files_for_dir(dir)}enddeffilter_requirement_files(contents,base_path: nil)contents.select{|f|f.type=="file"}.select{|f|file_matches_requirement_pattern?(f.name)}.reject{|f|f.size>MAX_FILE_SIZE}.map{|f|fetch_file_with_path(f.name,base_path)}.select{|f|REQUIREMENT_FILE_PATTERNS[:filenames].include?(f.name)||requirements_file?(f)}enddeffile_matches_requirement_pattern?(filename)REQUIREMENT_FILE_PATTERNS[:extensions].any?{|ext|filename.end_with?(ext)}||REQUIREMENT_FILE_PATTERNS[:filenames].any?(filename)enddeffetch_file_with_path(filename,base_path)path=base_path?File.join(base_path,filename):filenamefetch_file_from_host(path)endendendendDependabot::FileFetchers.register("uv",Dependabot::Uv::FileFetcher)