diff --git a/testbed/cocotb__cocotb/.gitpod.Dockerfile b/testbed/cocotb__cocotb/.gitpod.Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..c43bc6054288b0d183d9d66f1ceb921c2852caf2 --- /dev/null +++ b/testbed/cocotb__cocotb/.gitpod.Dockerfile @@ -0,0 +1,56 @@ +FROM gitpod/workspace-full-vnc + +USER gitpod + +## Install Python with --enable-shared +ARG PYTHON_VERSION=3.9.2 +RUN rm -rf ${HOME}/.pyenv/versions/${PYTHON_VERSION} +RUN PYTHON_CONFIGURE_OPTS="--enable-shared" pyenv install ${PYTHON_VERSION} +RUN pyenv global ${PYTHON_VERSION} + +RUN pip3 install --upgrade pip + +# Install extra packages +RUN pip3 install -U pytest flake8 pylint pytype mypy gcovr cherrypy dowser + +# Re-synchronize the OS package index +RUN sudo apt-get update + +# Install all needed packages for all simulators +RUN sudo apt-get install -y perl make flex gnat gtkwave swig autoconf g++ bison libfl2 libfl-dev ccache libgoogle-perftools-dev numactl perl-doc +RUN sudo rm -rf /var/lib/apt/lists/* + +## Install Icarus Verilog +RUN brew install icarus-verilog + +## Install Verilator +#ENV VERILATOR_BRANCH=stable +ENV VERILATOR_BRANCH=v4.106 + +RUN git clone https://github.com/verilator/verilator.git --branch ${VERILATOR_BRANCH} verilator \ + && unset VERILATOR_ROOT \ + && cd verilator \ + && autoconf \ + && ./configure \ + && make --silent \ + && sudo make --silent install \ + && cd .. \ + && rm -rf verilator + +## Install GHDL +ENV GHDL_BRANCH=v1.0.0 +RUN git clone https://github.com/ghdl/ghdl.git --depth=1 --branch ${GHDL_BRANCH} ghdl \ + && cd ghdl \ + && ./configure \ + && make --silent \ + && sudo make --silent install \ + && cd .. \ + && rm -rf ghdl + +# Install cvc +RUN git clone https://github.com/cambridgehackers/open-src-cvc.git --depth=1 cvc \ + && cd cvc/src \ + && make -f makefile.cvc64 --silent \ + && sudo cp cvc64 /usr/local/bin \ + && cd ../.. \ + && rm -rf cvc diff --git a/testbed/cocotb__cocotb/CONTRIBUTING.md b/testbed/cocotb__cocotb/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..32e4bc68ea6037ad9505113c76b0ede5eb02ad48 --- /dev/null +++ b/testbed/cocotb__cocotb/CONTRIBUTING.md @@ -0,0 +1,537 @@ +# Cocotb Contribution Guidelines + +Welcome to the cocotb development! +We are an inclusive community with the common goal of improving the cocotb, a coroutine based cosimulation library for writing VHDL and Verilog testbenches in Python. +This guide explains how to contribute to cocotb, and documents the processes we agreed on to manage the project. +All processes in this document are designed to streamline the development effort, to avoid bottlenecks, and to ultimately give a pleasant experience to all involved. + + +## Getting Help + +Cocotb is a diverse and challenging project to contribute to. +If you ever feel lost, out of your depth, or simply want to know more, the [cocotb Gitter channel](https://gitter.im/cocotb/Lobby) is actively watched by many cocotb users, contributors, and maintainers. +It is a good idea if you are unsure whether your problem or question is worthy of a Github Issue to first post it to the Gitter channel. +You may also ask questions in [Github issues](https://github.com/cocotb/cocotb/issues). +If you don't receive any response on the Gitter channel or a Github issue, or you want help privately, you may directly contact a [maintainer](#maintainers). + + +## What to Work On + +There is *a lot* of work to do on this project, no matter your area of expertise or skill level. + +If you are a beginner there are several [Github issues](https://github.com/cocotb/cocotb/issues) marked [`good first issue`](https://github.com/cocotb/cocotb/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) you can look at. +There are also a number of things you can work on that aren't in Github issues and don't require in-depth knowledge of the cocotb internals. +They including the following: + +* Documentation improvements +* Managing Github issues and the Gitter channel +* Testing and coverage improvements + +Cocotb is still not perfect. There are plenty of [bug fixes](https://github.com/cocotb/cocotb/issues?q=is%3Aopen+is%3Aissue+label%3Atype%3Abug) and [features](https://github.com/cocotb/cocotb/issues?q=is%3Aopen+is%3Aissue+label%3Atype%3Afeature) that can be worked on. +Most of these are recorded as Github issues. + + +### Documentation + +Cocotb's documentation is always open to improvements. +Improving documentation will help users better understand and use cocotb; +and may decrease the number of questions the Gitter channel and Github issue page. +Updating documentation requires knowledge of: + +* [reStructuredText](https://docutils.sourceforge.io/rst.html) +* [Sphinx documentation generator](https://www.sphinx-doc.org/en/master/) +* [Markdown](https://www.markdownguide.org/) +* [How to architect documentation](https://documentation.divio.com/) + +Some documentation should be located in the official documentation on [Read the Docs/RTD](https://docs.cocotb.org/en/latest/), +while the rest belongs on the [Wiki](https://github.com/cocotb/cocotb/wiki). +There are several ways to improve the documentation: + +* Better documenting core functionality (RTD) +* Documenting common "gotchas" (RTD) +* Documenting difficult and niche use cases (Wiki) +* Documenting common design patterns (Wiki) +* Documenting internal components (Wiki) + +See the documentation on [building the documentation](#building-documentation-locally) and the [guidelines on submitting pull requests](#patch-requirements). +Documentation improvements typically require no maintainer pre-approval; you can simply work on the documentation and open a pull request. +Documentation on the Wiki does not require a pull request; any user with a Github account can contribute to it. +Please be responsible with that power. + + +### Project Management + +The cocotb project is [fairly popular](https://larsasplund.github.io/github-facts/verification-practices.html#frameworks) and the +[Gitter channel](https://gitter.im/cocotb/Lobby) and +[Github issues](https://github.com/cocotb/cocotb) page receive a fair amount of traffic; +which is only expected to increase. +People are needed to categorize issues and pull requests, and respond to questions. +Working this task is the quickest way to learn how cocotb works. +Tending to this task requires the following: + +* people skills +* an understanding of the scope of cocotb +* general understanding about how cocotb works + +Someone working this task should set notifications on the Gitter channel to be notified of any new comments. +They should also "watch" the Github repo by selecting the "Watching" notification level button in the upper right corner of the main Github page. +Finally, they should notify the maintainers that they are able and willing to accept questions. + +To be able to add labels and close issues and PRs you will need special permissions. +Contact a [maintainer](#maintainer) if you are interested in receiving these permissions. +They will be granted according to the project's need and the requestor's familiarity with cocotb. +Once you have those permissions, see the guidelines on [managing issues and pull requests](#Managing-of-Issues-and-Pull-Requests). + +This task can also be done without special repo permissions, by just commenting on the issue or PR. +This is especially helpful for Github issues about bugs. +If you can duplicate the bug or confirm the bug report is invalid, that helps maintainers *a lot*. + + +### Tests and Coverage + +Cocotb has a suite of unit tests (located in the `tests` directory) and examples (located in the `examples` directory) which are functional acceptance tests. +If a pull request cannot pass *all* of these tests, it will likely be rejected. +To ensure cocotb only includes the highest quality code, these test should be exhaustive. +We use code coverage as a quantifiable metric of the "exhaustiveness" of these tests, and wish to improve this metric. + +Working on this task requires a familiarity with: + +* Cocotb's core functionality +* How to write Verilog and VHDL +* How to write cocotb tests in Python +* (Optionally) [codecov](https://docs.codecov.io/docs); coverage aggregator and Github bot +* (Optionally) the [coverage](https://coverage.readthedocs.io/en/latest/) module, for Python code coverage +* (Optionally) [gcov](https://gcc.gnu.org/onlinedocs/gcc/Gcov.html), for C++ code coverage +* (Optionally) [Github Actions](https://docs.github.com/en/free-pro-team@latest/actions), for automatic acceptance testing + +Cocotb's regression tests can be improved by: + +* Testing more of cocotb's core functionality +* Testing corner cases left out of the current set of tests (identified by looking at code coverage) +* Increasing the matrix of simulators, operating system, and Python installations tested in CI + +Testing improvements don't require maintainer pre-approval, but require a pull request. +Please see the [guidelines on submitting pull requests](#patch-requirements). + + +### Features + +Cocotb is still in development and new features are still welcome and appreciated; +as long as they stay [in scope](#Architecture-and-Scope-of-Cocotb). +Cocotb is comprised of several major codebases, each requiring different sets of skills and development process. +Instead of including that breakdown here, it is done in the [internal documentation](https://github.com/cocotb/cocotb/wiki/cocotb-Internals). + +Small improvements to existing features generally do not require maintainer pre-approval. +Large changes, approximately >150 LOC changed, and new features generally require maintainer pre-approval. +If a change is deemed too large for the main repo, or out of scope, +please feel free to make it an [extension](https://docs.cocotb.org/en/latest/extensions.html). + +***New features must not break existing features.*** + +Feature changes require full coverage of the added feature. +This likely requires adding new unit tests to the `tests` directory. +Issue-specific test directories will not be accepted, unless a special HDL entity is required. +Instead, place the test in an existing test suite (`test_cocotb`, `test_discovery`, etc.). + +Features should generally follow the following design principles: + +* Something the user cannot do without assistance of cocotb-specific code +* Orthogonal to existing features +* Easily composed with existing features +* Limited in scope and impervious to scope creep + + +### Bugfixes + +**!WARNING!** Bugfixing cocotb is not for the faint of heart! + +Bugs happen. +cocotb supports many simulators that have inconsistent support for the procedural interfaces cocotb depends on, +and it has a number of features that aren't wholly tested yet. +There are likely many bugs lurking, waiting to be found; +which is why increasing testing and code coverage is important. +Working on bugfixing can be very challenging, depending on the cause of the bug. +In general, bugfixing requires knowledge of: + +* How cocotb works +* [cocotb's debugging utilities](https://github.com/cocotb/cocotb/wiki/Debugging-HOW-TOs#cocotb-debugging-functionality) +* (Optional) Simulator interfaces (VPI, VHPI, and FLI) +* (Optional) Python debugging tools ([pdb](https://github.com/cocotb/cocotb/wiki/Debugging-HOW-TOs#using-a-remote-python-debugger), [dowser](https://github.com/cocotb/cocotb/wiki/Debugging-HOW-TOs#debugging-python-memory-usage)) +* (Optional) C/C++ debugging tools ([gdb](https://github.com/cocotb/cocotb/wiki/Debugging-HOW-TOs#using-a-remote-cc-debugger), [valgrind](https://github.com/cocotb/cocotb/wiki/Debugging-HOW-TOs#debugging-cc-memory-usage)) +* (Optional) Specific simulators (sometimes the bug exists in the simulator and not cocotb) + +Fixing a bug follows the procedure: + +1. Locate buggy behavior, make a Github issue + * Maintainers may be able to offer more information, confirm it as a bug, or confirm it as expected behavior +2. Make a Minimum Reproducible Failing Example (MRFE, pronounced like Murphy, like the law :) + * Confirms the bug + * Add to [regressions](#running-tests-locally) +3. Open a new draft pull request with the MRFE test + * It should cause CI to fail +4. Determine scope of the bug, and add that detail to the pull request + * Which simulators/interfaces are affected? + * Which Python versions? + * Which operating systems? +5. Determine the cause of the bug, and add that detail to the pull request + * May require Python or C debugging, or the builtin cocotb debugging utilities +6. Make a fix, and push it up on the PR branch + * It should cause the CI to pass + * The fix should not break other existing functionality + +Details on how to debug cocotb can be found on the [Wiki](https://github.com/cocotb/cocotb/wiki/Debugging-HOW-TOs). + + +### Deprecations and Removals + +Cocotb's treatment of deprecations and removal follows guidelines laid out [here](https://symfony.com/doc/current/setup/upgrade_major.html#make-your-code-deprecation-free). +Deprecations serve the following purposes: + +* Remove legacy code that has been deemed out of scope +* Remove support for a simulator, OS, or Python version that is past end-of-life +* Remove potentially dangerous, broken, and misunderstood interfaces (usually accompanied with a superior alternative) + +Deprecations can be incorporated at any time. +They are implemented in Python by [issuing a `DeprecationWarning`](https://docs.python.org/3/library/warnings.html#warnings.warn) +or using the [`@deprecated`](cocotb/_deprecation.py) decorator. +In C++ code, deprecations are implemented by [issuing a LOG_WARN](https://docs.cocotb.org/en/stable/generated/file/gpi__logging_8h.html?highlight=LOG_WARN#c.LOG_WARN) with `DEPRECATED` in the message. + +Removals only occur on major version bumps. +One can create removal pull requests at any time, on the condition they will not be accepted until the next release is known to be a major version release. + + +### Breaking Changes + +Breaking changes are changes to the interface or behavior of a user-facing entity. +They are necessary when a user-facing interfaces are broken in a way that cannot be changed without changing the behavior of user's code. +In these situations it is ideal to be able to implement a switch between new better behavior and the old broken behavior. +On major version bumps, this switch will be deprecated and the new behavior will become the default. + +In cases where behavioral switches are not easy to implement, breaking changes will attempt to be broadcasted to user by [issuing a `DeprecationWarning`](https://docs.python.org/3/library/warnings.html#warnings.warn) when the to-be-changed behavior is invoked. +Before major releases, pending breaking changes will be incorporated. + +One can create pull requests with breaking changes at any time, on the condition they will not be accepted until the next release is known to be a major version release. + + +## Setting Up a Development Environment + +Assuming you have used cocotb prior to reading this guide, you will already have the cocotb [installation prerequisites](https://docs.cocotb.org/en/latest/install.html) and standard development tools (editor, shell, git, etc.) installed. + +First, you should [fork and clone](https://guides.github.com/activities/forking/) the cocotb repo to your machine. +This will allow you to make changes to the cocotb source code, create pull requests, and run regressions and build documentation locally. + +Additionally, you will need [doxygen](https://www.doxygen.nl/index.html), for building documentation; +[nox](https://pypi.org/project/nox/), for building documentation and running regression tests; +and [pre-commit](https://pre-commit.com/), to check your changes before committing them. + +We recommend if you are using a Linux distribution to use your system package manager to install doxygen. +Likewise, doxygen can be installed using the homebrew package manager on Mac OS. +Windows contributors should download a binary distribution installer from the main website. + +`nox` and `pre-commit` are Python projects and can be installed with `pip`: + +```command +pip install nox pre-commit +``` + +To enable pre-commit run the following command at the root of the cloned project to install the git hooks. +The first run of pre-commit will build an environment for you, so it may take a while. +Following runs should be much quicker. + +```command +pre-commit install +``` + +When committing, pre-commit's hook will run, checking your changes for formatting, code smells, etc. +You will see the lists of checks printed and whether they passed, were skipped, or failed. +If any of the checks fail, it is recommended to fix them before opening a pull request, +otherwise the pull request checks will fail as well. + +Now you are ready to contribute! + + +## Running Tests Locally + +First, [set up your development environment](#setting-up-a-development-environment). + +Our tests are managed by `nox`, which runs both `pytest` tests and our system of makefiles. +The regression does not end on the first failure, but continues until all tests in the `/tests` and `/examples` directories have been run. + +To run the tests locally with `nox`, issue the following command. + +```command +nox -s dev_test +``` + +At the end of the regression, if there were any test failures, the tests that failed will be printed. +If the tests succeed you will see the message `Session tests was successful` printed in green. + +By default the `dev_test` nox session runs all simulator-agnostic tests, as well as all tests which require a simulator and can be run against Icarus Verilog. +Icarus Verilog must be installed. + +The simulator and the toplevel language can be changed by setting the environment variables [`SIM`](https://docs.cocotb.org/en/latest/building.html#var-SIM) and [`TOPLEVEL_LANG`](https://docs.cocotb.org/en/latest/building.html#var-TOPLEVEL_LANG). +Alternatively, the simulator-specific nox sessions can be used, as described below. + +### Selecting a Language and Simulator for Regression + +cocotb can be used with multiple simulators, and can run tests against all of them. +Nox provides a session for each valid simulator/language/GPI interface combination, from which one or multiple sessions can be selected. + +The following examples are good starting points; +refer to the [nox command-line usage documentation](https://nox.thea.codes/en/stable/usage.html) for more information. + +```command +# List all available sessions. +nox -l + +# Run all simulator-agnostic tests. +nox -s dev_test_nosim + +# Run the simulator-specific tests against Xcelium, using a VHDL toplevel and +# the VHPI interface. +nox -s "dev_test_sim(sim='xcelium', toplevel_lang='vhdl', gpi_interface='vhpi')" + +# Run all simulator-specific tests against Icarus Verilog and GHDL. +# Both simulators must be installed locally. +nox -k "dev_test_sim and (icarus or ghdl)" +``` + +### Running Individual Tests Locally + +Each test under `/tests/test_cases/*/` and `/examples/*/tests/` can be run individually. +This is particularly useful if you want to run a particular test that fails the regression. + +First you must install cocotb from source by navigating to the project root directory and issuing the following command: + +```command +python -m pip install . +``` + +On Windows, you must instead install cocotb from source like so: + +```command +python -m pip install --global-option build_ext --global-option --compiler=mingw32 . +``` + +Once that has been done, you can navigate to the directory containing the test you wish to run. +Then you may issue an [appropriate](https://docs.cocotb.org/en/latest/building.html#makefile-based-test-scripts) `make` command. +For example, if you want to test with Icarus using Verilog sources: + +```command +make SIM=icarus TOPLEVEL_LANG=verilog +``` + + +## Building Documentation Locally + +First, [set up your development environment](#setting-up-a-development-environment). + +Documentation is built locally using `nox`. +The last message in the output will contain a URL to the documentation you just built. +Simply copy and paste the link into your browser to view it. +The documentation will be built in the same location on your hard drive on every run, so you only have to refresh the page to see new changes. + +To build the documentation locally on Linux or Mac, issue the following command: + +```command +nox -e docs +``` + +Building the documentation is not currently supported on Windows. + + +## Architecture and Scope of Cocotb + +Cocotb has seen adoption in a wide variety of scenarios with sometimes conflicting requirements. +To foster experimentation and to decentralize the development process the architecture of cocotb is highly modular. +A solid core forms the foundation upon which extensions can provide higher-level functionality. + +The core of cocotb are +- the infrastructure to write testbenches with coroutines, threads, etc., +- the abstraction and interaction with simulators through interfaces like VPI, GPI, etc., +- tooling to run tests, and +- core primitives to interact with the simulation: triggers, data access classes, etc. + +As a general rule, functionality beyond this core set should go into extensions. +However, none of these rules are set in stone. +They can and should be challenged at times to ensure the project stays relevant to the majority of its users. + + +## Maintainer Pre-approval + +After making changes to cocotb, changes must be approved by at least one maintainer before being included. +Out-of-scope and breaking changes ***will not be accepted***. +Also a maintainer could object to a change due to implementation approach or code quality reasons. +To potentially save you frustration and time, it is a good idea to get maintainer pre-approval on the task before starting it. + +The best way to get maintainer pre-approval is to make a [Github issue](https://github.com/cocotb/cocotb/issues). +These issues can be a place for maintainers, as well as other users, to voice opinions on a proposed change before the task is worked. +You may also propose changes on the [Gitter channel](https://gitter.im/cocotb/Lobby) or by directly contacting a [maintainer](#maintainer). + + +## How to Get Changes Merged + +Have you fixed a bug in cocotb, or want to add new functionality to it? +Cocotb follows the typical [GitHub flow](https://guides.github.com/introduction/flow/) and makes use of pull requests and reviews. +Follow the steps below to get your changes merged, i.e. integrated into the main cocotb codebase. + +1. Create an issue ticket on [cocotb's GitHub issue tracker](https://github.com/cocotb/cocotb/issues) describing the problem. + Issues are also a good place to discuss design options with others before writing code. +2. [Fork](https://help.github.com/articles/fork-a-repo/) the [cocotb GitHub repository](https://github.com/cocotb/cocotb) into your personal namespace. +3. Create a new branch off the `master` branch for your set of changes. + Use one branch per "topic," i.e. per set of changes which belong together. +4. Create one or multiple commits to address the issue. + Make sure to read and follow the [Patch Requirements](#patch-requirements) when preparing your commits. +5. Create new [pull request (PR)](https://github.com/cocotb/cocotb/pulls). +6. When you submit (or update) the pull request, a suite of regression tests will run. + If any of them turns "red," i.e. reports a failure, you most likely need to fix your code before it can be merged. +7. The pull request needs to be reviewed by at least one maintainer. + We aim to give feedback to all pull requests within a week, but as so often, life can get in the way. + If you receive no feedback from a maintainer within that time, please contact them directly (e.g. on [Gitter](https://gitter.im/cocotb) or email). + You can find a [list of all maintainers](#maintainers) below. + If a maintainer asks you to explain or modify code, try to do so. +8. Once your code has at least one positive review from a maintainer and no maintainer strongly objects it your code is ready to be merged into the `master` branch. + + +## Patch Requirements + +All changes which should go into the main codebase of cocotb must follow this set of requirements. + +- The code must be within the [scope of cocotb](#architecture-and-scope-of-cocotb). +- All code must be licensed under the [Revised BSD License](https://github.com/cocotb/cocotb/blob/master/LICENSE). + By contributing to this project you signal your agreement with these license terms. +- All code must follow the established coding standards: + - For Python code, follow the [PEP 8](https://www.python.org/dev/peps/pep-0008/) style guide. + - For C++ code, follow the [Google C++ Style Guide](https://google.github.io/styleguide/cppguide.html) but with 4 space indentation. + You can run the following command to automatically format the modified file to match the standard: + ```command + clang-format -i + ``` +- All code must pass existing tests. + New functionality must be accompanied by tests, and bug fixes should add tests to increase the test coverage and prevent regressions. +- If code changes or enhances documented behavior the documentation should be updated. +- If a change is user-visible, a newsfragment should be added to `docs/source/newsfragments`. +- All pull requests must be accepted by at least one maintainer, with no maintainer strongly objecting. + Reviews must be performed by a person other than the primary author of the code. +- All commits should follow established best practices when creating a commit message: + - The first line of the commit message is the short summary of what the code change does. + Keep this line below 50 characters. + - Then have one blank line. + - Now comes the long description of the commit. + Use this text to discuss things which are not obvious from the code, especially *why* changes were made. + Include the GitHub issue number (if one exists) in the form "Fixes #nnn" ([read more about that](https://help.github.com/articles/closing-issues-using-keywords/)). + Keep each description line below 72 characters. +- Use the following header for new non-example files: + ```python + # Copyright cocotb contributors + # Licensed under the Revised BSD License, see LICENSE for details. + # SPDX-License-Identifier: BSD-3-Clause + ``` +- Use the following header for new example files: + ```python + # This file is public domain, it can be freely copied without restrictions. + # SPDX-License-Identifier: CC0-1.0 + ``` + +## Managing of Issues and Pull Requests + +The cocotb project makes use of GitHub labels attached to issues and pull requests to structure the development process. +Each issue and pull request can have multiple labels assigned. + +The `type` labels define the type of issue or PR: +- `type:bug`: a bug in existing functionality +- `type:feature`: new functionality +- `type:question`: a support question +- `type:cleanup`: cleanup or refactoring on code, documentation, or other areas +- `type:deprecation`: API that should warn and eventually be removed + +The `status` labels give a quick impression of the current status of the issue or PR: +- `status:worksforme`: the issue it not reproducible, or intended behavior (i.e. not a bug) +- `status:blocked`: further progress is blocked by a dependency, e.g. other code which must be commited first. +- `status:needs-info`: feedback from someone is required. The issue/PR text gives more details. +- `status:duplicate`: the same issue is already being handled in another issue/PR. +- `status:close?`: issues which can probably be closed, but need a second pair of eyes +- `status:needs-proprietary-testing`: Help needed testing on a proprietary tool +- `status:out-of-scope`: An issue or PR that was closed because the feature or bug was deemed to be out of scope + +For the use in pull requests the following additional status labels are defined: +- `status:needs-review`: this PR needs at least one review +- `status:changes-requested`: changes are requested to the code +- `status:ready-for-merge`: this PR is ready (according to the [Patch Requirements](#patch-requirements)) to be merged +- `status:needs-rebase`: needs a git rebase +- `status:needs-newsfragment`: Needs a towncrier newsfragment for the changelog + +The `category` labels help maintainers to filter issues which are relevant to their area of expertise: +- `category:OS:MacOS`: Mac OS/OS X specific issues +- `category:OS:Linux`: Linux specific issues +- `category:OS:Windows`: Microsoft Windows-specific issues +- `category:simulators`: simulator support, including VPI/GPI/etc. +- `category:simulators:activehdl`: Aldec Active-HDL +- `category:simulators:cvc`: Tachyon CVC +- `category:simulators:ghdl`: GHDL +- `category:simulators:icarus`: Icarus Verilog (iverilog) +- `category:simulators:ius`: Cadence Incisive (IUS) +- `category:simulators:modelsim`: Mentor Modelsim +- `category:simulators:questa`: Mentor Questa +- `category:simulators:riviera`: Aldec Riviera-PRO +- `category:simulators:vcs`: Synopsys VCS +- `category:simulators:verilator`: Verilator +- `category:simulators:xcelium`: Cadence Xcelium +- `category:codebase:gpi`: relating to the GPI or one of the implementation +- `category:codebase:pygpi`: relating to the Python wrapper around the GPI (embed library and simulator module) +- `category:codebase:scheduler`: relating to the coroutine scheduler, triggers, or coroutine objects +- `category:codebase:test-runner`: relating to code for automating test runs (regression manager) +- `category:codebase:handle`: relating to handles or handle types (BinaryValue) +- `category:codebase:project-automation`: relating to included project automation (makefiles) +- `category:codebase:testbenching`: relating to testbenching components (Drivers, Monitors, etc.) +- `category:building`: relating to build C/C++ libraries and extension modules +- `category:packaging`: issues related to (PyPi) packaging, etc. +- `category:docs`: documentation issues and fixes +- `category:extensions`: cocotb extensions +- `category:performance`: performance topics +- `category:tests-ci`: continuous integration and unit tests + +To help new contributors find a good issue to work on one more label is used (following [GitHub standard practices](#https://help.github.com/articles/helping-new-contributors-find-your-project-with-labels/)): +- `good first issue`: this issue is a good starting point for new contributors. + The issue should give an actionable description of what to do to complete this task, along with contact information of a mentor for this task. + +cocotb explicitly uses no priority labels, as experience indicates that they provide little value. + +Issues and pull requests which are invalid, or where feedback is lacking for four weeks, should be closed. + +## Cocotb Releases + +cocotb aims to keep the `master` branch always in a releasable state. +At least four times a year an official release should be created. +It is the job of the maintainers to find a suitable time for a release, to communicate it to the community, and to coordinate it. + +## Maintainers + +Cocotb uses a shared maintainer model. +Most maintainers are experts in part of the cocotb codebase, and are primarily responsible for reviews in this area. + +- Kaleb Barrett (@ktbarrett) +- Tomasz Hemperek (@themperek) +- Marlon James (@marlonjames) +- Colin Marquardt (@cmarqu) +- Philipp Wagner (@imphil) +- Eric Wieser (@eric-wieser) + +Founders + +- Chris Higgs (@chiggs) +- Stuart Hodgson (@stuarthodgson) + +### Getting in Contact with a Maintainer + +All of the maintainers are active on the [Gitter channel](https://github.com/cocotb/cocotb). +They prefer inquiries go through direct messages on Gitter, +or by mentioning them in the main [cocotb Gitter channel](https://gitter.im/cocotb/Lobby) using `@{maintainer name}`. +Maintainers are unpaid volunteers, so it might take a while for a maintainer to get back to you. + + +## Code of Conduct + +The cocotb development community aims to be welcoming to everyone. +The [FOSSi Foundation Code of Conduct](https://www.fossi-foundation.org/code-of-conduct) applies. +Please contact any of the maintainers if you feel uncomfortable in the cocotb development community. diff --git a/testbed/cocotb__cocotb/tests/benchmark.py b/testbed/cocotb__cocotb/tests/benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..a8454cf8ee7828083a6b8355f461b5f50ef69866 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/benchmark.py @@ -0,0 +1,62 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +import sys +from pathlib import Path + +from cocotb.runner import get_runner + + +def build_and_run_matrix_multiplier(benchmark, sim): + hdl_toplevel_lang = "verilog" + build_args = [] + test_args = [] + + if sim == "nvc": + build_args = ["--std=08"] + hdl_toplevel_lang = "vhdl" + + verilog_sources = [] + vhdl_sources = [] + + proj_path = ( + Path(__file__).resolve().parent.parent / "examples" / "matrix_multiplier" + ) + + sys.path.append(str(proj_path / "tests")) + + if hdl_toplevel_lang == "verilog": + verilog_sources = [proj_path / "hdl" / "matrix_multiplier.sv"] + else: + vhdl_sources = [ + proj_path / "hdl" / "matrix_multiplier_pkg.vhd", + proj_path / "hdl" / "matrix_multiplier.vhd", + ] + + runner = get_runner(sim) + + runner.build( + hdl_toplevel="matrix_multiplier", + verilog_sources=verilog_sources, + vhdl_sources=vhdl_sources, + build_args=build_args, + ) + + @benchmark + def run_test(): + runner.test( + hdl_toplevel="matrix_multiplier", + hdl_toplevel_lang=hdl_toplevel_lang, + test_module="test_matrix_multiplier", + test_args=test_args, + seed=123456789, + ) + + +def test_matrix_multiplier_icarus(benchmark): + build_and_run_matrix_multiplier(benchmark, "icarus") + + +def test_matrix_multiplier_nvc(benchmark): + build_and_run_matrix_multiplier(benchmark, "nvc") diff --git a/testbed/cocotb__cocotb/tests/sxs.ps1 b/testbed/cocotb__cocotb/tests/sxs.ps1 new file mode 100644 index 0000000000000000000000000000000000000000..f17ff011e2d634b709622107da10ff780c680946 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/sxs.ps1 @@ -0,0 +1,15 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +# Run this script with: +# powershell -executionpolicy bypass -File tests\sxs.ps1 + +$j = Start-Job -ScriptBlock { SxsTrace Trace -logfile:SxsTrace.etl } +Start-Sleep -s 5 +python -c "import cocotb.simulator" +Start-Sleep -s 5 +$j | Stop-Job +SxsTrace Stoptrace +SxsTrace Parse -logfile:SxsTrace.etl -outfile:SxsTrace.txt +Get-Content SxsTrace.txt diff --git a/testbed/cocotb__cocotb/tests/test_cases/issue_253/issue_253.py b/testbed/cocotb__cocotb/tests/test_cases/issue_253/issue_253.py new file mode 100644 index 0000000000000000000000000000000000000000..5609515c7498fa513ac11e34006bfeaa888ae7de --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/issue_253/issue_253.py @@ -0,0 +1,28 @@ +# A set of regression tests for open issues + +import cocotb +from cocotb.triggers import Timer + + +async def toggle_clock(dut): + dut.clk.value = 0 + await Timer(10, "ns") + assert dut.clk.value.integer == 0, "Clock not set to 0 as expected" + dut.clk.value = 1 + await Timer(10, "ns") + assert dut.clk.value.integer == 1, "Clock not set to 1 as expected" + + +@cocotb.test() +async def issue_253_empty(dut): + await toggle_clock(dut) + + +@cocotb.test() +async def issue_253_none(dut): + await toggle_clock(dut) + + +@cocotb.test() +async def issue_253_notset(dut): + await toggle_clock(dut) diff --git a/testbed/cocotb__cocotb/tests/test_cases/issue_330/Makefile b/testbed/cocotb__cocotb/tests/test_cases/issue_330/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..d7d4c67b830cc32d472ec92dfbefb84ace4b7a27 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/issue_330/Makefile @@ -0,0 +1,33 @@ +############################################################################### +# Copyright (c) 2013 Potential Ventures Ltd +# Copyright (c) 2013 SolarFlare Communications Inc +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Potential Ventures Ltd, +# SolarFlare Communications Inc nor the +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +############################################################################### + + +include ../../designs/sample_module/Makefile + +MODULE = issue_330 diff --git a/testbed/cocotb__cocotb/tests/test_cases/issue_330/issue_330.py b/testbed/cocotb__cocotb/tests/test_cases/issue_330/issue_330.py new file mode 100644 index 0000000000000000000000000000000000000000..cacaaff0a3d8dd9ece558e11c2b6c3558cb96253 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/issue_330/issue_330.py @@ -0,0 +1,72 @@ +# A set of regression tests for open issues + +import logging + +import cocotb +from cocotb._sim_versions import RivieraVersion + +SIM_NAME = cocotb.SIM_NAME.lower() + + +# GHDL unable to access record signals (gh-2591) +# Icarus doesn't support structs (gh-2592) +# Verilator doesn't support structs (gh-1275) +# Riviera-PRO 2022.10 and newer does not discover inout_if correctly over VPI (gh-3587) +@cocotb.test( + expect_error=AttributeError + if SIM_NAME.startswith(("icarus", "ghdl", "verilator")) + or ( + SIM_NAME.startswith("riviera") + and RivieraVersion(cocotb.SIM_VERSION) >= RivieraVersion("2022.10") + and cocotb.LANGUAGE == "verilog" + ) + else () +) +async def issue_330_direct(dut): + """ + Access a structure + """ + + tlog = logging.getLogger("cocotb.test") + + structure = dut.inout_if + + tlog.info( + f"Value of inout_if => a_in = {structure.a_in.value} ; b_out = {structure.b_out.value}" + ) + + +# GHDL unable to access record signals (gh-2591) +# Icarus doesn't support structs (gh-2592) +# Verilator doesn't support structs (gh-1275) +@cocotb.test( + expect_error=AttributeError + if SIM_NAME.startswith(("icarus", "ghdl")) + else AssertionError + if SIM_NAME.startswith("verilator") + else () +) +async def issue_330_iteration(dut): + """ + Access a structure via issue_330_iteration + """ + + tlog = logging.getLogger("cocotb.test") + + structure = dut.inout_if + + count = 0 + for member in structure: + tlog.info("Found %s" % member._path) + count += 1 + + # Riviera-PRO 2022.10 and newer does not discover inout_if correctly over VPI (gh-3587) + rv_2022_10_plus = RivieraVersion(cocotb.SIM_VERSION) >= RivieraVersion("2022.10") + if ( + SIM_NAME.startswith("riviera") + and rv_2022_10_plus + and cocotb.LANGUAGE == "verilog" + ): + assert count == 0 + else: + assert count == 2, "There should have been two members of the structure" diff --git a/testbed/cocotb__cocotb/tests/test_cases/issue_588/Makefile b/testbed/cocotb__cocotb/tests/test_cases/issue_588/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..fa8015e28799a73afb5c4183eb28e24fc0fba810 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/issue_588/Makefile @@ -0,0 +1,31 @@ +############################################################################### +# Copyright (c) 2018 Potential Ventures Ltd +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Potential Ventures Ltd, +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +############################################################################### + + +include ../../designs/sample_module/Makefile + +MODULE = issue_588 diff --git a/testbed/cocotb__cocotb/tests/test_cases/issue_588/issue_588.py b/testbed/cocotb__cocotb/tests/test_cases/issue_588/issue_588.py new file mode 100644 index 0000000000000000000000000000000000000000..a92cc88a5e75375c3c4df96152d1e35618a2370f --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/issue_588/issue_588.py @@ -0,0 +1,29 @@ +# Test case for issue 588- yielding both coroutines and triggers in a list. +# This is a very simple test; it just makes sure we can yield a list of both. + +import cocotb +from cocotb import triggers, utils + + +async def sample_coroutine(dut): + """Very simple coroutine that waits 5 ns.""" + await triggers.Timer(5, "ns") + dut._log.info("Sample coroutine yielded.") + + +@cocotb.test() +async def issue_588_coroutine_list(dut): + """Yield a list of triggers and coroutines.""" + + # Record simulation time. + current_time = utils.get_sim_time("ns") + + # Yield a list, containing a RisingEdge trigger and a coroutine. + coro = cocotb.start_soon(sample_coroutine(dut)) + await triggers.First(coro, triggers.Timer(100, "ns")) + coro.kill() + + # Make sure that only 5 ns passed, because the sample coroutine + # terminated first. + new_time = utils.get_sim_time("ns") + assert int(new_time - current_time) == 5, "Did not yield coroutine in list." diff --git a/testbed/cocotb__cocotb/tests/test_cases/issue_768_a/Makefile b/testbed/cocotb__cocotb/tests/test_cases/issue_768_a/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..09c51217383b3c1a27152601fd466d064215cbf1 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/issue_768_a/Makefile @@ -0,0 +1,3 @@ +include ../../designs/sample_module/Makefile + +MODULE = issue_768 diff --git a/testbed/cocotb__cocotb/tests/test_cases/issue_768_b/Makefile b/testbed/cocotb__cocotb/tests/test_cases/issue_768_b/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..09c51217383b3c1a27152601fd466d064215cbf1 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/issue_768_b/Makefile @@ -0,0 +1,3 @@ +include ../../designs/sample_module/Makefile + +MODULE = issue_768 diff --git a/testbed/cocotb__cocotb/tests/test_cases/issue_768_b/issue_768.py b/testbed/cocotb__cocotb/tests/test_cases/issue_768_b/issue_768.py new file mode 100644 index 0000000000000000000000000000000000000000..41fad7bc13101c6b6793a1f51e2de6a80aa35b15 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/issue_768_b/issue_768.py @@ -0,0 +1,21 @@ +""" +Failing case. + +Note that the bug only occurred if the test in question runs first - so +no more tests can be added to this file. +""" + +import cocotb +from cocotb.triggers import ReadOnly, Timer +from cocotb.types import LogicArray, Range + +# this line is different between the two files +value = LogicArray(0, Range(7, "downto", 0)) + + +@cocotb.test() +async def do_test(dut): + dut.stream_in_data.setimmediatevalue(value) + await Timer(1, "step") + assert dut.stream_in_data.value == 0 + await ReadOnly() diff --git a/testbed/cocotb__cocotb/tests/test_cases/issue_857/Makefile b/testbed/cocotb__cocotb/tests/test_cases/issue_857/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..4142731ef64b20b73f45eb09b08abe0fb5401dda --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/issue_857/Makefile @@ -0,0 +1,31 @@ +############################################################################### +# Copyright (c) 2015 Potential Ventures Ltd +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Potential Ventures Ltd, +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +############################################################################### + + +include ../../designs/sample_module/Makefile + +MODULE = issue_857 diff --git a/testbed/cocotb__cocotb/tests/test_cases/issue_892/Makefile b/testbed/cocotb__cocotb/tests/test_cases/issue_892/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..0cbf05c3da1ae83509ccfc26166f236db0c27b3e --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/issue_892/Makefile @@ -0,0 +1,33 @@ +############################################################################### +# Copyright (c) 2013 Potential Ventures Ltd +# Copyright (c) 2013 SolarFlare Communications Inc +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Potential Ventures Ltd, +# SolarFlare Communications Inc nor the +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +############################################################################### + + +include ../../designs/sample_module/Makefile + +MODULE = issue_892 diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_external/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_external/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..83403ffbc3aa7e99ef71b2445fcf8a677f801fcd --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_external/Makefile @@ -0,0 +1,33 @@ +############################################################################### +# Copyright (c) 2013 Potential Ventures Ltd +# Copyright (c) 2013 SolarFlare Communications Inc +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Potential Ventures Ltd, +# SolarFlare Communications Inc nor the +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +############################################################################### + + +include ../../designs/sample_module/Makefile + +MODULE = test_external diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_iteration_vhdl/test_iteration.py b/testbed/cocotb__cocotb/tests/test_cases/test_iteration_vhdl/test_iteration.py new file mode 100644 index 0000000000000000000000000000000000000000..8e529e35c67b150dc26aa0d90ce0aeef66925219 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_iteration_vhdl/test_iteration.py @@ -0,0 +1,158 @@ +# Copyright (c) 2015, 2018 Potential Ventures Ltd +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Potential Ventures Ltd +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import logging +import os + +import cocotb +from cocotb._sim_versions import QuestaVersion +from cocotb.triggers import Combine, Timer + + +def total_object_count(): + """Return the total object count based on simulator.""" + SIM_NAME = cocotb.SIM_NAME.lower() + SIM_VERSION = cocotb.SIM_VERSION.lower() + + # Questa with VHPI + # TODO: Why do we get massively different numbers for Questa/VHPI than for Questa/FLI or VPI? + if SIM_NAME.startswith("modelsim") and os.environ["VHDL_GPI_INTERFACE"] == "vhpi": + return 68127 + + # Questa 2023.1 onwards (FLI) do not discover the following objects, which + # are instantiated four times: + # - inst_generic_sp_ram.clk () + # - inst_generic_sp_ram.rst () + # - inst_generic_sp_ram.wen () + # - inst_generic_sp_ram.en () + if ( + SIM_NAME.startswith("modelsim") + and QuestaVersion(SIM_VERSION) >= QuestaVersion("2023.1") + and os.environ["VHDL_GPI_INTERFACE"] == "fli" + ): + return 35153 - 4 * 4 + + if SIM_NAME.startswith( + ( + "ncsim", + "xmsim", + "modelsim", + "riviera", + ) + ): + return 35153 + + # Active-HDL + if SIM_NAME.startswith("aldec"): + if SIM_VERSION.startswith("11.1"): + # Active-HDL 11.1 only finds 'inbranch_tdata_low' inside the gen_acs for generate block + return 27359 + if SIM_VERSION.startswith("10.01"): + # Active-HDL 10.1 doesn't find any signals declared inside the gen_acs for generate block + return 26911 + + return 0 + + +@cocotb.test(skip=(total_object_count() == 0)) +async def recursive_discovery(dut): + """Recursively discover every single object in the design.""" + + pass_total = total_object_count() + + tlog = logging.getLogger("cocotb.test") + await Timer(100) + + def dump_all_the_things(parent): + if not isinstance( + parent, + ( + cocotb.handle.HierarchyObjectBase, + cocotb.handle.NonHierarchyIndexableObjectBase, + ), + ): + return 0 + count = 0 + for thing in parent: + count += 1 + tlog.info("Found %s (%s)", thing._path, type(thing)) + count += dump_all_the_things(thing) + return count + + total = dump_all_the_things(dut) + tlog.info("Found a total of %d things", total) + assert total == pass_total + + +# GHDL unable to access signals in generate loops (gh-2594) +@cocotb.test( + expect_error=IndexError if cocotb.SIM_NAME.lower().startswith("ghdl") else () +) +async def discovery_all(dut): + """Discover everything on top-level.""" + dut._log.info("Iterating over top-level to discover objects") + for thing in dut: + thing._log.info("Found something: %s", thing._path) + + dut._log.info("length of dut.inst_acs is %d", len(dut.gen_acs)) + item = dut.gen_acs[3] + item._log.info("this is item") + + +@cocotb.test() +async def dual_iteration(dut): + """Test iteration over top-level in two forked coroutines.""" + + async def iteration_loop(): + for thing in dut: + thing._log.info("Found something: %s", thing._path) + await Timer(1) + + loop_one = cocotb.start_soon(iteration_loop()) + loop_two = cocotb.start_soon(iteration_loop()) + + await Combine(loop_one, loop_two) + + +# GHDL unable to access record types (gh-2591) +@cocotb.test( + expect_fail=cocotb.SIM_NAME.lower().startswith("aldec"), + expect_error=AttributeError if cocotb.SIM_NAME.lower().startswith("ghdl") else (), +) +async def test_n_dimension_array(dut): + """Test iteration over multi-dimensional array.""" + tlog = logging.getLogger("cocotb.test") + inner_count = 0 + outer_count = 0 + config = dut.inst_ram_ctrl.config + # This signal is a 2 x 7 vhpiEnumVecVal + for thing in config: + for sub_thing in thing: + tlog.info("Found %s", sub_thing._name) + inner_count += 1 + outer_count += 1 + + assert outer_count == 2, outer_count + assert inner_count == 14, inner_count diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_long_log_msg/.gitignore b/testbed/cocotb__cocotb/tests/test_cases/test_long_log_msg/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..03eb0a3ce432aa8be0bb1df0f15e35fac0134cb5 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_long_log_msg/.gitignore @@ -0,0 +1 @@ +errlog diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_long_log_msg/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_long_log_msg/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..69fa66e5fe211d852da1e035da2689379d19d47e --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_long_log_msg/Makefile @@ -0,0 +1,39 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +ifeq ($(SIM),ghdl) + +all: + @echo Skipping test because GHDL does not support identifiers longer than 1023 characters. + @echo See also https://github.com/ghdl/ghdl/issues/1930 + +else + +PROJ_DIR := $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) + +TOPLEVEL_LANG ?= verilog + +ifeq ($(TOPLEVEL_LANG),verilog) +VERILOG_SOURCES := $(PROJ_DIR)/test.sv +else ifeq ($(TOPLEVEL_LANG),vhdl) +VHDL_SOURCES := $(PROJ_DIR)/test.vhd +endif + +ifneq ($(filter $(SIM),ius xcelium),) +COMPILE_ARGS += -v93 +endif + +TOPLEVEL := test + +export MODULE := test_long_log_msg +export COCOTB_LOG_LEVEL := DEBUG + +.PHONY: override_for_this_test +override_for_this_test: + $(MAKE) all 2>errlog + ! grep &>/dev/null "Log message construction failed" errlog + +include $(shell cocotb-config --makefiles)/Makefile.sim + +endif diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_long_log_msg/test.vhd b/testbed/cocotb__cocotb/tests/test_cases/test_long_log_msg/test.vhd new file mode 100644 index 0000000000000000000000000000000000000000..7f06c3077c75aeef2d40fabf998d96db89699791 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_long_log_msg/test.vhd @@ -0,0 +1,14 @@ +-- Copyright cocotb contributors +-- Licensed under the Revised BSD License, see LICENSE for details. +-- SPDX-License-Identifier: BSD-3-Clause + +entity test is + port ( + KPFIVTM1LHY0OFUNXF0XZO1RV535NNU4A8QWR12BLQXJ98PMM1P552QC0T089PUJPB4POUJZLBS19S8XNMPGJV2QK3AJUW98X7RU56ZV4IMFIPDJI81E5B9HNW94Q9APLMMK3VPHS1VB1QVPQDG7VOTLY2NT3F4080OUXSC68H0JZF7KQ0O6GGN3DARXC03CSZL7TE6B7R47366SB54T5Q4MOR5BT3L5S0S3NM8MALXPHZKCUA6AR5U391GGDYG5LB7JGKAHSIREODSNGW7FAYNRTTXFFCRL4U3ZQA6DH1RKCFKGDG9WMF81IX5YSAINQSP14F2FJV0GYEM3R4LUFFSWOZKK5MGKS25RLROJFEQDC8L2XY07728MM7V516ZXH1YFS0AL1GPLH03N5EL0RQVY61EVEQEJCYDT0ZBBN1ZLC5BDQU83NF8N953MU6A99SDNPCTSOD2W9WY69ZL64JHURFHA5DT7KQC7T4KASR5CAG85ONU3F2XWYA97JHDN9V9SBS39MYMYERJ338O6JQYCHX7SH8FB2VL2PI7DOQWB3NXTA8CQM7YKT34L6U3O42WWMI7NHKUIBO4U9MBP176000FU39WET32RG4PHLYYGWFMKPYPCUFE46RFSQDELLXU31ZZH0OJCGEFUDR2USDUYZ3XPBQ6RC0XARPG2Z1GHCESILHJOF7503PHKWUKDVM2V18WB16CB7AAQ8C4C6E7FXUB3E89Q0ZJSKQFYNZPSKYKGXURV3V5C0SHU9QQ2GFUTP38ORCSWN0QYIX9H0SKJEXPC5U1D3QN9PRT0QPOVM7H5EGQ4E449YTSHUMJW1TT2S6EVIPPIR9ZMFCOWPYXRSNJEQ3OCKGDUW2ZX2AS7N5GBUY7NOAR2P7BK5YPOA6APVAH12V86V2YQZ2M56HLNAD785GI4GMFSCI5P3LNFM0CLSBUEJXCVT695N5D3GC8T0HKAN0BZDV1ZMI0WZ3QUVABNYFOJHXXBUW5OK5MQ46NMK3W0FMCKWVPP6265 : in integer; + o : out integer); +end entity test; + +architecture rtl of test is +begin + o <= KPFIVTM1LHY0OFUNXF0XZO1RV535NNU4A8QWR12BLQXJ98PMM1P552QC0T089PUJPB4POUJZLBS19S8XNMPGJV2QK3AJUW98X7RU56ZV4IMFIPDJI81E5B9HNW94Q9APLMMK3VPHS1VB1QVPQDG7VOTLY2NT3F4080OUXSC68H0JZF7KQ0O6GGN3DARXC03CSZL7TE6B7R47366SB54T5Q4MOR5BT3L5S0S3NM8MALXPHZKCUA6AR5U391GGDYG5LB7JGKAHSIREODSNGW7FAYNRTTXFFCRL4U3ZQA6DH1RKCFKGDG9WMF81IX5YSAINQSP14F2FJV0GYEM3R4LUFFSWOZKK5MGKS25RLROJFEQDC8L2XY07728MM7V516ZXH1YFS0AL1GPLH03N5EL0RQVY61EVEQEJCYDT0ZBBN1ZLC5BDQU83NF8N953MU6A99SDNPCTSOD2W9WY69ZL64JHURFHA5DT7KQC7T4KASR5CAG85ONU3F2XWYA97JHDN9V9SBS39MYMYERJ338O6JQYCHX7SH8FB2VL2PI7DOQWB3NXTA8CQM7YKT34L6U3O42WWMI7NHKUIBO4U9MBP176000FU39WET32RG4PHLYYGWFMKPYPCUFE46RFSQDELLXU31ZZH0OJCGEFUDR2USDUYZ3XPBQ6RC0XARPG2Z1GHCESILHJOF7503PHKWUKDVM2V18WB16CB7AAQ8C4C6E7FXUB3E89Q0ZJSKQFYNZPSKYKGXURV3V5C0SHU9QQ2GFUTP38ORCSWN0QYIX9H0SKJEXPC5U1D3QN9PRT0QPOVM7H5EGQ4E449YTSHUMJW1TT2S6EVIPPIR9ZMFCOWPYXRSNJEQ3OCKGDUW2ZX2AS7N5GBUY7NOAR2P7BK5YPOA6APVAH12V86V2YQZ2M56HLNAD785GI4GMFSCI5P3LNFM0CLSBUEJXCVT695N5D3GC8T0HKAN0BZDV1ZMI0WZ3QUVABNYFOJHXXBUW5OK5MQ46NMK3W0FMCKWVPP6265; +end architecture rtl; diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_long_log_msg/test_long_log_msg.py b/testbed/cocotb__cocotb/tests/test_cases/test_long_log_msg/test_long_log_msg.py new file mode 100644 index 0000000000000000000000000000000000000000..5ecf68dcbf17167a5bca642006f153f67c475436 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_long_log_msg/test_long_log_msg.py @@ -0,0 +1,10 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +import cocotb + + +@cocotb.test() +async def test_access_long_name(dut): + dut.KPFIVTM1LHY0OFUNXF0XZO1RV535NNU4A8QWR12BLQXJ98PMM1P552QC0T089PUJPB4POUJZLBS19S8XNMPGJV2QK3AJUW98X7RU56ZV4IMFIPDJI81E5B9HNW94Q9APLMMK3VPHS1VB1QVPQDG7VOTLY2NT3F4080OUXSC68H0JZF7KQ0O6GGN3DARXC03CSZL7TE6B7R47366SB54T5Q4MOR5BT3L5S0S3NM8MALXPHZKCUA6AR5U391GGDYG5LB7JGKAHSIREODSNGW7FAYNRTTXFFCRL4U3ZQA6DH1RKCFKGDG9WMF81IX5YSAINQSP14F2FJV0GYEM3R4LUFFSWOZKK5MGKS25RLROJFEQDC8L2XY07728MM7V516ZXH1YFS0AL1GPLH03N5EL0RQVY61EVEQEJCYDT0ZBBN1ZLC5BDQU83NF8N953MU6A99SDNPCTSOD2W9WY69ZL64JHURFHA5DT7KQC7T4KASR5CAG85ONU3F2XWYA97JHDN9V9SBS39MYMYERJ338O6JQYCHX7SH8FB2VL2PI7DOQWB3NXTA8CQM7YKT34L6U3O42WWMI7NHKUIBO4U9MBP176000FU39WET32RG4PHLYYGWFMKPYPCUFE46RFSQDELLXU31ZZH0OJCGEFUDR2USDUYZ3XPBQ6RC0XARPG2Z1GHCESILHJOF7503PHKWUKDVM2V18WB16CB7AAQ8C4C6E7FXUB3E89Q0ZJSKQFYNZPSKYKGXURV3V5C0SHU9QQ2GFUTP38ORCSWN0QYIX9H0SKJEXPC5U1D3QN9PRT0QPOVM7H5EGQ4E449YTSHUMJW1TT2S6EVIPPIR9ZMFCOWPYXRSNJEQ3OCKGDUW2ZX2AS7N5GBUY7NOAR2P7BK5YPOA6APVAH12V86V2YQZ2M56HLNAD785GI4GMFSCI5P3LNFM0CLSBUEJXCVT695N5D3GC8T0HKAN0BZDV1ZMI0WZ3QUVABNYFOJHXXBUW5OK5MQ46NMK3W0FMCKWVPP6265 diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_module_var_empty/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_module_var_empty/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..a9a71f3622fc1a07604b131c83c190cc6d849a87 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_module_var_empty/Makefile @@ -0,0 +1,15 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +# test MODULE is empty +# should cause regression initialization failure so no results.xml is written + +MODULE := " " + +.PHONY: override_for_this_test +override_for_this_test: + -$(MAKE) all + @test ! -f $(COCOTB_RESULTS_FILE) + +include ../../designs/sample_module/Makefile diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_module_var_messy/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_module_var_messy/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..4ada8690968a55dcdce03348fd4ea3924ecbc225 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_module_var_messy/Makefile @@ -0,0 +1,15 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +# test MODULE contains leading and trailing separators +# should cause regression initialization failure so no results.xml is written + +MODULE=" , test_nothing ," + +.PHONY: override_for_this_test +override_for_this_test: + -$(MAKE) all + @test ! -f $(COCOTB_RESULTS_FILE) + +include ../../designs/sample_module/Makefile diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_module_var_messy/test_nothing.py b/testbed/cocotb__cocotb/tests/test_cases/test_module_var_messy/test_nothing.py new file mode 100644 index 0000000000000000000000000000000000000000..0b9efadd01633f0643e3883411605b11dbe343e6 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_module_var_messy/test_nothing.py @@ -0,0 +1 @@ +""" This test module is purposefully empty """ diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_module_without_tests/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_module_without_tests/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..76a935c038ca9ef7e6bdf94e928f0b48c2846c79 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_module_without_tests/Makefile @@ -0,0 +1,15 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +# test MODULE is set +# should cause regression initialization failure so no results.xml is written + +MODULE=test_nothing + +.PHONY: override_for_this_test +override_for_this_test: + -$(MAKE) all + @test ! -f $(COCOTB_RESULTS_FILE) + +include ../../designs/sample_module/Makefile diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_multi_dimension_array/test_cocotb_array.py b/testbed/cocotb__cocotb/tests/test_cases/test_multi_dimension_array/test_cocotb_array.py new file mode 100644 index 0000000000000000000000000000000000000000..e7e520052f45db0e547292812c0cc8a3149ee568 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_multi_dimension_array/test_cocotb_array.py @@ -0,0 +1,185 @@ +import cocotb +from cocotb.triggers import Timer + +SIM_NAME = cocotb.SIM_NAME.lower() + + +@cocotb.test() +async def test_in_vect_packed(dut): + test_value = 0x5 + dut.in_vect_packed.value = test_value + await Timer(1, "ns") + assert dut.out_vect_packed.value == test_value + + +# Verilator combines 1-dimensional unpacked arrays into a single vector (gh-3611) +@cocotb.test(expect_error=TypeError if SIM_NAME.startswith("verilator") else ()) +async def test_in_vect_unpacked(dut): + test_value = [0x1, 0x0, 0x1] + dut.in_vect_unpacked.value = test_value + await Timer(1, "ns") + assert dut.out_vect_unpacked.value == test_value + + +@cocotb.test() +async def test_in_arr(dut): + test_value = 0x5 + dut.in_arr.value = test_value + await Timer(1, "ns") + assert dut.out_arr.value == test_value + + +@cocotb.test() +async def test_in_2d_vect_packed_packed(dut): + test_value = (0x5 << 6) | (0x5 << 3) | 0x5 + dut.in_2d_vect_packed_packed.value = test_value + await Timer(1, "ns") + assert dut.out_2d_vect_packed_packed.value == test_value + + +@cocotb.test() +async def test_in_2d_vect_packed_unpacked(dut): + test_value = [0x5, 0x5, 0x5] + dut.in_2d_vect_packed_unpacked.value = test_value + await Timer(1, "ns") + assert dut.out_2d_vect_packed_unpacked.value == test_value + + +# Verilator doesn't support multi-dimensional unpacked arrays (gh-3611) +@cocotb.test(expect_error=AttributeError if SIM_NAME.startswith("verilator") else ()) +async def test_in_2d_vect_unpacked_unpacked(dut): + test_value = 3 * [[0x1, 0x0, 0x1]] + dut.in_2d_vect_unpacked_unpacked.value = test_value + await Timer(1, "ns") + assert dut.out_2d_vect_unpacked_unpacked.value == test_value + + +@cocotb.test() +async def test_in_arr_packed(dut): + test_value = 365 + dut.in_arr_packed.value = test_value + await Timer(1, "ns") + assert dut.out_arr_packed.value == test_value + + +@cocotb.test() +async def test_in_arr_unpacked(dut): + test_value = [0x5, 0x5, 0x5] + dut.in_arr_unpacked.value = test_value + await Timer(1, "ns") + assert dut.out_arr_unpacked.value == test_value + + +@cocotb.test() +async def test_in_2d_arr(dut): + test_value = 365 + dut.in_2d_arr.value = test_value + await Timer(1, "ns") + assert dut.out_2d_arr.value == test_value + + +@cocotb.test() +async def test_in_vect_packed_packed_packed(dut): + test_value = 95869805 + dut.in_vect_packed_packed_packed.value = test_value + await Timer(1, "ns") + assert dut.out_vect_packed_packed_packed.value == test_value + + +# Questa is unable to access elements of a logic array if the last dimension is unpacked (gh-2605) +# Verilator doesn't support multi-dimensional unpacked arrays (gh-3611) +@cocotb.test( + expect_error=IndexError + if cocotb.LANGUAGE == "verilog" and SIM_NAME.startswith("modelsim") + else AttributeError + if SIM_NAME.startswith("verilator") + else () +) +async def test_in_vect_packed_packed_unpacked(dut): + test_value = [365, 365, 365] + dut.in_vect_packed_packed_unpacked.value = test_value + await Timer(1, "ns") + assert dut.out_vect_packed_packed_unpacked.value == test_value + + +# Verilator doesn't support multi-dimensional unpacked arrays (gh-3611) +@cocotb.test(expect_error=AttributeError if SIM_NAME.startswith("verilator") else ()) +async def test_in_vect_packed_unpacked_unpacked(dut): + test_value = 3 * [3 * [5]] + dut.in_vect_packed_unpacked_unpacked.value = test_value + await Timer(1, "ns") + assert dut.out_vect_packed_unpacked_unpacked.value == test_value + + +# Verilator doesn't support multi-dimensional unpacked arrays (gh-3611) +@cocotb.test(expect_error=AttributeError if SIM_NAME.startswith("verilator") else ()) +async def test_in_vect_unpacked_unpacked_unpacked(dut): + test_value = 3 * [3 * [[1, 0, 1]]] + dut.in_vect_unpacked_unpacked_unpacked.value = test_value + await Timer(1, "ns") + assert dut.out_vect_unpacked_unpacked_unpacked.value == test_value + + +@cocotb.test() +async def test_in_arr_packed_packed(dut): + test_value = (365 << 18) | (365 << 9) | (365) + dut.in_arr_packed_packed.value = test_value + await Timer(1, "ns") + assert dut.out_arr_packed_packed.value == test_value + + +# Questa is unable to access elements of a logic array if the last dimension is unpacked (gh-2605) +# Verilator doesn't support multi-dimensional unpacked arrays (gh-3611) +@cocotb.test( + expect_error=IndexError + if cocotb.LANGUAGE == "verilog" and SIM_NAME.startswith("modelsim") + else AttributeError + if SIM_NAME.startswith("verilator") + else () +) +async def test_in_arr_packed_unpacked(dut): + test_value = [365, 365, 365] + dut.in_arr_packed_unpacked.value = test_value + await Timer(1, "ns") + assert dut.out_arr_packed_unpacked.value == test_value + + +# Verilator doesn't support multi-dimensional unpacked arrays (gh-3611) +@cocotb.test(expect_error=AttributeError if SIM_NAME.startswith("verilator") else ()) +async def test_in_arr_unpacked_unpacked(dut): + test_value = 3 * [3 * [5]] + dut.in_arr_unpacked_unpacked.value = test_value + await Timer(1, "ns") + assert dut.out_arr_unpacked_unpacked.value == test_value + + +@cocotb.test() +async def test_in_2d_arr_packed(dut): + test_value = (365 << 18) | (365 << 9) | (365) + dut.in_2d_arr_packed.value = test_value + await Timer(1, "ns") + assert dut.out_2d_arr_packed.value == test_value + + +# Questa is unable to access elements of a logic array if the last dimension is unpacked (gh-2605) +# Verilator doesn't support multi-dimensional unpacked arrays (gh-3611) +@cocotb.test( + expect_error=IndexError + if cocotb.LANGUAGE == "verilog" and SIM_NAME.startswith("modelsim") + else AttributeError + if SIM_NAME.startswith("verilator") + else () +) +async def test_in_2d_arr_unpacked(dut): + test_value = [365, 365, 365] + dut.in_2d_arr_unpacked.value = test_value + await Timer(1, "ns") + assert dut.out_2d_arr_unpacked.value == test_value + + +@cocotb.test() +async def test_in_3d_arr(dut): + test_value = (365 << 18) | (365 << 9) | (365) + dut.in_3d_arr.value = test_value + await Timer(1, "ns") + assert dut.out_3d_arr.value == test_value diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_multi_level_module_path/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_multi_level_module_path/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..59b3a9bdebfcb0eeb7a99450c504dcde487fe869 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_multi_level_module_path/Makefile @@ -0,0 +1,11 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +# test MODULE contains a multi-level Python module path, separated by period + +MODULE = test_package.test_module_path + +export PYTHONPATH := . + +include ../../designs/sample_module/Makefile diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_multi_level_module_path/__init__.py b/testbed/cocotb__cocotb/tests/test_cases/test_multi_level_module_path/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_multi_level_module_path/test_package/__init__.py b/testbed/cocotb__cocotb/tests/test_cases/test_multi_level_module_path/test_package/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_multi_level_module_path/test_package/test_module_path.py b/testbed/cocotb__cocotb/tests/test_cases/test_multi_level_module_path/test_package/test_module_path.py new file mode 100644 index 0000000000000000000000000000000000000000..3d3be07cf7494d0b6130fbd7c040f9f01e3e83eb --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_multi_level_module_path/test_package/test_module_path.py @@ -0,0 +1,12 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +"""Test for multi-level module path in MODULE""" + +import cocotb + + +@cocotb.test() +async def test_pass(_): + pass diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_package/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_package/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..f93d9309a82bdc23026b4b7909bbfa1e99051837 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_package/Makefile @@ -0,0 +1,46 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +ifeq ($(SIM),) +USING_ICARUS := 1 +endif + +ifeq ($(shell echo $(SIM) | tr A-Z a-z),icarus) +USING_ICARUS := 1 +endif + +ifneq ($(USING_ICARUS),) +ICARUS_MIN := 12.0 +ICARUS_VERSION := $(shell iverilog -V 2>/dev/null | head -n1 | cut -d ' ' -f 4) +MIN_VERSION := $(shell printf "%s\n%s\n" "$(ICARUS_MIN)" "$(ICARUS_VERSION)" | sort -g | head -1) +ifneq ($(MIN_VERSION),$(ICARUS_MIN)) +SKIP := 1 +$(info "Skipping test_defaultless_parameter since icarus < v12.0 doesn't support vpiInstance iteration") +endif +endif + +TOPLEVEL_LANG ?= verilog +ifneq ($(TOPLEVEL_LANG),verilog) +SKIP := 1 +$(info "Skipping . . . Verilog only") +endif + +ifeq ($(SKIP),) + +VERILOG_SOURCES = cocotb_package_pkg.sv \ + cocotb_package.sv +TOPLEVEL := cocotb_package +MODULE = test_package + +include $(shell cocotb-config --makefiles)/Makefile.sim + +else + +all: + @echo "Skipping test_package" + +clean:: +# nothing to clean, just define target in this branch + +endif diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_package/cocotb_package.sv b/testbed/cocotb__cocotb/tests/test_cases/test_package/cocotb_package.sv new file mode 100644 index 0000000000000000000000000000000000000000..0478395ddda858781d0b92570838d772e7068c47 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_package/cocotb_package.sv @@ -0,0 +1,11 @@ +// Copyright cocotb contributors +// Licensed under the Revised BSD License, see LICENSE for details. +// SPDX-License-Identifier: BSD-3-Clause + +module cocotb_package; + // Necessary for Xcelium and Riviera in order for compiled packages to be visible + import cocotb_package_pkg_1::*; + import cocotb_package_pkg_2::*; + + parameter int seven_int = 7; +endmodule diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_package/test_package.py b/testbed/cocotb__cocotb/tests/test_cases/test_package/test_package.py new file mode 100644 index 0000000000000000000000000000000000000000..aa9f1f2f7e3f3dc4c5c2483d847b63be273f9bc0 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_package/test_package.py @@ -0,0 +1,60 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +""" +A set of tests that demonstrate package access +""" + +import logging + +import cocotb +from cocotb.result import TestSuccess + + +@cocotb.test() +async def test_params(dut): + """Test package parameter access""" + tlog = logging.getLogger("cocotb.test") + + tlog.info("Checking Parameters:") + assert dut.seven_int.value == 7 + pkg1 = cocotb.packages.cocotb_package_pkg_1 + assert pkg1.five_int.value == 5 + assert pkg1.eight_logic.value == 8 + pkg2 = cocotb.packages.cocotb_package_pkg_2 + assert pkg2.eleven_int.value == 11 + + +@cocotb.test() +async def test_stringification(dut): + """Test package stringification""" + tlog = logging.getLogger("cocotb.test") + + tlog.info("Checking Strings:") + pkg1 = cocotb.packages.cocotb_package_pkg_1 + assert str(pkg1).startswith("HierarchyObject(cocotb_package_pkg_1") + assert str(pkg1.five_int) == "LogicObject(cocotb_package_pkg_1::five_int)" + assert str(pkg1.eight_logic) == "LogicObject(cocotb_package_pkg_1::eight_logic)" + pkg2 = cocotb.packages.cocotb_package_pkg_2 + assert str(pkg2).startswith("HierarchyObject(cocotb_package_pkg_2") + assert str(pkg2.eleven_int) == "LogicObject(cocotb_package_pkg_2::eleven_int)" + + +@cocotb.test() +async def test_dollar_unit(dut): + """Test $unit scope""" + tlog = logging.getLogger("cocotb.test") + + if cocotb.SIM_NAME.lower().startswith("riviera"): + tlog.info("Riviera does not support $unit access via vpiInstance") + raise TestSuccess + + tlog.info("Checking $unit:") + # Is $unit even a package? Xcelium says yes and 37.10 detail 5 would also suggest yes + pkgs = vars(cocotb.packages).keys() + f = filter(lambda x: "unit" in x, pkgs) + unit = list(f)[0] + tlog.info(f"Found $unit as {unit}") + unit_pkg = getattr(cocotb.packages, unit) + assert unit_pkg.unit_four_int.value == 4 diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_packed_union/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_packed_union/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..9d80d6bfac191a2c88feda5aea48cde914b71205 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_packed_union/Makefile @@ -0,0 +1,21 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +TOPLEVEL_LANG ?= verilog + +ifneq ($(TOPLEVEL_LANG),verilog) + +all: + @echo "Skipping test due to TOPLEVEL_LANG=$(TOPLEVEL_LANG) not being verilog" +clean:: + +else + +VERILOG_SOURCES = test_packed_union.sv +TOPLEVEL = test_packed_union +MODULE = test_packed_union + +include $(shell cocotb-config --makefiles)/Makefile.sim + +endif diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_packed_union/test_packed_union.py b/testbed/cocotb__cocotb/tests/test_cases/test_packed_union/test_packed_union.py new file mode 100644 index 0000000000000000000000000000000000000000..06c26edbaaed492e29012b84feea61eba7a9965c --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_packed_union/test_packed_union.py @@ -0,0 +1,21 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +import cocotb +from cocotb._sim_versions import RivieraVersion + + +# Riviera-PRO 2022.10 (VPI) and newer does not discover dut.t correctly (gh-3587) +@cocotb.test( + expect_error=Exception + if cocotb.SIM_NAME.lower().startswith(("verilator", "icarus", "ghdl")) + or ( + cocotb.SIM_NAME.lower().startswith("riviera") + and RivieraVersion(cocotb.SIM_VERSION) >= RivieraVersion("2022.10") + and cocotb.LANGUAGE == "verilog" + ) + else () +) +async def test_packed_union(dut): + dut.t.a.value = 0 diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_packed_union/test_packed_union.sv b/testbed/cocotb__cocotb/tests/test_cases/test_packed_union/test_packed_union.sv new file mode 100644 index 0000000000000000000000000000000000000000..07abd6c6776ea0cb3e50294efc1b2deed2e460c4 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_packed_union/test_packed_union.sv @@ -0,0 +1,12 @@ +/** + * Copyright cocotb contributors + * Licensed under the Revised BSD License, see LICENSE for details. + * SPDX-License-Identifier: BSD-3-Clause +*/ + +module test_packed_union + (input union packed { + logic [3:0] a; + logic [1:0][1:0] b; + } t); +endmodule : test_packed_union diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_plusargs/plusargs.py b/testbed/cocotb__cocotb/tests/test_cases/test_plusargs/plusargs.py new file mode 100644 index 0000000000000000000000000000000000000000..c289615a63c50a3a95734ffa80ec7a3601fbfe7d --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_plusargs/plusargs.py @@ -0,0 +1,44 @@ +# Copyright (c) 2013 Potential Ventures Ltd +# Copyright (c) 2013 SolarFlare Communications Inc +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Potential Ventures Ltd, +# SolarFlare Communications Inc nor the +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" + plusarg testing +""" + +import cocotb + + +@cocotb.test() +async def plusargs_test(dut): + """Demonstrate plusarg access from Python test""" + + for name in cocotb.plusargs: + print("COCOTB:", name, cocotb.plusargs[name]) + + assert "test1" in cocotb.plusargs + assert cocotb.plusargs["foo"] == "bar" + assert cocotb.plusargs["lol"] == "wow=4" diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_seed/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_seed/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..8b5996041cf2e082756e66595cd78b32d7ab1654 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_seed/Makefile @@ -0,0 +1,10 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +run: + $(RM) number + $(MAKE) sim MODULE=test_other,test_seed RANDOM_SEED=1234 + $(MAKE) sim MODULE=test_seed TESTCASE=test_reproducibility RANDOM_SEED=1234 + +include ../../designs/sample_module/Makefile diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_seed/test_other.py b/testbed/cocotb__cocotb/tests/test_cases/test_seed/test_other.py new file mode 100644 index 0000000000000000000000000000000000000000..ee0ebcc32b3ec344815bbc5d1311685fd160406f --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_seed/test_other.py @@ -0,0 +1,11 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause +import cocotb + + +@cocotb.test() +async def test_pass(_): + # exists solely so there is another test in another module + # before the other module is run + pass diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_seed/test_seed.py b/testbed/cocotb__cocotb/tests/test_cases/test_seed/test_seed.py new file mode 100644 index 0000000000000000000000000000000000000000..49ba7d8905b6d8329d78cb4a2ab6d682728d06f4 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_seed/test_seed.py @@ -0,0 +1,25 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause +import random + +import cocotb + + +@cocotb.test() +async def test_first(_): + # move generator to test that it doesn't affect the next test + for _ in range(100): + random.getrandbits(64) + + +@cocotb.test() +async def test_reproducibility(_): + try: + with open("number") as file: + a = int(file.read()) + assert a == random.getrandbits(32) + except FileNotFoundError: + with open("number", "w") as file: + number = random.getrandbits(32) + file.write(str(number)) diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..28762d52730eb226d38d44c3fa439d2efe0f2c20 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase/Makefile @@ -0,0 +1,10 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +# select only y_tests from all MODULEs + +include ../../designs/sample_module/Makefile + +MODULE := x_tests,y_tests,y_tests_again +TESTCASE := y_test diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase/x_tests.py b/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase/x_tests.py new file mode 100644 index 0000000000000000000000000000000000000000..865975f45412050fc91a615d6bd918dd684cfdae --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase/x_tests.py @@ -0,0 +1,6 @@ +import cocotb + + +@cocotb.test() +async def x_test(dut): + assert False diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase/y_tests.py b/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase/y_tests.py new file mode 100644 index 0000000000000000000000000000000000000000..bd65ffe1bbf02135ba6aef0618e47f972520c1d8 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase/y_tests.py @@ -0,0 +1,6 @@ +import cocotb + + +@cocotb.test() +async def y_test(dut): + pass diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase/y_tests_again.py b/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase/y_tests_again.py new file mode 100644 index 0000000000000000000000000000000000000000..bd65ffe1bbf02135ba6aef0618e47f972520c1d8 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase/y_tests_again.py @@ -0,0 +1,6 @@ +import cocotb + + +@cocotb.test() +async def y_test(dut): + pass diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase_error/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase_error/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..0f28bb06c58808b7eea86f563867fe0ef019ea59 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase_error/Makefile @@ -0,0 +1,12 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +# This module exists, but... +MODULE := x_tests +# ...this test does not exist +TESTCASE := y_test + +# TESTCASE filtering out all tests results in a warning + +include ../../designs/sample_module/Makefile diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase_error/x_tests.py b/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase_error/x_tests.py new file mode 100644 index 0000000000000000000000000000000000000000..edc4f28cf623c3ad31cfc88fdefa8160f793f42d --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_select_testcase_error/x_tests.py @@ -0,0 +1,6 @@ +import cocotb + + +@cocotb.test() +async def x_test(dut): + dut._log.info("x_test") diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_skipped/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_skipped/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..ef0d7e5f9c59c3a0ad8e3e9c95b9f023bf7e6942 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_skipped/Makefile @@ -0,0 +1,22 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +include ../../designs/sample_module/Makefile + +SKIPPED_TEST_FILE = ran_skipped_test~ + +clean:: + $(RM) -r ${SKIPPED_TEST_FILE} + +# Override the default target. We need to run clean (to remove the cached test file) +# and then test to make sure it is recreated. +.DEFAULT_GOAL := override +.PHONY: override +override: clean all + @test -f $(SKIPPED_TEST_FILE) || (echo "ERROR: skip=True test was not ran!" >&2 && exit 1) + +# Set TESTCASE; run test_skipped even though skip=True is set. +TESTCASE = test_skipped + +MODULE = test_skipped diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_skipped/test_skipped.py b/testbed/cocotb__cocotb/tests/test_cases/test_skipped/test_skipped.py new file mode 100644 index 0000000000000000000000000000000000000000..b71f831a4173b59316a78fce50b000d2b8e2ed66 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_skipped/test_skipped.py @@ -0,0 +1,15 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +import pathlib + +import cocotb + +skipped_file_name = "ran_skipped_test~" + + +@cocotb.test(skip=True) +async def test_skipped(dut): + """Touch a file so we can check that this test has run.""" + pathlib.Path(skipped_file_name).touch() diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_sv_interface/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_sv_interface/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..809b673f016b6d1c21d8246b1c38f19de94f7f99 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_sv_interface/Makefile @@ -0,0 +1,28 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +TOPLEVEL_LANG ?= verilog +VERILOG_SOURCES = $(shell pwd)/top.sv +MODULE = test_sv_if +TOPLEVEL = top + +# Simulator behavior: +# Icarus Verilog sees SV interfaces as vpiModule but doesn't discover signals inside them +# Verilator sees SV interfaces as vpiModule and discovers signals inside them +# Commercial simulators all see SV interfaces as vpiInterface and discover signals inside them + + +ifneq ($(TOPLEVEL_LANG),verilog) +all: + @echo "Skipping test due to TOPLEVEL_LANG=$(TOPLEVEL_LANG) not being verilog" +clean:: +else +ifeq ($(filter questa xcelium ius vcs riviera activehdl verilator,$(shell echo $(SIM) | tr A-Z a-z)),) +all:: + @echo "Skipping simulator $(SIM) because it might not support SV interfaces" +clean:: +else +include $(shell cocotb-config --makefiles)/Makefile.sim +endif +endif diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_sv_interface/test_sv_if.py b/testbed/cocotb__cocotb/tests/test_cases/test_sv_interface/test_sv_if.py new file mode 100644 index 0000000000000000000000000000000000000000..c10700614e22de1752b323b079dfb4d9bbdfb143 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_sv_interface/test_sv_if.py @@ -0,0 +1,15 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +import cocotb + + +@cocotb.test() +async def test_sv_if(dut): + """Test that signals in an interface are discovered and iterable""" + + dut.sv_if_i._discover_all() + assert hasattr(dut.sv_if_i, "a") + assert hasattr(dut.sv_if_i, "b") + assert hasattr(dut.sv_if_i, "c") diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_sv_interface/top.sv b/testbed/cocotb__cocotb/tests/test_cases/test_sv_interface/top.sv new file mode 100644 index 0000000000000000000000000000000000000000..6856c98bc30caf71f7d331eaacba08783b8b6f17 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_sv_interface/top.sv @@ -0,0 +1,17 @@ +// Copyright cocotb contributors +// Licensed under the Revised BSD License, see LICENSE for details. +// SPDX-License-Identifier: BSD-3-Clause + +`timescale 1us/1us + +interface sv_if(); + logic a; + reg b; + wire c; +endinterface + +module top (); + +sv_if sv_if_i(); + +endmodule diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_toplevel_library/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_toplevel_library/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..7c791966fa0f8aa301ccc336bd958f97f9e9611f --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_toplevel_library/Makefile @@ -0,0 +1,25 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +ifeq ($(SIM),questa) + +PROJ_DIR := $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST)))) + +VHDL_SOURCES_mylib := $(PROJ_DIR)/mylib.vhd +VHDL_LIB_ORDER := mylib + +TOPLEVEL := myentity +TOPLEVEL_LIBRARY := mylib +TOPLEVEL_LANG := vhdl + +MODULE := test_myentity + +include $(shell cocotb-config --makefiles)/Makefile.sim + +else + +all: + @echo "Skipping test because TOPLEVEL_LIBRARY is only supported in Questa." + +endif diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_toplevel_library/mylib.vhd b/testbed/cocotb__cocotb/tests/test_cases/test_toplevel_library/mylib.vhd new file mode 100644 index 0000000000000000000000000000000000000000..6a2b5fe840ef8598d091008f8a29b598cb3254ea --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_toplevel_library/mylib.vhd @@ -0,0 +1,25 @@ +-- Copyright cocotb contributors +-- Licensed under the Revised BSD License, see LICENSE for details. +-- SPDX-License-Identifier: BSD-3-Clause + +library ieee; + use ieee.std_logic_1164.all; + +entity myentity is + port ( + clk : in std_logic; + a_data : in std_logic_vector(31 downto 0); + b_data : out std_logic_vector(31 downto 0)); +end entity myentity; + +architecture rtl of myentity is +begin + + process (clk) is + begin + if (rising_edge(clk)) then + b_data <= a_data; + end if; + end process; + +end architecture rtl; diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_toplevel_library/test_myentity.py b/testbed/cocotb__cocotb/tests/test_cases/test_toplevel_library/test_myentity.py new file mode 100644 index 0000000000000000000000000000000000000000..381b5cb5e386817bfe6727dae72f11e2091b873b --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_toplevel_library/test_myentity.py @@ -0,0 +1,11 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +import cocotb + + +@cocotb.test() +async def test_myentity(dut): + # if we got here, the test worked + pass diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_verilog_access/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_access/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..d89b11a6138582d7054f24bc98ab9e3b48ddc74f --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_access/Makefile @@ -0,0 +1,29 @@ +############################################################################## +# Copyright (c) 2015 Potential Ventures Ltd +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Potential Ventures Ltd, +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +############################################################################### + +include ../../designs/uart2bus/Makefile +MODULE = test_verilog_access diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_verilog_access/test_verilog_access.py b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_access/test_verilog_access.py new file mode 100644 index 0000000000000000000000000000000000000000..6e72bde090f466b5dab4c6a523e1ec9174b6b4bf --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_access/test_verilog_access.py @@ -0,0 +1,68 @@ +# Copyright (c) 2015 Potential Ventures Ltd +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Potential Ventures Ltd +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import logging + +import cocotb +from cocotb.handle import HierarchyObject, LogicObject + + +@cocotb.test() +async def port_not_hierarchy(dut): + """ + Test for issue raised by Luke - iteration causes a toplevel port type to + change from LogicObject to HierarchyObject + """ + fails = 0 + tlog = logging.getLogger("cocotb.test") + + def check_instance(obj, objtype): + if not isinstance(obj, objtype): + tlog.error( + "Expected {} to be of type {} but got {}".format( + obj._path, objtype.__name__, type(obj).__name__ + ) + ) + return 1 + tlog.info(f"{obj._path} is {type(obj).__name__}") + return 0 + + fails += check_instance(dut.clk, LogicObject) + fails += check_instance(dut.i_verilog, HierarchyObject) + fails += check_instance(dut.i_verilog.clock, LogicObject) + fails += check_instance(dut.i_verilog.tx_data, LogicObject) + + for _ in dut: + pass + + for _ in dut.i_verilog: + pass + + fails += check_instance(dut.clk, LogicObject) + fails += check_instance(dut.i_verilog, HierarchyObject) + fails += check_instance(dut.i_verilog.clock, LogicObject) + fails += check_instance(dut.i_verilog.tx_data, LogicObject) + + assert fails == 0 diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..6b09d0fddbd5552f5cb0baeecbb0612ad0910e7f --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/Makefile @@ -0,0 +1,24 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +TOPLEVEL_LANG ?= verilog + +ifneq ($(TOPLEVEL_LANG),verilog) + +all: + @echo "Skipping test due to TOPLEVEL_LANG=$(TOPLEVEL_LANG) not being verilog" +clean:: + +else + +VERILOG_INCLUDE_DIRS = \ + ./common \ + ./const_stream +VERILOG_SOURCES = simple_and.sv +TOPLEVEL = simple_and +MODULE = test_verilog_include_dirs + +include $(shell cocotb-config --makefiles)/Makefile.sim + +endif diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/common/a.vh b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/common/a.vh new file mode 100644 index 0000000000000000000000000000000000000000..5ddaf3c68b3c1bd676b196092a33e4bfe4c1c11b --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/common/a.vh @@ -0,0 +1 @@ +`define DATA_BYTES 8 diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/common/b.vh b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/common/b.vh new file mode 100644 index 0000000000000000000000000000000000000000..aad30237e56747eb8523d29c96cd427e6cf59a56 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/common/b.vh @@ -0,0 +1 @@ +`define DATA_WIDTH 5 diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/const_stream/c.vh b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/const_stream/c.vh new file mode 100644 index 0000000000000000000000000000000000000000..9c805254a0995e5111e4b2a81840d09551e9ec91 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/const_stream/c.vh @@ -0,0 +1 @@ +`define DATA_LAST 3 diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/simple_and.sv b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/simple_and.sv new file mode 100644 index 0000000000000000000000000000000000000000..47dd8e4ed29d590989759e310e17118233926fbb --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/simple_and.sv @@ -0,0 +1,16 @@ +// Copyright cocotb contributors +// Licensed under the Revised BSD License, see LICENSE for details. +// SPDX-License-Identifier: BSD-3-Clause +`include "a.vh" +`include "b.vh" +`include "c.vh" + +module simple_and ( + input [`DATA_BYTES-1:0] a, + input [`DATA_WIDTH+2:0] b, + output [`DATA_LAST+4:0] c +); + +assign c = a & b; + +endmodule diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/test_verilog_include_dirs.py b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/test_verilog_include_dirs.py new file mode 100644 index 0000000000000000000000000000000000000000..329d273aaf11573be8fa7548f25e584a0ce18f2b --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_verilog_include_dirs/test_verilog_include_dirs.py @@ -0,0 +1,11 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause +import cocotb + + +# The purpose of this test is just to complete an elaboration cycle up to time 0, before simulation +# If it fails to get to this point then the addition of the include dirs failed! +@cocotb.test() +async def test_noop(_): + pass diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_access/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_access/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..d7897648226f723567cc616c611b096ae0cc551e --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_access/Makefile @@ -0,0 +1,29 @@ +############################################################################### +# Copyright (c) 2015 Potential Ventures Ltd +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Potential Ventures Ltd, +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +############################################################################### + +include ../../designs/viterbi_decoder_axi4s/Makefile +MODULE = test_vhdl_access diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_access/test_vhdl_access.py b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_access/test_vhdl_access.py new file mode 100644 index 0000000000000000000000000000000000000000..c36640ec6a395e80afca1d3aa13fb9111cdc7683 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_access/test_vhdl_access.py @@ -0,0 +1,97 @@ +# Copyright (c) 2015 Potential Ventures Ltd +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Potential Ventures Ltd +# names of its contributors may be used to endorse or promote products +# derived from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import logging + +import cocotb +import pytest +from cocotb.handle import EnumObject, HierarchyObject, IntegerObject, LogicObject + + +# GHDL discovers enum as `vpiNet` (gh-2600) +@cocotb.test(expect_fail=cocotb.SIM_NAME.lower().startswith("ghdl")) +async def check_enum_object(dut): + """ + Enumerations currently behave as normal signals + + TODO: Implement an EnumObject class and detect valid string mappings + """ + assert isinstance(dut.inst_ram_ctrl.write_ram_fsm, EnumObject) + + +# GHDL unable to access signals in generate loops (gh-2594) +@cocotb.test( + expect_error=IndexError if cocotb.SIM_NAME.lower().startswith("ghdl") else () +) +async def check_objects(dut): + """ + Check the types of objects that are returned + """ + tlog = logging.getLogger("cocotb.test") + + def check_instance(obj, objtype): + assert isinstance( + obj, objtype + ), "Expected {} to be of type {} but got {}".format( + obj._path, objtype.__name__, type(obj).__name__ + ) + tlog.info(f"{obj._path} is {type(obj).__name__}") + + # Hierarchy checks + check_instance(dut.inst_axi4s_buffer, HierarchyObject) + check_instance(dut.gen_branch_distance[0], HierarchyObject) + check_instance(dut.gen_branch_distance[0].inst_branch_distance, HierarchyObject) + check_instance(dut.gen_acs[0].inbranch_tdata_low, LogicObject) + check_instance(dut.gen_acs[0].inbranch_tdata_low[0], LogicObject) + check_instance(dut.aclk, LogicObject) + check_instance(dut.s_axis_input_tdata, LogicObject) + check_instance(dut.current_active, IntegerObject) + check_instance(dut.inst_axi4s_buffer.DATA_WIDTH, IntegerObject) + check_instance(dut.inst_ram_ctrl, HierarchyObject) + + assert ( + dut.inst_axi4s_buffer.DATA_WIDTH.value == 32 + ), f"Expected dut.inst_axi4s_buffer.DATA_WIDTH to be 32 but got {dut.inst_axi4s_buffer.DATA_WIDTH.value}" + + with pytest.raises(TypeError): + dut.inst_axi4s_buffer.DATA_WIDTH.value = 42 + + +@cocotb.test() +async def port_not_hierarchy(dut): + """ + Test for issue raised by Luke - iteration causes a toplevel port type to + change from LogicObject to HierarchyObject + """ + assert isinstance( + dut.aclk, LogicObject + ), f"dut.aclk should be LogicObject but got {type(dut.aclk).__name__}" + + for _ in dut: + pass + + assert isinstance( + dut.aclk, LogicObject + ), f"dut.aclk should be LogicObject but got {type(dut.aclk).__name__}" diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..8d956d7c1f06971db2a0c32da82f9f2125f5d9b4 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries/Makefile @@ -0,0 +1,29 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause +SIM ?= ghdl +TOPLEVEL_LANG ?= vhdl +VHDL_SOURCES_blib := b.vhdl +VHDL_SOURCES := a.vhdl +TOPLEVEL := a +MODULE := test_ab + +ifneq ($(filter $(SIM),xcelium),) + COMPILE_ARGS += -v93 +endif + +ifneq ($(filter nvc questa modelsim xcelium ius,$(shell echo $(SIM) | tr A-Z a-z)),) + VHDL_LIB_ORDER := blib +endif + +ifneq ($(shell echo $(TOPLEVEL_LANG) | tr A-Z a-z),vhdl) +all: + @echo "Skipping test since only VHDL is supported" +clean:: +else ifeq ($(filter ghdl nvc questa modelsim xcelium ius,$(shell echo $(SIM) | tr A-Z a-z)),) +all: + @echo "Skipping test since only GHDL, NVC, Questa/ModelSim, Xcelium and Incisive are supported" +clean:: +else +include $(shell cocotb-config --makefiles)/Makefile.sim +endif diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries/a.vhdl b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries/a.vhdl new file mode 100644 index 0000000000000000000000000000000000000000..081751f15e4b9d8f2936bd433c1d23f98fbd9eeb --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries/a.vhdl @@ -0,0 +1,10 @@ +library blib; + +entity a is + port ( x : in boolean ); +end; + +architecture structural of a is +begin + b : entity blib.b port map (x); +end; diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries/b.vhdl b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries/b.vhdl new file mode 100644 index 0000000000000000000000000000000000000000..c0989fc4bb9081a17dab4384779943a7987b7749 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries/b.vhdl @@ -0,0 +1,11 @@ +entity b is + port ( x : in boolean ); +end; + + +architecture structural of b is +begin + process(x) begin + report b'instance_name; + end process; +end; diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries/test_ab.py b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries/test_ab.py new file mode 100644 index 0000000000000000000000000000000000000000..394d596dbf994aea7b94e5c877bf7e84003c409d --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries/test_ab.py @@ -0,0 +1,11 @@ +import cocotb + + +@cocotb.test() +async def test(dut): + # Toggling an input should trigger the simulator to print a message + # similar to: + # + # b.vhdl:9:5:@0ms:(report note): :a(structural):b@b(structural): + # + dut.x.value = False diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..5a2b843f253d98a42587819f6422fd0146586390 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/Makefile @@ -0,0 +1,34 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause +SIM ?= ghdl +TOPLEVEL_LANG ?= vhdl + +VHDL_SOURCES_blib := b.vhdl +VHDL_SOURCES_clib := c.vhdl +VHDL_SOURCES_dlib := d.vhdl +VHDL_SOURCES_elib := e.vhdl + +VHDL_SOURCES := a.vhdl +TOPLEVEL := a +MODULE := test_abcde + +ifneq ($(filter $(SIM),xcelium),) + COMPILE_ARGS += -v93 +endif + +ifneq ($(filter nvc questa modelsim xcelium,$(shell echo $(SIM) | tr A-Z a-z)),) + VHDL_LIB_ORDER := elib dlib clib blib +endif + +ifneq ($(shell echo $(TOPLEVEL_LANG) | tr A-Z a-z),vhdl) +all: + @echo "Skipping test since only VHDL is supported" +clean:: +else ifeq ($(filter ghdl nvc questa modelsim xcelium,$(shell echo $(SIM) | tr A-Z a-z)),) +all: + @echo "Skipping test since only GHDL, NVC, Questa/ModelSim and Xcelium are supported" +clean:: +else +include $(shell cocotb-config --makefiles)/Makefile.sim +endif diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/a.vhdl b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/a.vhdl new file mode 100644 index 0000000000000000000000000000000000000000..05d46f35f8a4dcc5f51ed1f7b245d5cd3b2b2650 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/a.vhdl @@ -0,0 +1,13 @@ +-- Copyright cocotb contributors +-- Licensed under the Revised BSD License, see LICENSE for details. +-- SPDX-License-Identifier: BSD-3-Clause +library blib; + +entity a is + port ( x : in boolean ); +end; + +architecture structural of a is +begin + b : entity blib.b port map (x); +end; diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/b.vhdl b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/b.vhdl new file mode 100644 index 0000000000000000000000000000000000000000..832bfbfc5bd8c1c6c4769464450eca6523049177 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/b.vhdl @@ -0,0 +1,13 @@ +-- Copyright cocotb contributors +-- Licensed under the Revised BSD License, see LICENSE for details. +-- SPDX-License-Identifier: BSD-3-Clause +library clib; + +entity b is + port ( x : in boolean ); +end; + +architecture structural of b is +begin + c : entity clib.c port map (x); +end; diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/c.vhdl b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/c.vhdl new file mode 100644 index 0000000000000000000000000000000000000000..75acba680a50f648b8273c0da26eb70e7e053eb6 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/c.vhdl @@ -0,0 +1,13 @@ +-- Copyright cocotb contributors +-- Licensed under the Revised BSD License, see LICENSE for details. +-- SPDX-License-Identifier: BSD-3-Clause +library dlib; + +entity c is + port ( x : in boolean ); +end; + +architecture structural of c is +begin + d : entity dlib.d port map (x); +end; diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/d.vhdl b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/d.vhdl new file mode 100644 index 0000000000000000000000000000000000000000..d90c23f8d04b192476c9a1152903ebe4fe56e646 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/d.vhdl @@ -0,0 +1,13 @@ +-- Copyright cocotb contributors +-- Licensed under the Revised BSD License, see LICENSE for details. +-- SPDX-License-Identifier: BSD-3-Clause +library elib; + +entity d is + port ( x : in boolean ); +end; + +architecture structural of d is +begin + e : entity elib.e port map (x); +end; diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/e.vhdl b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/e.vhdl new file mode 100644 index 0000000000000000000000000000000000000000..bd7ec0e3a07ef3cc949fbb71d5f11cb41b878a2c --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/e.vhdl @@ -0,0 +1,14 @@ +-- Copyright cocotb contributors +-- Licensed under the Revised BSD License, see LICENSE for details. +-- SPDX-License-Identifier: BSD-3-Clause +entity e is + port ( x : in boolean ); +end; + + +architecture structural of e is +begin + process(x) begin + report e'instance_name; + end process; +end; diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/test_abcde.py b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/test_abcde.py new file mode 100644 index 0000000000000000000000000000000000000000..0c30b432d275a41d90a7b28f9cdbb4621496a741 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_libraries_multiple/test_abcde.py @@ -0,0 +1,9 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause +import cocotb + + +@cocotb.test() +async def test(dut): + pass diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_zerovector/Makefile b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_zerovector/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..5b6cbc0b345d5158ba389306cbf55eccc6407ee4 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_zerovector/Makefile @@ -0,0 +1,24 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +ifeq ($(TOPLEVEL_LANG),vhdl) + +ifneq ($(filter $(SIM),ius xcelium),) + COMPILE_ARGS += -v93 +endif + +TOPLEVEL := vhdl_zerovector +MODULE := test_vhdl_zerovector +VHDL_SOURCES := vhdl_zerovector.vhdl + +# Cocotb inclusions +include $(shell cocotb-config --makefiles)/Makefile.sim + +else +all: + $(info Skipping simulation as only TOPLEVEL_LANG=vhdl is supported) + +clean:: + +endif diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_zerovector/test_vhdl_zerovector.py b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_zerovector/test_vhdl_zerovector.py new file mode 100644 index 0000000000000000000000000000000000000000..58b84709f3d5eaeac578e9fda677a7df76d038b6 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_zerovector/test_vhdl_zerovector.py @@ -0,0 +1,64 @@ +# Copyright cocotb contributors +# Licensed under the Revised BSD License, see LICENSE for details. +# SPDX-License-Identifier: BSD-3-Clause + +import os + +import cocotb +import pytest +from cocotb.triggers import Timer + +is_questa_vhpi = ( + cocotb.SIM_NAME.lower().startswith("modelsim") + and os.environ["VHDL_GPI_INTERFACE"] == "vhpi" +) + + +@cocotb.test() +async def test_long_signal(dut): + """Write and read a normal signal (longer than 0).""" + dut.data_in.value = 0x5 + await Timer(1, "ns") + assert dut.data_out.value == 0x5, "Failed to readback dut.data_out" + + +@cocotb.test( + expect_error=AttributeError + if cocotb.SIM_NAME.lower().startswith( + ("ghdl", "xmsim", "ncsim", "riviera", "aldec", "nvc") + ) + or is_questa_vhpi + else () +) +async def test_read_zero_signal(dut): + """Read a zero vector. It should always read 0.""" + assert dut.Cntrl_out.value == 0, "Failed to readback dut.Cntrl_out" + + +@cocotb.test( + expect_error=AttributeError + if cocotb.SIM_NAME.lower().startswith( + ("ghdl", "xmsim", "ncsim", "riviera", "aldec", "nvc") + ) + or is_questa_vhpi + else () +) +async def test_write_zero_signal_with_0(dut): + """Write a zero vector with 0.""" + dut.Cntrl_out.value = 0x0 + await Timer(1, "ns") + assert dut.Cntrl_out.value == 0, "Failed to readback dut.Cntrl_out" + + +@cocotb.test( + expect_error=AttributeError + if cocotb.SIM_NAME.lower().startswith( + ("ghdl", "xmsim", "ncsim", "riviera", "aldec", "nvc") + ) + or is_questa_vhpi + else () +) +async def test_write_zero_signal_with_1(dut): + """Write a zero vector with 1. Should catch a "out of range" exception.""" + with pytest.raises(OverflowError): + dut.Cntrl_out.value = 0x1 diff --git a/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_zerovector/vhdl_zerovector.vhdl b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_zerovector/vhdl_zerovector.vhdl new file mode 100644 index 0000000000000000000000000000000000000000..9e0f456590156967a1289d2317cd9ffc683dcae5 --- /dev/null +++ b/testbed/cocotb__cocotb/tests/test_cases/test_vhdl_zerovector/vhdl_zerovector.vhdl @@ -0,0 +1,21 @@ +-- Copyright cocotb contributors +-- Licensed under the Revised BSD License, see LICENSE for details. +-- SPDX-License-Identifier: BSD-3-Clause + +library ieee; +use ieee.std_logic_1164.all; + +entity vhdl_zerovector is + generic(DataWidth : natural := 8; + CntrlWidth : natural := 0); + port(Data_in : in std_logic_vector(DataWidth - 1 downto 0); + Data_out : out std_logic_vector(DataWidth - 1 downto 0); + Cntrl_in : in std_logic_vector(CntrlWidth - 1 downto 0); + Cntrl_out : out std_logic_vector(CntrlWidth - 1 downto 0)); +end entity vhdl_zerovector; + +architecture RTL of vhdl_zerovector is +begin + Data_out <= Data_in; + Cntrl_out <= Cntrl_in; +end architecture RTL; diff --git a/testbed/conan-io__conan/.ci/travis/install.sh b/testbed/conan-io__conan/.ci/travis/install.sh new file mode 100644 index 0000000000000000000000000000000000000000..3db67237acd6009104b8b2d5431e6f410b9f6540 --- /dev/null +++ b/testbed/conan-io__conan/.ci/travis/install.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +set -e +set -x + +if [[ "$(uname -s)" == 'Darwin' ]]; then + brew update || brew update + brew outdated pyenv || brew upgrade pyenv + brew install pyenv-virtualenv + + if which pyenv > /dev/null; then + eval "$(pyenv init -)" + fi + if which pyenv > /dev/null; then + eval "$(pyenv init -)" + fi + + case "${PYVER}" in + py27) + pyenv install 2.7.10 + pyenv virtualenv 2.7.10 conan + ;; + py33) + pyenv install 3.3.6 + pyenv virtualenv 3.3.6 conan + ;; + py34) + pyenv install 3.4.3 + pyenv virtualenv 3.4.3 conan + ;; + py35) + pyenv install 3.5.0 + pyenv virtualenv 3.5.0 conan + ;; + py36) + pyenv install 3.6.0 + pyenv virtualenv 3.6.0 conan + ;; + + esac + pyenv rehash + pyenv activate conan +fi + +pip install -r conans/requirements_dev.txt +pip install -r conans/requirements_server.txt +pip install -r conans/requirements.txt diff --git a/testbed/conan-io__conan/.ci/travis/run.sh b/testbed/conan-io__conan/.ci/travis/run.sh new file mode 100644 index 0000000000000000000000000000000000000000..15efea3a88f7fd40d1e92cb9982abf18da217779 --- /dev/null +++ b/testbed/conan-io__conan/.ci/travis/run.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +set -e +set -x + +if [[ "$(uname -s)" == 'Darwin' ]]; then + if which pyenv > /dev/null; then + eval "$(pyenv init -)" + fi + pyenv activate conan +fi + +nosetests --with-coverage conans.test diff --git a/testbed/conan-io__conan/.codecov.yml b/testbed/conan-io__conan/.codecov.yml new file mode 100644 index 0000000000000000000000000000000000000000..43ef6a38928f2bf6f733f1a3aedbf1aade19a980 --- /dev/null +++ b/testbed/conan-io__conan/.codecov.yml @@ -0,0 +1,35 @@ +codecov: + notify: + require_ci_to_pass: yes + +coverage: + precision: 2 + round: down + range: "85...100" + + status: + project: + default: + threshold: 3% + patch: no + changes: no + + notify: + slack: + default: + url: "https://hooks.slack.com/services/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" + threshold: 1% # allow coverage to drop by 1% without posting + +parsers: + gcov: + branch_detection: + conditional: yes + loop: yes + method: no + macro: no + +comment: + layout: "header, diff" + behavior: default + require_changes: no + diff --git a/testbed/conan-io__conan/.coveragerc b/testbed/conan-io__conan/.coveragerc new file mode 100644 index 0000000000000000000000000000000000000000..9179d3ba4413ee30f731c0cc540b41da3a0726f6 --- /dev/null +++ b/testbed/conan-io__conan/.coveragerc @@ -0,0 +1,6 @@ +[run] +omit = + *conanfile.py* + +include = + *conans* diff --git a/testbed/conan-io__conan/.gitignore b/testbed/conan-io__conan/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..c9513213ddabc36958cb8fddadcc9bfe205982d1 --- /dev/null +++ b/testbed/conan-io__conan/.gitignore @@ -0,0 +1,94 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + + +# OS generated files # +###################### +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +Icon? +ehthumbs.db +Thumbs.db + +.pydev* +.project + +# IDEs # +######## +.metadata +.idea +.history/ + +conan.conf +*default_package_folder + +#Eclipse folder +.settings + +#Generated certificate file +cacert.pem + +#linux backup files +*~ +.*.swp + +#Pyinstaller generated binaries +/pyinstaller diff --git a/testbed/conan-io__conan/.travis.yml b/testbed/conan-io__conan/.travis.yml new file mode 100644 index 0000000000000000000000000000000000000000..a6a9eaff5e5be06a5aa6a674b36cf1c075aa66fb --- /dev/null +++ b/testbed/conan-io__conan/.travis.yml @@ -0,0 +1,44 @@ +language: python +python: + - 2.7 + - 3.4 + - 3.5 + - 3.6 +os: linux +sudo: required +dist: trusty + +env: + - CONAN_COMPILER=gcc CONAN_COMPILER_VERSION=4.8 + +matrix: + include: + - language: generic + os: osx + env: PYVER=py27 CONAN_COMPILER=apple-clang CONAN_COMPILER_VERSION=6.0 + + - language: generic + os: osx + env: PYVER=py34 CONAN_COMPILER=apple-clang CONAN_COMPILER_VERSION=6.0 + + - language: generic + os: osx + env: PYVER=py35 CONAN_COMPILER=apple-clang CONAN_COMPILER_VERSION=6.0 + + - language: generic + os: osx + env: PYVER=py36 CONAN_COMPILER=apple-clang CONAN_COMPILER_VERSION=6.0 + +# command to install dependencies +install: + - ./.ci/travis/install.sh +before_script: + - export PYTHONPATH=$PYTHONPATH:$(pwd) + - export CONAN_LOGGING_LEVEL=10 +# command to run tests +script: + - ulimit -n 2048 # Error with py3 and OSX, max file descriptors + - ./.ci/travis/run.sh + +after_success: + - bash <(curl -s https://codecov.io/bash) diff --git a/testbed/conan-io__conan/LICENSE.md b/testbed/conan-io__conan/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..59c2db49ee7b6af63d580b5f1d97f40a7f97e0e3 --- /dev/null +++ b/testbed/conan-io__conan/LICENSE.md @@ -0,0 +1,28 @@ +The MIT License (MIT) + +Copyright (c) 2016 JFrog LTD + + + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + + + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + + + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + diff --git a/testbed/conan-io__conan/README.rst b/testbed/conan-io__conan/README.rst new file mode 100644 index 0000000000000000000000000000000000000000..ece3a0772d1dc773324590fe89721d475bb22df6 --- /dev/null +++ b/testbed/conan-io__conan/README.rst @@ -0,0 +1,227 @@ +Conan +===== + +A distributed, open source, package manager. + ++------------------------+-------------------------+----------------------+-----------------------+ +| **master (linux/osx)** | **develop (linux/osx)** | **master (windows)** | **develop** (windows) | ++========================+=========================+======================+=======================+ +| |Build Status1| | |Build Status2| | |Build status3| | |Build status4| | ++------------------------+-------------------------+----------------------+-----------------------+ + ++------------------------+---------------------------+--------------------------------------------------+ +| **Coverage develop** | **Coverage master** | **Coverage graph** | ++========================+===========================+==================================================+ +| |Develop coverage| | |Master coverage| | |Coverage graph| | ++------------------------+---------------------------+--------------------------------------------------+ + + + + +Setup +====== + +From binaries +------------- + +We have installers for `most platforms here `__ but you +can run **conan** from sources if you want + + +From pip +-------- + +Conan is compatible with Python 2 and Python 3. + +- Install pip following `pip docs`_ + +- Install conan: + +:: + + $ pip install conan + + +From Homebrew (OSx) +------------------- + +- Install Homebrew following `brew homepage`_. + +:: + + $ brew update + $ brew install conan + + + +From source +----------- + +You can run **conan** client and server in Windows, MacOS, and Linux. + +Install *python and pip*, search in google instructions for your operating system. +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Clone conan repository +~~~~~~~~~~~~~~~~~~~~~~ + +:: + + $ git clone https://github.com/conan-io/conan.git + +Install python requirements +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +For running the client: + +:: + + $ sudo pip install -r conans/requirements.txt + +Server: + +:: + + $ sudo apt-get install python-dev + $ sudo pip install -r conans/requirements_server.txt + +Development: + +:: + + $ sudo pip install -r conans/requirements_dev.txt + +Running the tests +~~~~~~~~~~~~~~~~~~ + +Make sure that the Python requirements have been installed. + +Before you can run the tests, you need to set a few environment +variables first. + +:: + + $ export PYTHONPATH=$PYTHONPATH:$(pwd) + + +On Windows it would be (while being in the conan root directory): + +:: + + $ export PYTHONPATH=. + +Ensure that your ``cmake`` has version 2.8 or later. You can see the +version with the following command: + +:: + + $ cmake --version + +The appropriate values of ``CONAN_COMPILER`` and +``CONAN_COMPILER_VERSION`` depend on your operating system and your +requirements. + +These should work for the GCC from ``build-essential`` on Ubuntu 14.04: + +:: + + $ export CONAN_COMPILER=gcc + $ export CONAN_COMPILER_VERSION=4.8 + +These should work for OS X: + +:: + + $ export CONAN_COMPILER=clang + $ export CONAN_COMPILER_VERSION=3.5 + +Finally, there are some tests that use conan to package Go-lang +libraries, so you might **need to install go-lang** in your computer and +add it to the path. + +You can run the actual tests like this: + +:: + + $ nosetests . + + +There are a couple of test attributes defined, as ``slow``, or ``golang`` that you can use +to filter the tests, and do not execute them: + +:: + + $ nosetests . -a !golang + +A few minutes later it should print ``OK``: + +:: + + .................................................................................................................................................. + ---------------------------------------------------------------------- + Ran 146 tests in 50.993s + + OK + +Create a launcher +~~~~~~~~~~~~~~~~~ + +Conan entry point is "conans.conan.main" module. Fill the absolute path +of the cloned repository folder: + +:: + + #!/usr/bin/env python + import sys + sys.path.append('/home/user/conan') # EDIT!! + + from conans.conan import main + main(sys.argv[1:]) + +If you are a Windows user, you can name this file "conan.py" and create +a file "conan.bat" that calls the python module: + +:: + + CALL python C:/Users/user/conan.py %* + +Then add that 'conan' file to your PATH and you are ready: + +:: + + $ conan --help + + Conan commands. Type $conan "command" -h for help + build calls your project conanfile.py "build" method. + export copies a conanfile.py and associated (export) files to your local store, + install install in the local store the given requirements. + remove Remove any folder from your local/remote store + search show local/remote packages + test build and run your package test. Must have conanfile.py with "test" + upload uploads a conanfile or binary packages from the local store to any remote. + user shows or change the current user + +License +------- + +`MIT LICENSE <./LICENSE.md>`__ + +.. |Build Status1| image:: https://travis-ci.org/conan-io/conan.svg?branch=master + :target: https://travis-ci.org/conan-io/conan +.. |Build Status2| image:: https://travis-ci.org/conan-io/conan.svg?branch=develop + :target: https://travis-ci.org/conan-io/conan +.. |Build status3| image:: https://ci.appveyor.com/api/projects/status/dae0ple27akmpgj4/branch/master?svg=true + :target: https://ci.appveyor.com/project/ConanCIintegration/conan/branch/master +.. |Build status4| image:: https://ci.appveyor.com/api/projects/status/dae0ple27akmpgj4/branch/develop?svg=true + :target: https://ci.appveyor.com/project/ConanCIintegration/conan/branch/develop +.. _`pip docs`: https://pip.pypa.io/en/stable/installing/ +.. _`brew homepage`: http://brew.sh/ +.. |Develop coverage| image:: https://codecov.io/gh/conan-io/conan/branch/develop/graph/badge.svg + :target: https://codecov.io/gh/conan-io/conan/branch/develop +.. |Master coverage| image:: https://codecov.io/gh/conan-io/conan/branch/master/graph/badge.svg + :target: https://codecov.io/gh/conan-io/conan/branch/master +.. |Coverage graph| image:: https://codecov.io/gh/conan-io/conan/branch/develop/graphs/tree.svg + :height: 50px + :width: 50 px + :alt: Conan develop coverage + diff --git a/testbed/conan-io__conan/__init__.py b/testbed/conan-io__conan/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/appveyor.yml b/testbed/conan-io__conan/appveyor.yml new file mode 100644 index 0000000000000000000000000000000000000000..cba990ef2f2a91df0ed4759f0e23cb6d04ddb979 --- /dev/null +++ b/testbed/conan-io__conan/appveyor.yml @@ -0,0 +1,27 @@ +build: false + +environment: + matrix: + - PYTHON: "C:\\Python27" + - PYTHON: "C:\\Python34" + - PYTHON: "C:\\Python35" + +init: + - "ECHO %PYTHON%" + +install: + - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" + - "set PYTHONPATH=%PYTHONPATH%;%CD%" + - "set CONAN_LOGGING_LEVEL=10" + - "set CONAN_COMPILER=Visual Studio" + - "set CONAN_COMPILER_VERSION=12" + - "%PYTHON%/Scripts/pip.exe install -r conans/requirements.txt" + - "%PYTHON%/Scripts/pip.exe install -r conans/requirements_dev.txt" + - "%PYTHON%/Scripts/pip.exe install -r conans/requirements_server.txt" + +test_script: + - "nosetests --with-coverage conans.test" + +after_test: + - "codecov" + diff --git a/testbed/conan-io__conan/conans/__init__.py b/testbed/conan-io__conan/conans/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5060ac42079e8e702d67d12ee8b0c76976642dfb --- /dev/null +++ b/testbed/conan-io__conan/conans/__init__.py @@ -0,0 +1,16 @@ +# Allow conans to import ConanFile from here +# to allow refactors +from conans.model.conan_file import ConanFile +from conans.model.options import Options +from conans.model.settings import Settings +from conans.client.cmake import CMake +from conans.client.gcc import GCC +from conans.client.configure_environment import ConfigureEnvironment +from conans.util.files import load +import os + +# complex_search: With ORs and not filtering by not restricted settings +COMPLEX_SEARCH_CAPABILITY = "complex_search" +SERVER_CAPABILITIES = [COMPLEX_SEARCH_CAPABILITY, ] + +__version__ = '0.19.0-dev' diff --git a/testbed/conan-io__conan/conans/client/file_copier.py b/testbed/conan-io__conan/conans/client/file_copier.py new file mode 100644 index 0000000000000000000000000000000000000000..18b7d39e4a26a3b7772b4b2f1f7f2922d2359fc4 --- /dev/null +++ b/testbed/conan-io__conan/conans/client/file_copier.py @@ -0,0 +1,101 @@ +import os +import fnmatch +import shutil +from collections import defaultdict + + +def report_copied_files(copied, output, warn=False): + ext_files = defaultdict(list) + for f in copied: + _, ext = os.path.splitext(f) + ext_files[ext].append(os.path.basename(f)) + + for ext, files in ext_files.items(): + files_str = (": " + ", ".join(files)) if len(files) < 5 else "" + output.info("Copied %d '%s' files%s" % (len(files), ext, files_str)) + + if warn and not ext_files: + output.warn("No files copied!") + + +class FileCopier(object): + """ main responsible of copying files from place to place: + package: build folder -> package folder + imports: package folder -> user folder + export: user folder -> store "export" folder + """ + def __init__(self, root_source_folder, root_destination_folder): + """ + Takes the base folders to copy resources src -> dst. These folders names + will not be used in the relative names while copying + param root_source_folder: The base folder to copy things from, typically the + store build folder + param root_destination_folder: The base folder to copy things to, typicall the + store package folder + """ + self._base_src = root_source_folder + self._base_dst = root_destination_folder + self._copied = [] + + def report(self, output, warn=False): + report_copied_files(self._copied, output, warn) + + def __call__(self, pattern, dst="", src="", keep_path=True, links=False, symlinks=None): + """ FileCopier is lazy, it just store requested copies, and execute them later + param pattern: an fnmatch file pattern of the files that should be copied. Eg. *.dll + param dst: the destination local folder, wrt to current conanfile dir, to which + the files will be copied. Eg: "bin" + param src: the source folder in which those files will be searched. This folder + will be stripped from the dst name. Eg.: lib/Debug/x86 + param keep_path: False if you want the relative paths to be maintained from + src to dst folders, or just drop. False is useful if you want + to collect e.g. many *.libs among many dirs into a single + lib dir + return: list of copied files + """ + if symlinks is not None: + links = symlinks + # Check for ../ patterns and allow them + reldir = os.path.abspath(os.path.join(self._base_src, pattern)) + if self._base_src.startswith(os.path.dirname(reldir)): # ../ relative dir + self._base_src = os.path.dirname(reldir) + pattern = os.path.basename(reldir) + + copied_files = [] + src = os.path.join(self._base_src, src) + dst = os.path.join(self._base_dst, dst) + for root, subfolders, files in os.walk(src, followlinks=True): + basename = os.path.basename(root) + # Skip git or svn subfolders + if basename in [".git", ".svn"]: + subfolders[:] = [] + continue + if basename == "test_package": # DO NOT export test_package/build folder + try: + subfolders.remove("build") + except: + pass + + relative_path = os.path.relpath(root, src) + for f in files: + relative_name = os.path.normpath(os.path.join(relative_path, f)) + if fnmatch.fnmatch(relative_name, pattern): + abs_src_name = os.path.join(root, f) + filename = relative_name if keep_path else f + abs_dst_name = os.path.normpath(os.path.join(dst, filename)) + try: + os.makedirs(os.path.dirname(abs_dst_name)) + except: + pass + if links and os.path.islink(abs_src_name): + linkto = os.readlink(abs_src_name) + try: + os.remove(abs_dst_name) + except OSError: + pass + os.symlink(linkto, abs_dst_name) + else: + shutil.copy2(abs_src_name, abs_dst_name) + copied_files.append(abs_dst_name) + self._copied.append(relative_name) + return copied_files diff --git a/testbed/conan-io__conan/conans/client/loader.py b/testbed/conan-io__conan/conans/client/loader.py new file mode 100644 index 0000000000000000000000000000000000000000..f728bf93f5d13cc656292bc14441314d771e3a5b --- /dev/null +++ b/testbed/conan-io__conan/conans/client/loader.py @@ -0,0 +1,279 @@ +from conans.errors import ConanException, NotFoundException +from conans.model.conan_file import ConanFile, create_exports +from conans.util.files import rmdir +import inspect +import uuid +import imp +import os +from conans.util.files import load +from conans.util.config_parser import ConfigParser +from conans.model.options import OptionsValues +from conans.model.ref import ConanFileReference +from conans.model.settings import Settings +import sys +from conans.model.conan_generator import Generator +from conans.client.generators import _save_generator +from conans.model.scope import Scopes +from conans.model.values import Values + + +class ConanFileLoader(object): + def __init__(self, runner, settings, package_settings, options, scopes, env, package_env): + ''' + @param settings: Settings object, to assign to ConanFile at load time + @param options: OptionsValues, necessary so the base conanfile loads the options + to start propagation, and having them in order to call build() + @param package_settings: Dict with {recipe_name: {setting_name: setting_value}} + @param env: list of tuples for environment vars: [(var, value), (var2, value2)...] + @param package_env: package dict of list of tuples: {"name": [(var, v1), (var2, v2)...]} + ''' + self._runner = runner + assert settings is None or isinstance(settings, Settings) + assert options is None or isinstance(options, OptionsValues) + assert scopes is None or isinstance(scopes, Scopes) + assert env is None or isinstance(env, list) + assert package_env is None or isinstance(package_env, dict) + # assert package_settings is None or isinstance(package_settings, dict) + self._settings = settings + self._user_options = options + self._scopes = scopes + self._package_settings = package_settings + self._env = env or [] + self._package_env = package_env or {} + + def _parse_module(self, conanfile_module, consumer, filename): + """ Parses a python in-memory module, to extract the classes, mainly the main + class defining the Recipe, but also process possible existing generators + @param conanfile_module: the module to be processed + @param consumer: if this is a root node in the hierarchy, the consumer project + @return: the main ConanFile class from the module + """ + result = None + for name, attr in conanfile_module.__dict__.items(): + if "_" in name: + continue + if (inspect.isclass(attr) and issubclass(attr, ConanFile) and attr != ConanFile and + attr.__dict__["__module__"] == filename): + if result is None: + result = attr + else: + raise ConanException("More than 1 conanfile in the file") + if (inspect.isclass(attr) and issubclass(attr, Generator) and attr != Generator and + attr.__dict__["__module__"] == filename): + _save_generator(attr.__name__, attr) + + if result is None: + raise ConanException("No subclass of ConanFile") + + # check name and version were specified + if not consumer: + if not hasattr(result, "name") or not result.name: + raise ConanException("conanfile didn't specify name") + if not hasattr(result, "version") or not result.version: + raise ConanException("conanfile didn't specify version") + + return result + + def _parse_file(self, conan_file_path): + """ From a given path, obtain the in memory python import module + """ + # Check if precompiled exist, delete it + if os.path.exists(conan_file_path + "c"): + os.unlink(conan_file_path + "c") + + # Python 3 + pycache = os.path.join(os.path.dirname(conan_file_path), "__pycache__") + if os.path.exists(pycache): + rmdir(pycache) + + if not os.path.exists(conan_file_path): + raise NotFoundException("%s not found!" % conan_file_path) + + filename = os.path.splitext(os.path.basename(conan_file_path))[0] + + try: + current_dir = os.path.dirname(conan_file_path) + sys.path.append(current_dir) + old_modules = list(sys.modules.keys()) + loaded = imp.load_source(filename, conan_file_path) + # Put all imported files under a new package name + module_id = uuid.uuid1() + added_modules = set(sys.modules).difference(old_modules) + for added in added_modules: + module = sys.modules[added] + if module: + folder = os.path.dirname(module.__file__) + if folder.startswith(current_dir): + module = sys.modules.pop(added) + sys.modules["%s.%s" % (module_id, added)] = module + except Exception: + import traceback + trace = traceback.format_exc().split('\n') + raise ConanException("Unable to load conanfile in %s\n%s" % (conan_file_path, + '\n'.join(trace[3:]))) + finally: + sys.path.pop() + + return loaded, filename + + def load_class(self, conanfile_path): + """ Load only the class of the ConanFile recipe, but do not instantiate the object + It is needed for the 'conan export' command + """ + loaded, filename = self._parse_file(conanfile_path) + try: + result = self._parse_module(loaded, False, filename) + # Exports is the only object field, we need to do this, because conan export needs it + result.exports = create_exports(result) + return result + except Exception as e: # re-raise with file name + raise ConanException("%s: %s" % (conanfile_path, str(e))) + + def load_conan(self, conanfile_path, output, consumer=False, reference=None): + """ loads a ConanFile object from the given file + """ + loaded, filename = self._parse_file(conanfile_path) + try: + result = self._parse_module(loaded, consumer, filename) + + # Prepare the settings for the loaded conanfile + # Mixing the global settings with the specified for that name if exist + tmp_settings = self._settings.copy() + if self._package_settings and result.name in self._package_settings: + # Update the values, keeping old ones (confusing assign) + values_tuple = self._package_settings[result.name] + tmp_settings.values = Values.from_list(values_tuple) + + user, channel = (reference.user, reference.channel) if reference else (None, None) + + # Instance the conanfile + result = result(output, self._runner, tmp_settings, + os.path.dirname(conanfile_path), user, channel) + + # Prepare the env variables mixing global env vars with the + # package ones if name match + tmp_env = [] + # Copy only the global variables not present in package level vars + for var_name, value in self._env: + if result.name in self._package_env: + if var_name not in self._package_env[result.name]: + tmp_env.append((var_name, value)) + else: + tmp_env.append((var_name, value)) + tmp_env.extend(self._package_env.get(result.name, [])) + result.env = tmp_env + + if consumer: + self._user_options.descope_options(result.name) + result.options.initialize_upstream(self._user_options) + # If this is the consumer project, it has no name + result.scope = self._scopes.package_scope() + else: + result.scope = self._scopes.package_scope(result.name) + return result + except Exception as e: # re-raise with file name + raise ConanException("%s: %s" % (conanfile_path, str(e))) + + def load_conan_txt(self, conan_txt_path, output): + if not os.path.exists(conan_txt_path): + raise NotFoundException("Conanfile not found!") + + contents = load(conan_txt_path) + path = os.path.dirname(conan_txt_path) + + conanfile = self.parse_conan_txt(contents, path, output) + return conanfile + + def parse_conan_txt(self, contents, path, output): + conanfile = ConanFile(output, self._runner, Settings(), path) + # It is necessary to copy the settings, because the above is only a constraint of + # conanfile settings, and a txt doesn't define settings. Necessary for generators, + # as cmake_multi, that check build_type. + conanfile.settings = self._settings.copy() + + try: + parser = ConanFileTextLoader(contents) + except Exception as e: + raise ConanException("%s:\n%s" % (path, str(e))) + for requirement_text in parser.requirements: + ConanFileReference.loads(requirement_text) # Raise if invalid + conanfile.requires.add(requirement_text) + + conanfile.generators = parser.generators + + options = OptionsValues.loads(parser.options) + conanfile.options.values = options + conanfile.options.initialize_upstream(self._user_options) + + # imports method + conanfile.imports = ConanFileTextLoader.imports_method(conanfile, + parser.import_parameters) + conanfile.scope = self._scopes.package_scope() + return conanfile + + def load_virtual(self, reference, path): + # If user don't specify namespace in options, assume that it is + # for the reference (keep compatibility) + conanfile = ConanFile(None, self._runner, self._settings.copy(), path) + + conanfile.requires.add(str(reference)) # Convert to string necessary + # conanfile.options.values = options + self._user_options.scope_options(reference.name) + conanfile.options.initialize_upstream(self._user_options) + + conanfile.generators = [] + conanfile.scope = self._scopes.package_scope() + + return conanfile + + +class ConanFileTextLoader(object): + """Parse a plain requirements file""" + + def __init__(self, input_text): + # Prefer composition over inheritance, the __getattr__ was breaking things + self._config_parser = ConfigParser(input_text, ["requires", "generators", "options", + "imports"], parse_lines=True) + + @property + def requirements(self): + """returns a list of requires + EX: "OpenCV/2.4.10@phil/stable" + """ + return [r.strip() for r in self._config_parser.requires.splitlines()] + + @property + def options(self): + return self._config_parser.options + + @property + def import_parameters(self): + ret = [] + local_install_text = self._config_parser.imports + for local_install_line in local_install_text.splitlines(): + invalid_line_msg = "Invalid imports line: %s" \ + "\nEX: OpenCV/lib, * -> ./lib" % local_install_line + try: + if local_install_line.startswith("/") or local_install_line.startswith(".."): + raise ConanException("Import's paths can't begin with '/' or '..'") + pair = local_install_line.split("->") + source = pair[0].strip().split(',', 1) + dest = pair[1].strip() + src, pattern = source[0].strip(), source[1].strip() + ret.append((pattern, dest, src)) + except ConanException as excp: + raise ConanException("%s\n%s" % (invalid_line_msg, excp.message)) + except: + raise ConanException(invalid_line_msg) + return ret + + @property + def generators(self): + return self._config_parser.generators.splitlines() + + @staticmethod + def imports_method(conan_file, parameters): + def imports(): + for import_params in parameters: + conan_file.copy(*import_params) + return imports diff --git a/testbed/conan-io__conan/conans/client/manager.py b/testbed/conan-io__conan/conans/client/manager.py new file mode 100644 index 0000000000000000000000000000000000000000..c1a60ce6cbd7a9c602d04d62dfd75bd6bdc9d0fb --- /dev/null +++ b/testbed/conan-io__conan/conans/client/manager.py @@ -0,0 +1,608 @@ +import os +import time +from collections import OrderedDict, Counter + +from conans.paths import (CONANFILE, CONANINFO, CONANFILE_TXT, BUILD_INFO, CONANENV) +from conans.client.loader import ConanFileLoader +from conans.client.export import export_conanfile +from conans.client.deps_builder import DepsGraphBuilder +from conans.client.userio import UserIO +from conans.client.installer import ConanInstaller +from conans.util.files import save, load, rmdir, normalize +from conans.util.log import logger +from conans.client.uploader import ConanUploader +from conans.client.printer import Printer +from conans.errors import NotFoundException, ConanException +from conans.client.generators import write_generators +from conans.client.importer import run_imports, undo_imports +from conans.model.ref import ConanFileReference, PackageReference +from conans.client.remover import ConanRemover +from conans.model.info import ConanInfo +from conans.model.values import Values +from conans.model.options import OptionsValues +from conans.model.build_info import DepsCppInfo, CppInfo +from conans.client import packager +from conans.client.detect import detected_os +from conans.client.package_copier import PackageCopier +from conans.client.output import ScopedOutput +from conans.client.proxy import ConanProxy +from conans.client.remote_registry import RemoteRegistry +from conans.model.scope import Scopes +from conans.client.client_cache import ClientCache +from conans.client.source import config_source, config_source_local +from conans.client.manifest_manager import ManifestManager +from conans.model.env_info import EnvInfo, DepsEnvInfo +from conans.tools import environment_append +from conans.client.require_resolver import RequireResolver +from conans.model.profile import Profile + + +def get_user_channel(text): + tokens = text.split('/') + try: + user = tokens[0] + channel = tokens[1] + except IndexError: + channel = "testing" + return user, channel + + +class ConanManager(object): + """ Manage all the commands logic The main entry point for all the client + business logic + """ + def __init__(self, client_cache, user_io, runner, remote_manager, search_manager): + assert isinstance(user_io, UserIO) + assert isinstance(client_cache, ClientCache) + self._client_cache = client_cache + self._user_io = user_io + self._runner = runner + self._remote_manager = remote_manager + self._current_scopes = None + self._search_manager = search_manager + + def _loader(self, current_path=None, user_settings_values=None, package_settings=None, + user_options_values=None, scopes=None, env=None, package_env=None): + + # The disk settings definition, already including the default disk values + settings = self._client_cache.settings + + conaninfo_scopes = Scopes() + user_options = OptionsValues(user_options_values) + + if current_path: + conan_info_path = os.path.join(current_path, CONANINFO) + if os.path.exists(conan_info_path): + existing_info = ConanInfo.load_file(conan_info_path) + settings.values = existing_info.full_settings + options = existing_info.full_options # Take existing options from conaninfo.txt + options.update(user_options) + user_options = options + conaninfo_scopes = existing_info.scope + + if user_settings_values: + aux_values = Values.from_list(user_settings_values) + settings.values = aux_values + + if scopes: + conaninfo_scopes.update_scope(scopes) + + self._current_scopes = conaninfo_scopes + return ConanFileLoader(self._runner, settings, package_settings=package_settings, + options=user_options, scopes=conaninfo_scopes, + env=env, package_env=package_env) + + def export(self, user, conan_file_path, keep_source=False): + """ Export the conans + param conanfile_path: the original source directory of the user containing a + conanfile.py + param user: user under this package will be exported + param channel: string (stable, testing,...) + """ + assert conan_file_path + logger.debug("Exporting %s" % conan_file_path) + user_name, channel = get_user_channel(user) + conan_file = self._loader().load_class(os.path.join(conan_file_path, CONANFILE)) + for field in ["url", "license", "description"]: + field_value = getattr(conan_file, field, None) + if not field_value: + self._user_io.out.warn("Conanfile doesn't have '%s'.\n" + "It is recommended to add it as attribute" % field) + + conan_ref = ConanFileReference(conan_file.name, conan_file.version, user_name, channel) + conan_ref_str = str(conan_ref) + # Maybe a platform check could be added, but depends on disk partition + refs = self._search_manager.search(conan_ref_str, ignorecase=True) + if refs and conan_ref not in refs: + raise ConanException("Cannot export package with same name but different case\n" + "You exported '%s' but already existing '%s'" + % (conan_ref_str, " ".join(str(s) for s in refs))) + output = ScopedOutput(str(conan_ref), self._user_io.out) + export_conanfile(output, self._client_cache, conan_file.exports, conan_file_path, + conan_ref, conan_file.short_paths, keep_source) + + def download(self, reference, package_ids, remote=None): + """ Download conanfile and specified packages to local repository + @param reference: ConanFileReference + @param package_ids: Package ids or empty for download all + @param remote: install only from that remote + """ + assert(isinstance(reference, ConanFileReference)) + remote_proxy = ConanProxy(self._client_cache, self._user_io, self._remote_manager, remote) + + package = remote_proxy.search(reference, None) + if not package: # Search the reference first, and raise if it doesn't exist + raise ConanException("'%s' not found in remote" % str(reference)) + + if package_ids: + remote_proxy.download_packages(reference, package_ids) + else: + packages_props = remote_proxy.search_packages(reference, None) + if not packages_props: + output = ScopedOutput(str(reference), self._user_io.out) + output.warn("No remote binary packages found in remote") + else: + remote_proxy.download_packages(reference, list(packages_props.keys())) + + def _get_graph(self, reference, current_path, remote, options, settings, filename, update, + check_updates, manifest_manager, scopes, package_settings, env, package_env): + + loader = self._loader(current_path, settings, package_settings, options, scopes, env, package_env) + # Not check for updates for info command, it'll be checked when dep graph is built + + remote_proxy = ConanProxy(self._client_cache, self._user_io, self._remote_manager, remote, + update=update, check_updates=check_updates, + manifest_manager=manifest_manager) + + if isinstance(reference, ConanFileReference): + project_reference = None + conanfile = loader.load_virtual(reference, current_path) + is_txt = True + else: + conanfile_path = reference + project_reference = "PROJECT" + output = ScopedOutput(project_reference, self._user_io.out) + try: + if filename and filename.endswith(".txt"): + raise NotFoundException("") + conan_file_path = os.path.join(conanfile_path, filename or CONANFILE) + conanfile = loader.load_conan(conan_file_path, output, consumer=True) + is_txt = False + if conanfile.name is not None and conanfile.version is not None: + project_reference = "%s/%s@" % (conanfile.name, conanfile.version) + project_reference += "PROJECT" + except NotFoundException: # Load requirements.txt + conan_path = os.path.join(conanfile_path, filename or CONANFILE_TXT) + conanfile = loader.load_conan_txt(conan_path, output) + is_txt = True + # build deps graph and install it + local_search = None if update else self._search_manager + resolver = RequireResolver(self._user_io.out, local_search, remote_proxy) + builder = DepsGraphBuilder(remote_proxy, self._user_io.out, loader, resolver) + deps_graph = builder.load(None, conanfile) + # These lines are so the conaninfo stores the correct complete info + if is_txt: + conanfile.info.settings = loader._settings.values + conanfile.info.full_settings = loader._settings.values + conanfile.info.scope = self._current_scopes + conanfile.cpp_info = CppInfo(current_path) + conanfile.env_info = EnvInfo(current_path) + registry = RemoteRegistry(self._client_cache.registry, self._user_io.out) + return (builder, deps_graph, project_reference, registry, conanfile, + remote_proxy, loader) + + def info(self, reference, current_path, remote=None, options=None, settings=None, + info=None, filename=None, update=False, check_updates=False, scopes=None, + build_order=None, build_mode=None, package_settings=None): + """ Fetch and build all dependencies for the given reference + @param reference: ConanFileReference or path to user space conanfile + @param current_path: where the output files will be saved + @param remote: install only from that remote + @param options: list of tuples: [(optionname, optionvalue), (optionname, optionvalue)...] + @param settings: list of tuples: [(settingname, settingvalue), (settingname, value)...] + @param package_settings: dict name=> settings: {"zlib": [(settingname, settingvalue), ...]} + """ + + def read_dates(deps_graph): + ret = {} + for ref, _ in sorted(deps_graph.nodes): + if ref: + manifest = self._client_cache.load_manifest(ref) + ret[ref] = manifest.time_str + return ret + + objects = self._get_graph(reference, current_path, remote, options, settings, filename, + update, check_updates, None, scopes, package_settings, None, None) + (builder, deps_graph, project_reference, registry, _, remote_proxy, _) = objects + + if build_order: + result = deps_graph.build_order(build_order) + self._user_io.out.info(", ".join(str(s) for s in result)) + return + + if build_mode is not False: # sim_install is a policy or list of names (same as install build param) + installer = ConanInstaller(self._client_cache, self._user_io, remote_proxy) + nodes = installer.nodes_to_build(deps_graph, build_mode) + counter = Counter(ref.conan.name for ref, _ in nodes) + self._user_io.out.info(", ".join((str(ref) + if counter[ref.conan.name] > 1 else str(ref.conan)) + for ref, _ in nodes)) + return + + if check_updates: + graph_updates_info = builder.get_graph_updates_info(deps_graph) + else: + graph_updates_info = {} + + Printer(self._user_io.out).print_info(deps_graph, project_reference, + info, registry, graph_updates_info, + remote, read_dates(deps_graph)) + + def read_profile(self, profile_name, cwd): + if not profile_name: + return None + + if os.path.isabs(profile_name): + profile_path = profile_name + folder = os.path.dirname(profile_name) + elif profile_name.startswith("."): # relative path name + profile_path = os.path.abspath(os.path.join(cwd, profile_name)) + folder = os.path.dirname(profile_path) + else: + folder = self._client_cache.profiles_path + profile_path = self._client_cache.profile_path(profile_name) + + try: + text = load(profile_path) + except Exception: + if os.path.exists(folder): + profiles = [name for name in os.listdir(folder) if not os.path.isdir(name)] + else: + profiles = [] + current_profiles = ", ".join(profiles) or "[]" + raise ConanException("Specified profile '%s' doesn't exist.\nExisting profiles: " + "%s" % (profile_name, current_profiles)) + + try: + return Profile.loads(text) + except ConanException as exc: + raise ConanException("Error reading '%s' profile: %s" % (profile_name, exc)) + + def install(self, reference, current_path, remote=None, options=None, settings=None, + build_mode=False, filename=None, update=False, check_updates=False, + manifest_folder=None, manifest_verify=False, manifest_interactive=False, + scopes=None, generators=None, profile_name=None, package_settings=None, + env=None, package_env=None, no_imports=False): + """ Fetch and build all dependencies for the given reference + @param reference: ConanFileReference or path to user space conanfile + @param current_path: where the output files will be saved + @param remote: install only from that remote + @param options: list of tuples: [(optionname, optionvalue), (optionname, optionvalue)...] + @param settings: list of tuples: [(settingname, settingvalue), (settingname, value)...] + @param package_settings: dict name=> settings: {"zlib": [(settingname, settingvalue), ...]} + @param profile: name of the profile to use + @param env: list of tuples for environment vars: [(var, value), (var2, value2)...] + @param package_env: package dict of list of tuples: {"package_name": [(var, value), (var2, value2)...]} + """ + generators = generators or [] + + if manifest_folder: + manifest_manager = ManifestManager(manifest_folder, user_io=self._user_io, + client_cache=self._client_cache, + verify=manifest_verify, + interactive=manifest_interactive) + else: + manifest_manager = None + + profile = self.read_profile(profile_name, current_path) + + # Mix Settings, Env vars and scopes between profile and command line + if profile: + profile.update_settings(settings) + profile.update_package_settings(package_settings) + settings = profile.settings + package_settings = profile.package_settings + + profile.update_env(env) + profile.update_packages_env(package_env) + env = profile.env + package_env = profile.package_env + + profile.update_scopes(scopes) + scopes = profile.scopes + + objects = self._get_graph(reference, current_path, remote, options, settings, filename, + update, check_updates, manifest_manager, scopes, package_settings, + env, package_env) + (_, deps_graph, _, registry, conanfile, remote_proxy, loader) = objects + + Printer(self._user_io.out).print_graph(deps_graph, registry) + # Warn if os doesn't match + try: + if detected_os() != loader._settings.os: + message = '''You are building this package with settings.os='%s' on a '%s' system. +If this is your intention, you can ignore this message. +If not: + - Check the passed settings (-s) + - Check your global settings in ~/.conan/conan.conf + - Remove conaninfo.txt to avoid bad cached settings +''' % (loader._settings.os, detected_os()) + self._user_io.out.warn(message) + except ConanException: # Setting os doesn't exist + pass + + installer = ConanInstaller(self._client_cache, self._user_io, remote_proxy) + installer.install(deps_graph, build_mode) + + prefix = "PROJECT" if not isinstance(reference, ConanFileReference) else str(reference) + output = ScopedOutput(prefix, self._user_io.out) + + # Write generators + tmp = list(conanfile.generators) # Add the command line specified generators + tmp.extend(generators) + conanfile.generators = tmp + write_generators(conanfile, current_path, output) + + if not isinstance(reference, ConanFileReference): + content = normalize(conanfile.info.dumps()) + save(os.path.join(current_path, CONANINFO), content) + output.info("Generated %s" % CONANINFO) + if not no_imports: + run_imports(conanfile, current_path, output) + installer.call_system_requirements(conanfile, output) + + if manifest_manager: + manifest_manager.print_log() + + def _load_info_file(self, current_path, conanfile, output, info_file, error=False): + if info_file == BUILD_INFO: + class_, attr, gen = DepsCppInfo, "deps_cpp_info", "txt" + else: + class_, attr, gen = DepsEnvInfo, "deps_env_info", "env" + info_file_path = os.path.join(current_path, info_file) + try: + deps_info = class_.loads(load(info_file_path)) + setattr(conanfile, attr, deps_info) + except IOError: + error_msg = ("%s file not found in %s\nIt is %s for this command\n" + "You can generate it using 'conan install -g %s'" + % (info_file, current_path, "required" if error else "recommended", gen)) + if not error: + output.warn(error_msg) + else: + raise ConanException(error_msg) + except ConanException: + raise ConanException("Parse error in '%s' file in %s" % (info_file, current_path)) + + def _load_deps_info(self, current_path, conanfile, output, load_env=True, error=False): + self._load_info_file(current_path, conanfile, output, BUILD_INFO, error=error) + if load_env: + self._load_info_file(current_path, conanfile, output, CONANENV, error=error) + + def source(self, current_path, reference, force): + if not isinstance(reference, ConanFileReference): + output = ScopedOutput("PROJECT", self._user_io.out) + conan_file_path = os.path.join(reference, CONANFILE) + conanfile = self._loader().load_conan(conan_file_path, output, consumer=True) + self._load_deps_info(current_path, conanfile, output) + export_folder = reference + config_source_local(export_folder, current_path, conanfile, output) + else: + output = ScopedOutput(str(reference), self._user_io.out) + conan_file_path = self._client_cache.conanfile(reference) + conanfile = self._loader().load_conan(conan_file_path, output, reference=reference) + self._load_deps_info(current_path, conanfile, output) + src_folder = self._client_cache.source(reference, conanfile.short_paths) + export_folder = self._client_cache.export(reference) + config_source(export_folder, src_folder, conanfile, output, force) + + def imports_undo(self, current_path): + undo_imports(current_path, self._user_io.out) + + def imports(self, current_path, reference, conan_file_path, dest_folder): + if not isinstance(reference, ConanFileReference): + output = ScopedOutput("PROJECT", self._user_io.out) + if not conan_file_path: + conan_file_path = os.path.join(reference, CONANFILE) + if not os.path.exists(conan_file_path): + conan_file_path = os.path.join(reference, CONANFILE_TXT) + + if conan_file_path.endswith(".txt"): + conanfile = self._loader().load_conan_txt(conan_file_path, output) + else: + conanfile = self._loader().load_conan(conan_file_path, output, consumer=True) + else: + output = ScopedOutput(str(reference), self._user_io.out) + conan_file_path = self._client_cache.conanfile(reference) + conanfile = self._loader().load_conan(conan_file_path, output, reference=reference) + + self._load_deps_info(current_path, conanfile, output, load_env=False, error=True) + run_imports(conanfile, dest_folder or current_path, output) + + def local_package(self, current_path, build_folder): + if current_path == build_folder: + raise ConanException("Cannot 'conan package' to the build folder. " + "Please move to another folder and try again") + output = ScopedOutput("PROJECT", self._user_io.out) + conan_file_path = os.path.join(build_folder, CONANFILE) + conanfile = self._loader().load_conan(conan_file_path, output, consumer=True) + self._load_deps_info(build_folder, conanfile, output) + packager.create_package(conanfile, build_folder, current_path, output, local=True) + + def package(self, reference, package_id): + # Package paths + conan_file_path = self._client_cache.conanfile(reference) + if not os.path.exists(conan_file_path): + raise ConanException("Package recipe '%s' does not exist" % str(reference)) + + if not package_id: + packages = [PackageReference(reference, packid) + for packid in self._client_cache.conan_builds(reference)] + if not packages: + raise NotFoundException("%s: Package recipe has not been built locally\n" + "Please read the 'conan package' command help\n" + "Use 'conan install' or 'conan test_package' to build and " + "create binaries" % str(reference)) + else: + packages = [PackageReference(reference, package_id)] + + for package_reference in packages: + build_folder = self._client_cache.build(package_reference, short_paths=None) + if not os.path.exists(build_folder): + raise NotFoundException("%s: Package binary '%s' folder doesn't exist\n" + "Please read the 'conan package' command help\n" + "Use 'conan install' or 'conan test_package' to build and " + "create binaries" + % (str(reference), package_reference.package_id)) + # The package already exist, we can use short_paths if they were defined + package_folder = self._client_cache.package(package_reference, short_paths=None) + # Will read current conaninfo with specified options and load conanfile with them + output = ScopedOutput(str(reference), self._user_io.out) + output.info("Re-packaging %s" % package_reference.package_id) + loader = self._loader(build_folder) + conanfile = loader.load_conan(conan_file_path, self._user_io.out, + reference=package_reference.conan) + self._load_deps_info(build_folder, conanfile, output) + rmdir(package_folder) + packager.create_package(conanfile, build_folder, package_folder, output) + + def build(self, conanfile_path, current_path, test=False, filename=None, profile_name=None, + env=None, package_env=None): + """ Call to build() method saved on the conanfile.py + param conanfile_path: the original source directory of the user containing a + conanfile.py + """ + logger.debug("Building in %s" % current_path) + logger.debug("Conanfile in %s" % conanfile_path) + + if filename and filename.endswith(".txt"): + raise ConanException("A conanfile.py is needed to call 'conan build'") + + conanfile_file = os.path.join(conanfile_path, filename or CONANFILE) + + try: + # Append env_vars to execution environment and clear when block code ends + profile = self.read_profile(profile_name, current_path) + output = ScopedOutput("Project", self._user_io.out) + if profile: + profile.update_env(env) + profile.update_packages_env(package_env) + + env = profile.env + package_env = profile.package_env + + package_env = profile.package_env if profile else None + loader = self._loader(current_path, env=env, package_env=package_env) + conan_file = loader.load_conan(conanfile_file, output, consumer=True) + except NotFoundException: + # TODO: Auto generate conanfile from requirements file + raise ConanException("'%s' file is needed for build.\n" + "Create a '%s' and move manually the " + "requirements and generators from '%s' file" + % (CONANFILE, CONANFILE, CONANFILE_TXT)) + try: + self._load_deps_info(current_path, conan_file, output) + + os.chdir(current_path) + conan_file._conanfile_directory = conanfile_path + with environment_append(conan_file.env): + conan_file.build() + + if test: + conan_file.test() + except ConanException: + raise # Raise but not let to reach the Exception except (not print traceback) + except Exception: + import traceback + trace = traceback.format_exc().split('\n') + raise ConanException("Unable to build it successfully\n%s" % '\n'.join(trace[3:])) + + def upload(self, conan_reference_or_pattern, package_id=None, remote=None, all_packages=None, + force=False, confirm=False, retry=0, retry_wait=0): + """If package_id is provided, conan_reference_or_pattern is a ConanFileReference""" + + t1 = time.time() + remote_proxy = ConanProxy(self._client_cache, self._user_io, self._remote_manager, + remote) + uploader = ConanUploader(self._client_cache, self._user_io, remote_proxy, + self._search_manager, self._loader()) + + if package_id: # Upload package + ref = ConanFileReference.loads(conan_reference_or_pattern) + uploader.check_reference(ref) + uploader.upload_package(PackageReference(ref, package_id), retry=retry, + retry_wait=retry_wait) + else: # Upload conans + uploader.upload_conan(conan_reference_or_pattern, all_packages=all_packages, + force=force, confirm=confirm, + retry=retry, retry_wait=retry_wait) + + logger.debug("====> Time manager upload: %f" % (time.time() - t1)) + + def search(self, pattern_or_reference=None, remote=None, ignorecase=True, packages_query=None): + """ Print the single information saved in conan.vars about all the packages + or the packages which match with a pattern + + Attributes: + pattern = string to match packages + remote = search on another origin to get packages info + packages_pattern = String query with binary + packages properties: "arch=x86 AND os=Windows" + """ + printer = Printer(self._user_io.out) + + if remote: + remote_proxy = ConanProxy(self._client_cache, self._user_io, self._remote_manager, + remote) + adapter = remote_proxy + else: + adapter = self._search_manager + if isinstance(pattern_or_reference, ConanFileReference): + packages_props = adapter.search_packages(pattern_or_reference, packages_query) + ordered_packages = OrderedDict(sorted(packages_props.items())) + try: + recipe_hash = self._client_cache.load_manifest(pattern_or_reference).summary_hash + except IOError: # It could not exist in local + recipe_hash = None + printer.print_search_packages(ordered_packages, pattern_or_reference, + recipe_hash, packages_query) + else: + references = adapter.search(pattern_or_reference, ignorecase) + printer.print_search_recipes(references, pattern_or_reference) + + def copy(self, reference, package_ids, username, channel, force=False): + """ Copy or move conanfile (exported) and packages to another user and or channel + @param reference: ConanFileReference containing the packages to be moved + @param package_ids: list of ids or [] for all list + @param username: Destination username + @param channel: Destination channel + @param remote: install only from that remote + """ + output = ScopedOutput(str(reference), self._user_io.out) + conan_file_path = self._client_cache.conanfile(reference) + conanfile = self._loader().load_conan(conan_file_path, output) + copier = PackageCopier(self._client_cache, self._user_io, conanfile.short_paths) + if not package_ids: + packages = self._client_cache.packages(reference) + if os.path.exists(packages): + package_ids = os.listdir(packages) + else: + package_ids = [] + copier.copy(reference, package_ids, username, channel, force) + + def remove(self, pattern, src=False, build_ids=None, package_ids_filter=None, force=False, + remote=None): + """ Remove conans and/or packages + @param pattern: string to match packages + @param package_ids: list of ids or [] for all list + @param remote: search on another origin to get packages info + @param force: if True, it will be deleted without requesting anything + """ + remote_proxy = ConanProxy(self._client_cache, self._user_io, self._remote_manager, remote) + remover = ConanRemover(self._client_cache, self._search_manager, self._user_io, + remote_proxy) + remover.remove(pattern, src, build_ids, package_ids_filter, force=force) + + def user(self, remote=None, name=None, password=None): + remote_proxy = ConanProxy(self._client_cache, self._user_io, self._remote_manager, remote) + return remote_proxy.authenticate(name, password) diff --git a/testbed/conan-io__conan/conans/client/output.py b/testbed/conan-io__conan/conans/client/output.py new file mode 100644 index 0000000000000000000000000000000000000000..ec20e867ae46b0dcc6b546e8c05cfc47e299b1ea --- /dev/null +++ b/testbed/conan-io__conan/conans/client/output.py @@ -0,0 +1,117 @@ +from colorama import Fore, Style +import six +from conans.util.files import decode_text +from conans.util.env_reader import get_env +from conans.errors import ConanException + + +class Color(object): + """ Wrapper around colorama colors that are undefined in importing + """ + RED = Fore.RED # @UndefinedVariable + WHITE = Fore.WHITE # @UndefinedVariable + CYAN = Fore.CYAN # @UndefinedVariable + GREEN = Fore.GREEN # @UndefinedVariable + MAGENTA = Fore.MAGENTA # @UndefinedVariable + BLUE = Fore.BLUE # @UndefinedVariable + YELLOW = Fore.YELLOW # @UndefinedVariable + BLACK = Fore.BLACK # @UndefinedVariable + + BRIGHT_RED = Style.BRIGHT + Fore.RED # @UndefinedVariable + BRIGHT_BLUE = Style.BRIGHT + Fore.BLUE # @UndefinedVariable + BRIGHT_YELLOW = Style.BRIGHT + Fore.YELLOW # @UndefinedVariable + BRIGHT_GREEN = Style.BRIGHT + Fore.GREEN # @UndefinedVariable + BRIGHT_CYAN = Style.BRIGHT + Fore.CYAN # @UndefinedVariable + BRIGHT_WHITE = Style.BRIGHT + Fore.WHITE # @UndefinedVariable + BRIGHT_MAGENTA = Style.BRIGHT + Fore.MAGENTA # @UndefinedVariable + + +if get_env("CONAN_COLOR_DARK", 0): + Color.WHITE = Fore.BLACK + Color.CYAN = Fore.BLUE + Color.YELLOW = Fore.MAGENTA + Color.BRIGHT_WHITE = Fore.BLACK + Color.BRIGHT_CYAN = Fore.BLUE + Color.BRIGHT_YELLOW = Fore.MAGENTA + Color.BRIGHT_GREEN = Fore.GREEN + + +class ConanOutput(object): + """ wraps an output stream, so it can be pretty colored, + and auxiliary info, success, warn methods for convenience. + """ + + def __init__(self, stream, color=False): + self._stream = stream + self._color = color + self.werror_active = False + + def is_terminal(self): + return hasattr(self._stream, "isatty") and self._stream.isatty() + + def writeln(self, data, front=None, back=None): + self.write(data, front, back, True) + + def write(self, data, front=None, back=None, newline=False): + if six.PY2: + if isinstance(data, str): + data = decode_text(data) # Keep python 2 compatibility + + if self._color and (front or back): + color = "%s%s" % (front or '', back or '') + end = (Style.RESET_ALL + "\n") if newline else Style.RESET_ALL # @UndefinedVariable + data = "%s%s%s" % (color, data, end) + else: + if newline: + data = "%s\n" % data + + try: + self._stream.write(data) + except UnicodeError: + data = data.encode("utf8").decode("ascii", "ignore") + self._stream.write(data) + self._stream.flush() + + def info(self, data): + self.writeln(data, Color.BRIGHT_CYAN) + + def success(self, data): + self.writeln(data, Color.BRIGHT_GREEN) + + def warn(self, data): + self.writeln("WARN: " + data, Color.BRIGHT_YELLOW) + + def werror(self, data): + if self.werror_active: + raise ConanException(data) + else: + self.warn(data) + + def error(self, data): + self.writeln("ERROR: " + data, Color.BRIGHT_RED) + + def input_text(self, data): + self.write(data, Color.GREEN) + + def rewrite_line(self, line): + tmp_color = self._color + self._color = False + TOTAL_SIZE = 70 + LIMIT_SIZE = 32 # Hard coded instead of TOTAL_SIZE/2-3 that fails in Py3 float division + if len(line) > TOTAL_SIZE: + line = line[0:LIMIT_SIZE] + " ... " + line[-LIMIT_SIZE:] + self.write("\r%s%s" % (line, " " * (TOTAL_SIZE - len(line)))) + self._stream.flush() + self._color = tmp_color + + +class ScopedOutput(ConanOutput): + def __init__(self, scope, output): + self.scope = scope + self._stream = output._stream + self._color = output._color + self.werror_active = output.werror_active + + def write(self, data, front=None, back=None, newline=False): + super(ScopedOutput, self).write("%s: " % self.scope, front, back, False) + super(ScopedOutput, self).write("%s" % data, Color.BRIGHT_WHITE, back, newline) diff --git a/testbed/conan-io__conan/conans/client/package_copier.py b/testbed/conan-io__conan/conans/client/package_copier.py new file mode 100644 index 0000000000000000000000000000000000000000..73bf66fbb79824276a9737e7c5cdee93a8e9b80d --- /dev/null +++ b/testbed/conan-io__conan/conans/client/package_copier.py @@ -0,0 +1,45 @@ +from conans.model.ref import ConanFileReference, PackageReference +import os +from conans.util.files import rmdir +import shutil +from conans.errors import ConanException + + +class PackageCopier(object): + """ Class responsible for copying or moving packages from users/channels """ + + def __init__(self, paths, user_io, short_paths=False): + self._user_io = user_io + self._paths = paths + self._short_paths = short_paths + + def copy(self, reference, package_ids, username, channel, force=False): + assert(isinstance(reference, ConanFileReference)) + dest_ref = ConanFileReference(reference.name, reference.version, username, channel) + # Copy export + export_origin = self._paths.export(reference) + if not os.path.exists(export_origin): + raise ConanException("'%s' doesn't exist" % str(reference)) + export_dest = self._paths.export(dest_ref) + if os.path.exists(export_dest): + if not force and not self._user_io.request_boolean("'%s' already exist. Override?" + % str(dest_ref)): + return + rmdir(export_dest) + shutil.copytree(export_origin, export_dest) + self._user_io.out.info("Copied %s to %s" % (str(reference), str(dest_ref))) + + # Copy packages + for package_id in package_ids: + package_origin = PackageReference(reference, package_id) + package_dest = PackageReference(dest_ref, package_id) + package_path_origin = self._paths.package(package_origin, self._short_paths) + package_path_dest = self._paths.package(package_dest, self._short_paths) + if os.path.exists(package_path_dest): + if not force and not self._user_io.request_boolean("Package '%s' already exist." + " Override?" + % str(package_id)): + continue + rmdir(package_path_dest) + shutil.copytree(package_path_origin, package_path_dest) + self._user_io.out.info("Copied %s to %s" % (str(package_id), str(dest_ref))) diff --git a/testbed/conan-io__conan/conans/client/packager.py b/testbed/conan-io__conan/conans/client/packager.py new file mode 100644 index 0000000000000000000000000000000000000000..8709f88b070bbe04ab7449bd216f4aa0a5ac3d0c --- /dev/null +++ b/testbed/conan-io__conan/conans/client/packager.py @@ -0,0 +1,76 @@ +from conans.util.files import mkdir, save, rmdir +import os +from conans.util.log import logger +from conans.paths import CONANINFO, CONAN_MANIFEST +from conans.errors import ConanException, format_conanfile_exception +from conans.model.build_info import DEFAULT_RES, DEFAULT_BIN, DEFAULT_LIB, DEFAULT_INCLUDE +import shutil +from conans.model.manifest import FileTreeManifest +from conans.client.output import ScopedOutput +from conans.client.file_copier import FileCopier + + +def create_package(conanfile, build_folder, package_folder, output, local=False): + """ copies built artifacts, libs, headers, data, etc from build_folder to + package folder + """ + mkdir(package_folder) + + # Make the copy of all the patterns + output.info("Generating the package") + output.info("Package folder %s" % (package_folder)) + conanfile.copy = FileCopier(build_folder, package_folder) + + def wrap(dst_folder): + def new_method(pattern, src=""): + conanfile.copy(pattern, dst_folder, src) + return new_method + + conanfile.copy_headers = wrap(DEFAULT_INCLUDE) + conanfile.copy_libs = wrap(DEFAULT_LIB) + conanfile.copy_bins = wrap(DEFAULT_BIN) + conanfile.copy_res = wrap(DEFAULT_RES) + try: + conanfile.package() + package_output = ScopedOutput("%s package()" % output.scope, output) + conanfile.copy.report(package_output, warn=True) + except Exception as e: + if not local: + os.chdir(build_folder) + try: + rmdir(package_folder) + except Exception as e_rm: + output.error("Unable to remove package folder %s\n%s" % (package_folder, str(e_rm))) + output.warn("**** Please delete it manually ****") + + msg = format_conanfile_exception(output.scope, "package", e) + raise ConanException(msg) + + _create_aux_files(build_folder, package_folder) + output.success("Package '%s' created" % os.path.basename(package_folder)) + + +def generate_manifest(package_folder): + # Create the digest for the package + digest = FileTreeManifest.create(package_folder) + save(os.path.join(package_folder, CONAN_MANIFEST), str(digest)) + + +def _create_aux_files(build_folder, package_folder): + """ auxiliary method that creates CONANINFO in + the package_folder + """ + try: + logger.debug("Creating config files to %s" % package_folder) + shutil.copy(os.path.join(build_folder, CONANINFO), package_folder) + + except IOError: + raise ConanException("%s does not exist inside of your %s folder. Try to re-build it again" + " to solve it." % (CONANINFO, build_folder)) + + try: + # Create the digest for the package + generate_manifest(package_folder) + except IOError as exc: + raise ConanException("Cannot create the manifest file, Try to re-build it again" + " to solve it: %s" % exc) diff --git a/testbed/conan-io__conan/conans/client/proxy.py b/testbed/conan-io__conan/conans/client/proxy.py new file mode 100644 index 0000000000000000000000000000000000000000..126536e6633eca2955ba6418e017b40b64d1afa2 --- /dev/null +++ b/testbed/conan-io__conan/conans/client/proxy.py @@ -0,0 +1,369 @@ +from conans.client.output import ScopedOutput +from conans.util.files import rmdir +from conans.model.ref import PackageReference +from conans.errors import (ConanException, ConanConnectionError, ConanOutdatedClient, + NotFoundException) +from conans.client.remote_registry import RemoteRegistry +from conans.util.log import logger +from conans.client.loader import ConanFileLoader +import os +from conans.paths import rm_conandir +from conans.client.remover import DiskRemover +from conans.util.tracer import log_package_got_from_local_cache,\ + log_recipe_got_from_local_cache + + +class ConanProxy(object): + """ Class to access the conan storage, to perform typical tasks as to get packages, + getting conanfiles, uploading, removing from remote, etc. + It uses the RemoteRegistry to control where the packages come from. + """ + def __init__(self, client_cache, user_io, remote_manager, remote_name, + update=False, check_updates=False, manifest_manager=False): + self._client_cache = client_cache + self._out = user_io.out + self._remote_manager = remote_manager + self._registry = RemoteRegistry(self._client_cache.registry, self._out) + self._remote_name = remote_name + self._update = update + self._check_updates = check_updates or update # Update forces check + self._manifest_manager = manifest_manager + + @property + def registry(self): + return self._registry + + def package_available(self, package_ref, short_paths, check_outdated): + """ + Returns True if there is a local or remote package available (and up to date if check_outdated). + It wont download the package, just check its hash + """ + + output = ScopedOutput(str(package_ref.conan), self._out) + package_folder = self._client_cache.package(package_ref, short_paths=short_paths) + + remote_info = None + # No package in local cache + if not os.path.exists(package_folder): + try: + remote_info = self.get_package_info(package_ref) + except ConanException: + return False # Not local nor remote + + # Maybe we have the package (locally or in remote) but it's outdated + if check_outdated: + if remote_info: + package_hash = remote_info.recipe_hash + else: + package_hash = self._client_cache.read_package_recipe_hash(package_folder) + local_recipe_hash = self._client_cache.load_manifest(package_ref.conan).summary_hash + up_to_date = local_recipe_hash == package_hash + if not up_to_date: + output.info("Outdated package!") + else: + output.info("Package is up to date") + return up_to_date + + return True + + def get_package(self, package_ref, short_paths): + """ obtain a package, either from disk or retrieve from remotes if necessary + and not necessary to build + """ + output = ScopedOutput(str(package_ref.conan), self._out) + package_folder = self._client_cache.package(package_ref, short_paths=short_paths) + + # Check current package status + if os.path.exists(package_folder): + if self._check_updates: + read_manifest = self._client_cache.load_package_manifest(package_ref) + try: # get_conan_digest can fail, not in server + upstream_manifest = self.get_package_digest(package_ref) + if upstream_manifest.file_sums != read_manifest.file_sums: + if upstream_manifest.time > read_manifest.time: + output.warn("Current package is older than remote upstream one") + if self._update: + output.warn("Removing it to retrieve or build an updated one") + rmdir(package_folder) + else: + output.warn("Current package is newer than remote upstream one") + except ConanException: + pass + + installed = False + local_package = os.path.exists(package_folder) + if local_package: + output.info('Already installed!') + installed = True + log_package_got_from_local_cache(package_ref) + else: + installed = self._retrieve_remote_package(package_ref, package_folder, + output) + self.handle_package_manifest(package_ref, installed) + return installed + + def _package_outdated(self, package_ref, package_folder): + recipe_hash = self._client_cache.load_manifest(package_ref.conan).summary_hash + package_recipe_hash = self._client_cache.read_package_recipe_hash(package_folder) + return not recipe_hash == package_recipe_hash + + def handle_package_manifest(self, package_ref, installed): + if installed and self._manifest_manager: + remote = self._registry.get_ref(package_ref.conan) + self._manifest_manager.check_package(package_ref, remote) + + def get_recipe(self, conan_reference): + output = ScopedOutput(str(conan_reference), self._out) + + def _refresh(): + export_path = self._client_cache.export(conan_reference) + rmdir(export_path) + # It might need to remove shortpath + rm_conandir(self._client_cache.source(conan_reference)) + current_remote, _ = self._get_remote(conan_reference) + output.info("Retrieving from remote '%s'..." % current_remote.name) + self._remote_manager.get_recipe(conan_reference, export_path, current_remote) + if self._update: + output.info("Updated!") + else: + output.info("Installed!") + + # check if it is in disk + conanfile_path = self._client_cache.conanfile(conan_reference) + + if os.path.exists(conanfile_path): + log_recipe_got_from_local_cache(conan_reference) + if self._check_updates: + ret = self.update_available(conan_reference) + if ret != 0: # Found and not equal + remote, ref_remote = self._get_remote(conan_reference) + if ret == 1: + if not self._update: + if remote != ref_remote: # Forced new remote + output.warn("There is a new conanfile in '%s' remote. " + "Execute 'install -u -r %s' to update it." + % (remote.name, remote.name)) + else: + output.warn("There is a new conanfile in '%s' remote. " + "Execute 'install -u' to update it." + % remote.name) + output.warn("Refused to install!") + else: + if remote != ref_remote: + # Delete packages, could be non coherent with new remote + DiskRemover(self._client_cache).remove_packages(conan_reference) + _refresh() + elif ret == -1: + if not self._update: + output.info("Current conanfile is newer " + "than %s's one" % remote.name) + else: + output.error("Current conanfile is newer than %s's one. " + "Run 'conan remove %s' and run install again " + "to replace it." % (remote.name, conan_reference)) + + else: + self._retrieve_recipe(conan_reference, output) + + if self._manifest_manager: + remote = self._registry.get_ref(conan_reference) + self._manifest_manager.check_recipe(conan_reference, remote) + + return conanfile_path + + def update_available(self, conan_reference): + """Returns 0 if the conanfiles are equal, 1 if there is an update and -1 if + the local is newer than the remote""" + if not conan_reference: + return 0 + read_manifest, _ = self._client_cache.conan_manifests(conan_reference) + if read_manifest: + try: # get_conan_digest can fail, not in server + upstream_manifest = self.get_conan_digest(conan_reference) + if upstream_manifest.file_sums != read_manifest.file_sums: + return 1 if upstream_manifest.time > read_manifest.time else -1 + except ConanException: + pass + + return 0 + + def _retrieve_recipe(self, conan_reference, output): + """ returns the requested conanfile object, retrieving it from + remotes if necessary. Can raise NotFoundException + """ + def _retrieve_from_remote(remote): + output.info("Trying with '%s'..." % remote.name) + export_path = self._client_cache.export(conan_reference) + result = self._remote_manager.get_recipe(conan_reference, export_path, remote) + self._registry.set_ref(conan_reference, remote) + return result + + if self._remote_name: + output.info("Not found, retrieving from server '%s' " % self._remote_name) + remote = self._registry.remote(self._remote_name) + return _retrieve_from_remote(remote) + else: + output.info("Not found, looking in remotes...") + + remotes = self._registry.remotes + for remote in remotes: + logger.debug("Trying with remote %s" % remote.name) + try: + return _retrieve_from_remote(remote) + # If exception continue with the next + except (ConanOutdatedClient, ConanConnectionError) as exc: + output.warn(str(exc)) + if remote == remotes[-1]: # Last element not found + raise ConanConnectionError("All remotes failed") + except NotFoundException as exc: + if remote == remotes[-1]: # Last element not found + logger.debug("Not found in any remote, raising...%s" % exc) + raise NotFoundException("Unable to find '%s' in remotes" + % str(conan_reference)) + + raise ConanException("No remote defined") + + def upload_conan(self, conan_reference, retry, retry_wait): + """ upload to defined remote in (-r=remote), to current remote + or to default remote, in that order. + If the remote is not set, set it + """ + remote, ref_remote = self._get_remote(conan_reference) + + result = self._remote_manager.upload_conan(conan_reference, remote, retry, retry_wait) + if not ref_remote: + self._registry.set_ref(conan_reference, remote) + return result + + def _get_remote(self, conan_ref=None): + # Prioritize -r , then reference registry and then the default remote + ref_remote = self._registry.get_ref(conan_ref) if conan_ref else None + if self._remote_name: + remote = self._registry.remote(self._remote_name) + else: + if ref_remote: + remote = ref_remote + else: + remote = self._registry.default_remote + return remote, ref_remote + + def upload_package(self, package_ref, retry, retry_wait): + remote, current_remote = self._get_remote(package_ref.conan) + + if not current_remote: + self._out.warn("Remote for '%s' not defined, uploading to %s" + % (str(package_ref.conan), remote.name)) + result = self._remote_manager.upload_package(package_ref, remote, retry, retry_wait) + if not current_remote: + self._registry.set_ref(package_ref.conan, remote) + return result + + def get_conan_digest(self, conan_ref): + """ used by update to check the date of packages, require force if older + """ + remote, current_remote = self._get_remote(conan_ref) + result = self._remote_manager.get_conan_digest(conan_ref, remote) + if not current_remote: + self._registry.set_ref(conan_ref, remote) + return result + + def get_package_digest(self, package_ref): + """ used by update to check the date of packages, require force if older + """ + remote, ref_remote = self._get_remote(package_ref.conan) + result = self._remote_manager.get_package_digest(package_ref, remote) + if not ref_remote: + self._registry.set_ref(package_ref.conan, remote) + return result + + def get_package_info(self, package_ref): + """ Gets the package info to check if outdated + """ + remote, ref_remote = self._get_remote(package_ref.conan) + result = self._remote_manager.get_package_info(package_ref, remote) + if not ref_remote: + self._registry.set_ref(package_ref.conan, remote) + return result + + def search(self, pattern=None, ignorecase=True): + remote, _ = self._get_remote() + return self._remote_manager.search(remote, pattern, ignorecase) + + def search_remotes(self, pattern=None, ignorecase=True): + if self._remote_name: + remote = self._registry.remote(self._remote_name) + search_result = self._remote_manager.search(remote, pattern, ignorecase) + return search_result + + for remote in self._registry.remotes: + search_result = self._remote_manager.search(remote, pattern, ignorecase) + if search_result: + return search_result + + def search_packages(self, reference, query): + remote, _ = self._get_remote() + return self._remote_manager.search_packages(remote, reference, query) + + def remove(self, conan_ref): + if not self._remote_name: + raise ConanException("Cannot remove, remote not defined") + remote = self._registry.remote(self._remote_name) + result = self._remote_manager.remove(conan_ref, remote) + current_remote = self._registry.get_ref(conan_ref) + if current_remote == remote: + self._registry.remove_ref(conan_ref) + return result + + def remove_packages(self, conan_ref, remove_ids): + if not self._remote_name: + raise ConanException("Cannot remove, remote not defined") + remote = self._registry.remote(self._remote_name) + return self._remote_manager.remove_packages(conan_ref, remove_ids, remote) + + def download_packages(self, reference, package_ids): + assert(isinstance(package_ids, list)) + remote, _ = self._get_remote(reference) + export_path = self._client_cache.export(reference) + self._remote_manager.get_recipe(reference, export_path, remote) + conanfile_path = self._client_cache.conanfile(reference) + loader = ConanFileLoader(None, None, None, None, None, None, None) + conanfile = loader.load_class(conanfile_path) + short_paths = conanfile.short_paths + self._registry.set_ref(reference, remote) + output = ScopedOutput(str(reference), self._out) + for package_id in package_ids: + package_ref = PackageReference(reference, package_id) + package_folder = self._client_cache.package(package_ref, short_paths=short_paths) + self._retrieve_remote_package(package_ref, package_folder, output, remote) + + def _retrieve_remote_package(self, package_ref, package_folder, output, remote=None): + + if remote is None: + remote = self._registry.get_ref(package_ref.conan) + if not remote: + output.warn("Package doesn't have a remote defined. " + "Probably created locally and not uploaded") + return False + package_id = str(package_ref.package_id) + try: + output.info("Looking for package %s in remote '%s' " % (package_id, remote.name)) + # Will raise if not found NotFoundException + self._remote_manager.get_package(package_ref, package_folder, remote) + output.success('Package installed %s' % package_id) + return True + except ConanConnectionError: + raise # This shouldn't be skipped + except ConanException as e: + output.warn('Binary for %s not in remote: %s' % (package_id, str(e))) + return False + + def authenticate(self, name, password): + if not name: # List all users, from all remotes + remotes = self._registry.remotes + if not remotes: + self._out.error("No remotes defined") + for remote in remotes: + self._remote_manager.authenticate(remote, None, None) + return + remote, _ = self._get_remote() + return self._remote_manager.authenticate(remote, name, password) diff --git a/testbed/conan-io__conan/conans/client/remote_registry.py b/testbed/conan-io__conan/conans/client/remote_registry.py new file mode 100644 index 0000000000000000000000000000000000000000..acdcc8e53469c0ae7871f0cd93f06a95dc8ee9cf --- /dev/null +++ b/testbed/conan-io__conan/conans/client/remote_registry.py @@ -0,0 +1,183 @@ +import os +from conans.errors import ConanException +from conans.util.files import load, save +from collections import OrderedDict, namedtuple +import fasteners +from conans.util.config_parser import get_bool_from_text_value +from conans.util.log import logger + + +default_remotes = """conan.io https://server.conan.io True +""" + +Remote = namedtuple("Remote", "name url verify_ssl") + + +class RemoteRegistry(object): + """ conan_ref: remote + remote is (name, url) + """ + def __init__(self, filename, output): + self._filename = filename + self._output = output + + def _parse(self, contents): + remotes = OrderedDict() + refs = {} + end_remotes = False + # Parse the file + for line in contents.splitlines(): + line = line.strip() + + if not line: + if end_remotes: + raise ConanException("Bad file format, blank line %s" % self._filename) + end_remotes = True + continue + chunks = line.split() + if not end_remotes: + if len(chunks) == 2: # Retro compatibility + ref, remote = chunks + verify_ssl = "True" + elif len(chunks) == 3: + ref, remote, verify_ssl = chunks + else: + raise ConanException("Bad file format, wrong item numbers in line '%s'" % line) + + verify_ssl = get_bool_from_text_value(verify_ssl) + remotes[ref] = (remote, verify_ssl) + else: + ref, remote = chunks + refs[ref] = remote + + return remotes, refs + + def _to_string(self, remotes, refs): + lines = ["%s %s %s" % (ref, remote, verify_ssl) for ref, (remote, verify_ssl) in remotes.items()] + lines.append("") + lines.extend(["%s %s" % (ref, remote) for ref, remote in sorted(refs.items())]) + text = os.linesep.join(lines) + return text + + def _load(self): + try: + contents = load(self._filename) + except: + self._output.warn("Remotes registry file missing, creating default one in %s" + % self._filename) + contents = default_remotes + save(self._filename, contents) + return self._parse(contents) + + def _save(self, remotes, refs): + save(self._filename, self._to_string(remotes, refs)) + + @property + def default_remote(self): + try: + return self.remotes[0] + except: + raise ConanException("No default remote defined in %s" % self._filename) + + @property + def remotes(self): + with fasteners.InterProcessLock(self._filename + ".lock", logger=logger): + remotes, _ = self._load() + return [Remote(ref, remote, verify_ssl) for ref, (remote, verify_ssl) in remotes.items()] + + @property + def refs(self): + with fasteners.InterProcessLock(self._filename + ".lock", logger=logger): + _, refs = self._load() + return refs + + def remote(self, name): + with fasteners.InterProcessLock(self._filename + ".lock", logger=logger): + remotes, _ = self._load() + try: + return Remote(name, remotes[name][0], remotes[name][1]) + except KeyError: + raise ConanException("No remote '%s' defined in remotes in file %s" + % (name, self._filename)) + + def get_ref(self, conan_reference): + with fasteners.InterProcessLock(self._filename + ".lock", logger=logger): + remotes, refs = self._load() + remote_name = refs.get(str(conan_reference)) + try: + return Remote(remote_name, remotes[remote_name][0], remotes[remote_name][1]) + except: + return None + + def remove_ref(self, conan_reference, quiet=False): + with fasteners.InterProcessLock(self._filename + ".lock", logger=logger): + conan_reference = str(conan_reference) + remotes, refs = self._load() + try: + del refs[conan_reference] + self._save(remotes, refs) + except: + if not quiet: + self._output.warn("Couldn't delete '%s' from remote registry" + % conan_reference) + + def set_ref(self, conan_reference, remote): + with fasteners.InterProcessLock(self._filename + ".lock", logger=logger): + conan_reference = str(conan_reference) + remotes, refs = self._load() + refs[conan_reference] = remote.name + self._save(remotes, refs) + + def add_ref(self, conan_reference, remote): + with fasteners.InterProcessLock(self._filename + ".lock", logger=logger): + conan_reference = str(conan_reference) + remotes, refs = self._load() + if conan_reference in refs: + raise ConanException("%s already exists. Use update" % conan_reference) + if remote not in remotes: + raise ConanException("%s not in remotes" % remote) + refs[conan_reference] = remote + self._save(remotes, refs) + + def update_ref(self, conan_reference, remote): + with fasteners.InterProcessLock(self._filename + ".lock", logger=logger): + conan_reference = str(conan_reference) + remotes, refs = self._load() + if conan_reference not in refs: + raise ConanException("%s does not exist. Use add" % conan_reference) + if remote not in remotes: + raise ConanException("%s not in remotes" % remote) + refs[conan_reference] = remote + self._save(remotes, refs) + + def add(self, remote_name, remote, verify_ssl=True): + def exists_function(remotes): + if remote_name in remotes: + raise ConanException("Remote '%s' already exists in remotes (use update to modify)" + % remote_name) + self._add_update(remote_name, remote, verify_ssl, exists_function) + + def remove(self, remote_name): + with fasteners.InterProcessLock(self._filename + ".lock", logger=logger): + remotes, refs = self._load() + if remote_name not in remotes: + raise ConanException("Remote '%s' not found in remotes" % remote_name) + del remotes[remote_name] + refs = {k: v for k, v in refs.items() if v != remote_name} + self._save(remotes, refs) + + def update(self, remote_name, remote, verify_ssl=True): + def exists_function(remotes): + if remote_name not in remotes: + raise ConanException("Remote '%s' not found in remotes" % remote_name) + self._add_update(remote_name, remote, verify_ssl, exists_function) + + def _add_update(self, remote_name, remote, verify_ssl, exists_function): + with fasteners.InterProcessLock(self._filename + ".lock", logger=logger): + remotes, refs = self._load() + exists_function(remotes) + urls = {r[0]: name for name, r in remotes.items() if name != remote_name} + if remote in urls: + raise ConanException("Remote '%s' already exists with same URL" % urls[remote]) + remotes[remote_name] = (remote, verify_ssl) + self._save(remotes, refs) diff --git a/testbed/conan-io__conan/conans/client/runner.py b/testbed/conan-io__conan/conans/client/runner.py new file mode 100644 index 0000000000000000000000000000000000000000..c93f23498b93cde1352313c52e766ba61803f0d2 --- /dev/null +++ b/testbed/conan-io__conan/conans/client/runner.py @@ -0,0 +1,86 @@ +import os +import sys +from subprocess import Popen, PIPE +from conans.util.files import decode_text +from conans.errors import ConanException +import six + + +class ConanRunner(object): + + def __init__(self, print_commands_to_output=False, generate_run_log_file=False, log_run_to_output=True): + self._print_commands_to_output = print_commands_to_output + self._generate_run_log_file = generate_run_log_file + self._log_run_to_output = log_run_to_output + + def __call__(self, command, output, log_filepath=None, cwd=None): + """ + @param command: Command to execute + @param output: Instead of print to sys.stdout print to that stream. Could be None + @param log_filepath: If specified, also log to a file + @param cwd: Move to directory to execute + """ + stream_output = output if output and hasattr(output, "write") else sys.stdout + + if not self._generate_run_log_file: + log_filepath = None + + # Log the command call in output and logger + call_message = "\n----Running------\n> %s\n-----------------\n" % command + if self._print_commands_to_output and stream_output and self._log_run_to_output: + stream_output.write(call_message) + + # No output has to be redirected to logs or buffer or omitted + if output is True and not log_filepath and self._log_run_to_output: + return self._simple_os_call(command, cwd) + elif log_filepath: + if stream_output: + stream_output.write("Logging command output to file '%s'\n" % log_filepath) + with open(log_filepath, "a+") as log_handler: + if self._print_commands_to_output: + log_handler.write(call_message) + return self._pipe_os_call(command, stream_output, log_handler, cwd) + else: + return self._pipe_os_call(command, stream_output, None, cwd) + + def _pipe_os_call(self, command, stream_output, log_handler, cwd): + + try: + proc = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=cwd) + except Exception as e: + raise ConanException("Error while executing '%s'\n\t%s" % (command, str(e))) + + def get_stream_lines(the_stream): + while True: + line = the_stream.readline() + if not line: + break + decoded_line = decode_text(line) + if stream_output and self._log_run_to_output: + stream_output.write(decoded_line) + if log_handler: + # Write decoded in PY2 causes some ASCII encoding problems + # tried to open the log_handler binary but same result. + log_handler.write(line if six.PY2 else decoded_line) + + get_stream_lines(proc.stdout) + get_stream_lines(proc.stderr) + + proc.communicate() + ret = proc.returncode + return ret + + def _simple_os_call(self, command, cwd): + if not cwd: + return os.system(command) + else: + try: + old_dir = os.getcwd() + os.chdir(cwd) + result = os.system(command) + except Exception as e: + raise ConanException("Error while executing" + " '%s'\n\t%s" % (command, str(e))) + finally: + os.chdir(old_dir) + return result diff --git a/testbed/conan-io__conan/conans/client/uploader.py b/testbed/conan-io__conan/conans/client/uploader.py new file mode 100644 index 0000000000000000000000000000000000000000..b0e87ff8dd4d6c6342aded5ead74bacb10937b7e --- /dev/null +++ b/testbed/conan-io__conan/conans/client/uploader.py @@ -0,0 +1,91 @@ +import os +from conans.errors import ConanException, NotFoundException +from conans.model.ref import PackageReference, is_a_reference,\ + ConanFileReference +from conans.util.log import logger +import time + + +class ConanUploader(object): + + def __init__(self, paths, user_io, remote_proxy, search_manager, loader): + self._paths = paths + self._user_io = user_io + self._remote_proxy = remote_proxy + self._search_manager = search_manager + self._loader = loader + + def upload_conan(self, pattern, force=False, all_packages=False, confirm=False, + retry=None, retry_wait=None): + """Upload all the recipes matching 'pattern'""" + if is_a_reference(pattern): + ref = ConanFileReference.loads(pattern) + export_path = self._paths.export(ref) + if not os.path.exists(export_path): + raise NotFoundException("There is no local conanfile exported as %s" + % str(ref)) + references = [ref, ] + confirm = True + else: + references = self._search_manager.search(pattern) + + if not references: + raise NotFoundException("No packages found matching pattern '%s'" % pattern) + + for conan_ref in references: + upload = True + if not confirm: + msg = "Are you sure you want to upload '%s'?" % str(conan_ref) + upload = self._user_io.request_boolean(msg) + if upload: + self._upload_conan(conan_ref, force, all_packages, retry, retry_wait) + + def _upload_conan(self, conan_ref, force, all_packages, retry, retry_wait): + """Uploads the conans identified by conan_ref""" + if not force: + self._check_package_date(conan_ref) + + self._user_io.out.info("Uploading %s" % str(conan_ref)) + self._remote_proxy.upload_conan(conan_ref, retry, retry_wait) + + if all_packages: + self.check_reference(conan_ref) + for index, package_id in enumerate(self._paths.conan_packages(conan_ref)): + total = len(self._paths.conan_packages(conan_ref)) + self.upload_package(PackageReference(conan_ref, package_id), index + 1, total, + retry, retry_wait) + + def check_reference(self, conan_reference): + try: + conanfile_path = self._paths.conanfile(conan_reference) + conan_file = self._loader.load_class(conanfile_path) + except NotFoundException: + raise NotFoundException("There is no local conanfile exported as %s" + % str(conan_reference)) + + # Can't use build_policy_always here because it's not loaded (only load_class) + if conan_file.build_policy == "always": + raise ConanException("Conanfile has build_policy='always', " + "no packages can be uploaded") + + def upload_package(self, package_ref, index=1, total=1, retry=None, retry_wait=None): + """Uploads the package identified by package_id""" + msg = ("Uploading package %d/%d: %s" % (index, total, str(package_ref.package_id))) + t1 = time.time() + self._user_io.out.info(msg) + self._remote_proxy.upload_package(package_ref, retry, retry_wait) + + logger.debug("====> Time uploader upload_package: %f" % (time.time() - t1)) + + def _check_package_date(self, conan_ref): + try: + remote_conan_digest = self._remote_proxy.get_conan_digest(conan_ref) + except NotFoundException: + return # First upload + + local_digest = self._paths.load_manifest(conan_ref) + + if remote_conan_digest.time > local_digest.time: + raise ConanException("Remote recipe is newer than local recipe: " + "\n Remote date: %s\n Local date: %s" % + (remote_conan_digest.time, local_digest.time)) diff --git a/testbed/conan-io__conan/conans/client/userio.py b/testbed/conan-io__conan/conans/client/userio.py new file mode 100644 index 0000000000000000000000000000000000000000..3f05ea5a927a83419d39072112761bc577c6d170 --- /dev/null +++ b/testbed/conan-io__conan/conans/client/userio.py @@ -0,0 +1,80 @@ +import sys +from conans.client.output import ConanOutput +from conans.model.username import Username +from conans.errors import InvalidNameException, ConanException +import getpass +from six.moves import input as raw_input + + +class UserIO(object): + """Class to interact with the user, used to show messages and ask for information""" + def __init__(self, ins=sys.stdin, out=None): + ''' + Params: + ins: input stream + out: ConanOutput, should have "write" method + ''' + self._ins = ins + if not out: + out = ConanOutput(sys.stdout) + self.out = out + + def request_login(self, remote_name, username=None): + """Request user to input their name and password + :param username If username is specified it only request password""" + user_input = '' + while not username: + try: + self.out.write("Remote '%s' username: " % remote_name) + user_input = self.get_username(remote_name) + username = Username(user_input) + except InvalidNameException: + self.out.error('%s is not a valid username' % user_input) + + self.out.write('Please enter a password for "%s" account: ' % username) + try: + pwd = self.get_password(remote_name) + except Exception as e: + raise ConanException('Cancelled pass %s' % e) + return username, pwd + + def get_username(self, remote_name): + """Overridable for testing purpose""" + return raw_input() + + def get_password(self, remote_name): + """Overridable for testing purpose""" + return getpass.getpass("") + + def request_string(self, msg, default_value=None): + """Request user to input a msg + :param msg Name of the msg + """ + if default_value: + self.out.input_text('%s (%s): ' % (msg, default_value)) + else: + self.out.input_text('%s: ' % msg) + s = self._ins.readline().replace("\n", "") + if default_value is not None and s == '': + return default_value + return s + + def request_boolean(self, msg, default_option=None): + """Request user to input a boolean""" + ret = None + while ret is None: + if default_option is True: + s = self.request_string("%s (YES/no)" % msg) + elif default_option is False: + s = self.request_string("%s (NO/yes)" % msg) + else: + s = self.request_string("%s (yes/no)" % msg) + if default_option is not None and s == '': + return default_option + if s.lower() in ['yes', 'y']: + ret = True + elif s.lower() in ['no', 'n']: + ret = False + else: + self.out.error("%s is not a valid answer" % s) + return ret diff --git a/testbed/conan-io__conan/conans/conan.py b/testbed/conan-io__conan/conans/conan.py new file mode 100644 index 0000000000000000000000000000000000000000..a9a6e9f6124961ae5ccbd622f32ea7ffe61e022f --- /dev/null +++ b/testbed/conan-io__conan/conans/conan.py @@ -0,0 +1,10 @@ +from conans.client.command import main +import sys + + +def run(): + main(sys.argv[1:]) + + +if __name__ == '__main__': + run() diff --git a/testbed/conan-io__conan/conans/conan_server.py b/testbed/conan-io__conan/conans/conan_server.py new file mode 100644 index 0000000000000000000000000000000000000000..c7244dcd62c9c787aea6281aa0843977ff43ef81 --- /dev/null +++ b/testbed/conan-io__conan/conans/conan_server.py @@ -0,0 +1,9 @@ +from conans.server.server_launcher import main + + +def run(): + main() + + +if __name__ == '__main__': + run() diff --git a/testbed/conan-io__conan/conans/errors.py b/testbed/conan-io__conan/conans/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..b095010da1ca18073b966a6ec79099ae34f495f4 --- /dev/null +++ b/testbed/conan-io__conan/conans/errors.py @@ -0,0 +1,95 @@ +''' + Exceptions raised and handled in Conan server. + These exceptions are mapped between server (as an HTTP response) and client + through the REST API. When an error happens in server its translated to an HTTP + error code that its sent to client. Client reads the server code and raise the + matching exception. + + see return_plugin.py + +''' + + +def format_conanfile_exception(scope, method, exception): + import sys + import traceback + msg = "%s: Error in %s() method" % (scope, method) + try: + tb = sys.exc_info()[2] + _, line, _, contents = traceback.extract_tb(tb, 2)[1] + msg += ", line %d\n\t%s" % (line, contents) + except: + pass + msg += "\n\t%s" % str(exception) + return msg + + +class ConanException(Exception): + """ + Generic conans exception + """ + pass + + +class InvalidNameException(ConanException): + pass + + +class ConanConnectionError(ConanException): + pass + + +class ConanOutdatedClient(ConanException): + pass + + +# Remote exceptions # + +class InternalErrorException(ConanException): + """ + Generic 500 error + """ + pass + + +class RequestErrorException(ConanException): + """ + Generic 400 error + """ + pass + + +class AuthenticationException(ConanException): # 401 + """ + 401 error + """ + pass + + +class ForbiddenException(ConanException): # 403 + """ + 403 error + """ + pass + + +class NotFoundException(ConanException): # 404 + """ + 404 error + """ + pass + + +class UserInterfaceErrorException(RequestErrorException): + """ + 420 error + """ + pass + + +EXCEPTION_CODE_MAPPING = {InternalErrorException: 500, + RequestErrorException: 400, + AuthenticationException: 401, + ForbiddenException: 403, + NotFoundException: 404, + UserInterfaceErrorException: 420} diff --git a/testbed/conan-io__conan/conans/migrations.py b/testbed/conan-io__conan/conans/migrations.py new file mode 100644 index 0000000000000000000000000000000000000000..8be5f257bc5d3d737bc9842efd4a99cd450cb764 --- /dev/null +++ b/testbed/conan-io__conan/conans/migrations.py @@ -0,0 +1,40 @@ +from conans.util.files import load, save +from conans.model.version import Version +from conans.errors import ConanException +import os + +CONAN_VERSION = "version.txt" + + +class Migrator(object): + + def __init__(self, conf_path, store_path, current_version, out): + self.conf_path = conf_path + self.store_path = store_path + + self.current_version = current_version + self.file_version_path = os.path.join(self.conf_path, CONAN_VERSION) + self.out = out + + def migrate(self): + old_version = self._load_old_version() + if old_version != self.current_version: + self._make_migrations(old_version) + self._update_version_file() + + def _make_migrations(self, old_version): + raise NotImplementedError("Implement in subclass") + + def _update_version_file(self): + try: + save(self.file_version_path, str(self.current_version)) + except Exception: + raise ConanException("Can't write version file in %s" % self.file_version_path) + + def _load_old_version(self): + try: + tmp = load(self.file_version_path) + old_version = Version(tmp) + except: + old_version = None + return old_version diff --git a/testbed/conan-io__conan/conans/model/__init__.py b/testbed/conan-io__conan/conans/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0e6f2ac3e9a65d538a3704d9bfc4a4e53b8ba0a1 --- /dev/null +++ b/testbed/conan-io__conan/conans/model/__init__.py @@ -0,0 +1,3 @@ +from .conan_generator import GeneratorManager, Generator + +registered_generators = GeneratorManager() diff --git a/testbed/conan-io__conan/conans/model/build_info.py b/testbed/conan-io__conan/conans/model/build_info.py new file mode 100644 index 0000000000000000000000000000000000000000..72fd249358e77c5200b2bb349b1cb18198cba935 --- /dev/null +++ b/testbed/conan-io__conan/conans/model/build_info.py @@ -0,0 +1,149 @@ +import os +import re +from conans.errors import ConanException +from conans.util.log import logger +import traceback +from collections import OrderedDict + + +DEFAULT_INCLUDE = "include" +DEFAULT_LIB = "lib" +DEFAULT_BIN = "bin" +DEFAULT_RES = "res" + + +class _CppInfo(object): + """ Object that stores all the necessary information to build in C/C++ a + given conans. It is intended to be system independent, translation to + specific systems will be produced from this info + """ + def __init__(self): + self.includedirs = [] # Ordered list of include paths + self.libs = [] # The libs to link against + self.libdirs = [] # Directories to find libraries + self.resdirs = [] # Directories to find resources, data, etc + self.bindirs = [] # Directories to find executables and shared libs + self.defines = [] # preprocessor definitions + self.cflags = [] # pure C flags + self.cppflags = [] # C++ compilation flags + self.sharedlinkflags = [] # linker flags + self.exelinkflags = [] # linker flags + self.rootpath = "" + + +class CppInfo(_CppInfo): + """ Build Information declared to be used by the CONSUMERS of a + conans. That means that consumers must use this flags and configs i order + to build properly. + Defined in user CONANFILE, directories are relative at user definition time + """ + def __init__(self, root_folder): + super(CppInfo, self).__init__() + self.rootpath = root_folder # the full path of the package in which the conans is found + self.includedirs.append(DEFAULT_INCLUDE) + self.libdirs.append(DEFAULT_LIB) + self.bindirs.append(DEFAULT_BIN) + self.resdirs.append(DEFAULT_RES) + self.public_deps = [] + + @property + def include_paths(self): + return [os.path.join(self.rootpath, p) + if not os.path.isabs(p) else p for p in self.includedirs] + + @property + def lib_paths(self): + return [os.path.join(self.rootpath, p) + if not os.path.isabs(p) else p for p in self.libdirs] + + @property + def bin_paths(self): + return [os.path.join(self.rootpath, p) + if not os.path.isabs(p) else p for p in self.bindirs] + + +class DepsCppInfo(_CppInfo): + """ Build Information necessary to build a given conans. It contains the + flags, directories and options if its dependencies. The conans CONANFILE + should use these flags to pass them to the underlaying build system (Cmake, make), + so deps info is managed + """ + fields = ["includedirs", "libdirs", "bindirs", "libs", "defines", "cppflags", + "cflags", "sharedlinkflags", "exelinkflags", "rootpath"] + + def __init__(self): + super(DepsCppInfo, self).__init__() + self._dependencies = OrderedDict() + + @property + def dependencies(self): + return self._dependencies.items() + + @property + def deps(self): + return self._dependencies.keys() + + def __getitem__(self, item): + return self._dependencies[item] + + @staticmethod + def loads(text): + pattern = re.compile("^\[([a-zA-Z0-9_:-]+)\]([^\[]+)", re.MULTILINE) + result = DepsCppInfo() + + try: + for m in pattern.finditer(text): + var_name = m.group(1) + lines = [] + for line in m.group(2).splitlines(): + line = line.strip() + if not line or line[0] == "#": + continue + lines.append(line) + if not lines: + continue + tokens = var_name.split("_") + field = tokens[0] + if len(tokens) == 2: + dep = tokens[1] + dep_cpp_info = result._dependencies.setdefault(dep, DepsCppInfo()) + if field == "rootpath": + lines = lines[0] + setattr(dep_cpp_info, field, lines) + else: + setattr(result, field, lines) + except Exception as e: + logger.error(traceback.format_exc()) + raise ConanException("There was an error parsing conaninfo.txt: %s" % str(e)) + + return result + + def update(self, dep_cpp_info, conan_ref): + self._dependencies[conan_ref.name] = dep_cpp_info + + def merge_lists(seq1, seq2): + return [s for s in seq1 if s not in seq2] + seq2 + + self.includedirs = merge_lists(self.includedirs, dep_cpp_info.include_paths) + self.libdirs = merge_lists(self.libdirs, dep_cpp_info.lib_paths) + self.bindirs = merge_lists(self.bindirs, dep_cpp_info.bin_paths) + self.libs = merge_lists(self.libs, dep_cpp_info.libs) + + # Note these are in reverse order + self.defines = merge_lists(dep_cpp_info.defines, self.defines) + self.cppflags = merge_lists(dep_cpp_info.cppflags, self.cppflags) + self.cflags = merge_lists(dep_cpp_info.cflags, self.cflags) + self.sharedlinkflags = merge_lists(dep_cpp_info.sharedlinkflags, self.sharedlinkflags) + self.exelinkflags = merge_lists(dep_cpp_info.exelinkflags, self.exelinkflags) + + @property + def include_paths(self): + return self.includedirs + + @property + def lib_paths(self): + return self.libdirs + + @property + def bin_paths(self): + return self.bindirs diff --git a/testbed/conan-io__conan/conans/model/conan_file.py b/testbed/conan-io__conan/conans/model/conan_file.py new file mode 100644 index 0000000000000000000000000000000000000000..bb3e4ec9c54a8b18aea3d6c045ccb82b23fe1ca6 --- /dev/null +++ b/testbed/conan-io__conan/conans/model/conan_file.py @@ -0,0 +1,240 @@ +from conans.model.options import Options, PackageOptions, OptionsValues +from conans.model.requires import Requirements +from conans.model.build_info import DepsCppInfo +from conans import tools # @UnusedImport KEEP THIS! Needed for pyinstaller to copy to exe. +from conans.errors import ConanException +from conans.model.env_info import DepsEnvInfo +import os +from conans.util.files import mkdir +from conans.paths import RUN_LOG_NAME + + +def create_options(conanfile): + try: + package_options = PackageOptions(getattr(conanfile, "options", None)) + options = Options(package_options) + + default_options = getattr(conanfile, "default_options", None) + if default_options: + if isinstance(default_options, (list, tuple)): + default_values = OptionsValues(default_options) + elif isinstance(default_options, str): + default_values = OptionsValues.loads(default_options) + else: + raise ConanException("Please define your default_options as list or " + "multiline string") + options.values = default_values + return options + except Exception as e: + raise ConanException("Error while initializing options. %s" % str(e)) + + +def create_requirements(conanfile): + try: + # Actual requirements of this package + if not hasattr(conanfile, "requires"): + return Requirements() + else: + if not conanfile.requires: + return Requirements() + if isinstance(conanfile.requires, tuple): + return Requirements(*conanfile.requires) + else: + return Requirements(conanfile.requires, ) + except Exception as e: + raise ConanException("Error while initializing requirements. %s" % str(e)) + + +def create_settings(conanfile, settings): + try: + defined_settings = getattr(conanfile, "settings", None) + if isinstance(defined_settings, str): + defined_settings = [defined_settings] + current = defined_settings or {} + settings.constraint(current) + return settings + except Exception as e: + raise ConanException("Error while initializing settings. %s" % str(e)) + + +def create_exports(conanfile): + if not hasattr(conanfile, "exports"): + return None + else: + if isinstance(conanfile.exports, str): + return (conanfile.exports, ) + return conanfile.exports + + +class ConanFile(object): + """ The base class for all package recipes + """ + + name = None + version = None # Any str, can be "1.1" or whatever + url = None # The URL where this File is located, as github, to collaborate in package + # The license of the PACKAGE, just a shortcut, does not replace or + # change the actual license of the source code + license = None + author = None # Main maintainer/responsible for the package, any format + build_policy = None + short_paths = False + + def __init__(self, output, runner, settings, conanfile_directory, user=None, channel=None): + # User defined generators + self.generators = self.generators if hasattr(self, "generators") else ["txt"] + if isinstance(self.generators, str): + self.generators = [self.generators] + + # User defined options + self.options = create_options(self) + self.requires = create_requirements(self) + self.settings = create_settings(self, settings) + self.exports = create_exports(self) + + # needed variables to pack the project + self.cpp_info = None # Will be initialized at processing time + self.deps_cpp_info = DepsCppInfo() + + # environment variables declared in the package_info + self.env_info = None # Will be initialized at processing time + self.deps_env_info = DepsEnvInfo() + + self.copy = None # initialized at runtime + + # an output stream (writeln, info, warn error) + self.output = output + # something that can run commands, as os.sytem + self._runner = runner + + self._conanfile_directory = conanfile_directory + self.package_folder = None # Assigned at runtime + self._scope = None + + # user specified env variables + self.env = None # Assigned at runtime + self._user = user + self._channel = channel + + @property + def channel(self): + if not self._channel: + self._channel = os.getenv("CONAN_CHANNEL") + if not self._channel: + raise ConanException("CONAN_CHANNEL environment variable not defined, " + "but self.channel is used in conanfile") + return self._channel + + @property + def user(self): + if not self._user: + self._user = os.getenv("CONAN_USERNAME") + if not self._user: + raise ConanException("CONAN_USERNAME environment variable not defined, " + "but self.user is used in conanfile") + return self._user + + def collect_libs(self, folder="lib"): + if not self.package_folder: + return [] + lib_folder = os.path.join(self.package_folder, folder) + if not os.path.exists(lib_folder): + self.output.warn("Package folder doesn't exist, can't collect libraries") + return [] + files = os.listdir(lib_folder) + result = [] + for f in files: + name, ext = os.path.splitext(f) + if ext in (".so", ".lib", ".a", ".dylib"): + if ext != ".lib" and name.startswith("lib"): + name = name[3:] + result.append(name) + return result + + @property + def scope(self): + return self._scope + + @scope.setter + def scope(self, value): + self._scope = value + if value.dev: + self.requires.allow_dev = True + try: + if hasattr(self, "dev_requires"): + if isinstance(self.dev_requires, tuple): + self.requires.add_dev(*self.dev_requires) + else: + self.requires.add_dev(self.dev_requires, ) + except Exception as e: + raise ConanException("Error while initializing dev_requirements. %s" % str(e)) + + @property + def conanfile_directory(self): + return self._conanfile_directory + + @property + def build_policy_missing(self): + return self.build_policy == "missing" + + @property + def build_policy_always(self): + return self.build_policy == "always" + + def source(self): + pass + + def system_requirements(self): + """ this method can be overwritten to implement logic for system package + managers, as apt-get + + You can define self.global_system_requirements = True, if you want the installation + to be for all packages (not depending on settings/options/requirements) + """ + + def config_options(self): + """ modify options, probably conditioned to some settings. This call is executed + before config_settings. E.g. + if self.settings.os == "Windows": + del self.options.shared # shared/static not supported in win + """ + + def configure(self): + """ modify settings, probably conditioned to some options. This call is executed + after config_options. E.g. + if self.options.header_only: + self.settings.clear() + This is also the place for conditional requirements + """ + + def imports(self): + pass + + def build(self): + self.output.warn("This conanfile has no build step") + + def package(self): + self.output.warn("This conanfile has no package step") + + def package_info(self): + """ define cpp_build_info, flags, etc + """ + + def run(self, command, output=True, cwd=None): + """ runs such a command in the folder the Conan + is defined + """ + retcode = self._runner(command, output, os.path.abspath(RUN_LOG_NAME), cwd) + if retcode != 0: + raise ConanException("Error %d while executing %s" % (retcode, command)) + + def conan_info(self): + """ modify the conans info, typically to narrow values + eg.: conaninfo.package_references = [] + """ + + def test(self): + raise ConanException("You need to create a method 'test' in your test/conanfile.py") + + def __repr__(self): + return 'Conanfile:%s/%s' % (self.name, self.version) diff --git a/testbed/conan-io__conan/conans/model/conan_generator.py b/testbed/conan-io__conan/conans/model/conan_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..54d88f484370a102562e28398450bae68a6f1cef --- /dev/null +++ b/testbed/conan-io__conan/conans/model/conan_generator.py @@ -0,0 +1,64 @@ +from conans.errors import ConanException +from abc import ABCMeta, abstractproperty + + +class Generator(object): + __metaclass__ = ABCMeta + + def __init__(self, conanfile): + self.conanfile = conanfile + self._deps_build_info = conanfile.deps_cpp_info + self._build_info = conanfile.cpp_info + self._deps_env_info = conanfile.deps_env_info + self._env_info = conanfile.env_info + + @property + def deps_build_info(self): + return self._deps_build_info + + @property + def build_info(self): + return self._build_info + + @property + def deps_env_info(self): + return self._deps_env_info + + @property + def env_info(self): + return self._env_info + + @abstractproperty + def content(self): + raise NotImplementedError() + + @abstractproperty + def filename(self): + raise NotImplementedError() + + +class GeneratorManager(object): + def __init__(self): + self._known_generators = {} + + def add(self, name, generator_class): + if name in self._known_generators: + raise ConanException("") + elif not issubclass(generator_class, Generator): + raise ConanException("") + else: + self._known_generators[name] = generator_class + + def remove(self, name): + if name in self._known_generators: + del self._known_generators[name] + + @property + def available(self): + return list(self._known_generators.keys()) + + def __contains__(self, key): + return key in self._known_generators + + def __getitem__(self, key): + return self._known_generators[key] diff --git a/testbed/conan-io__conan/conans/model/env_info.py b/testbed/conan-io__conan/conans/model/env_info.py new file mode 100644 index 0000000000000000000000000000000000000000..28fbdc6d786398d0cea55945d68e90fcdf68f4f4 --- /dev/null +++ b/testbed/conan-io__conan/conans/model/env_info.py @@ -0,0 +1,108 @@ +from collections import OrderedDict +from conans.util.log import logger +import os +import re + + +class EnvInfo(object): + """ Object that stores all the environment variables required: + + env = EnvInfo() + env.hola = True + env.Cosa.append("OTRO") + env.Cosa.append("MAS") + env.Cosa = "hello" + env.Cosa.append("HOLA") + + """ + def __init__(self, root_folder=None): + self._root_folder_ = root_folder + self._values_ = {} + + def __getattr__(self, name): + if name.startswith("_") and name.endswith("_"): + return super(EnvInfo, self).__getattr__(name) + + attr = self._values_.get(name) + if not attr: + self._values_[name] = [] + elif not isinstance(attr, list): + self._values_[name] = [attr] + return self._values_[name] + + def __setattr__(self, name, value): + if (name.startswith("_") and name.endswith("_")): + return super(EnvInfo, self).__setattr__(name, value) + self._values_[name] = value + + @property + def vars(self): + return self._values_ + + +class DepsEnvInfo(EnvInfo): + """ All the env info for a conanfile dependencies + """ + def __init__(self): + super(DepsEnvInfo, self).__init__() + self._dependencies_ = OrderedDict() + + def dumps(self): + result = [] + + for var, values in self.vars.items(): + result.append("[%s]" % var) + result.extend(values) + result.append("") + + for name, env_info in self._dependencies_.items(): + for var, values in env_info.vars.items(): + result.append("[%s:%s]" % (name, var)) + result.extend(values) + result.append("") + + return os.linesep.join(result) + + @staticmethod + def loads(text): + pattern = re.compile("^\[([a-zA-Z0-9_:-]+)\]([^\[]+)", re.MULTILINE) + result = DepsEnvInfo() + + for m in pattern.finditer(text): + var_name = m.group(1) + lines = [line.strip() for line in m.group(2).splitlines() if line.strip()] + tokens = var_name.split(":") + if len(tokens) == 2: + library = tokens[0] + var_name = tokens[1] + result._dependencies_.setdefault(library, EnvInfo()).vars[var_name] = lines + else: + result.vars[var_name] = lines + + return result + + @property + def dependencies(self): + return self._dependencies_.items() + + @property + def deps(self): + return self._dependencies_.keys() + + def __getitem__(self, item): + return self._dependencies_[item] + + def update(self, dep_env_info, conan_ref): + self._dependencies_[conan_ref.name] = dep_env_info + + # With vars if its setted the keep the setted value + for varname, value in dep_env_info.vars.items(): + if varname not in self.vars: + self.vars[varname] = value + elif isinstance(self.vars[varname], list): + if isinstance(value, list): + self.vars[varname].extend(value) + else: + self.vars[varname].append(value) + else: + logger.warn("DISCARDED variable %s=%s from %s" % (varname, value, str(conan_ref))) diff --git a/testbed/conan-io__conan/conans/model/info.py b/testbed/conan-io__conan/conans/model/info.py new file mode 100644 index 0000000000000000000000000000000000000000..4ead8f114423cac42c3843d3609451e4129c9173 --- /dev/null +++ b/testbed/conan-io__conan/conans/model/info.py @@ -0,0 +1,276 @@ +from conans.util.sha import sha1 +from conans.model.ref import PackageReference +from conans.errors import ConanException +from conans.util.config_parser import ConfigParser +from conans.util.files import load +from conans.model.values import Values +from conans.model.options import OptionsValues +from conans.model.scope import Scopes + + +class RequirementInfo(object): + def __init__(self, value_str, indirect=False): + """ parse the input into fields name, version... + """ + ref = PackageReference.loads(value_str) + self.package = ref + self.full_name = ref.conan.name + self.full_version = ref.conan.version + self.full_user = ref.conan.user + self.full_channel = ref.conan.channel + self.full_package_id = ref.package_id + + # sha values + if indirect: + self.name = self.version = None + else: + self.name = self.full_name + self.version = self.full_version.stable() + self.user = self.channel = self.package_id = None + + def dumps(self): + return "/".join([n for n in [self.name, self.version, self.user, self.channel, + self.package_id] if n]) + + @property + def sha(self): + return "/".join([str(n) for n in [self.name, self.version, self.user, self.channel, + self.package_id]]) + + def serialize(self): + return str(self.package) + + @staticmethod + def deserialize(data): + ret = RequirementInfo(data) + return ret + + def semver(self): + self.name = self.full_name + self.version = self.full_version.stable() + self.user = self.channel = self.package_id = None + + def full_version(self): + self.name = self.full_name + self.version = self.full_version + self.user = self.channel = self.package_id = None + + def full_recipe(self): + self.name = self.full_name + self.version = self.full_version + self.user = self.full_user + self.channel = self.full_channel + self.package_id = None + + def full_package(self): + self.name = self.full_name + self.version = self.full_version + self.user = self.full_user + self.channel = self.full_channel + self.package_id = self.full_package_id + + +class RequirementsInfo(object): + def __init__(self, requires, non_devs_requirements): + # {PackageReference: RequirementInfo} + self._non_devs_requirements = non_devs_requirements + self._data = {r: RequirementInfo(str(r)) for r in requires} + + def clear(self): + self._data = {} + + def remove(self, *args): + for name in args: + del self._data[self._get_key(name)] + + def add(self, indirect_reqs): + """ necessary to propagate from upstream the real + package requirements + """ + for r in indirect_reqs: + self._data[r] = RequirementInfo(str(r), indirect=True) + + def refs(self): + """ used for updating downstream requirements with this + """ + return list(self._data.keys()) + + def _get_key(self, item): + for reference in self._data: + if reference.conan.name == item: + return reference + raise ConanException("No requirement matching for %s" % (item)) + + def __getitem__(self, item): + """get by package name + Necessary to access from conaninfo + self.requires["Boost"].version = "2.X" + """ + return self._data[self._get_key(item)] + + @property + def sha(self): + result = [] + # Remove requirements without a name, i.e. indirect transitive requirements + data = {k: v for k, v in self._data.items() if v.name} + if self._non_devs_requirements is None: + for key in sorted(data): + result.append(data[key].sha) + else: + for key in sorted(data): + non_dev = key.conan.name in self._non_devs_requirements + if non_dev: + result.append(data[key].sha) + return sha1('\n'.join(result).encode()) + + def dumps(self): + result = [] + for ref in sorted(self._data): + dumped = self._data[ref].dumps() + if dumped: + dev = (self._non_devs_requirements is not None and + ref.conan.name not in self._non_devs_requirements) + if dev: + dumped += " DEV" + result.append(dumped) + return "\n".join(result) + + def serialize(self): + return {str(ref): requinfo.serialize() for ref, requinfo in self._data.items()} + + @staticmethod + def deserialize(data): + ret = RequirementsInfo({}, None) + for ref, requinfo in data.items(): + ref = PackageReference.loads(ref) + ret._data[ref] = RequirementInfo.deserialize(requinfo) + return ret + + +class RequirementsList(list): + @staticmethod + def loads(text): + return RequirementsList.deserialize(text.splitlines()) + + def dumps(self): + return "\n".join(self.serialize()) + + def serialize(self): + return [str(r) for r in sorted(self)] + + @staticmethod + def deserialize(data): + return RequirementsList([PackageReference.loads(line) for line in data]) + + +class ConanInfo(object): + + @staticmethod + def create(settings, options, requires, indirect_requires, non_devs_requirements): + result = ConanInfo() + result.full_settings = settings + result.settings = settings.copy() + result.full_options = options + result.options = options.copy() + result.options.clear_indirect() + result.full_requires = RequirementsList(requires) + result.requires = RequirementsInfo(requires, non_devs_requirements) + result.scope = None + result.requires.add(indirect_requires) + result.full_requires.extend(indirect_requires) + result.recipe_hash = None + result._non_devs_requirements = non_devs_requirements # Can be None + return result + + @staticmethod + def loads(text): + parser = ConfigParser(text, ["settings", "full_settings", "options", "full_options", + "requires", "full_requires", "scope", "recipe_hash"]) + + result = ConanInfo() + result.settings = Values.loads(parser.settings) + result.full_settings = Values.loads(parser.full_settings) + result.options = OptionsValues.loads(parser.options) + result.full_options = OptionsValues.loads(parser.full_options) + result.full_requires = RequirementsList.loads(parser.full_requires) + result.requires = RequirementsInfo(result.full_requires, None) + result.recipe_hash = parser.recipe_hash or None + # TODO: Missing handling paring of requires, but not necessary now + result.scope = Scopes.loads(parser.scope) + return result + + def dumps(self): + def indent(text): + return '\n'.join(" " + line for line in text.splitlines()) + result = [] + + result.append("[settings]") + result.append(indent(self.settings.dumps())) + result.append("\n[requires]") + result.append(indent(self.requires.dumps())) + result.append("\n[options]") + result.append(indent(self.options.dumps())) + result.append("\n[full_settings]") + result.append(indent(self.full_settings.dumps())) + result.append("\n[full_requires]") + result.append(indent(self.full_requires.dumps())) + result.append("\n[full_options]") + result.append(indent(self.full_options.dumps())) + result.append("\n[scope]") + if self.scope: + result.append(indent(self.scope.dumps())) + result.append("\n[recipe_hash]\n%s" % self.recipe_hash) + return '\n'.join(result) + + def __eq__(self, other): + """ currently just for testing purposes + """ + return self.dumps() == other.dumps() + + def __ne__(self, other): + return not self.__eq__(other) + + @staticmethod + def load_file(conan_info_path): + """ load from file + """ + try: + config_text = load(conan_info_path) + except IOError: + raise ConanException("Does not exist %s" % (conan_info_path)) + else: + return ConanInfo.loads(config_text) + + def package_id(self): + """ The package_id of a conans is the sha1 of its specific requirements, + options and settings + """ + computed_id = getattr(self, "_package_id", None) + if computed_id: + return computed_id + result = [] + result.append(self.settings.sha) + result.append(self.options.sha(self._non_devs_requirements)) + result.append(self.requires.sha) + self._package_id = sha1('\n'.join(result).encode()) + return self._package_id + + def serialize(self): + conan_info_json = {"settings": self.settings.serialize(), + "full_settings": self.full_settings.serialize(), + "options": self.options.serialize(), + "full_options": self.full_options.serialize(), + "requires": self.requires.serialize(), + "full_requires": self.full_requires.serialize(), + "recipe_hash": self.recipe_hash} + return conan_info_json + + def serialize_min(self): + """ + This info will be shown in search results. + """ + conan_info_json = {"settings": dict(self.settings.serialize()), + "options": dict(self.options.serialize()["options"]), + "full_requires": self.full_requires.serialize(), + "recipe_hash": self.recipe_hash} + return conan_info_json diff --git a/testbed/conan-io__conan/conans/model/manifest.py b/testbed/conan-io__conan/conans/model/manifest.py new file mode 100644 index 0000000000000000000000000000000000000000..28225b91de302fc554e83a1f79c39d42ea7370cc --- /dev/null +++ b/testbed/conan-io__conan/conans/model/manifest.py @@ -0,0 +1,83 @@ +import os +import calendar +import time +from conans.util.files import md5sum, md5 +from conans.paths import PACKAGE_TGZ_NAME, EXPORT_TGZ_NAME, CONAN_MANIFEST, CONANFILE +from conans.errors import ConanException +import datetime + + +class FileTreeManifest(object): + + def __init__(self, time, file_sums): + """file_sums is a dict with filepaths and md5's: {filepath/to/file.txt: md5}""" + self.time = time + self.file_sums = file_sums + + def __repr__(self): + ret = "%s\n" % (self.time) + for filepath, file_md5 in sorted(self.file_sums.items()): + ret += "%s: %s\n" % (filepath, file_md5) + return ret + + @property + def summary_hash(self): + ret = "" # Do not include the timestamp in the summary hash + for filepath, file_md5 in sorted(self.file_sums.items()): + ret += "%s: %s\n" % (filepath, file_md5) + return md5(ret) + + @property + def time_str(self): + return datetime.datetime.fromtimestamp(int(self.time)).strftime('%Y-%m-%d %H:%M:%S') + + @staticmethod + def loads(text): + """ parses a string representation, generated with __repr__ of a + ConanDigest + """ + tokens = text.split("\n") + time = int(tokens[0]) + file_sums = {} + for md5line in tokens[1:]: + if md5line: + filename, file_md5 = md5line.split(": ") + if not discarded_file(filename): + file_sums[filename] = file_md5 + return FileTreeManifest(time, file_sums) + + @classmethod + def create(cls, folder): + """ Walks a folder and create a FileTreeManifest for it, reading file contents + from disk, and capturing current time + """ + filterfiles = (PACKAGE_TGZ_NAME, EXPORT_TGZ_NAME, CONAN_MANIFEST, CONANFILE + "c", + ".DS_Store") + file_dict = {} + for root, dirs, files in os.walk(folder): + dirs[:] = [d for d in dirs if d != "__pycache__"] # Avoid recursing pycache + relative_path = os.path.relpath(root, folder) + files = [f for f in files if f not in filterfiles and not discarded_file(f)] # Avoid md5 of big TGZ files + for f in files: + abs_path = os.path.join(root, f) + rel_path = os.path.normpath(os.path.join(relative_path, f)) + rel_path = rel_path.replace("\\", "/") + if os.path.exists(abs_path): + file_dict[rel_path] = md5sum(abs_path) + else: + raise ConanException("The file is a broken symlink, verify that " + "you are packaging the needed destination files: '%s'" + % abs_path) + date = calendar.timegm(time.gmtime()) + + return cls(date, file_dict) + + def __eq__(self, other): + return self.time == other.time and self.file_sums == other.file_sums + + def __ne__(self, other): + return not self.__eq__(other) + + +def discarded_file(filename): + return filename.endswith(".pyc") or filename.endswith(".pyo") diff --git a/testbed/conan-io__conan/conans/model/options.py b/testbed/conan-io__conan/conans/model/options.py new file mode 100644 index 0000000000000000000000000000000000000000..6a478ac6357ab180cb5df2fdff35a18b1ae80706 --- /dev/null +++ b/testbed/conan-io__conan/conans/model/options.py @@ -0,0 +1,517 @@ +from conans.util.sha import sha1 +from conans.errors import ConanException +import yaml +import six + + +_falsey_options = ["false", "none", "0", "off", ""] + + +def option_wrong_value_msg(name, value, value_range): + """ The provided value is not among the range of values that it should + be + """ + return ("'%s' is not a valid 'options.%s' value.\nPossible values are %s" + % (value, name, value_range)) + + +def option_not_exist_msg(option_name, existing_options): + """ Someone is referencing an option that is not available in the current package + options + """ + result = ["'options.%s' doesn't exist" % option_name] + result.append("Possible options are %s" % existing_options or "none") + return "\n".join(result) + + +def option_undefined_msg(name): + return "'%s' value not defined" % name + + +class PackageOptionValue(str): + """ thin wrapper around a string value that allows to check for several false string + and also promote other types to string for homegeneous comparison + """ + def __bool__(self): + return self.lower() not in _falsey_options + + def __nonzero__(self): + return self.__bool__() + + def __eq__(self, other): + return str(other).__eq__(self) + + def __ne__(self, other): + return not self.__eq__(other) + + +class PackageOptionValues(object): + """ set of key(string)-value(PackageOptionValue) for options of a package. + Not prefixed by package name: + static: True + optimized: 2 + These are non-validating, not constrained. + Used for UserOptions, which is a dict{package_name: PackageOptionValues} + """ + def __init__(self): + self._dict = {} # {option_name: PackageOptionValue} + self._modified = {} + + def __getattr__(self, attr): + if attr not in self._dict: + return None + return self._dict[attr] + + def clear(self): + self._dict.clear() + + def __setattr__(self, attr, value): + if attr[0] == "_": + return super(PackageOptionValues, self).__setattr__(attr, value) + self._dict[attr] = PackageOptionValue(value) + + def copy(self): + result = PackageOptionValues() + for k, v in self._dict.items(): + result._dict[k] = v + return result + + @property + def fields(self): + return sorted(list(self._dict.keys())) + + def items(self): + return sorted(list(self._dict.items())) + + def add(self, option_text): + assert isinstance(option_text, six.string_types) + name, value = option_text.split("=") + self._dict[name.strip()] = PackageOptionValue(value.strip()) + + def add_option(self, option_name, option_value): + self._dict[option_name] = PackageOptionValue(option_value) + + def update(self, other): + assert isinstance(other, PackageOptionValues) + self._dict.update(other._dict) + + def propagate_upstream(self, down_package_values, down_ref, own_ref, output, package_name): + if not down_package_values: + return + + assert isinstance(down_package_values, PackageOptionValues) + for (name, value) in down_package_values.items(): + current_value = self._dict.get(name) + if value == current_value: + continue + + modified = self._modified.get(name) + if modified is not None: + modified_value, modified_ref = modified + output.werror("%s tried to change %s option %s:%s to %s\n" + "but it was already assigned to %s by %s" + % (down_ref, own_ref, package_name, name, value, + modified_value, modified_ref)) + else: + self._modified[name] = (value, down_ref) + self._dict[name] = value + + def serialize(self): + return self.items() + + @property + def sha(self): + result = [] + for name, value in self.items(): + # It is important to discard None values, so migrations in settings can be done + # without breaking all existing packages SHAs, by adding a first "None" option + # that doesn't change the final sha + if value: + result.append("%s=%s" % (name, value)) + return sha1('\n'.join(result).encode()) + + +class OptionsValues(object): + """ static= True, + Boost.static = False, + Poco.optimized = True + """ + def __init__(self, values=None): + self._package_values = PackageOptionValues() + self._reqs_options = {} # {name("Boost": PackageOptionValues} + + if not values: + return + + # convert tuple "Pkg:option=value", "..." to list of tuples(name, value) + if isinstance(values, tuple): + new_values = [] + for v in values: + option, value = v.split("=") + new_values.append((option.strip(), value.strip())) + values = new_values + + # handle list of tuples (name, value) + for (k, v) in values: + tokens = k.split(":") + if len(tokens) == 2: + package, option = tokens + package_values = self._reqs_options.setdefault(package.strip(), + PackageOptionValues()) + package_values.add_option(option, v) + else: + self._package_values.add_option(k, v) + + def update(self, other): + self._package_values.update(other._package_values) + for package_name, package_values in other._reqs_options.items(): + pkg_values = self._reqs_options.setdefault(package_name, PackageOptionValues()) + pkg_values.update(package_values) + + def scope_options(self, name): + self._reqs_options.setdefault(name, PackageOptionValues()).update(self._package_values) + self._package_values = PackageOptionValues() + + def descope_options(self, name): + package_values = self._reqs_options.pop(name, None) + if package_values: + self._package_values.update(package_values) + + def __getitem__(self, item): + return self._reqs_options.setdefault(item, PackageOptionValues()) + + def __setitem__(self, item, value): + self._reqs_options[item] = value + + def pop(self, item): + return self._reqs_options.pop(item, None) + + def __repr__(self): + return self.dumps() + + def __getattr__(self, attr): + return getattr(self._package_values, attr) + + def copy(self): + result = OptionsValues() + result._package_values = self._package_values.copy() + for k, v in self._reqs_options.items(): + result._reqs_options[k] = v.copy() + return result + + def __setattr__(self, attr, value): + if attr[0] == "_": + return super(OptionsValues, self).__setattr__(attr, value) + return setattr(self._package_values, attr, value) + + def clear_indirect(self): + for v in self._reqs_options.values(): + v.clear() + + def as_list(self): + result = [] + options_list = self._package_values.items() + if options_list: + result.extend(options_list) + for package_name, package_values in sorted(self._reqs_options.items()): + for option_name, option_value in package_values.items(): + result.append(("%s:%s" % (package_name, option_name), option_value)) + return result + + def dumps(self): + result = [] + for key, value in self.as_list(): + result.append("%s=%s" % (key, value)) + return "\n".join(result) + + @staticmethod + def loads(text): + """ parses a multiline text in the form + Package:option=value + other_option=3 + OtherPack:opt3=12.1 + """ + result = [] + for line in text.splitlines(): + line = line.strip() + if not line: + continue + name, value = line.split("=") + result.append((name.strip(), value.strip())) + return OptionsValues(result) + + def sha(self, non_dev_requirements): + result = [] + result.append(self._package_values.sha) + if non_dev_requirements is None: # Not filtering + for key in sorted(list(self._reqs_options.keys())): + result.append(self._reqs_options[key].sha) + else: + for key in sorted(list(self._reqs_options.keys())): + non_dev = key in non_dev_requirements + if non_dev: + result.append(self._reqs_options[key].sha) + return sha1('\n'.join(result).encode()) + + def serialize(self): + ret = {} + ret["options"] = self._package_values.serialize() + ret["req_options"] = {} + for name, values in self._reqs_options.items(): + ret["req_options"][name] = values.serialize() + return ret + + +class PackageOption(object): + def __init__(self, possible_values, name): + self._name = name + self._value = None + if possible_values == "ANY": + self._possible_values = "ANY" + else: + self._possible_values = sorted(str(v) for v in possible_values) + + def __bool__(self): + if not self._value: + return False + return self._value.lower() not in _falsey_options + + def __nonzero__(self): + return self.__bool__() + + def __str__(self): + return str(self._value) + + def _check_option_value(self, value): + """ checks that the provided value is allowed by current restrictions + """ + if self._possible_values != "ANY" and value not in self._possible_values: + raise ConanException(option_wrong_value_msg(self._name, value, self._possible_values)) + + def __eq__(self, other): + if other is None: + return self._value is None + other = str(other) + self._check_option_value(other) + return other == self.__str__() + + def __ne__(self, other): + return not self.__eq__(other) + + def remove(self, values): + if self._possible_values == "ANY": + return + if not isinstance(values, (list, tuple, set)): + values = [values] + values = [str(v) for v in values] + self._possible_values = [v for v in self._possible_values if v not in values] + + if self._value is not None: + self._check_option_value(self._value) + + @property + def value(self): + return self._value + + @value.setter + def value(self, v): + v = str(v) + self._check_option_value(v) + self._value = v + + def validate(self): + if self._value is None and "None" not in self._possible_values: + raise ConanException(option_undefined_msg(self._name)) + + +class PackageOptions(object): + def __init__(self, definition): + definition = definition or {} + self._data = {str(k): PackageOption(v, str(k)) + for k, v in definition.items()} + self._modified = {} + + @staticmethod + def loads(text): + return PackageOptions(yaml.load(text) or {}) + + def validate(self): + for child in self._data.values(): + child.validate() + + @property + def fields(self): + return sorted(list(self._data.keys())) + + def remove(self, item): + if not isinstance(item, (list, tuple, set)): + item = [item] + for it in item: + it = str(it) + self._data.pop(it, None) + + def clear(self): + self._data = {} + + def _check_field(self, field): + if field not in self._data: + raise ConanException(option_not_exist_msg(field, list(self._data.keys()))) + + def __getattr__(self, field): + assert field[0] != "_", "ERROR %s" % field + self._check_field(field) + return self._data[field] + + def __delattr__(self, field): + assert field[0] != "_", "ERROR %s" % field + self._check_field(field) + del self._data[field] + + def __setattr__(self, field, value): + if field[0] == "_" or field.startswith("values"): + return super(PackageOptions, self).__setattr__(field, value) + + self._check_field(field) + self._data[field].value = value + + @property + def values(self): + result = PackageOptionValues() + for field, package_option in self._data.items(): + result.add_option(field, package_option.value) + return result + + def _items(self): + result = [] + for field, package_option in sorted(list(self._data.items())): + result.append((field, package_option.value)) + return result + + def items(self): + return self._items() + + def iteritems(self): + return self._items() + + @values.setter + def values(self, vals): + assert isinstance(vals, PackageOptionValues) + for (name, value) in vals.items(): + self._check_field(name) + self._data[name].value = value + + def propagate_upstream(self, package_values, down_ref, own_ref, output): + if not package_values: + return + + for (name, value) in package_values.items(): + current_value = self._data.get(name) + if value == current_value: + continue + + modified = self._modified.get(name) + if modified is not None: + modified_value, modified_ref = modified + output.werror("%s tried to change %s option %s to %s\n" + "but it was already assigned to %s by %s" + % (down_ref, own_ref, name, value, modified_value, modified_ref)) + else: + self._modified[name] = (value, down_ref) + self._check_field(name) + self._data[name].value = value + + +class Options(object): + """ all options of a package, both its own options and the upstream + ones. + Owned by conanfile + """ + def __init__(self, options): + assert isinstance(options, PackageOptions) + self._package_options = options + # Addressed only by name, as only 1 configuration is allowed + # if more than 1 is present, 1 should be "private" requirement and its options + # are not public, not overridable + self._deps_package_values = {} # {name("Boost": PackageOptionValues} + + @property + def deps_package_values(self): + return self._deps_package_values + + def clear(self): + self._package_options.clear() + + def __getitem__(self, item): + return self._deps_package_values.setdefault(item, PackageOptionValues()) + + def __getattr__(self, attr): + return getattr(self._package_options, attr) + + def __setattr__(self, attr, value): + if attr[0] == "_" or attr == "values": + return super(Options, self).__setattr__(attr, value) + return setattr(self._package_options, attr, value) + + def __delattr__(self, field): + try: + self._package_options.__delattr__(field) + except ConanException: + pass + + @property + def values(self): + result = OptionsValues() + result._package_values = self._package_options.values + for k, v in self._deps_package_values.items(): + result._reqs_options[k] = v.copy() + return result + + @values.setter + def values(self, v): + assert isinstance(v, OptionsValues) + self._package_options.values = v._package_values + self._deps_package_values.clear() + for k, v in v._reqs_options.items(): + self._deps_package_values[k] = v.copy() + + def propagate_upstream(self, down_package_values, down_ref, own_ref, output): + """ used to propagate from downstream the options to the upper requirements + """ + if not down_package_values: + return + + assert isinstance(down_package_values, dict) + option_values = down_package_values.get(own_ref.name) + self._package_options.propagate_upstream(option_values, down_ref, own_ref, output) + for name, option_values in sorted(list(down_package_values.items())): + if name != own_ref.name: + pkg_values = self._deps_package_values.setdefault(name, PackageOptionValues()) + pkg_values.propagate_upstream(option_values, down_ref, own_ref, output, name) + + def initialize_upstream(self, user_values): + """ used to propagate from downstream the options to the upper requirements + """ + if user_values is not None: + assert isinstance(user_values, OptionsValues) + # This values setter implements an update, not an overwrite + self._package_options.values = user_values._package_values + for package_name, package_values in user_values._reqs_options.items(): + pkg_values = self._deps_package_values.setdefault(package_name, PackageOptionValues()) + pkg_values.update(package_values) + + def validate(self): + return self._package_options.validate() + + def propagate_downstream(self, ref, options): + assert isinstance(options, OptionsValues) + self._deps_package_values[ref.name] = options._package_values + for k, v in options._reqs_options.items(): + self._deps_package_values[k] = v.copy() + + def clear_unused(self, references): + """ remove all options not related to the passed references, + that should be the upstream requirements + """ + existing_names = [r.conan.name for r in references] + self._deps_package_values = {k: v for k, v in self._deps_package_values.items() + if k in existing_names} diff --git a/testbed/conan-io__conan/conans/model/profile.py b/testbed/conan-io__conan/conans/model/profile.py new file mode 100644 index 0000000000000000000000000000000000000000..a3a3ff87d2cc3081ad4eb325644eae7930d1e916 --- /dev/null +++ b/testbed/conan-io__conan/conans/model/profile.py @@ -0,0 +1,219 @@ +import copy +from collections import OrderedDict +from conans.util.config_parser import ConfigParser +from conans.model.scope import Scopes, _root +from conans.errors import ConanException +from collections import defaultdict + + +def _clean_value(value): + '''Strip a value and remove the quotes. EX: + key="value " => str('value') + ''' + value = value.strip() + if value.startswith('"') and value.endswith('"') and value != '"': + value = value[1:-1] + if value.startswith("'") and value.endswith("'") and value != "'": + value = value[1:-1] + return value + + +class Profile(object): + '''A profile contains a set of setting (with values), environment variables + and scopes''' + + def __init__(self): + # Sections + self._settings = OrderedDict() + self._package_settings = defaultdict(OrderedDict) + self._env = OrderedDict() + self._package_env = defaultdict(OrderedDict) + self.scopes = Scopes() + + @property + def package_settings(self): + return {package_name: list(settings.items()) for package_name, settings in self._package_settings.items()} + + @property + def settings(self): + return list(self._settings.items()) + + @property + def package_env(self): + return {package_name: list(env.items()) for package_name, env in self._package_env.items()} + + @property + def env(self): + return list(self._env.items()) + + @staticmethod + def loads(text): + + def get_package_name_value(item): + '''Parse items like package:name=value or name=value''' + package_name = None + if ":" in item: + tmp = item.split(":", 1) + package_name, item = tmp + + name, value = item.split("=", 1) + name = name.strip() + value = _clean_value(value) + return package_name, name, value + + try: + obj = Profile() + doc = ConfigParser(text, allowed_fields=["settings", "env", "scopes"]) + + for setting in doc.settings.splitlines(): + setting = setting.strip() + if setting and not setting.startswith("#"): + if "=" not in setting: + raise ConanException("Invalid setting line '%s'" % setting) + package_name, name, value = get_package_name_value(setting) + if package_name: + obj._package_settings[package_name][name] = value + else: + obj._settings[name] = value + + if doc.scopes: + obj.scopes = Scopes.from_list(doc.scopes.splitlines()) + + for env in doc.env.splitlines(): + env = env.strip() + if env and not env.startswith("#"): + if "=" not in env: + raise ConanException("Invalid env line '%s'" % env) + package_name, name, value = get_package_name_value(env) + if package_name: + obj._package_env[package_name][name] = value + else: + obj._env[name] = value + + obj._order() + return obj + except ConanException: + raise + except Exception as exc: + raise ConanException("Error parsing the profile text file: %s" % str(exc)) + + def dumps(self): + self._order() # gets in order the settings + + def dump_simple_items(items, result): + for name, value in items: + result.append("%s=%s" % (name, value)) + + def dump_package_items(items, result): + for package, values in items: + for name, value in values.items(): + result.append("%s:%s=%s" % (package, name, value)) + + result = ["[settings]"] + dump_simple_items(self._settings.items(), result) + dump_package_items(self._package_settings.items(), result) + + result.append("[scopes]") + if self.scopes[_root].get("dev", None): + # FIXME: Ugly _root import + del self.scopes[_root]["dev"] # Do not include dev + scopes_txt = self.scopes.dumps() + result.append(scopes_txt) + + result.append("[env]") + dump_simple_items(self._env.items(), result) + dump_package_items(self._package_env.items(), result) + + return "\n".join(result).replace("\n\n", "\n") + + def update_settings(self, new_settings): + '''Mix the specified settings with the current profile. + Specified settings are prioritized to profile''' + # apply the current profile + if new_settings: + self._settings.update(new_settings) + self._order() + + def update_package_settings(self, package_settings): + '''Mix the specified package settings with the specified profile. + Specified package settings are prioritized to profile''' + for package_name, settings in self._package_settings.items(): + if package_name in package_settings: + settings.update(dict(package_settings[package_name])) + + # The rest of new packages settings + for package_name, settings in package_settings.items(): + if package_name not in self._package_settings: + self._package_settings[package_name].update(dict(settings)) + + self._order() + + def _mix_env_with_new(self, env_dict, new_env): + + res_env = OrderedDict() + for name, value in new_env: + if name in env_dict: + del env_dict[name] + res_env[name] = value # Insert first in the result + + for name, value in env_dict.items(): + res_env[name] = value # Insert the rest of env vars at the end + + return res_env + + def update_env(self, new_env): + '''Priorize new_env to override the current envs''' + if not new_env: + return + self._env = self._mix_env_with_new(self._env, new_env) + + def update_packages_env(self, new_packages_env): + '''Priorize new_packages_env to override the current package_env''' + if not new_packages_env: + return + res_env = defaultdict(OrderedDict) + + # Mix the common packages env + for package, env_vars in self._package_env.items(): + new_env = new_packages_env.get(package, []) + res_env[package] = self._mix_env_with_new(env_vars, new_env) + + # The rest of new packages env variables + for package, env_vars in new_packages_env.items(): + if package not in res_env: + for name, value in env_vars: + res_env[package][name] = value # Insert the rest of env vars at the end + + self._package_env = res_env + + def update_scopes(self, new_scopes): + '''Mix the specified settings with the current profile. + Specified settings are prioritized to profile''' + # apply the current profile + if new_scopes: + self.scopes.update(new_scopes) + self._order() + + def _order(self): + + def order_single_settings(settings): + ret = OrderedDict() + # Insert in a good order + for func in [lambda x: "." not in x, # First the principal settings + lambda x: "." in x]: + for name, value in settings.items(): + if func(name): + ret[name] = value + return ret + + # Order global settings + self._settings = order_single_settings(self._settings) + + # Order package settings + for package_name, settings in self._package_settings.items(): + self._package_settings[package_name] = order_single_settings(settings) + + tmp_env = copy.copy(self._env) + self._env = OrderedDict() + for ordered_key in sorted(tmp_env): + self._env[ordered_key] = tmp_env[ordered_key] diff --git a/testbed/conan-io__conan/conans/model/ref.py b/testbed/conan-io__conan/conans/model/ref.py new file mode 100644 index 0000000000000000000000000000000000000000..0d74358df69524500f199744bd2bfbacff255695 --- /dev/null +++ b/testbed/conan-io__conan/conans/model/ref.py @@ -0,0 +1,109 @@ +from collections import namedtuple +import re +from conans.errors import ConanException, InvalidNameException +from conans.model.version import Version + + +def validate_conan_name(name, version=False): + """Check for name compliance with pattern rules""" + try: + if name == '*': + return name + if ConanFileReference.validation_pattern.match(name) is None: + if version and name.startswith("[") and name.endswith("]"): + return name + if len(name) > ConanFileReference.max_chars: + message = "'%s' is too long. Valid names must " \ + "contain at most %s characters." % (name, + ConanFileReference.max_chars) + elif len(name) < ConanFileReference.min_chars: + message = "'%s' is too short. Valid names must contain"\ + " at least %s characters." % (name, ConanFileReference.min_chars) + else: + message = "'%s' is an invalid name. Valid names MUST begin with a "\ + "letter or number, have between %s-%s chars, including "\ + "letters, numbers, underscore,"\ + " dot and dash" % (name, ConanFileReference.min_chars, + ConanFileReference.max_chars) + raise InvalidNameException(message) + return name + except AttributeError: + raise InvalidNameException('Empty name provided', None) + + +class ConanFileReference(namedtuple("ConanFileReference", "name version user channel")): + """ Full reference of a conans, e.g.: + opencv/2.4.10@lasote/testing + """ + max_chars = 40 + min_chars = 2 + base_er = "[a-zA-Z0-9_]+[a-zA-Z0-9_\.-]{%s,%s}" % (min_chars - 1, max_chars) + regular_expression = "^%s$" % base_er + validation_pattern = re.compile(regular_expression) + whitespace_pattern = re.compile(r"\s+") + sep_pattern = re.compile("@|/") + + def __new__(cls, name, version, user, channel, validate=True): + """Simple name creation. + @param name: string containing the desired name + @param validate: checks for valid complex name. default True + """ + if validate: + name = validate_conan_name(name) + version = validate_conan_name(version, True) + user = validate_conan_name(user) + channel = validate_conan_name(channel) + version = Version(version) + return super(cls, ConanFileReference).__new__(cls, name, version, user, channel) + + @staticmethod + def loads(text, validate=True): + """ Parses a text string to generate a ConanFileReference object + """ + text = ConanFileReference.whitespace_pattern.sub("", text) + tokens = ConanFileReference.sep_pattern.split(text) + try: + name = tokens[0] + version = tokens[1] + user = tokens[2] + channel = tokens[3] + except IndexError: + raise ConanException("Wrong package recipe reference %s\nWrite something like " + "OpenCV/1.0.6@phil/stable" % text) + return ConanFileReference(name, version, user, channel, validate) + + def __repr__(self): + return "%s/%s@%s/%s" % (self.name, self.version, self.user, self.channel) + + def package_ref(self, conan_info): + package_id = conan_info.package_id(self) + return PackageReference(self, package_id) + + +class PackageReference(namedtuple("PackageReference", "conan package_id")): + """ Full package reference, e.g.: + opencv/2.4.10@lasote/testing, fe566a677f77734ae + """ + + @staticmethod + def loads(text): + text = text.strip() + tmp = text.split(":") + try: + conan = ConanFileReference.loads(tmp[0].strip()) + package_id = tmp[1].strip() + except IndexError: + raise ConanException("Wrong package reference %s" % text) + return PackageReference(conan, package_id) + + def __repr__(self): + return "%s:%s" % (self.conan, self.package_id) + + +def is_a_reference(ref): + try: + ConanFileReference.loads(ref) + return True + except: + pass + return False diff --git a/testbed/conan-io__conan/conans/model/requires.py b/testbed/conan-io__conan/conans/model/requires.py new file mode 100644 index 0000000000000000000000000000000000000000..5c7a221262e4ddbfdd1ee7265d7928becc4ff24c --- /dev/null +++ b/testbed/conan-io__conan/conans/model/requires.py @@ -0,0 +1,163 @@ +from collections import OrderedDict +from conans.errors import ConanException +from conans.model.ref import ConanFileReference +import six + + +class Requirement(object): + """ A reference to a package plus some attributes of how to + depend on that package + """ + def __init__(self, conan_reference, private=False, override=False, dev=False): + """ + param override: True means that this is not an actual requirement, but something to + be passed upstream and override possible existing values + param private: True means that this requirement will be somewhat embedded (like + a static lib linked into a shared lib), so it is not required to link + param dev: True means that this requirement is only needed at dev time, e.g. only + needed for building or testing, but not affects the package hash at all + """ + self.conan_reference = conan_reference + self.range_reference = conan_reference + self.private = private + self.override = override + self.dev = dev + + @property + def version_range(self): + """ returns the version range expression, without brackets [] + or None if it is not an expression + """ + version = self.range_reference.version + if version.startswith("[") and version.endswith("]"): + return version[1:-1] + + @property + def is_resolved(self): + """ returns True if the version_range reference has been already resolved to a + concrete reference + """ + return self.conan_reference != self.range_reference + + def __repr__(self): + return ("%s" % str(self.conan_reference) + (" P" if self.private else "")) + + def __eq__(self, other): + return (self.override == other.override and + self.conan_reference == other.conan_reference and + self.private == other.private and + self.dev == other.dev) + + def __ne__(self, other): + return not self.__eq__(other) + + +class Requirements(OrderedDict): + """ {name: Requirement} in order, e.g. {"Hello": Requirement for Hello} + """ + + def __init__(self, *args): + super(Requirements, self).__init__() + self.allow_dev = False + for v in args: + if isinstance(v, tuple): + override = private = dev = False + ref = v[0] + for elem in v[1:]: + if elem == "override": + override = True + elif elem == "private": + private = True + else: + raise ConanException("Unknown requirement config %s" % elem) + self.add(ref, private=private, override=override, dev=dev) + else: + self.add(v) + + def add_dev(self, *args): + for v in args: + if isinstance(v, tuple): + override = private = False + ref = v[0] + for elem in v[1:]: + if elem == "override": + override = True + elif elem == "private": + private = True + else: + raise ConanException("Unknown requirement config %s" % elem) + self.add(ref, private=private, override=override, dev=True) + else: + self.add(v, dev=True) + + def copy(self): + """ We need a custom copy as the normal one requires __init__ to be + properly defined. This is not a deep-copy, in fact, requirements in the dict + are changed by RequireResolver, and are propagated upstream + """ + result = Requirements() + for name, req in self.items(): + result[name] = req + return result + + def iteritems(self): # FIXME: Just a trick to not change default testing conanfile for py3 + return self.items() + + def add(self, reference, private=False, override=False, dev=False): + """ to define requirements by the user in text, prior to any propagation + """ + assert isinstance(reference, six.string_types) + if dev and not self.allow_dev: + return + + conan_reference = ConanFileReference.loads(reference) + name = conan_reference.name + + new_requirement = Requirement(conan_reference, private, override, dev) + old_requirement = self.get(name) + if old_requirement and old_requirement != new_requirement: + raise ConanException("Duplicated requirement %s != %s" + % (old_requirement, new_requirement)) + else: + self[name] = new_requirement + + def update(self, down_reqs, output, own_ref, down_ref): + """ Compute actual requirement values when downstream values are defined + param down_reqs: the current requirements as coming from downstream to override + current requirements + param own_ref: ConanFileReference of the current conanfile + param down_ref: ConanFileReference of the downstream that is overriding values or None + return: new Requirements() value to be passed upstream + """ + + assert isinstance(down_reqs, Requirements) + assert isinstance(own_ref, ConanFileReference) if own_ref else True + assert isinstance(down_ref, ConanFileReference) if down_ref else True + + new_reqs = down_reqs.copy() + if own_ref: + new_reqs.pop(own_ref.name, None) + for name, req in self.items(): + if req.private or req.dev: + continue + if name in down_reqs: + other_req = down_reqs[name] + # update dependency + other_ref = other_req.conan_reference + if other_ref and other_ref != req.conan_reference: + output.info("%s requirement %s overriden by %s to %s " + % (own_ref, req.conan_reference, down_ref or "your conanfile", + other_ref)) + req.conan_reference = other_ref + + new_reqs[name] = req + return new_reqs + + def __call__(self, conan_reference, private=False, override=False, dev=False): + self.add(conan_reference, private, override, dev) + + def __repr__(self): + result = [] + for req in self.values(): + result.append(str(req)) + return '\n'.join(result) diff --git a/testbed/conan-io__conan/conans/model/scope.py b/testbed/conan-io__conan/conans/model/scope.py new file mode 100644 index 0000000000000000000000000000000000000000..375ff76749d36a520925862ad0c0b02f7a67e9e0 --- /dev/null +++ b/testbed/conan-io__conan/conans/model/scope.py @@ -0,0 +1,97 @@ +from collections import defaultdict +from conans.errors import ConanException + + +class Scope(dict): + """ the set of possible scopes than a package can have, by name(string): + "dev", "test", "myscope"... + it is just a set, but with syntax to be queried as: + if self.scope.dev: + """ + + def __getattr__(self, field): + return self.get(field) + + def __setattr__(self, field, value): + self[field] = value + + def __repr__(self): + return ", ".join("%s=%s" % (k, v) for k, v in sorted(self.items())) + + +# This is necessary, as None cannot be ordered in Py3 +_root = "0CONAN_ROOT*" +_all = "ALL" + + +class Scopes(defaultdict): + """ all the scopes of a dependency graph, as a dict{package name(str): Scope + the root package of the graph might not have name, then its key is None. + It is loaded and saved to text as: + Package1:dev + Package1:test + Package2:dev + dev # for the root package, without name + other # any name allowed + This will be stored in memory as {Package1: Scopes(set[dev, test]), + Package2: Scopes(...), + None: Scopes(set[dev, other]) + """ + def __init__(self): + super(Scopes, self).__init__(Scope) + self[_root].dev = True + + def package_scope(self, name=None): + """ return the scopes for the given package which are the scopes set + for ALL, updated (high priority) with the specific package scopes + if the package name is None, then it is the ROOT package/consumer + """ + scope = Scope(self.get(_all, {})) + scope.update(self[name or _root]) + return scope + + @staticmethod + def from_list(items): + result = Scopes() + for item in items: + try: + key, value = item.split("=") + except: + raise ConanException("Bad scope %s" % item) + v = value.upper() + if v == "TRUE": + value = True + elif v == "FALSE": + value = False + elif v == "NONE": + value = None + + chunks = key.split(":") + if len(chunks) == 2: + root = chunks[0] + scope = chunks[1] + elif len(chunks) == 1: + root = _root + scope = chunks[0] + else: + raise ConanException("Bad scope %s" % item) + + result[root][scope] = value + return result + + def update_scope(self, other): + for name, scopes in other.items(): + self[name].update(scopes) + + @staticmethod + def loads(text): + return Scopes.from_list([s.strip() for s in text.splitlines()]) + + def dumps(self): + result = [] + for name, scopes in sorted(self.items()): + if name != _root: + result.extend("%s:%s=%s" % (name, k, v) for (k, v) in sorted(scopes.items())) + else: + result.extend("%s=%s" % (k, v) for (k, v) in sorted(scopes.items())) + return "\n".join(result) diff --git a/testbed/conan-io__conan/conans/model/settings.py b/testbed/conan-io__conan/conans/model/settings.py new file mode 100644 index 0000000000000000000000000000000000000000..9e2215a43e75796673f207d27317afe3a0eb1e42 --- /dev/null +++ b/testbed/conan-io__conan/conans/model/settings.py @@ -0,0 +1,305 @@ +from conans.errors import ConanException +import yaml +from conans.model.values import Values + + +def bad_value_msg(name, value, value_range): + tip = "" + if "settings" in name: + tip = "\nCheck your settings ~/.conan/settings.yml or read the docs FAQ" + + return ("'%s' is not a valid '%s' value.\nPossible values are %s%s" + % (value, name, value_range, tip)) + + +def undefined_field(name, field, fields=None, value=None): + value_str = " for '%s'" % value if value else "" + result = ["'%s.%s' doesn't exist%s" % (name, field, value_str)] + result.append("'%s' possible configurations are %s" % (name, fields or "none")) + return "\n".join(result) + + +def undefined_value(name): + return "'%s' value not defined" % name + + +class SettingsItem(object): + def __init__(self, definition, name): + self._name = name + self._value = None + self._definition = {} + if isinstance(definition, dict): + # recursive + for k, v in definition.items(): + k = str(k) + self._definition[k] = Settings(v, name, k) + elif definition == "ANY": + self._definition = "ANY" + else: + # list or tuple of possible values + self._definition = sorted(str(v) for v in definition) + + def copy(self): + """ deepcopy, recursive + """ + result = SettingsItem({}, name=self._name) + result._value = self._value + if self.is_final: + result._definition = self._definition[:] + else: + result._definition = {k: v.copy() for k, v in self._definition.items()} + return result + + @property + def is_final(self): + return not isinstance(self._definition, dict) + + def __bool__(self): + if not self._value: + return False + return self._value.lower() not in ["false", "none", "0", "off"] + + def __nonzero__(self): + return self.__bool__() + + def __str__(self): + return self._value + + def __eq__(self, other): + if other is None: + return self._value is None + other = str(other) + if self._definition != "ANY" and other not in self.values_range: + raise ConanException(bad_value_msg(self._name, other, self.values_range)) + return other == self.__str__() + + def __ne__(self, other): + return not self.__eq__(other) + + def __delattr__(self, item): + """ This is necessary to remove libcxx subsetting from compiler in config() + del self.settings.compiler.stdlib + """ + try: + self._get_child(self._value).remove(item) + except: + pass + + def remove(self, values): + if not isinstance(values, (list, tuple, set)): + values = [values] + for v in values: + v = str(v) + if isinstance(self._definition, dict): + self._definition.pop(v, None) + elif self._definition != "ANY": + if v in self._definition: + self._definition.remove(v) + if self._value is not None and self._value not in self._definition: + raise ConanException(bad_value_msg(self._name, self._value, self.values_range)) + + def _get_child(self, item): + if not isinstance(self._definition, dict): + raise ConanException(undefined_field(self._name, item, None, self._value)) + if self._value is None: + raise ConanException(undefined_value(self._name)) + return self._definition[self._value] + + def __getattr__(self, item): + item = str(item) + sub_config_dict = self._get_child(item) + return getattr(sub_config_dict, item) + + def __setattr__(self, item, value): + if item[0] == "_" or item.startswith("value"): + return super(SettingsItem, self).__setattr__(item, value) + + item = str(item) + sub_config_dict = self._get_child(item) + return setattr(sub_config_dict, item, value) + + def __getitem__(self, value): + value = str(value) + try: + return self._definition[value] + except: + raise ConanException(bad_value_msg(self._name, value, self.values_range)) + + @property + def value(self): + return self._value + + @value.setter + def value(self, v): + v = str(v) + if self._definition != "ANY" and v not in self._definition: + raise ConanException(bad_value_msg(self._name, v, self.values_range)) + self._value = v + + @property + def values_range(self): + try: + return sorted(list(self._definition.keys())) + except: + return self._definition + + @property + def values_list(self): + if self._value is None: + return [] + result = [] + partial_name = ".".join(self._name.split(".")[1:]) + result.append((partial_name, self._value)) + if isinstance(self._definition, dict): + sub_config_dict = self._definition[self._value] + result.extend(sub_config_dict.values_list) + return result + + def validate(self): + if self._value is None and "None" not in self._definition: + raise ConanException(undefined_value(self._name)) + + if isinstance(self._definition, dict): + self._definition[self._value].validate() + + +class Settings(object): + def __init__(self, definition=None, name="settings", parent_value=None): + definition = definition or {} + self._name = name # settings, settings.compiler + self._parent_value = parent_value # gcc, x86 + self._data = {str(k): SettingsItem(v, "%s.%s" % (name, k)) + for k, v in definition.items()} + + def copy(self): + """ deepcopy, recursive + """ + result = Settings({}, name=self._name, parent_value=self._parent_value) + for k, v in self._data.items(): + result._data[k] = v.copy() + return result + + @staticmethod + def loads(text): + return Settings(yaml.load(text) or {}) + + def validate(self): + for field in self.fields: + child = self._data[field] + child.validate() + + @property + def fields(self): + return sorted(list(self._data.keys())) + + def remove(self, item): + if not isinstance(item, (list, tuple, set)): + item = [item] + for it in item: + it = str(it) + self._data.pop(it, None) + + def clear(self): + self._data = {} + + def _check_field(self, field): + if field not in self._data: + raise ConanException(undefined_field(self._name, field, self.fields, + self._parent_value)) + + def __getattr__(self, field): + assert field[0] != "_", "ERROR %s" % field + self._check_field(field) + return self._data[field] + + def __delattr__(self, field): + assert field[0] != "_", "ERROR %s" % field + self._check_field(field) + del self._data[field] + + def __setattr__(self, field, value): + if field[0] == "_" or field.startswith("values"): + return super(Settings, self).__setattr__(field, value) + + self._check_field(field) + self._data[field].value = value + + @property + def values(self): + return Values.from_list(self.values_list) + + @property + def values_list(self): + result = [] + for field in self.fields: + config_item = self._data[field] + result.extend(config_item.values_list) + return result + + def items(self): + return self.values_list + + def iteritems(self): + return self.values_list + + @values_list.setter + def values_list(self, vals): + """ receives a list of tuples (compiler.version, value) + """ + assert isinstance(vals, list), vals + for (name, value) in vals: + list_settings = name.split(".") + attr = self + for setting in list_settings[:-1]: + attr = getattr(attr, setting) + setattr(attr, list_settings[-1], str(value)) + + @values.setter + def values(self, vals): + assert isinstance(vals, Values) + self.values_list = vals.as_list() + + def constraint(self, constraint_def): + """ allows to restrict a given Settings object with the input of another Settings object + 1. The other Settings object MUST be exclusively a subset of the former. + No additions allowed + 2. If the other defines {"compiler": None} means to keep the full specification + """ + if isinstance(constraint_def, (list, tuple, set)): + constraint_def = {str(k): None for k in constraint_def or []} + else: + constraint_def = {str(k): v for k, v in constraint_def.items()} + + fields_to_remove = [] + for field, config_item in self._data.items(): + if field not in constraint_def: + fields_to_remove.append(field) + continue + + other_field_def = constraint_def[field] + if other_field_def is None: # Means leave it as is + continue + + values_to_remove = [] + for value in config_item.values_range: # value = "Visual Studio" + if value not in other_field_def: + values_to_remove.append(value) + else: # recursion + if (not config_item.is_final and isinstance(other_field_def, dict) and + other_field_def[value] is not None): + config_item[value].constraint(other_field_def[value]) + + # Sanity check of input constraint values + for value in other_field_def: + if value not in config_item.values_range: + raise ConanException(bad_value_msg(field, value, config_item.values_range)) + + config_item.remove(values_to_remove) + + # Sanity check for input constraint wrong fields + for field in constraint_def: + if field not in self._data: + raise ConanException(undefined_field(self._name, field, self.fields)) + + # remove settings not defined in the constraint + self.remove(fields_to_remove) diff --git a/testbed/conan-io__conan/conans/model/username.py b/testbed/conan-io__conan/conans/model/username.py new file mode 100644 index 0000000000000000000000000000000000000000..e82bdc46b1b86ef5e71e9f087a7c410afe5ef5c4 --- /dev/null +++ b/testbed/conan-io__conan/conans/model/username.py @@ -0,0 +1,38 @@ +from conans.errors import InvalidNameException +import re + + +class Username(str): + + base_er = "[a-zA-Z][a-zA-Z0-9_]{1,49}" + pattern = re.compile("^%s$" % base_er) + + def __new__(cls, name, validate=True): + """Simple name creation. + + @param name: string containing the desired name + @param validate: checks for valid simple name. default True + """ + if validate: + name = Username.validate(name) + return str.__new__(cls, name) + + @staticmethod + def validate(name): + """Check for name compliance with pattern rules. User names can be + with upper/lower case + """ + try: + name = name.strip() + if Username.pattern.match(name) is None: + if len(name) > 49: + message = "'%s' is too long. Valid names must contain at most 50 characters." + elif len(name) < 2: + message = "'%s' is too short. Valid names must contain at least 2 characters." + else: + message = "'%s' is an invalid name. "\ + "Valid names should begin with alphanumerical characters." + raise InvalidNameException(message % name) + return name + except AttributeError: + raise InvalidNameException('Empty name provided', None) diff --git a/testbed/conan-io__conan/conans/model/values.py b/testbed/conan-io__conan/conans/model/values.py new file mode 100644 index 0000000000000000000000000000000000000000..c1030557e3f197e9101424fc210e3b40f2efd5be --- /dev/null +++ b/testbed/conan-io__conan/conans/model/values.py @@ -0,0 +1,111 @@ +from conans.util.sha import sha1 +from conans.errors import ConanException + + +class Values(object): + def __init__(self, value="values"): + self._value = str(value) + self._dict = {} # {key: Values()} + self._modified = {} # {"compiler.version.arch": (old_value, old_reference)} + + def __getattr__(self, attr): + if attr not in self._dict: + return None + return self._dict[attr] + + def clear(self): + # TODO: Test. DO not delete, might be used by conan_info() to clear settings values + self._dict.clear() + self._value = "" + + def __setattr__(self, attr, value): + if attr[0] == "_": + return super(Values, self).__setattr__(attr, value) + self._dict[attr] = Values(value) + + def copy(self): + """ deepcopy, recursive + """ + result = Values(self._value) + for k, v in self._dict.items(): + result._dict[k] = v.copy() + return result + + @property + def fields(self): + """ return a sorted list of fields: [compiler, os, ...] + """ + return sorted(list(self._dict.keys())) + + def __bool__(self): + return self._value.lower() not in ["false", "none", "0", "off", ""] + + def __nonzero__(self): + return self.__bool__() + + def __str__(self): + return self._value + + def __eq__(self, other): + return str(other) == self.__str__() + + def __ne__(self, other): + return not self.__eq__(other) + + @classmethod + def loads(cls, text): + result = [] + for line in text.splitlines(): + if not line.strip(): + continue + name, value = line.split("=") + result.append((name.strip(), value.strip())) + return cls.from_list(result) + + def as_list(self, list_all=True): + result = [] + for field in self.fields: + value = getattr(self, field) + if value or list_all: + result.append((field, str(value))) + child_lines = value.as_list() + for (child_name, child_value) in child_lines: + result.append(("%s.%s" % (field, child_name), child_value)) + return result + + @classmethod + def from_list(cls, data): + result = cls() + for (field, value) in data: + tokens = field.split(".") + attr = result + for token in tokens[:-1]: + attr = getattr(attr, token) + if attr is None: + raise ConanException("%s not defined for %s\n" + "Please define %s value first too" + % (token, field, token)) + setattr(attr, tokens[-1], Values(value)) + return result + + def dumps(self): + """ produces a text string with lines containine a flattened version: + compiler.arch = XX + compiler.arch.speed = YY + """ + return "\n".join(["%s=%s" % (field, value) + for (field, value) in self.as_list()]) + + def serialize(self): + return self.as_list() + + @property + def sha(self): + result = [] + for (name, value) in self.as_list(list_all=False): + # It is important to discard None values, so migrations in settings can be done + # without breaking all existing packages SHAs, by adding a first "None" option + # that doesn't change the final sha + if value != "None": + result.append("%s=%s" % (name, value)) + return sha1('\n'.join(result).encode()) diff --git a/testbed/conan-io__conan/conans/model/version.py b/testbed/conan-io__conan/conans/model/version.py new file mode 100644 index 0000000000000000000000000000000000000000..e3d2574626adb11650109422bd01d5c2972e5fc1 --- /dev/null +++ b/testbed/conan-io__conan/conans/model/version.py @@ -0,0 +1,91 @@ +import re + + +class Version(str): + """ This is NOT an implementation of semver, as users may use any pattern in their versions. + It is just a helper to parse .-, and compare taking into account integers when possible + """ + + def __new__(cls, content): + return str.__new__(cls, content.strip()) + + @property + def as_list(self): + result = [] + tokens = re.split('[.-]', self) + for item in tokens: + result.append(int(item) if item.isdigit() else item) + return result + + def major(self, fill=True): + self_list = self.as_list + v = str(self_list[0]) if self_list else "0" + if fill: + return Version(".".join([v, 'Y', 'Z'])) + return Version(v) + + def stable(self): + """ same as major, but as semver, 0.Y.Z is not considered + stable, so return it as is + """ + if self.as_list[0] == 0: + return self + return self.major() + + def minor(self, fill=True): + self_list = self.as_list + v0 = str(self_list[0]) if len(self_list) > 0 else "0" + v1 = str(self_list[1]) if len(self_list) > 1 else "0" + if fill: + return Version(".".join([v0, v1, 'Z'])) + return Version(".".join([v0, v1])) + + def compatible(self, other): + if not isinstance(other, Version): + other = Version(other) + for v1, v2 in zip(self.as_list, other.as_list): + if v1 in ["X", "Y", "Z"] or v2 in ["X", "Y", "Z"]: + return True + if v1 != v2: + return False + return True + + def __cmp__(self, other): + if other is None: + return 1 + if not isinstance(other, Version): + other = Version(other) + + other_list = other.as_list + for ind, el in enumerate(self.as_list): + if ind + 1 > len(other_list): + if isinstance(el, int): + return 1 + return -1 + if not isinstance(el, int) and isinstance(other_list[ind], int): + # Version compare with 1.4.rc2 + return -1 + elif not isinstance(other_list[ind], int) and isinstance(el, int): + return 1 + elif el == other_list[ind]: + continue + elif el > other_list[ind]: + return 1 + else: + return -1 + if len(other_list) > len(self.as_list): + return -1 + else: + return 0 + + def __gt__(self, other): + return self.__cmp__(other) == 1 + + def __lt__(self, other): + return self.__cmp__(other) == -1 + + def __le__(self, other): + return self.__cmp__(other) in [0, -1] + + def __ge__(self, other): + return self.__cmp__(other) in [0, 1] diff --git a/testbed/conan-io__conan/conans/paths.py b/testbed/conan-io__conan/conans/paths.py new file mode 100644 index 0000000000000000000000000000000000000000..722a0d8a92f079cf08cdc75463a7119d8c2b7645 --- /dev/null +++ b/testbed/conan-io__conan/conans/paths.py @@ -0,0 +1,215 @@ +import os +from conans.model.ref import ConanFileReference, PackageReference +from conans.util.files import load, save, rmdir +from os.path import join, normpath +import platform +import tempfile +from conans.errors import ConanException + + +EXPORT_FOLDER = "export" +SRC_FOLDER = "source" +BUILD_FOLDER = "build" +PACKAGES_FOLDER = "package" +SYSTEM_REQS_FOLDER = "system_reqs" + + +CONANFILE = 'conanfile.py' +CONANFILE_TXT = "conanfile.txt" +CONAN_MANIFEST = "conanmanifest.txt" +BUILD_INFO = 'conanbuildinfo.txt' +BUILD_INFO_GCC = 'conanbuildinfo.gcc' +BUILD_INFO_CMAKE = 'conanbuildinfo.cmake' +BUILD_INFO_QMAKE = 'conanbuildinfo.pri' +BUILD_INFO_QBS = 'conanbuildinfo.qbs' +BUILD_INFO_VISUAL_STUDIO = 'conanbuildinfo.props' +BUILD_INFO_XCODE = 'conanbuildinfo.xcconfig' +BUILD_INFO_YCM = '.ycm_extra_conf.py' +CONANINFO = "conaninfo.txt" +CONANENV = "conanenv.txt" +SYSTEM_REQS = "system_reqs.txt" +DIRTY_FILE = ".conan_dirty" + +PACKAGE_TGZ_NAME = "conan_package.tgz" +EXPORT_TGZ_NAME = "conan_export.tgz" +CONAN_LINK = ".conan_link" + +RUN_LOG_NAME = "conan_run.log" + + +def conan_expand_user(path): + """ wrapper to the original expanduser function, to workaround python returning + verbatim %USERPROFILE% when some other app (git for windows) sets HOME envvar + """ + if platform.system() == "Windows": + # In win these variables should exist and point to user directory, which + # must exist. Using context to avoid permanent modification of os.environ + old_env = dict(os.environ) + try: + home = os.environ.get("HOME") + # Problematic cases of wrong HOME variable + # - HOME = %USERPROFILE% verbatim, as messed by some other tools + # - MSYS console, that defines a different user home in /c/mingw/msys/users/xxx + # In these cases, it is safe to remove it and rely on USERPROFILE directly + if home and (not os.path.exists(home) or + (os.getenv("MSYSTEM") and os.getenv("USERPROFILE"))): + del os.environ["HOME"] + result = os.path.expanduser(path) + finally: + os.environ.clear() + os.environ.update(old_env) + return result + + return os.path.expanduser(path) + + +if platform.system() == "Windows": + def _rm_conandir(path): + ''' removal of a directory that might contain a link to a short path + ''' + link = os.path.join(path, CONAN_LINK) + if os.path.exists(link): + short_path = load(link) + rmdir(os.path.dirname(short_path)) + rmdir(path) + rm_conandir = _rm_conandir +else: + rm_conandir = rmdir + + +def is_case_insensitive_os(): + system = platform.system() + return system != "Linux" and system != "FreeBSD" and system != "SunOS" + + +if is_case_insensitive_os(): + def _check_ref_case(conan_reference, conan_folder, store_folder): + if not os.path.exists(conan_folder): # If it doesn't exist, not a problem + return + # If exists, lets check path + tmp = store_folder + for part in conan_reference: + items = os.listdir(tmp) + if part not in items: + offending = "" + for item in items: + if item.lower() == part.lower(): + offending = item + break + raise ConanException("Requested '%s' but found case incompatible '%s'\n" + "Case insensitive filesystem can't manage this" + % (str(conan_reference), offending)) + tmp = os.path.normpath(tmp + os.sep + part) +else: + def _check_ref_case(conan_reference, conan_folder, store_folder): # @UnusedVariable + pass + + +def _shortener(path, short_paths): + """ short_paths is 4-state: + False: Never shorten the path + True: Always shorten the path, create link if not existing + None: Use shorten path only if already exists, not create + Other: Integrity check. Consumer knows it should be short, but it isn't + """ + if short_paths is False: + return path + link = os.path.join(path, CONAN_LINK) + if os.path.exists(link): + return load(link) + elif short_paths is None: + return path + elif short_paths is not True: + raise ConanException("This path should be short, but it isn't: %s\n" + "Try to remove these packages and re-build them" % path) + + short_home = os.getenv("CONAN_USER_HOME_SHORT") + if not short_home: + drive = os.path.splitdrive(path)[0] + short_home = drive + "/.conan" + try: + os.makedirs(short_home) + except: + pass + redirect = tempfile.mkdtemp(dir=short_home, prefix="") + # This "1" is the way to have a non-existing directory, so commands like + # shutil.copytree() to it, works. It can be removed without compromising the + # temp folder generator and conan-links consistency + redirect = os.path.join(redirect, "1") + save(link, redirect) + return redirect + + +class SimplePaths(object): + """ + Generate Conan paths. Handles the conan domain path logic. NO DISK ACCESS, just + path logic responsability + """ + def __init__(self, store_folder): + self._store_folder = store_folder + if platform.system() == "Windows": + self._shortener = _shortener + else: + self._shortener = lambda x, _: x + + @property + def store(self): + return self._store_folder + + def conan(self, conan_reference): + """ the base folder for this package reference, for each ConanFileReference + """ + assert isinstance(conan_reference, ConanFileReference) + return normpath(join(self._store_folder, "/".join(conan_reference))) + + def export(self, conan_reference): + assert isinstance(conan_reference, ConanFileReference) + return normpath(join(self.conan(conan_reference), EXPORT_FOLDER)) + + def source(self, conan_reference, short_paths=False): + assert isinstance(conan_reference, ConanFileReference) + p = normpath(join(self.conan(conan_reference), SRC_FOLDER)) + return self._shortener(p, short_paths) + + def conanfile(self, conan_reference): + export = self.export(conan_reference) + _check_ref_case(conan_reference, export, self.store) + return normpath(join(export, CONANFILE)) + + def digestfile_conanfile(self, conan_reference): + export = self.export(conan_reference) + _check_ref_case(conan_reference, export, self.store) + return normpath(join(export, CONAN_MANIFEST)) + + def digestfile_package(self, package_reference, short_paths=False): + assert isinstance(package_reference, PackageReference) + return normpath(join(self.package(package_reference, short_paths), CONAN_MANIFEST)) + + def builds(self, conan_reference): + assert isinstance(conan_reference, ConanFileReference) + return normpath(join(self.conan(conan_reference), BUILD_FOLDER)) + + def build(self, package_reference, short_paths=False): + assert isinstance(package_reference, PackageReference) + p = normpath(join(self.conan(package_reference.conan), BUILD_FOLDER, + package_reference.package_id)) + return self._shortener(p, short_paths) + + def system_reqs(self, conan_reference): + assert isinstance(conan_reference, ConanFileReference) + return normpath(join(self.conan(conan_reference), SYSTEM_REQS_FOLDER, SYSTEM_REQS)) + + def system_reqs_package(self, package_reference): + assert isinstance(package_reference, PackageReference) + return normpath(join(self.conan(package_reference.conan), SYSTEM_REQS_FOLDER, + package_reference.package_id, SYSTEM_REQS)) + + def packages(self, conan_reference): + assert isinstance(conan_reference, ConanFileReference) + return normpath(join(self.conan(conan_reference), PACKAGES_FOLDER)) + + def package(self, package_reference, short_paths=False): + assert isinstance(package_reference, PackageReference) + p = normpath(join(self.conan(package_reference.conan), PACKAGES_FOLDER, + package_reference.package_id)) + return self._shortener(p, short_paths) diff --git a/testbed/conan-io__conan/conans/requirements.txt b/testbed/conan-io__conan/conans/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..f15e438e4ebd8294e1609eff815171edd0f82657 --- /dev/null +++ b/testbed/conan-io__conan/conans/requirements.txt @@ -0,0 +1,8 @@ +PyJWT>=1.4.0, <1.5.0 +requests>=2.7.0, <2.14.0 +colorama>=0.3.3, <0.4.0 +PyYAML>=3.11, <3.13.0 +patch==1.16 +fasteners>=0.14.1 +six>=1.10.0 +node-semver==0.1.1 diff --git a/testbed/conan-io__conan/conans/requirements_dev.txt b/testbed/conan-io__conan/conans/requirements_dev.txt new file mode 100644 index 0000000000000000000000000000000000000000..922dea1ffc6930c4e98c2a584f0b1c1ba04f4554 --- /dev/null +++ b/testbed/conan-io__conan/conans/requirements_dev.txt @@ -0,0 +1,6 @@ +nose>=1.3.7, <1.4.0 +nose_parameterized>=0.5.0, <0.6.0 +mock>=1.3.0, <1.4.0 +WebTest>=2.0.18, <2.1.0 +coverage==4.2 +codecov \ No newline at end of file diff --git a/testbed/conan-io__conan/conans/requirements_osx.txt b/testbed/conan-io__conan/conans/requirements_osx.txt new file mode 100644 index 0000000000000000000000000000000000000000..2b1ee8d25461602e30db0a32e2089debe9cdd44e --- /dev/null +++ b/testbed/conan-io__conan/conans/requirements_osx.txt @@ -0,0 +1,4 @@ +cryptography>=1.3.4, <1.5 +pyOpenSSL>=16.0.0, <16.1.0 +ndg-httpsclient>=0.4.1, <0.5.0 +pyasn>=1.5.0b7, <1.6.0 diff --git a/testbed/conan-io__conan/conans/requirements_server.txt b/testbed/conan-io__conan/conans/requirements_server.txt new file mode 100644 index 0000000000000000000000000000000000000000..27f12bebb6513b763120d598e640de3543b2e6ea --- /dev/null +++ b/testbed/conan-io__conan/conans/requirements_server.txt @@ -0,0 +1,2 @@ +# Server +bottle>=0.12.8, < 0.13 \ No newline at end of file diff --git a/testbed/conan-io__conan/conans/search/__init__.py b/testbed/conan-io__conan/conans/search/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/conans/search/query_parse.py b/testbed/conan-io__conan/conans/search/query_parse.py new file mode 100644 index 0000000000000000000000000000000000000000..b7a206dbbd7750798eee007cd660a39c4f8efc0e --- /dev/null +++ b/testbed/conan-io__conan/conans/search/query_parse.py @@ -0,0 +1,124 @@ +from conans.util.log import logger + + +def is_operator(el): + return el in ["|", "&"] + + +def _parse_expression(subexp): + '''Expressions like: + compiler.version=12 + compiler="Visual Studio" + arch="x86" + Could be replaced with another one to parse different queries''' + ret = "" + quoted = False + for char in subexp: + if char in ['"', "'"]: # Fixme: Mix quotes + quoted = not quoted + ret += char + continue + + if quoted: + ret += char + elif char == " " or is_operator(char) or char in [")", "("]: + break + else: + ret += char + + if "=" not in ret: + raise Exception("Invalid expression: %s" % ret) + + return ret + + +def evaluate_postfix(postfix, evaluator): + """ + Evaluates a postfix expression and returns a boolean + @param postfix: Postfix expression as a list + @param evaluator: Function that will return a bool receiving expressions + like "compiler.version=12" + @return: bool + """ + if not postfix: # If no query return all? + return True + + stack = [] + for el in postfix: + if not is_operator(el): + stack.append(el) + else: + o1 = stack.pop() + o2 = stack.pop() + if not isinstance(o1, bool): + o1 = evaluator(o1) + if not isinstance(o2, bool): + o2 = evaluator(o2) + + if el == "|": + res = o1 or o2 + elif el == "&": + res = o1 and o2 + stack.append(res) + if len(stack) != 1: + raise Exception("Bad stack: %s" % str(stack)) + elif not isinstance(stack[0], bool): + return evaluator(stack[0]) # Single Expression without AND or OR + else: + return stack[0] + + +def infix_to_postfix(exp): + """ + Translates an infix expression to postfix using an standard algorithm + with little hacks for parse complex expressions like "compiler.version=4" + instead of just numbers and without taking in account the operands priority + except the priority specified by the "(" + + @param exp: String with an expression with & and | operators, + e.j: "os=Windows & (compiler=gcc | compiler.version=3)" + e.j: "os=Windows AND (compiler=gcc or compiler.version=3)" + @return List with the postfix expression + """ + + # To ease the parser, operators only with one character + exp = exp.replace(" AND ", "&").replace(" OR ", "|").replace(" and ", "&").replace(" or ", "|") + output = [] + stack = [] + + i = -1 + while(i < len(exp) - 1): + i += 1 + char = exp[i] + if char == " ": # Ignore spaces between expressions and operators + continue + if char == ")": # Pop the stack until "(" and send them to output + popped = None + while(popped != "(" and stack): + popped = stack.pop() + if popped != "(": + output.append(popped) + if popped != "(": + raise Exception("Bad expression, not balanced parenthesis") + elif is_operator(char): + # Same operations has the same priority + # replace this lines if the operators need to have + # some priority + if stack and is_operator(stack[:-1]): + popped = stack.pop() + output.append(popped) + stack.append(char) + elif char == "(": + stack.append("(") + else: # Parse an expression, in our case something like "compiler=gcc" + logger.debug("Parsing expression from : %s" % exp[i:]) + expr = _parse_expression(exp[i:]) + logger.debug("Parsed expression: %s" % expr) + i += len(expr) - 1 + output.append(expr) + + # Append remaining elements + if "(" in stack: + raise Exception("Bad expression, not balanced parenthesis") + output.extend(stack) + return output diff --git a/testbed/conan-io__conan/conans/search/search.py b/testbed/conan-io__conan/conans/search/search.py new file mode 100644 index 0000000000000000000000000000000000000000..92158b939d4e717da721a3147693b324a88c52b0 --- /dev/null +++ b/testbed/conan-io__conan/conans/search/search.py @@ -0,0 +1,176 @@ +import re + +from abc import ABCMeta, abstractmethod +from fnmatch import translate + +from conans.errors import ConanException, NotFoundException +from conans.model.info import ConanInfo +from conans.model.ref import PackageReference, ConanFileReference +from conans.paths import CONANINFO +from conans.util.log import logger +import os +from conans.search.query_parse import infix_to_postfix, evaluate_postfix + + +class SearchAdapterABC(object): + """Methods that allows access to disk or s3 or whatever to make a search""" + __metaclass__ = ABCMeta + + @abstractmethod + def list_folder_subdirs(self, basedir, level): + pass + + @abstractmethod + def path_exists(self, path): + pass + + @abstractmethod + def load(self, filepath): + pass + + @abstractmethod + def join_paths(self, *args): + pass + + +class DiskSearchAdapter(SearchAdapterABC): + + def list_folder_subdirs(self, basedir, level): + from conans.util.files import list_folder_subdirs + return list_folder_subdirs(basedir, level) + + def path_exists(self, path): + return os.path.exists(path) + + def load(self, filepath): + from conans.util.files import load + return load(filepath) + + def join_paths(self, *args): + return os.path.join(*args) + + +class SearchManagerABC(object): + """Methods that allows access to disk or s3 or whatever to make a search""" + __metaclass__ = ABCMeta + + @abstractmethod + def search(self, pattern=None, ignorecase=True): + pass + + @abstractmethod + def search_packages(self, reference, query): + pass + + +def filter_packages(query, package_infos): + if query is None: + return package_infos + try: + if "!" in query: + raise ConanException("'!' character is not allowed") + if " not " in query or query.startswith("not "): + raise ConanException("'not' operator is not allowed") + result = {} + postfix = infix_to_postfix(query) if query else [] + for package_id, info in package_infos.items(): + if evaluate_postfix_with_info(postfix, info): + result[package_id] = info + return result + except Exception as exc: + raise ConanException("Invalid package query: %s. %s" % (query, exc)) + + +def evaluate_postfix_with_info(postfix, conan_vars_info): + + # Evaluate conaninfo with the expression + + def evaluate_info(expression): + """Receives an expression like compiler.version="12" + Uses conan_vars_info in the closure to evaluate it""" + name, value = expression.split("=", 1) + value = value.replace("\"", "") + return evaluate(name, value, conan_vars_info) + + return evaluate_postfix(postfix, evaluate_info) + + +def evaluate(prop_name, prop_value, conan_vars_info): + """ + Evaluates a single prop_name, prop_value like "os", "Windows" against conan_vars_info.serialize_min() + """ + + def compatible_prop(setting_value, prop_value): + return setting_value is None or prop_value == setting_value + + info_settings = conan_vars_info.get("settings", []) + info_options = conan_vars_info.get("options", []) + + if prop_name in ["os", "compiler", "arch", "build_type"] or prop_name.startswith("compiler."): + return compatible_prop(info_settings.get(prop_name, None), prop_value) + else: + return compatible_prop(info_options.get(prop_name, None), prop_value) + return False + + +class DiskSearchManager(SearchManagerABC): + """Will search recipes and packages using a file system. + Can be used with a SearchAdapter""" + + def __init__(self, paths, disk_search_adapter): + self._paths = paths + self._adapter = disk_search_adapter + + def search(self, pattern=None, ignorecase=True): + # Conan references in main storage + if pattern: + pattern = translate(pattern) + pattern = re.compile(pattern, re.IGNORECASE) if ignorecase else re.compile(pattern) + + subdirs = self._adapter.list_folder_subdirs(basedir=self._paths.store, level=4) + if not pattern: + return sorted([ConanFileReference(*folder.split("/")) for folder in subdirs]) + else: + ret = [] + for subdir in subdirs: + conan_ref = ConanFileReference(*subdir.split("/")) + if pattern: + if pattern.match(str(conan_ref)): + ret.append(conan_ref) + return sorted(ret) + + def search_packages(self, reference, query): + """ Return a dict like this: + + {package_ID: {name: "OpenCV", + version: "2.14", + settings: {os: Windows}}} + param conan_ref: ConanFileReference object + """ + + infos = self._get_local_infos_min(reference) + return filter_packages(query, infos) + + def _get_local_infos_min(self, reference): + result = {} + packages_path = self._paths.packages(reference) + subdirs = self._adapter.list_folder_subdirs(packages_path, level=1) + for package_id in subdirs: + # Read conaninfo + try: + package_reference = PackageReference(reference, package_id) + info_path = self._adapter.join_paths(self._paths.package(package_reference, + short_paths=None), + CONANINFO) + if not self._adapter.path_exists(info_path): + raise NotFoundException("") + conan_info_content = self._adapter.load(info_path) + conan_vars_info = ConanInfo.loads(conan_info_content).serialize_min() + result[package_id] = conan_vars_info + + except Exception as exc: + logger.error("Package %s has not ConanInfo file" % str(package_reference)) + if str(exc): + logger.error(str(exc)) + + return result diff --git a/testbed/conan-io__conan/conans/server/conf/default_server_conf.py b/testbed/conan-io__conan/conans/server/conf/default_server_conf.py new file mode 100644 index 0000000000000000000000000000000000000000..c9879ff7610c7d8cf0ce8c222dc5ea0c534fb653 --- /dev/null +++ b/testbed/conan-io__conan/conans/server/conf/default_server_conf.py @@ -0,0 +1,58 @@ +default_server_conf = """[server] +# WARNING! Change default variable of jwt_secret. You should change it periodically +# It only affects to current authentication tokens, you can change safetely anytime +# When it changes user are just forced to log in again +jwt_secret: {jwt_secret} +jwt_expire_minutes: 120 + +ssl_enabled: False +port: 9300 +# Public port where files will be served. If empty will be used "port" +public_port: +host_name: localhost + +# Choose file adapter, "disk" for disk storage +# Authorize timeout are seconds the client has to upload/download files until authorization expires +store_adapter: disk +authorize_timeout: 1800 + +# Just for disk storage adapter +# updown_secret is the key used to generate the upload/download authorization token +disk_storage_path: ~/.conan_server/data +disk_authorize_timeout: 1800 +updown_secret: {updown_secret} + + +[write_permissions] + +# +# name,version,user,channel: user1, user2, user3 +# The rules are applied in order. If a rule applies to a conan, system wont look further. +# +# Example: All versions of opencv package from lasote user in testing channel is only +# writeable by default_user and default_user2. Rest of packages are not writtable by anything +# except the author. +# +# "opencv/2.3.4@lasote/testing": default_user, default_user2 +# + +[read_permissions] + +# +# name,version,user,channel: user1, user2, user3 +# The rules are applied in order. If a rule applies to a conan, system wont look further. +# +# Example: All versions of opencv package from lasote user in testing channel is only +# readable by default_user and default_user2. Rest of packages are world readable +# +# opencv/1.2.3@lasote/testing: default_user default_user2 +# *:*@*/*: * +# +# By default all users can read all blocks +*/*@*/*: * + + +[users] +#default_user: defaultpass +demo: demo +""" diff --git a/testbed/conan-io__conan/conans/test/__init__.py b/testbed/conan-io__conan/conans/test/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..678b2fae9ff1c15ad29e7493b01ed14f95ab7e47 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/__init__.py @@ -0,0 +1,3 @@ +import os + +CONAN_TEST_FOLDER = os.getenv('CONAN_TEST_FOLDER', None) diff --git a/testbed/conan-io__conan/conans/test/auth_bearer_test.py b/testbed/conan-io__conan/conans/test/auth_bearer_test.py new file mode 100644 index 0000000000000000000000000000000000000000..cc216a83081fa7aec36312f1c609fbb4891ce589 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/auth_bearer_test.py @@ -0,0 +1,103 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from bottle import request + + +conanfile = """ +from conans import ConanFile + +class OpenSSLConan(ConanFile): + name = "Hello" + version = "0.1" +""" + + +class AuthorizationHeaderSpy(object): + ''' Generic plugin to handle Authorization header. Must be extended and implement + some abstract methods in subclasses''' + + name = 'authorizationheaderspy' + api = 2 + + def __init__(self): + self.auths = [] + + def apply(self, callback, context): # @UnusedVariable + auth = request.headers.get("Authorization") + name = callback.__name__ + self.auths.append((name, auth)) + return callback + + +class ReturnHandlerPlugin(object): + + name = 'ReturnHandlerPluginSpy' + api = 2 + + def apply(self, callback, _): + '''Apply plugin''' + def wrapper(*args, **kwargs): + '''Capture possible exceptions to manage the return''' + result = callback(*args, **kwargs) + if isinstance(result, dict): + for k, v in result.items(): + result[k] = v.split("?signature=")[0] + return result + return wrapper + + +class AuthorizeBearerTest(unittest.TestCase): + + def basic_test(self): + auth = AuthorizationHeaderSpy() + server = TestServer(plugins=[auth]) + server.app + servers = {"default": server} + client = TestClient(servers=servers, users={"default": [("lasote", "mypass")]}) + client.save({"conanfile.py": conanfile}) + client.run("export lasote/stable") + errors = client.run("upload Hello/0.1@lasote/stable") + self.assertFalse(errors) + + expected_calls = [('get_conan_digest_url', None), + ('check_credentials', None), + ('authenticate', 'Basic'), + ('get_conanfile_snapshot', 'Bearer'), + ('get_conanfile_upload_urls', 'Bearer'), + ('put', None)] + + self.assertEqual(len(expected_calls), len(auth.auths)) + for i, (method, auth_type) in enumerate(expected_calls): + real_call = auth.auths[i] + self.assertEqual(method, real_call[0]) + if auth_type: + self.assertIn(auth_type, real_call[1]) + + def no_signature_test(self): + auth = AuthorizationHeaderSpy() + retur = ReturnHandlerPlugin() + server = TestServer(plugins=[auth, retur]) + server.app + servers = {"default": server} + client = TestClient(servers=servers, users={"default": [("lasote", "mypass")]}) + client.save({"conanfile.py": conanfile}) + client.run("export lasote/stable") + errors = client.run("upload Hello/0.1@lasote/stable") + self.assertFalse(errors) + + expected_calls = [('get_conan_digest_url', None), + ('check_credentials', None), + ('authenticate', 'Basic'), + ('get_conanfile_snapshot', 'Bearer'), + ('get_conanfile_upload_urls', 'Bearer'), + ('put', 'Bearer')] + + self.assertEqual(len(expected_calls), len(auth.auths)) + for i, (method, auth_type) in enumerate(expected_calls): + real_call = auth.auths[i] + self.assertEqual(method, real_call[0]) + if auth_type: + self.assertIn(auth_type, real_call[1]) + + # The Bearer of the last two calls must be identical + self.assertEqual(auth.auths[-1][1], auth.auths[-2][1]) diff --git a/testbed/conan-io__conan/conans/test/auth_test.py b/testbed/conan-io__conan/conans/test/auth_test.py new file mode 100644 index 0000000000000000000000000000000000000000..3d1f66e3ed6aa744c69d39ee2fa75c88f069bbb2 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/auth_test.py @@ -0,0 +1,61 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.paths import CONANFILE +from conans.util.files import save +from conans.model.ref import ConanFileReference +import os + +conan_content = """ +from conans import ConanFile + +class OpenSSLConan(ConanFile): + name = "openssl" + version = "2.0.1" + files = '*' +""" + + +class AuthorizeTest(unittest.TestCase): + + def setUp(self): + unittest.TestCase.setUp(self) + self.servers = {} + self.conan_reference = ConanFileReference.loads("openssl/2.0.1@lasote/testing") + # Create a default remote. R/W is not authorized for conan_reference, just for pepe and owner + self.test_server = TestServer([(str(self.conan_reference), "pepe")], # read permissions + [(str(self.conan_reference), "pepe")], # write permissions + users={"lasote": "mypass", + "pepe": "pepepass"}) # exported users and passwords + self.servers["default"] = self.test_server + + def retries_test(self): + """Bad login 2 times""" + self.conan = TestClient(servers=self.servers, users={"default": [("baduser", "badpass"), + ("baduser", "badpass2"), + ("pepe", "pepepass")]}) + save(os.path.join(self.conan.current_folder, CONANFILE), conan_content) + self.conan.run("export lasote") + errors = self.conan.run("upload %s" % str(self.conan_reference)) + # Check that return was ok + self.assertFalse(errors) + # Check that upload was granted + self.assertTrue(os.path.exists(self.test_server.paths.export(self.conan_reference))) + + # Check that login failed two times before ok + self.assertEquals(self.conan.user_io.login_index["default"], 3) + + def max_retries_test(self): + """Bad login 3 times""" + self.conan = TestClient(servers=self.servers, users={"default": [("baduser", "badpass"), + ("baduser", "badpass2"), + ("baduser3", "badpass3")]}) + save(os.path.join(self.conan.current_folder, CONANFILE), conan_content) + self.conan.run("export lasote -p ./ ") + errors = self.conan.run("upload %s" % str(self.conan_reference), ignore_error=True) + # Check that return was not ok + self.assertTrue(errors) + # Check that upload was not granted + self.assertFalse(os.path.exists(self.test_server.paths.export(self.conan_reference))) + + # Check that login failed all times + self.assertEquals(self.conan.user_io.login_index["default"], 3) diff --git a/testbed/conan-io__conan/conans/test/broken_download_test.py b/testbed/conan-io__conan/conans/test/broken_download_test.py new file mode 100644 index 0000000000000000000000000000000000000000..26027bb04234486dcc1e81111b87d5c9b11f1b24 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/broken_download_test.py @@ -0,0 +1,28 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.model.ref import ConanFileReference +import os +from conans.util.files import save + + +class BrokenDownloadTest(unittest.TestCase): + + def basic_test(self): + server = TestServer() + servers = {"default": server} + client = TestClient(servers=servers, users={"default": [("lasote", "mypass")]}) + files = cpp_hello_conan_files() + client.save(files) + client.run("export lasote/stable") + ref = ConanFileReference.loads("Hello/0.1@lasote/stable") + self.assertTrue(os.path.exists(client.paths.export(ref))) + client.run("upload Hello/0.1@lasote/stable") + client.run("remove Hello/0.1@lasote/stable -f") + self.assertFalse(os.path.exists(client.paths.export(ref))) + path = server.test_server.file_manager.paths.export(ref) + tgz = os.path.join(path, "conan_export.tgz") + save(tgz, "contents") # dummy content to break it, so the download decompress will fail + client.run("install Hello/0.1@lasote/stable --build", ignore_error=True) + self.assertIn("ERROR: Error while downloading/extracting files to", client.user_io.out) + self.assertFalse(os.path.exists(client.paths.export(ref))) diff --git a/testbed/conan-io__conan/conans/test/client_conf_test.py b/testbed/conan-io__conan/conans/test/client_conf_test.py new file mode 100644 index 0000000000000000000000000000000000000000..9a06e0bbaad4f0e76e1e4b651d765aa48a0f62a0 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/client_conf_test.py @@ -0,0 +1,74 @@ +import unittest +from conans.test.utils.test_files import temp_folder +from conans.client.conf import ConanClientConfigParser, default_settings_yml +from conans.util.files import save +from conans.client.client_cache import CONAN_CONF +import os +from conans import tools +from conans.model.settings import Settings +from conans.errors import ConanException + + +default_client_conf = '''[storage] +path: ~/.conan/data + +[proxies] +[settings_defaults] +arch=x86_64 +build_type=Release +compiler=gcc +compiler.libcxx=libstdc++ +compiler.version=4.9 +os=Linux +''' + + +class ClientConfTest(unittest.TestCase): + + def env_setting_override_test(self): + tmp_dir = temp_folder() + save(os.path.join(tmp_dir, CONAN_CONF), default_client_conf) + config = ConanClientConfigParser(os.path.join(tmp_dir, CONAN_CONF)) + + # If I don't specify an ENV for compiler, the subsettings are kept, + # except the compiler version that I'm overriding + def get_settings(): + settings = Settings.loads(default_settings_yml) + config.settings_defaults(settings) + return settings.values.as_list() + + with tools.environment_append({"CONAN_ENV_COMPILER_VERSION": "4.6"}): + self.assertEquals(get_settings(), [("arch", "x86_64"), + ("build_type", "Release"), + ("compiler", "gcc"), + ("compiler.libcxx", "libstdc++"), + ("compiler.version", "4.6"), + ("os", "Linux")]) + with tools.environment_append({}): + self.assertEquals(get_settings(), [("arch", "x86_64"), + ("build_type", "Release"), + ("compiler", "gcc"), + ("compiler.libcxx", "libstdc++"), + ("compiler.version", "4.9"), + ("os", "Linux")]) + + # If compiler is overwritten compiler subsettings are not assigned + with tools.environment_append({"CONAN_ENV_COMPILER": "Visual Studio"}): + self.assertEquals(get_settings(), [("arch", "x86_64"), + ("build_type", "Release"), + ("compiler", "Visual Studio"), + ("os", "Linux")]) + + with tools.environment_append({"CONAN_ENV_COMPILER": "Visual Studio", + "CONAN_ENV_COMPILER_VERSION": "14", + "CONAN_ENV_COMPILER_RUNTIME": "MDd"}): + self.assertEquals(dict(get_settings()), dict([("arch", "x86_64"), + ("build_type", "Release"), + ("compiler", "Visual Studio"), + ("compiler.version", "14"), + ("compiler.runtime", "MDd"), + ("os", "Linux")])) + + # Specified settings are applied in order (first fake and then fake.setting) + with tools.environment_append({"CONAN_ENV_FAKE": "Fake1"}): + self.assertRaisesRegexp(ConanException, "'settings.fake' doesn't exist", get_settings) diff --git a/testbed/conan-io__conan/conans/test/cmake_test.py b/testbed/conan-io__conan/conans/test/cmake_test.py new file mode 100644 index 0000000000000000000000000000000000000000..f24010b470f4e4cd6e651932ed3cd68c9fba6d2b --- /dev/null +++ b/testbed/conan-io__conan/conans/test/cmake_test.py @@ -0,0 +1,116 @@ +import unittest +from conans.model.settings import Settings +from conans.client.conf import default_settings_yml +from conans.client.cmake import CMake + + +class CMakeTest(unittest.TestCase): + + def loads_default_test(self): + settings = Settings.loads(default_settings_yml) + settings.compiler = "Visual Studio" + settings.compiler.version = "12" + settings.arch = "x86" + + cmake = CMake(settings) + self.assertEqual('-G "Visual Studio 12 2013" -DCONAN_EXPORTED=1 ' + '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" -Wno-dev', + cmake.command_line) + self.assertEqual('', cmake.build_config) + + settings.build_type = "Debug" + cmake = CMake(settings) + self.assertEqual('-G "Visual Studio 12 2013" -DCONAN_EXPORTED=1 ' + '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" -Wno-dev', + cmake.command_line) + self.assertEqual('--config Debug', cmake.build_config) + + settings.arch = "x86_64" + cmake = CMake(settings) + self.assertEqual('-G "Visual Studio 12 2013 Win64" -DCONAN_EXPORTED=1 ' + '-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" -Wno-dev', + cmake.command_line) + + settings.os = "Windows" + settings.compiler = "gcc" + settings.compiler.version = "4.8" + cmake = CMake(settings) + self.assertEqual('-G "MinGW Makefiles" -DCMAKE_BUILD_TYPE=Debug -DCONAN_EXPORTED=1 ' + '-DCONAN_COMPILER="gcc" -DCONAN_COMPILER_VERSION="4.8" -Wno-dev', + cmake.command_line) + + settings.os = "Linux" + settings.arch = "x86" + cmake = CMake(settings) + self.assertEqual('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Debug -DCONAN_EXPORTED=1 ' + '-DCONAN_COMPILER="gcc" ' + '-DCONAN_COMPILER_VERSION="4.8" -DCONAN_CXX_FLAGS=-m32 ' + '-DCONAN_SHARED_LINKER_FLAGS=-m32 -DCONAN_C_FLAGS=-m32 -Wno-dev', + cmake.command_line) + + settings.arch = "x86_64" + cmake = CMake(settings) + self.assertEqual('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Debug -DCONAN_EXPORTED=1 ' + '-DCONAN_COMPILER="gcc" ' + '-DCONAN_COMPILER_VERSION="4.8" -DCONAN_CXX_FLAGS=-m64 ' + '-DCONAN_SHARED_LINKER_FLAGS=-m64 -DCONAN_C_FLAGS=-m64 -Wno-dev', + cmake.command_line) + + settings.os = "FreeBSD" + settings.compiler = "clang" + settings.compiler.version = "3.8" + settings.arch = "x86" + cmake = CMake(settings) + self.assertEqual('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Debug -DCONAN_EXPORTED=1 ' + '-DCONAN_COMPILER="clang" ' + '-DCONAN_COMPILER_VERSION="3.8" -DCONAN_CXX_FLAGS=-m32 ' + '-DCONAN_SHARED_LINKER_FLAGS=-m32 -DCONAN_C_FLAGS=-m32 -Wno-dev', + cmake.command_line) + + settings.arch = "x86_64" + cmake = CMake(settings) + self.assertEqual('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Debug -DCONAN_EXPORTED=1 ' + '-DCONAN_COMPILER="clang" ' + '-DCONAN_COMPILER_VERSION="3.8" -DCONAN_CXX_FLAGS=-m64 ' + '-DCONAN_SHARED_LINKER_FLAGS=-m64 -DCONAN_C_FLAGS=-m64 -Wno-dev', + cmake.command_line) + + settings.os = "SunOS" + settings.compiler = "sun-cc" + settings.compiler.version = "5.10" + settings.arch = "x86" + cmake = CMake(settings) + self.assertEqual('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Debug -DCONAN_EXPORTED=1 ' + '-DCONAN_COMPILER="sun-cc" ' + '-DCONAN_COMPILER_VERSION="5.10" -DCONAN_CXX_FLAGS=-m32 ' + '-DCONAN_SHARED_LINKER_FLAGS=-m32 -DCONAN_C_FLAGS=-m32 -Wno-dev', + cmake.command_line) + + settings.arch = "x86_64" + cmake = CMake(settings) + self.assertEqual('-G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Debug -DCONAN_EXPORTED=1 ' + '-DCONAN_COMPILER="sun-cc" ' + '-DCONAN_COMPILER_VERSION="5.10" -DCONAN_CXX_FLAGS=-m64 ' + '-DCONAN_SHARED_LINKER_FLAGS=-m64 -DCONAN_C_FLAGS=-m64 -Wno-dev', + cmake.command_line) + + def deleted_os_test(self): + partial_settings = """ +os: [Linux] +arch: [x86_64] +compiler: + gcc: + version: ["4.9"] +build_type: [ Release] +""" + settings = Settings.loads(partial_settings) + settings.os = "Linux" + settings.compiler = "gcc" + settings.compiler.version = "4.9" + settings.arch = "x86_64" + + cmake = CMake(settings) + self.assertEqual('-G "Unix Makefiles" -DCONAN_EXPORTED=1 -DCONAN_COMPILER="gcc" ' + '-DCONAN_COMPILER_VERSION="4.9" -DCONAN_CXX_FLAGS=-m64 ' + '-DCONAN_SHARED_LINKER_FLAGS=-m64 -DCONAN_C_FLAGS=-m64 -Wno-dev', + cmake.command_line) diff --git a/testbed/conan-io__conan/conans/test/command/__init__.py b/testbed/conan-io__conan/conans/test/command/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/conans/test/command/build_test.py b/testbed/conan-io__conan/conans/test/command/build_test.py new file mode 100644 index 0000000000000000000000000000000000000000..d894dcd509ff17c67b0e5eb158203726eaf624ff --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/build_test.py @@ -0,0 +1,48 @@ +from conans.test.tools import TestClient +import unittest +from conans.paths import CONANFILE +from conans.model.ref import PackageReference + + +conanfile_scope_env = """ +from conans import ConanFile + +class AConan(ConanFile): + requires = "Hello/0.1@lasote/testing" + generators = "txt" + + def build(self): + self.output.info("INCLUDE PATH: %s" % self.deps_cpp_info.include_paths[0]) + self.output.info("HELLO ROOT PATH: %s" % self.deps_cpp_info["Hello"].rootpath) + self.output.info("HELLO INCLUDE PATHS: %s" % self.deps_cpp_info["Hello"].include_paths[0]) +""" + +conanfile_dep = """ +from conans import ConanFile + +class AConan(ConanFile): + name = "Hello" + version = "0.1" +""" + + +class ConanBuildTest(unittest.TestCase): + + def build_test(self): + """ Try to reuse variables loaded from txt generator => deps_cpp_info + """ + client = TestClient() + client.save({CONANFILE: conanfile_dep}) + client.run("export lasote/testing") + + client.save({CONANFILE: conanfile_scope_env}, clean_first=True) + client.run("install --build=missing") + + client.run("build") + ref = PackageReference.loads("Hello/0.1@lasote/testing:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + package_folder = client.paths.package(ref).replace("\\", "/") + self.assertIn("Project: INCLUDE PATH: %s/include" % package_folder, client.user_io.out) + self.assertIn("Project: HELLO ROOT PATH: %s" % package_folder, client.user_io.out) + self.assertIn("Project: HELLO INCLUDE PATHS: %s/include" + % package_folder, client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/command/export_dirty_test.py b/testbed/conan-io__conan/conans/test/command/export_dirty_test.py new file mode 100644 index 0000000000000000000000000000000000000000..0550faa1e2f3623b1699adbf33eb36fe85b6f0d9 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/export_dirty_test.py @@ -0,0 +1,61 @@ +import unittest +import os +from conans.paths import CONANFILE +from conans.model.ref import ConanFileReference +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.test.tools import TestClient +import platform + + +class ExportDirtyTest(unittest.TestCase): + """ Make sure than when the source folder becomes dirty, due to a export of + a new recipe with a rmdir failure, or to an uncomplete execution of source(), + it is marked as dirty and removed when necessary + """ + + def setUp(self): + if platform.system() != "Windows": + return + self.client = TestClient() + files = cpp_hello_conan_files("Hello0", "0.1", build=False) + + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("install Hello0/0.1@lasote/stable --build") + ref = ConanFileReference.loads("Hello0/0.1@lasote/stable") + source_path = self.client.paths.source(ref) + file_open = os.path.join(source_path, "main.cpp") + + self.f = open(file_open, 'rb') + files[CONANFILE] = files[CONANFILE].replace("build2(", "build3(") + self.client.save(files) + self.client.run("export lasote/stable") + self.assertIn("ERROR: Unable to delete source folder. " + "Will be marked as dirty for deletion", + self.client.user_io.out) + + err = self.client.run("install Hello0/0.1@lasote/stable --build", ignore_error=True) + self.assertTrue(err) + self.assertIn("ERROR: Unable to remove source folder", self.client.user_io.out) + + def test_export_remove(self): + """ The export is able to remove dirty source folders + """ + if platform.system() != "Windows": + return + self.f.close() + self.client.run("export lasote/stable") + self.assertIn("Source folder is dirty, forcing removal", self.client.user_io.out) + err = self.client.run("install Hello0/0.1@lasote/stable --build") + self.assertFalse(err) + + def test_install_remove(self): + """ The install is also able to remove dirty source folders + """ + if platform.system() != "Windows": + return + # Now, release the handle to the file + self.f.close() + err = self.client.run("install Hello0/0.1@lasote/stable --build") + self.assertFalse(err) + self.assertIn("WARN: Trying to remove dirty source folder", self.client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/command/export_path_test.py b/testbed/conan-io__conan/conans/test/command/export_path_test.py new file mode 100644 index 0000000000000000000000000000000000000000..7e4bdd3c2c8950a781e7c0e82d5d575f5445c05c --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/export_path_test.py @@ -0,0 +1,139 @@ +import unittest +import os +from conans.util.files import load +from conans.model.ref import ConanFileReference +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.model.manifest import FileTreeManifest +from conans.test.tools import TestClient +from conans.test.utils.test_files import temp_folder + + +class ExportPathTest(unittest.TestCase): + + def test_basic(self): + current_folder = temp_folder() + source_folder = os.path.join(current_folder, "source") + client = TestClient(current_folder=current_folder) + files = cpp_hello_conan_files("Hello0", "0.1") + conan_ref = ConanFileReference("Hello0", "0.1", "lasote", "stable") + client.save(files, path=source_folder) + client.run("export lasote/stable --path=source") + reg_path = client.paths.export(conan_ref) + manif = FileTreeManifest.loads(load(client.paths.digestfile_conanfile(conan_ref))) + + self.assertIn('%s: A new conanfile.py version was exported' % str(conan_ref), + client.user_io.out) + self.assertIn('%s: Folder: %s' % (str(conan_ref), reg_path), client.user_io.out) + self.assertTrue(os.path.exists(reg_path)) + + for name in list(files.keys()): + self.assertTrue(os.path.exists(os.path.join(reg_path, name))) + + expected_sums = {'hello.cpp': '4f005274b2fdb25e6113b69774dac184', + 'main.cpp': '0479f3c223c9a656a718f3148e044124', + 'CMakeLists.txt': 'bc3405da4bb0b51a3b9f05aca71e58c8', + 'conanfile.py': '0f623a95e7262a618ad140eb2f959c5f', + 'executable': '68b329da9893e34099c7d8ad5cb9c940', + 'helloHello0.h': '9448df034392fc8781a47dd03ae71bdd'} + + self.assertEqual(expected_sums, manif.file_sums) + + def test_rel_path(self): + base_folder = temp_folder() + source_folder = os.path.join(base_folder, "source") + current_folder = os.path.join(base_folder, "current") + os.makedirs(current_folder) + client = TestClient(current_folder=current_folder) + files = cpp_hello_conan_files("Hello0", "0.1") + conan_ref = ConanFileReference("Hello0", "0.1", "lasote", "stable") + client.save(files, path=source_folder) + client.run("export lasote/stable --path=../source") + reg_path = client.paths.export(conan_ref) + manif = FileTreeManifest.loads(load(client.paths.digestfile_conanfile(conan_ref))) + + self.assertIn('%s: A new conanfile.py version was exported' % str(conan_ref), + client.user_io.out) + self.assertIn('%s: Folder: %s' % (str(conan_ref), reg_path), client.user_io.out) + self.assertTrue(os.path.exists(reg_path)) + + for name in list(files.keys()): + self.assertTrue(os.path.exists(os.path.join(reg_path, name))) + + expected_sums = {'hello.cpp': '4f005274b2fdb25e6113b69774dac184', + 'main.cpp': '0479f3c223c9a656a718f3148e044124', + 'CMakeLists.txt': 'bc3405da4bb0b51a3b9f05aca71e58c8', + 'conanfile.py': '0f623a95e7262a618ad140eb2f959c5f', + 'executable': '68b329da9893e34099c7d8ad5cb9c940', + 'helloHello0.h': '9448df034392fc8781a47dd03ae71bdd'} + self.assertEqual(expected_sums, manif.file_sums) + + def test_path(self): + base_folder = temp_folder() + source_folder = os.path.join(base_folder, "source") + current_folder = os.path.join(base_folder, "current") + client = TestClient(current_folder=current_folder) + files = cpp_hello_conan_files("Hello0", "0.1") + conan_ref = ConanFileReference("Hello0", "0.1", "lasote", "stable") + conanfile = files.pop("conanfile.py") + client.save(files, path=source_folder) + conanfile = conanfile.replace("exports = '*'", 'exports = "../source*"') + + client.save({"conanfile.py": conanfile}) + client.run("export lasote/stable") + reg_path = client.paths.export(conan_ref) + manif = FileTreeManifest.loads(load(client.paths.digestfile_conanfile(conan_ref))) + + self.assertIn('%s: A new conanfile.py version was exported' % str(conan_ref), + client.user_io.out) + self.assertIn('%s: Folder: %s' % (str(conan_ref), reg_path), client.user_io.out) + self.assertTrue(os.path.exists(reg_path)) + + for name in ['conanfile.py', 'conanmanifest.txt', 'source/main.cpp', + 'source/executable', 'source/hello.cpp', 'source/CMakeLists.txt', + 'source/helloHello0.h']: + self.assertTrue(os.path.exists(os.path.join(reg_path, name))) + + expected_sums = {'source/hello.cpp': '4f005274b2fdb25e6113b69774dac184', + 'source/main.cpp': '0479f3c223c9a656a718f3148e044124', + 'source/CMakeLists.txt': 'bc3405da4bb0b51a3b9f05aca71e58c8', + 'conanfile.py': 'c90fe9d800e1f48c8ea0999e8e5929d8', + 'source/executable': '68b329da9893e34099c7d8ad5cb9c940', + 'source/helloHello0.h': '9448df034392fc8781a47dd03ae71bdd'} + self.assertEqual(expected_sums, manif.file_sums) + + def test_combined(self): + base_folder = temp_folder() + source_folder = os.path.join(base_folder, "source") + conanfile_folder = os.path.join(base_folder, "conan") + current_folder = os.path.join(base_folder, "current") + os.makedirs(current_folder) + + client = TestClient(current_folder=current_folder) + files = cpp_hello_conan_files("Hello0", "0.1") + conan_ref = ConanFileReference("Hello0", "0.1", "lasote", "stable") + conanfile = files.pop("conanfile.py") + client.save(files, path=source_folder) + conanfile = conanfile.replace("exports = '*'", 'exports = "../source*"') + + client.save({"conanfile.py": conanfile}, path=conanfile_folder) + client.run("export lasote/stable --path=../conan") + reg_path = client.paths.export(conan_ref) + manif = FileTreeManifest.loads(load(client.paths.digestfile_conanfile(conan_ref))) + + self.assertIn('%s: A new conanfile.py version was exported' % str(conan_ref), + client.user_io.out) + self.assertIn('%s: Folder: %s' % (str(conan_ref), reg_path), client.user_io.out) + self.assertTrue(os.path.exists(reg_path)) + + for name in ['conanfile.py', 'conanmanifest.txt', 'source/main.cpp', + 'source/executable', 'source/hello.cpp', 'source/CMakeLists.txt', + 'source/helloHello0.h']: + self.assertTrue(os.path.exists(os.path.join(reg_path, name))) + + expected_sums = {'source/hello.cpp': '4f005274b2fdb25e6113b69774dac184', + 'source/main.cpp': '0479f3c223c9a656a718f3148e044124', + 'source/CMakeLists.txt': 'bc3405da4bb0b51a3b9f05aca71e58c8', + 'conanfile.py': 'c90fe9d800e1f48c8ea0999e8e5929d8', + 'source/executable': '68b329da9893e34099c7d8ad5cb9c940', + 'source/helloHello0.h': '9448df034392fc8781a47dd03ae71bdd'} + self.assertEqual(expected_sums, manif.file_sums) diff --git a/testbed/conan-io__conan/conans/test/command/export_test.py b/testbed/conan-io__conan/conans/test/command/export_test.py new file mode 100644 index 0000000000000000000000000000000000000000..62ec6105ae76fd248522a365738dc55bf4cd6c41 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/export_test.py @@ -0,0 +1,196 @@ +import unittest +import os +from conans.paths import CONANFILE, CONAN_MANIFEST +from conans.util.files import save, load +from conans.model.ref import ConanFileReference +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.model.manifest import FileTreeManifest +from conans.test.tools import TestClient + + +class ExportSettingsTest(unittest.TestCase): + + def test_basic(self): + client = TestClient() + conanfile = """ +from conans import ConanFile +class TestConan(ConanFile): + name = "Hello" + version = "1.2" + settings = {"os": ["Linux"]} +""" + files = {CONANFILE: conanfile} + client.save(files) + client.run("export lasote/stable") + self.assertIn("WARN: Conanfile doesn't have 'license'", client.user_io.out) + client.run("install Hello/1.2@lasote/stable -s os=Windows", ignore_error=True) + self.assertIn("'Windows' is not a valid 'settings.os' value", client.user_io.out) + self.assertIn("Possible values are ['Linux']", client.user_io.out) + + +class ExportTest(unittest.TestCase): + + def setUp(self): + self.conan = TestClient() + self.files = cpp_hello_conan_files("Hello0", "0.1") + self.conan_ref = ConanFileReference("Hello0", "0.1", "lasote", "stable") + self.conan.save(self.files) + self.conan.run("export lasote/stable") + + def test_basic(self): + """ simple registration of a new conans + """ + reg_path = self.conan.paths.export(self.conan_ref) + manif = FileTreeManifest.loads(load(self.conan.paths.digestfile_conanfile(self.conan_ref))) + + self.assertIn('%s: A new conanfile.py version was exported' % str(self.conan_ref), + self.conan.user_io.out) + self.assertIn('%s: Folder: %s' % (str(self.conan_ref), reg_path), self.conan.user_io.out) + self.assertTrue(os.path.exists(reg_path)) + + for name in list(self.files.keys()): + self.assertTrue(os.path.exists(os.path.join(reg_path, name))) + + expected_sums = {'hello.cpp': '4f005274b2fdb25e6113b69774dac184', + 'main.cpp': '0479f3c223c9a656a718f3148e044124', + 'CMakeLists.txt': 'bc3405da4bb0b51a3b9f05aca71e58c8', + 'conanfile.py': '0f623a95e7262a618ad140eb2f959c5f', + 'executable': '68b329da9893e34099c7d8ad5cb9c940', + 'helloHello0.h': '9448df034392fc8781a47dd03ae71bdd'} + self.assertEqual(expected_sums, manif.file_sums) + + def test_case_sensitive(self): + self.files = cpp_hello_conan_files("hello0", "0.1") + self.conan_ref = ConanFileReference("hello0", "0.1", "lasote", "stable") + self.conan.save(self.files) + error = self.conan.run("export lasote/stable", ignore_error=True) + self.assertTrue(error) + self.assertIn("ERROR: Cannot export package with same name but different case", + self.conan.user_io.out) + + def test_export_filter(self): + content = """ +from conans import ConanFile + +class OpenSSLConan(ConanFile): + name = "openssl" + version = "2.0.1" +""" + save(os.path.join(self.conan.current_folder, CONANFILE), content) + self.conan.run("export lasote/stable") + reg_path = self.conan.paths.export(ConanFileReference.loads('openssl/2.0.1@lasote/stable')) + self.assertEqual(sorted(os.listdir(reg_path)), [CONANFILE, CONAN_MANIFEST]) + + content = """ +from conans import ConanFile + +class OpenSSLConan(ConanFile): + name = "openssl" + version = "2.0.1" + exports = ('*.txt', '*.h') +""" + save(os.path.join(self.conan.current_folder, CONANFILE), content) + self.conan.run("export lasote/stable") + reg_path = self.conan.paths.export(ConanFileReference.loads('openssl/2.0.1@lasote/stable')) + self.assertEqual(sorted(os.listdir(reg_path)), + ['CMakeLists.txt', CONANFILE, CONAN_MANIFEST, 'helloHello0.h']) + + # Now exports being a list instead a tuple + content = """ +from conans import ConanFile + +class OpenSSLConan(ConanFile): + name = "openssl" + version = "2.0.1" + exports = ['*.txt', '*.h'] +""" + save(os.path.join(self.conan.current_folder, CONANFILE), content) + self.conan.run("export lasote/stable") + reg_path = self.conan.paths.export(ConanFileReference.loads('openssl/2.0.1@lasote/stable')) + self.assertEqual(sorted(os.listdir(reg_path)), + ['CMakeLists.txt', CONANFILE, CONAN_MANIFEST, + 'helloHello0.h']) + + def test_export_the_same_code(self): + file_list = self._create_packages_and_builds() + # Export the same conans + + conan2 = TestClient(self.conan.base_folder) + files2 = cpp_hello_conan_files("Hello0", "0.1") + conan2.save(files2) + conan2.run("export lasote/stable") + reg_path2 = conan2.paths.export(self.conan_ref) + digest2 = FileTreeManifest.loads(load(conan2.paths.digestfile_conanfile(self.conan_ref))) + + self.assertNotIn('A new Conan version was exported', conan2.user_io.out) + self.assertNotIn('Cleaning the old builds ...', conan2.user_io.out) + self.assertNotIn('Cleaning the old packs ...', conan2.user_io.out) + self.assertNotIn('All the previous packs were cleaned', conan2.user_io.out) + self.assertIn('%s: A new conanfile.py version was exported' % str(self.conan_ref), + self.conan.user_io.out) + self.assertIn('%s: Folder: %s' % (str(self.conan_ref), reg_path2), self.conan.user_io.out) + self.assertTrue(os.path.exists(reg_path2)) + + for name in list(files2.keys()): + self.assertTrue(os.path.exists(os.path.join(reg_path2, name))) + + expected_sums = {'hello.cpp': '4f005274b2fdb25e6113b69774dac184', + 'main.cpp': '0479f3c223c9a656a718f3148e044124', + 'CMakeLists.txt': 'bc3405da4bb0b51a3b9f05aca71e58c8', + 'conanfile.py': '0f623a95e7262a618ad140eb2f959c5f', + 'executable': '68b329da9893e34099c7d8ad5cb9c940', + 'helloHello0.h': '9448df034392fc8781a47dd03ae71bdd'} + self.assertEqual(expected_sums, digest2.file_sums) + + for f in file_list: + self.assertTrue(os.path.exists(f)) + + def test_export_a_new_version(self): + self._create_packages_and_builds() + # Export an update of the same conans + + conan2 = TestClient(self.conan.base_folder) + files2 = cpp_hello_conan_files("Hello0", "0.1") + files2[CONANFILE] = "# insert comment\n %s" % files2[CONANFILE] + conan2.save(files2) + conan2.run("export lasote/stable") + + reg_path3 = conan2.paths.export(self.conan_ref) + digest3 = FileTreeManifest.loads(load(conan2.paths.digestfile_conanfile(self.conan_ref))) + + self.assertIn('%s: A new conanfile.py version was exported' % str(self.conan_ref), + self.conan.user_io.out) + self.assertIn('%s: Folder: %s' % (str(self.conan_ref), reg_path3), self.conan.user_io.out) + + self.assertTrue(os.path.exists(reg_path3)) + + for name in list(files2.keys()): + self.assertTrue(os.path.exists(os.path.join(reg_path3, name))) + + expected_sums = {'hello.cpp': '4f005274b2fdb25e6113b69774dac184', + 'main.cpp': '0479f3c223c9a656a718f3148e044124', + 'CMakeLists.txt': 'bc3405da4bb0b51a3b9f05aca71e58c8', + 'conanfile.py': '6b19dfd1241712a6c694c7c397f909ce', + 'executable': '68b329da9893e34099c7d8ad5cb9c940', + 'helloHello0.h': '9448df034392fc8781a47dd03ae71bdd'} + self.assertEqual(expected_sums, digest3.file_sums) + + # for f in file_list: + # self.assertFalse(os.path.exists(f)) + + def _create_packages_and_builds(self): + reg_builds = self.conan.paths.builds(self.conan_ref) + reg_packs = self.conan.paths.packages(self.conan_ref) + + folders = [os.path.join(reg_builds, '342525g4f52f35f'), + os.path.join(reg_builds, 'ew9o8asdf908asdf80'), + os.path.join(reg_packs, '342525g4f52f35f'), + os.path.join(reg_packs, 'ew9o8asdf908asdf80')] + + file_list = [] + for f in folders: + for name, content in {'file1.h': 'asddfasdf', 'file1.dll': 'asddfasdf'}.items(): + file_path = os.path.join(f, name) + save(file_path, content) + file_list.append(file_path) + return file_list diff --git a/testbed/conan-io__conan/conans/test/command/help_test.py b/testbed/conan-io__conan/conans/test/command/help_test.py new file mode 100644 index 0000000000000000000000000000000000000000..5ffea68baf062377f8c6f0c3d8319c6c9aa3ac48 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/help_test.py @@ -0,0 +1,10 @@ +import unittest +from conans.test.tools import TestClient + + +class BasicTest(unittest.TestCase): + + def help_test(self): + conan = TestClient() + conan.run("") + self.assertIn('Conan commands. Type $conan "command" -h', conan.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/command/imports_test.py b/testbed/conan-io__conan/conans/test/command/imports_test.py new file mode 100644 index 0000000000000000000000000000000000000000..bada7911f588c5161ed0b0256922e81e68a84a54 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/imports_test.py @@ -0,0 +1,176 @@ +import unittest +from conans.test.tools import TestClient +import os +from conans.client.importer import IMPORTS_MANIFESTS +from conans.util.files import load +from conans.model.manifest import FileTreeManifest +from conans.test.utils.test_files import temp_folder + + +conanfile = """ +from conans import ConanFile +from conans.util.files import save + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + build_policy = "missing" + + def build(self): + save("file1.txt", "Hello") + save("file2.txt", "World") + + def package(self): + self.copy("file1.txt") + self.copy("file2.txt") +""" + +test1 = """[requires] +Hello/0.1@lasote/stable + +[imports] +., file* -> . +""" + +test2 = """ +from conans import ConanFile +from conans.util.files import save + +class HelloReuseConan(ConanFile): + requires = "Hello/0.1@lasote/stable" + + def imports(self): + self.copy("*1.txt") +""" + +test3 = """ +from conans import ConanFile +from conans.util.files import save + +class HelloReuseConan(ConanFile): + requires = "Hello/0.1@lasote/stable" + + def imports(self): + self.copy("*2.txt") +""" + + +class ImportsTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + self.client.save({"conanfile.py": conanfile}) + self.client.run("export lasote/stable") + + def imports_global_path_test(self): + """ Ensure that when importing files in a global path, outside the package build, + they are not deleted + """ + dst_global_folder = temp_folder().replace("\\", "/") + conanfile2 = ''' +from conans import ConanFile + +class ConanLib(ConanFile): + name = "Say" + version = "0.1" + requires = "Hello/0.1@lasote/stable" + + def imports(self): + self.copy("file*.txt", dst="%s") +''' % dst_global_folder + + self.client.save({"conanfile.py": conanfile2}, clean_first=True) + self.client.run("export lasote/stable") + + self.client.current_folder = temp_folder() + self.client.run("install Say/0.1@lasote/stable --build=missing") + for filename, content in [("file1.txt", "Hello"), ("file2.txt", "World")]: + filecontent = load(os.path.join(dst_global_folder, filename)) + self.assertTrue(content, filecontent) + + def imports_env_var_test(self): + conanfile2 = ''' +from conans import ConanFile +import os + +class ConanLib(ConanFile): + requires = "Hello/0.1@lasote/stable" + + def imports(self): + self.copy("file*.txt", dst=os.environ["MY_IMPORT_PATH"]) +''' + for folder in ("folder1", "folder2"): + self.client.save({"conanfile.py": conanfile2}, clean_first=True) + self.client.run("install -e MY_IMPORT_PATH=%s" % folder) + self.assertEqual("Hello", + load(os.path.join(self.client.current_folder, folder, "file1.txt"))) + + def imports_error_test(self): + self.client.save({"conanfile.txt": test1}, clean_first=True) + self.client.run("install --no-imports") + self.assertNotIn("file1.txt", os.listdir(self.client.current_folder)) + self.assertNotIn("file2.txt", os.listdir(self.client.current_folder)) + + error = self.client.run("imports", ignore_error=True) + self.assertTrue(error) + self.assertIn("conanbuildinfo.txt file not found", self.client.user_io.out) + + def install_manifest_test(self): + self.client.save({"conanfile.txt": test1}, clean_first=True) + self.client.run("install -g txt") + self.assertIn("imports(): Copied 2 '.txt' files", self.client.user_io.out) + self.assertIn("file1.txt", os.listdir(self.client.current_folder)) + self.assertIn("file2.txt", os.listdir(self.client.current_folder)) + self._check_manifest() + + def undo_install_manifest_test(self): + self.client.save({"conanfile.txt": test1}, clean_first=True) + self.client.run("install -g txt") + self.client.run("imports --undo") + self.assertNotIn("file1.txt", os.listdir(self.client.current_folder)) + self.assertNotIn("file2.txt", os.listdir(self.client.current_folder)) + self.assertNotIn(IMPORTS_MANIFESTS, os.listdir(self.client.current_folder)) + self.assertIn("Removed 2 imported files", self.client.user_io.out) + self.assertIn("Removed imports manifest file", self.client.user_io.out) + + def _check_manifest(self): + manifest_content = load(os.path.join(self.client.current_folder, IMPORTS_MANIFESTS)) + manifest = FileTreeManifest.loads(manifest_content) + self.assertEqual(manifest.file_sums, + {os.path.join(self.client.current_folder, "file1.txt"): + "8b1a9953c4611296a827abf8c47804d7", + os.path.join(self.client.current_folder, "file2.txt"): + "f5a7924e621e84c9280a9a27e1bcb7f6"}) + + def imports_test(self): + self.client.save({"conanfile.txt": test1}, clean_first=True) + self.client.run("install --no-imports -g txt") + self.assertNotIn("file1.txt", os.listdir(self.client.current_folder)) + self.assertNotIn("file2.txt", os.listdir(self.client.current_folder)) + self.client.run("imports") + self.assertIn("imports(): Copied 2 '.txt' files", self.client.user_io.out) + self.assertIn("file1.txt", os.listdir(self.client.current_folder)) + self.assertIn("file2.txt", os.listdir(self.client.current_folder)) + self._check_manifest() + + def imports_filename_test(self): + self.client.save({"conanfile.txt": test1, + "conanfile.py": test2, + "conanfile2.py": test3}, clean_first=True) + self.client.run("install --no-imports -g txt") + self.assertNotIn("file1.txt", os.listdir(self.client.current_folder)) + self.assertNotIn("file2.txt", os.listdir(self.client.current_folder)) + + self.client.run("imports -f=conanfile2.py") + self.assertNotIn("file1.txt", os.listdir(self.client.current_folder)) + self.assertIn("file2.txt", os.listdir(self.client.current_folder)) + + os.unlink(os.path.join(self.client.current_folder, "file2.txt")) + self.client.run("imports") + self.assertIn("file1.txt", os.listdir(self.client.current_folder)) + self.assertNotIn("file2.txt", os.listdir(self.client.current_folder)) + + os.unlink(os.path.join(self.client.current_folder, "file1.txt")) + self.client.run("imports -f conanfile.txt") + self.assertIn("file1.txt", os.listdir(self.client.current_folder)) + self.assertIn("file2.txt", os.listdir(self.client.current_folder)) diff --git a/testbed/conan-io__conan/conans/test/command/info_options_test.py b/testbed/conan-io__conan/conans/test/command/info_options_test.py new file mode 100644 index 0000000000000000000000000000000000000000..5da843be0078c0c87247921734711726afc9b71b --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/info_options_test.py @@ -0,0 +1,31 @@ +import unittest +from conans.test.tools import TestClient + + +class InfoOptionsTest(unittest.TestCase): + + def info_options_test(self): + """ packages with dash + """ + client = TestClient() + client.run('new My-Package/1.3@myuser/testing -t') + # assert they are correct at least + client.run("export myuser/testing") + client.run("info test_package") + self.assertIn("My-Package/1.3@myuser/testing", client.user_io.out) + + # Check that I can pass options to info + client.run("info -o shared=True") + self.assertIn("My-Package/1.3@PROJECT", client.user_io.out) + client.run("info -o My-Package:shared=True") + self.assertIn("My-Package/1.3@PROJECT", client.user_io.out) + client.run("info test_package -o My-Package:shared=True") + self.assertIn("My-Package/1.3@myuser/testing", client.user_io.out) + + # errors + client.run("info -o shared2=True", ignore_error=True) + self.assertIn("'options.shared2' doesn't exist", client.user_io.out) + client.run("info -o My-Package:shared2=True", ignore_error=True) + self.assertIn("'options.shared2' doesn't exist", client.user_io.out) + client.run("info test_package -o My-Package:shared2=True", ignore_error=True) + self.assertIn("'options.shared2' doesn't exist", client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/command/info_test.py b/testbed/conan-io__conan/conans/test/command/info_test.py new file mode 100644 index 0000000000000000000000000000000000000000..60ee5fe2dd097628901d0b28accbbf9c7765ef30 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/info_test.py @@ -0,0 +1,196 @@ +import unittest +from conans.test.tools import TestClient +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.paths import CONANFILE +import textwrap + + +class InfoTest(unittest.TestCase): + + def _create(self, number, version, deps=None, deps_dev=None, export=True): + files = cpp_hello_conan_files(number, version, deps, build=False) + files[CONANFILE] = files[CONANFILE].replace("config(", "configure(") + if deps_dev: + files[CONANFILE] = files[CONANFILE].replace("exports = '*'", """exports = '*' + dev_requires=%s +""" % ",".join('"%s"' % d for d in deps_dev)) + + self.client.save(files, clean_first=True) + if export: + self.client.run("export lasote/stable") + expected_output = textwrap.dedent( + """\ + WARN: Conanfile doesn't have 'url'. + It is recommended to add it as attribute + WARN: Conanfile doesn't have 'license'. + It is recommended to add it as attribute + WARN: Conanfile doesn't have 'description'. + It is recommended to add it as attribute""") + self.assertIn(expected_output, self.client.user_io.out) + + if number != "Hello2": + files[CONANFILE] = files[CONANFILE].replace('version = "0.1"', + 'version = "0.1"\n' + ' url= "myurl"\n' + ' license = "MIT"') + else: + files[CONANFILE] = files[CONANFILE].replace('version = "0.1"', + 'version = "0.1"\n' + ' url= "myurl"\n' + ' license = "MIT", "GPL"') + + self.client.save(files) + if export: + self.client.run("export lasote/stable") + self.assertNotIn("WARN: Conanfile doesn't have 'url'", self.client.user_io.out) + + def only_names_test(self): + self.client = TestClient() + self._create("Hello0", "0.1") + self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"]) + self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False) + + self.client.run("info --only") + self.assertEqual(["Hello2/0.1@PROJECT", "Hello0/0.1@lasote/stable", + "Hello1/0.1@lasote/stable"], str(self.client.user_io.out).splitlines()) + self.client.run("info --only=date") + lines = [(line if "date" not in line else "Date") + for line in str(self.client.user_io.out).splitlines()] + self.assertEqual(["Hello2/0.1@PROJECT", "Hello0/0.1@lasote/stable", "Date", + "Hello1/0.1@lasote/stable", "Date"], lines) + + def reuse_test(self): + self.client = TestClient() + self._create("Hello0", "0.1") + self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"]) + self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False) + + self.client.run("info -u") + + self.assertIn("Creation date: ", self.client.user_io.out) + + expected_output = textwrap.dedent( + """\ + Hello2/0.1@PROJECT + URL: myurl + Licenses: MIT, GPL + Requires: + Hello1/0.1@lasote/stable + Hello0/0.1@lasote/stable + Remote: None + URL: myurl + License: MIT + Updates: You have the latest version (None) + Required by: + Hello1/0.1@lasote/stable + Hello1/0.1@lasote/stable + Remote: None + URL: myurl + License: MIT + Updates: You have the latest version (None) + Required by: + Hello2/0.1@PROJECT + Requires: + Hello0/0.1@lasote/stable""") + + def clean_dates(output): + return "\n".join([line for line in str(output).splitlines() + if not line.strip().startswith("Creation date")]) + + # The timestamp is variable so we can't check the equality + self.assertIn(expected_output, clean_dates(self.client.user_io.out)) + + self.client.run("info -u --only=url") + expected_output = textwrap.dedent( + """\ + Hello2/0.1@PROJECT + URL: myurl + Hello0/0.1@lasote/stable + URL: myurl + Hello1/0.1@lasote/stable + URL: myurl""") + self.assertIn(expected_output, clean_dates(self.client.user_io.out)) + self.client.run("info -u --only=url,license") + expected_output = textwrap.dedent( + """\ + Hello2/0.1@PROJECT + URL: myurl + Licenses: MIT, GPL + Hello0/0.1@lasote/stable + URL: myurl + License: MIT + Hello1/0.1@lasote/stable + URL: myurl + License: MIT""") + self.assertIn(expected_output, clean_dates(self.client.user_io.out)) + + def build_order_test(self): + self.client = TestClient() + self._create("Hello0", "0.1") + self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"]) + self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False) + + self.client.run("info -bo=Hello0/0.1@lasote/stable") + self.assertIn("[Hello0/0.1@lasote/stable], [Hello1/0.1@lasote/stable]", + self.client.user_io.out) + + self.client.run("info -bo=Hello1/0.1@lasote/stable") + self.assertIn("[Hello1/0.1@lasote/stable]", self.client.user_io.out) + + self.client.run("info -bo=Hello1/0.1@lasote/stable -bo=Hello0/0.1@lasote/stable") + self.assertIn("[Hello0/0.1@lasote/stable], [Hello1/0.1@lasote/stable]", + self.client.user_io.out) + + self.client.run("info Hello1/0.1@lasote/stable -bo=Hello0/0.1@lasote/stable") + self.assertEqual("[Hello0/0.1@lasote/stable], [Hello1/0.1@lasote/stable]\n", + self.client.user_io.out) + + def diamond_build_order_test(self): + self.client = TestClient() + self._create("LibA", "0.1") + self._create("Dev1", "0.1") + self._create("LibE", "0.1", deps_dev=["Dev1/0.1@lasote/stable"]) + self._create("LibF", "0.1") + self._create("LibG", "0.1") + self._create("Dev2", "0.1", deps=["LibG/0.1@lasote/stable"]) + + self._create("LibB", "0.1", ["LibA/0.1@lasote/stable", "LibE/0.1@lasote/stable"]) + self._create("LibC", "0.1", ["LibA/0.1@lasote/stable", "LibF/0.1@lasote/stable"], + deps_dev=["Dev2/0.1@lasote/stable"]) + + self._create("LibD", "0.1", ["LibB/0.1@lasote/stable", "LibC/0.1@lasote/stable"], + export=False) + + self.client.run("info -bo=LibA/0.1@lasote/stable") + self.assertIn("[LibA/0.1@lasote/stable], " + "[LibB/0.1@lasote/stable, LibC/0.1@lasote/stable]", + self.client.user_io.out) + self.client.run("info -bo=LibB/0.1@lasote/stable") + self.assertIn("[LibB/0.1@lasote/stable]", self.client.user_io.out) + self.client.run("info -bo=LibE/0.1@lasote/stable") + self.assertIn("[LibE/0.1@lasote/stable], [LibB/0.1@lasote/stable]", + self.client.user_io.out) + self.client.run("info -bo=LibF/0.1@lasote/stable") + self.assertIn("[LibF/0.1@lasote/stable], [LibC/0.1@lasote/stable]", + self.client.user_io.out) + self.client.run("info -bo=Dev1/0.1@lasote/stable") + self.assertEqual("\n", self.client.user_io.out) + self.client.run("info --scope=LibE:dev=True -bo=Dev1/0.1@lasote/stable") + self.assertIn("[Dev1/0.1@lasote/stable], [LibE/0.1@lasote/stable], " + "[LibB/0.1@lasote/stable]", self.client.user_io.out) + self.client.run("info -bo=LibG/0.1@lasote/stable") + self.assertEqual("\n", self.client.user_io.out) + self.client.run("info --scope=LibC:dev=True -bo=LibG/0.1@lasote/stable") + self.assertIn("[LibG/0.1@lasote/stable], [Dev2/0.1@lasote/stable], " + "[LibC/0.1@lasote/stable]", self.client.user_io.out) + + self.client.run("info --build_order=ALL") + self.assertIn("[LibA/0.1@lasote/stable, LibE/0.1@lasote/stable, LibF/0.1@lasote/stable], " + "[LibB/0.1@lasote/stable, LibC/0.1@lasote/stable]", + self.client.user_io.out) + + self.client.run("info --build_order=ALL --scope=ALL:dev=True") + self.assertIn("[Dev1/0.1@lasote/stable, LibG/0.1@lasote/stable], " + "[Dev2/0.1@lasote/stable, LibA/0.1@lasote/stable, LibE/0.1@lasote/stable, " + "LibF/0.1@lasote/stable], [LibB/0.1@lasote/stable, LibC/0.1@lasote/stable]", + self.client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/command/install_subfolder_test.py b/testbed/conan-io__conan/conans/test/command/install_subfolder_test.py new file mode 100644 index 0000000000000000000000000000000000000000..da226692f0740eb7f22781133b8c160001814a74 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/install_subfolder_test.py @@ -0,0 +1,71 @@ +import unittest +from conans.test.tools import TestClient +from conans.paths import CONANFILE, CONANINFO, BUILD_INFO_CMAKE +import os +from conans.model.info import ConanInfo +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.util.files import mkdir, load + + +class InstallSubfolderTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + self.settings = ("-s os=Windows -s compiler='Visual Studio' -s compiler.version=12 " + "-s arch=x86 -s compiler.runtime=MD") + + def _create(self, number, version, deps=None, export=True): + files = cpp_hello_conan_files(number, version, deps, build=False) + + files[CONANFILE] = files[CONANFILE] + """ def build(self): + self.output.info("Settings %s" % self.settings.values.dumps()) + self.output.info("Options %s" % self.options.values.dumps()) + """ + self.client.save(files, clean_first=True) + if export: + self.client.run("export lasote/stable") + + def reuse_test(self): + self._create("Hello0", "0.1") + self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"]) + self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False) + + current_folder = self.client.current_folder + h00 = "2e38bbc2c3ef1425197c8e2ffa8532894c347d26" + h10 = "44671ecdd9c606eb7166f2197ab50be8d36a3c3b" + h01 = "8b964e421a5b7e48b7bc19b94782672be126be8b" + h11 = "3eeab577a3134fa3afdcd82881751789ec48e08f" + for lang, id0, id1, id2, id3 in [(0, h00, h10, h01, h11), + (1, h01, h11, h00, h10)]: + self.client.current_folder = os.path.join(current_folder, "lang%dbuild" % lang) + mkdir(self.client.current_folder) + self.client.run("install .. -o language=%d %s --build missing" % (lang, self.settings)) + info_path = os.path.join(self.client.current_folder, CONANINFO) + conan_info = ConanInfo.load_file(info_path) + self.assertEqual("arch=x86\n" + "compiler=Visual Studio\n" + "compiler.runtime=MD\n" + "compiler.version=12\n" + "os=Windows", + conan_info.settings.dumps()) + conan_info_text = load(info_path) + self.assertIn(id0, conan_info_text) + self.assertIn(id1, conan_info_text) + self.assertNotIn(id2, conan_info_text) + self.assertNotIn(id3, conan_info_text) + self.assertEqual("language=%s\nstatic=True" % lang, conan_info.options.dumps()) + build_cmake = os.path.join(self.client.current_folder, BUILD_INFO_CMAKE) + build_cmake_text = load(build_cmake) + self.assertIn(id0, build_cmake_text) + self.assertIn(id1, build_cmake_text) + self.assertNotIn(id2, build_cmake_text) + self.assertNotIn(id3, build_cmake_text) + + # Now test "build" command in subfolders + for lang, id0, id1, id2, id3 in [(0, h00, h10, h01, h11), + (1, h01, h11, h00, h10)]: + self.client.current_folder = os.path.join(current_folder, "lang%dbuild" % lang) + self.client.run("build ..") + self.assertIn("compiler=Visual Studio", self.client.user_io.out) + self.assertIn("language=%d" % lang, self.client.user_io.out) + self.assertNotIn("language=%d" % (not lang), self.client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/command/install_test.py b/testbed/conan-io__conan/conans/test/command/install_test.py new file mode 100644 index 0000000000000000000000000000000000000000..35967581ee6897e7aac677a5d1b12588541bb7a5 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/install_test.py @@ -0,0 +1,199 @@ +import unittest +from conans.test.tools import TestClient +from conans.model.ref import ConanFileReference, PackageReference +from conans.paths import CONANFILE, CONANINFO +import os +from conans.model.info import ConanInfo +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.paths import CONANFILE_TXT +import platform +from conans.client.detect import detected_os + + +class InstallTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + self.settings = ("-s os=Windows -s compiler='Visual Studio' -s compiler.version=12 " + "-s arch=x86 -s compiler.runtime=MD") + + def _create(self, number, version, deps=None, export=True, no_config=False): + files = cpp_hello_conan_files(number, version, deps, build=False, config=not no_config) + + self.client.save(files, clean_first=True) + if export: + self.client.run("export lasote/stable") + + def reuse_test(self): + self._create("Hello0", "0.1") + self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"]) + self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False) + + for lang, id0, id1 in [(0, "2e38bbc2c3ef1425197c8e2ffa8532894c347d26", + "44671ecdd9c606eb7166f2197ab50be8d36a3c3b"), + (1, "8b964e421a5b7e48b7bc19b94782672be126be8b", + "3eeab577a3134fa3afdcd82881751789ec48e08f")]: + + self.client.run("install -o language=%d %s --build missing" % (lang, self.settings)) + info_path = os.path.join(self.client.current_folder, CONANINFO) + conan_info = ConanInfo.load_file(info_path) + self.assertEqual("arch=x86\n" + "compiler=Visual Studio\n" + "compiler.runtime=MD\n" + "compiler.version=12\n" + "os=Windows", + conan_info.settings.dumps()) + self.assertEqual("language=%s\nstatic=True" % lang, conan_info.options.dumps()) + conan_ref = ConanFileReference.loads("Hello0/0.1@lasote/stable") + + hello0 = self.client.paths.package(PackageReference(conan_ref, id0)) + hello0_info = os.path.join(hello0, CONANINFO) + hello0_conan_info = ConanInfo.load_file(hello0_info) + self.assertEqual(lang, hello0_conan_info.options.language) + + package_ref1 = PackageReference(ConanFileReference.loads("Hello1/0.1@lasote/stable"), + id1) + hello1 = self.client.paths.package(package_ref1) + hello1_info = os.path.join(hello1, CONANINFO) + hello1_conan_info = ConanInfo.load_file(hello1_info) + self.assertEqual(lang, hello1_conan_info.options.language) + + def upper_option_test(self): + self._create("Hello0", "0.1", no_config=True) + self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"], no_config=True) + self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False, no_config=True) + + self.client.run("install -o Hello2:language=1 -o Hello1:language=0 -o Hello0:language=1 %s" + " --build missing" % self.settings) + info_path = os.path.join(self.client.current_folder, CONANINFO) + conan_info = ConanInfo.load_file(info_path) + self.assertEqual("language=1\nstatic=True", conan_info.options.dumps()) + conan_ref = ConanFileReference.loads("Hello0/0.1@lasote/stable") + + hello0 = self.client.paths.package(PackageReference(conan_ref, + "8b964e421a5b7e48b7bc19b94782672be126be8b")) + hello0_info = os.path.join(hello0, CONANINFO) + hello0_conan_info = ConanInfo.load_file(hello0_info) + self.assertEqual(1, hello0_conan_info.options.language) + + package_ref1 = PackageReference(ConanFileReference.loads("Hello1/0.1@lasote/stable"), + "44671ecdd9c606eb7166f2197ab50be8d36a3c3b") + hello1 = self.client.paths.package(package_ref1) + hello1_info = os.path.join(hello1, CONANINFO) + hello1_conan_info = ConanInfo.load_file(hello1_info) + self.assertEqual(0, hello1_conan_info.options.language) + + def inverse_upper_option_test(self): + self._create("Hello0", "0.1", no_config=True) + self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"], no_config=True) + self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False, no_config=True) + + self.client.run("install -o language=0 -o Hello1:language=1 -o Hello0:language=0 %s " + "--build missing" % self.settings) + info_path = os.path.join(self.client.current_folder, CONANINFO) + + conan_info = ConanInfo.load_file(info_path) + + self.assertEqual("language=0\nstatic=True", conan_info.options.dumps()) + conan_ref = ConanFileReference.loads("Hello0/0.1@lasote/stable") + + hello0 = self.client.paths.package(PackageReference(conan_ref, + "2e38bbc2c3ef1425197c8e2ffa8532894c347d26")) + hello0_info = os.path.join(hello0, CONANINFO) + hello0_conan_info = ConanInfo.load_file(hello0_info) + self.assertEqual("language=0\nstatic=True", hello0_conan_info.options.dumps()) + + package_ref1 = PackageReference(ConanFileReference.loads("Hello1/0.1@lasote/stable"), + "3eeab577a3134fa3afdcd82881751789ec48e08f") + hello1 = self.client.paths.package(package_ref1) + hello1_info = os.path.join(hello1, CONANINFO) + hello1_conan_info = ConanInfo.load_file(hello1_info) + self.assertEqual("language=1\nstatic=True", hello1_conan_info.options.dumps()) + + def upper_option_txt_test(self): + self._create("Hello0", "0.1", no_config=True) + self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"], no_config=True) + + files = cpp_hello_conan_files("Hello2", "0.1", ["Hello1/0.1@lasote/stable"]) + files.pop(CONANFILE) + files[CONANFILE_TXT] = """[requires] + Hello1/0.1@lasote/stable + + [options] + Hello0:language=1 + Hello1:language=0 + """ + self.client.save(files, clean_first=True) + + self.client.run("install %s --build missing" % self.settings) + info_path = os.path.join(self.client.current_folder, CONANINFO) + conan_info = ConanInfo.load_file(info_path) + self.assertEqual("", conan_info.options.dumps()) + conan_ref = ConanFileReference.loads("Hello0/0.1@lasote/stable") + + hello0 = self.client.paths.package(PackageReference(conan_ref, + "8b964e421a5b7e48b7bc19b94782672be126be8b")) + hello0_info = os.path.join(hello0, CONANINFO) + hello0_conan_info = ConanInfo.load_file(hello0_info) + self.assertEqual(1, hello0_conan_info.options.language) + + package_ref1 = PackageReference(ConanFileReference.loads("Hello1/0.1@lasote/stable"), + "44671ecdd9c606eb7166f2197ab50be8d36a3c3b") + hello1 = self.client.paths.package(package_ref1) + hello1_info = os.path.join(hello1, CONANINFO) + hello1_conan_info = ConanInfo.load_file(hello1_info) + self.assertEqual(0, hello1_conan_info.options.language) + + def change_option_txt_test(self): + self._create("Hello0", "0.1") + + client = TestClient(base_folder=self.client.base_folder) + files = {CONANFILE_TXT: """[requires] + Hello0/0.1@lasote/stable + + [options] + Hello0:language=1 + """} + client.save(files) + + client.run("install %s --build missing" % self.settings) + info_path = os.path.join(client.current_folder, CONANINFO) + conan_info = ConanInfo.load_file(info_path) + self.assertEqual("", conan_info.options.dumps()) + self.assertIn("Hello0:language=1", conan_info.full_options.dumps()) + self.assertIn("Hello0/0.1@lasote/stable:8b964e421a5b7e48b7bc19b94782672be126be8b", + conan_info.full_requires.dumps()) + + files = {CONANFILE_TXT: """[requires] + Hello0/0.1@lasote/stable + + [options] + Hello0:language=0 + """} + client.save(files) + client.run("install %s --build missing" % self.settings) + + info_path = os.path.join(client.current_folder, CONANINFO) + conan_info = ConanInfo.load_file(info_path) + self.assertEqual("", conan_info.options.dumps()) + self.assertIn("Hello0:language=1", conan_info.full_options.dumps()) + + # it is necessary to clean the cached conaninfo + client.save(files, clean_first=True) + client.run("install %s --build missing" % self.settings) + conan_info = ConanInfo.load_file(info_path) + self.assertEqual("", conan_info.options.dumps()) + self.assertIn("Hello0:language=0", conan_info.full_options.dumps()) + self.assertIn("Hello0/0.1@lasote/stable:2e38bbc2c3ef1425197c8e2ffa8532894c347d26", + conan_info.full_requires.dumps()) + + def warn_bad_os_test(self): + bad_os = "Linux" if platform.system() != "Linux" else "Macos" + message = "You are building this package with settings.os='%s" % bad_os + self._create("Hello0", "0.1") + self.client.run("install Hello0/0.1@lasote/stable -s os=%s" % bad_os, ignore_error=True) + self.assertIn(message, self.client.user_io.out) + + self.client.run("install Hello0/0.1@lasote/stable -s os=%s" % detected_os(), + ignore_error=True) + self.assertNotIn("You are building this package with settings.os", self.client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/command/new_test.py b/testbed/conan-io__conan/conans/test/command/new_test.py new file mode 100644 index 0000000000000000000000000000000000000000..862fbe942c12ab246a8426fceb753bab3b6f8d81 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/new_test.py @@ -0,0 +1,75 @@ +import unittest +from conans.test.tools import TestClient +import os +from conans.util.files import load + + +class NewTest(unittest.TestCase): + + def new_test(self): + """ Test that the user can be shown and changed, and it is reflected in the + user cache localdb + """ + client = TestClient() + client.run('new MyPackage/1.3@myuser/testing -t') + root = client.current_folder + self.assertTrue(os.path.exists(os.path.join(root, "conanfile.py"))) + content = load(os.path.join(root, "conanfile.py")) + self.assertIn('name = "MyPackage"', content) + self.assertIn('version = "1.3"', content) + self.assertTrue(os.path.exists(os.path.join(root, "test_package/conanfile.py"))) + self.assertTrue(os.path.exists(os.path.join(root, "test_package/CMakeLists.txt"))) + self.assertTrue(os.path.exists(os.path.join(root, "test_package/example.cpp"))) + # assert they are correct at least + client.run("export myuser/testing") + client.run("info test_package") + self.assertIn("MyPackage/1.3@myuser/testing", client.user_io.out) + + def new_dash_test(self): + """ packages with dash + """ + client = TestClient() + client.run('new My-Package/1.3@myuser/testing -t') + root = client.current_folder + self.assertTrue(os.path.exists(os.path.join(root, "conanfile.py"))) + content = load(os.path.join(root, "conanfile.py")) + self.assertIn('name = "My-Package"', content) + self.assertIn('version = "1.3"', content) + self.assertTrue(os.path.exists(os.path.join(root, "test_package/conanfile.py"))) + self.assertTrue(os.path.exists(os.path.join(root, "test_package/CMakeLists.txt"))) + self.assertTrue(os.path.exists(os.path.join(root, "test_package/example.cpp"))) + # assert they are correct at least + client.run("export myuser/testing") + client.run("info test_package") + self.assertIn("My-Package/1.3@myuser/testing", client.user_io.out) + + def new_header_test(self): + """ Test that the user can be shown and changed, and it is reflected in the + user cache localdb + """ + client = TestClient() + client.run('new MyPackage/1.3@myuser/testing -t -i') + root = client.current_folder + self.assertTrue(os.path.exists(os.path.join(root, "conanfile.py"))) + content = load(os.path.join(root, "conanfile.py")) + self.assertIn('name = "MyPackage"', content) + self.assertIn('version = "1.3"', content) + self.assertTrue(os.path.exists(os.path.join(root, "test_package/conanfile.py"))) + self.assertTrue(os.path.exists(os.path.join(root, "test_package/CMakeLists.txt"))) + self.assertTrue(os.path.exists(os.path.join(root, "test_package/example.cpp"))) + # assert they are correct at least + client.run("export myuser/testing") + client.run("info test_package") + self.assertIn("MyPackage/1.3@myuser/testing", client.user_io.out) + + def new_without_test(self): + """ Test that the user can be shown and changed, and it is reflected in the + user cache localdb + """ + client = TestClient() + client.run('new MyPackage/1.3@myuser/testing') + root = client.current_folder + self.assertTrue(os.path.exists(os.path.join(root, "conanfile.py"))) + self.assertFalse(os.path.exists(os.path.join(root, "test_package/conanfile.py"))) + self.assertFalse(os.path.exists(os.path.join(root, "test_package/CMakeLists.txt"))) + self.assertFalse(os.path.exists(os.path.join(root, "test_package/example.cpp"))) diff --git a/testbed/conan-io__conan/conans/test/command/profile_test.py b/testbed/conan-io__conan/conans/test/command/profile_test.py new file mode 100644 index 0000000000000000000000000000000000000000..4ff397279f17eb0b1608d0dc8bb67be73d673de3 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/profile_test.py @@ -0,0 +1,34 @@ +import unittest +from conans.test.tools import TestClient +from conans.test.utils.profiles import create_profile + + +class ProfileTest(unittest.TestCase): + + def empty_test(self): + client = TestClient() + client.run("profile list") + self.assertIn("No profiles defined", client.user_io.out) + + def list_test(self): + client = TestClient() + create_profile(client.client_cache.profiles_path, "profile1") + create_profile(client.client_cache.profiles_path, "profile2") + create_profile(client.client_cache.profiles_path, "profile3") + client.run("profile list") + self.assertEqual(set(["profile1", "profile2", "profile3"]), + set(str(client.user_io.out).splitlines())) + + def show_test(self): + client = TestClient() + create_profile(client.client_cache.profiles_path, "profile1", settings={"os": "Windows"}) + create_profile(client.client_cache.profiles_path, "profile2", scopes={"test": True}) + create_profile(client.client_cache.profiles_path, "profile3", + env=[("CXX", "/path/tomy/g++_build"), ("CC", "/path/tomy/gcc_build")]) + client.run("profile show profile1") + self.assertIn(" os: Windows", client.user_io.out) + client.run("profile show profile2") + self.assertIn(" test=True", client.user_io.out) + client.run("profile show profile3") + self.assertIn(" CC: /path/tomy/gcc_build", client.user_io.out) + self.assertIn(" CXX: /path/tomy/g++_build", client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/command/remote_test.py b/testbed/conan-io__conan/conans/test/command/remote_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e19df757c7f2ac59dbb3e6b608bca492e303d3bc --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/remote_test.py @@ -0,0 +1,114 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from collections import OrderedDict +from conans.util.files import load + + +class RemoteTest(unittest.TestCase): + + def setUp(self): + self.servers = OrderedDict() + self.users = {} + for i in range(3): + test_server = TestServer() + self.servers["remote%d" % i] = test_server + self.users["remote%d" % i] = [("lasote", "mypass")] + + self.client = TestClient(servers=self.servers, users=self.users) + + def basic_test(self): + self.client.run("remote list") + self.assertIn("remote0: http://", self.client.user_io.out) + self.assertIn("remote1: http://", self.client.user_io.out) + self.assertIn("remote2: http://", self.client.user_io.out) + + self.client.run("remote add origin https://myurl") + self.client.run("remote list") + self.assertIn("origin: https://myurl", self.client.user_io.out) + + self.client.run("remote update origin https://2myurl") + self.client.run("remote list") + self.assertIn("origin: https://2myurl", self.client.user_io.out) + + self.client.run("remote update remote0 https://remote0url") + self.client.run("remote list") + output = str(self.client.user_io.out) + self.assertIn("remote0: https://remote0url", output.splitlines()[0]) + + self.client.run("remote remove remote0") + self.client.run("remote list") + output = str(self.client.user_io.out) + self.assertIn("remote1: http://", output.splitlines()[0]) + + def verify_ssl_test(self): + client = TestClient() + client.run("remote add my-remote http://someurl TRUE") + client.run("remote add my-remote2 http://someurl2 yes") + client.run("remote add my-remote3 http://someurl3 FALse") + client.run("remote add my-remote4 http://someurl4 No") + registry = load(client.client_cache.registry) + self.assertIn("my-remote http://someurl True", registry) + self.assertIn("my-remote2 http://someurl2 True", registry) + self.assertIn("my-remote3 http://someurl3 False", registry) + self.assertIn("my-remote4 http://someurl4 False", registry) + + def verify_ssl_error_test(self): + client = TestClient() + error = client.run("remote add my-remote http://someurl some_invalid_option=foo", + ignore_error=True) + self.assertTrue(error) + self.assertIn("ERROR: Unrecognized boolean value 'some_invalid_option=foo'", + client.user_io.out) + self.assertEqual("", load(client.client_cache.registry)) + + def errors_test(self): + self.client.run("remote update origin url", ignore_error=True) + self.assertIn("ERROR: Remote 'origin' not found in remotes", self.client.user_io.out) + + self.client.run("remote remove origin", ignore_error=True) + self.assertIn("ERROR: Remote 'origin' not found in remotes", self.client.user_io.out) + + def duplicated_error_tests(self): + """ check remote name and URL are not duplicated + """ + error = self.client.run("remote add remote1 http://otherurl", ignore_error=True) + self.assertTrue(error) + self.assertIn("ERROR: Remote 'remote1' already exists in remotes (use update to modify)", + self.client.user_io.out) + + self.client.run("remote list") + url = str(self.client.user_io.out).split()[1] + error = self.client.run("remote add newname %s" % url, ignore_error=True) + self.assertTrue(error) + self.assertIn("Remote 'remote0' already exists with same URL", + self.client.user_io.out) + + error = self.client.run("remote update remote1 %s" % url, ignore_error=True) + self.assertTrue(error) + self.assertIn("Remote 'remote0' already exists with same URL", + self.client.user_io.out) + + def basic_refs_test(self): + self.client.run("remote add_ref Hello/0.1@user/testing remote0") + self.client.run("remote list_ref") + self.assertIn("Hello/0.1@user/testing: remote0", self.client.user_io.out) + + self.client.run("remote add_ref Hello1/0.1@user/testing remote1") + self.client.run("remote list_ref") + self.assertIn("Hello/0.1@user/testing: remote0", self.client.user_io.out) + self.assertIn("Hello1/0.1@user/testing: remote1", self.client.user_io.out) + + self.client.run("remote remove_ref Hello1/0.1@user/testing") + self.client.run("remote list_ref") + self.assertIn("Hello/0.1@user/testing: remote0", self.client.user_io.out) + self.assertNotIn("Hello1/0.1@user/testing", self.client.user_io.out) + + self.client.run("remote add_ref Hello1/0.1@user/testing remote1") + self.client.run("remote list_ref") + self.assertIn("Hello/0.1@user/testing: remote0", self.client.user_io.out) + self.assertIn("Hello1/0.1@user/testing: remote1", self.client.user_io.out) + + self.client.run("remote update_ref Hello1/0.1@user/testing remote2") + self.client.run("remote list_ref") + self.assertIn("Hello/0.1@user/testing: remote0", self.client.user_io.out) + self.assertIn("Hello1/0.1@user/testing: remote2", self.client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/command/remove_empty_dirs_test.py b/testbed/conan-io__conan/conans/test/command/remove_empty_dirs_test.py new file mode 100644 index 0000000000000000000000000000000000000000..0023037a8ff41d49b852724cafffa924bf901a64 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/remove_empty_dirs_test.py @@ -0,0 +1,34 @@ +import unittest +from conans.test.tools import TestClient +import os +from conans.test.utils.cpp_test_files import cpp_hello_conan_files + + +class RemoveEmptyDirsTest(unittest.TestCase): + + def basic_test(self): + hello_files = cpp_hello_conan_files("Hello") + client = TestClient() + client.save(hello_files) + client.run("export lasote/stable") + path = os.path.join(client.storage_folder, "Hello/0.1/lasote/stable") + self.assertTrue(os.path.exists(path)) + client.run("remove Hello* -f") + path = os.path.join(client.storage_folder, "Hello") + self.assertFalse(os.path.exists(path)) + + def shared_folder_test(self): + hello_files = cpp_hello_conan_files("Hello") + client = TestClient() + client.save(hello_files) + client.run("export lasote/stable") + path = os.path.join(client.storage_folder, "Hello/0.1/lasote/stable") + self.assertTrue(os.path.exists(path)) + client.run("export lasote2/stable") + path = os.path.join(client.storage_folder, "Hello/0.1/lasote2/stable") + self.assertTrue(os.path.exists(path)) + client.run("remove Hello/0.1@lasote/stable -f") + path = os.path.join(client.storage_folder, "Hello/0.1/lasote") + self.assertFalse(os.path.exists(path)) + path = os.path.join(client.storage_folder, "Hello/0.1") + self.assertTrue(os.path.exists(path)) diff --git a/testbed/conan-io__conan/conans/test/command/remove_test.py b/testbed/conan-io__conan/conans/test/command/remove_test.py new file mode 100644 index 0000000000000000000000000000000000000000..8531afd06ba5419d65548d310cb731b0e45f10a7 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/remove_test.py @@ -0,0 +1,252 @@ +import unittest +from conans.test.tools import TestClient, TestBufferConanOutput, TestServer +from conans.paths import PACKAGES_FOLDER, EXPORT_FOLDER, BUILD_FOLDER, SRC_FOLDER, CONANFILE,\ + CONAN_MANIFEST, CONANINFO +import os +from mock import Mock +from conans.client.userio import UserIO +from conans.test.utils.test_files import temp_folder +import six +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.model.ref import PackageReference, ConanFileReference +from conans.model.manifest import FileTreeManifest + + +class RemoveTest(unittest.TestCase): + + def setUp(self): + hello_files = cpp_hello_conan_files("Hello") + test_conanfile_contents = hello_files[CONANFILE] + + self.server_folder = temp_folder() + test_server = TestServer(users={"fenix": "mypass"}, + base_path=self.server_folder) # exported users and passwords + self.server = test_server + servers = {"default": test_server} + client = TestClient(servers=servers, users={"default": [("fenix", "mypass")]}) + + # Conans with and without packages created + self.root_folder = {"H1": 'Hello/1.4.10/fenix/testing', + "H2": 'Hello/2.4.11/fenix/testing', + "B": 'Bye/0.14/fenix/testing', + "O": 'Other/1.2/fenix/testing'} + + files = {} + pack_refs = [] + for key, folder in self.root_folder.items(): + ref = ConanFileReference.loads(folder) + files["%s/%s/conanfile.py" % (folder, EXPORT_FOLDER)] = test_conanfile_contents + files["%s/%s/conanmanifest.txt" % (folder, EXPORT_FOLDER)] = "" + files["%s/%s/conans.txt" % (folder, SRC_FOLDER)] = "" + for pack_id in (1, 2): + pack_id = "%s_%s" % (pack_id, key) + pack_refs.append(PackageReference(ref, str(pack_id))) + files["%s/%s/%s/conans.txt" % (folder, BUILD_FOLDER, pack_id)] = "" + files["%s/%s/%s/conans.txt" % (folder, PACKAGES_FOLDER, pack_id)] = "" + files["%s/%s/%s/%s" % (folder, PACKAGES_FOLDER, pack_id, CONANINFO)] = "" + files["%s/%s/%s/%s" % (folder, PACKAGES_FOLDER, pack_id, CONAN_MANIFEST)] = "" + + client.save(files, client.client_cache.store) + + # Create the manifests to be able to upload + for pack_ref in pack_refs: + digest_path = client.client_cache.digestfile_package(pack_ref) + expected_manifest = FileTreeManifest.create(os.path.dirname(digest_path)) + files["%s/%s/%s/%s" % ("/".join(pack_ref.conan), + PACKAGES_FOLDER, + pack_ref.package_id, + CONAN_MANIFEST)] = str(expected_manifest) + + client.save(files, client.client_cache.store) + + self.client = client + + for folder in self.root_folder.values(): + client.run("upload %s --all" % folder.replace("/fenix", "@fenix")) + + self.assert_folders({"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) + + def assert_folders(self, local_folders, remote_folders, build_folders, src_folders): + for base_path, folders in [(self.client.paths, local_folders), + (self.server.paths, remote_folders)]: + root_folder = base_path.store + for k, shas in folders.items(): + folder = os.path.join(root_folder, self.root_folder[k]) + if shas is None: + self.assertFalse(os.path.exists(folder)) + else: + for value in (1, 2): + sha = "%s_%s" % (value, k) + package_folder = os.path.join(folder, "package", sha) + if value in shas: + self.assertTrue(os.path.exists(package_folder)) + else: + self.assertFalse(os.path.exists(package_folder)) + + root_folder = self.client.paths.store + for k, shas in build_folders.items(): + folder = os.path.join(root_folder, self.root_folder[k]) + if shas is None: + self.assertFalse(os.path.exists(folder)) + else: + for value in (1, 2): + sha = "%s_%s" % (value, k) + build_folder = os.path.join(folder, "build", sha) + if value in shas: + self.assertTrue(os.path.exists(build_folder)) + else: + self.assertFalse(os.path.exists(build_folder)) + for k, value in src_folders.items(): + folder = os.path.join(root_folder, self.root_folder[k], "source") + if value: + self.assertTrue(os.path.exists(folder)) + else: + self.assertFalse(os.path.exists(folder)) + + def basic_test(self): + self.client.run("remove hello/* -f") + self.assert_folders({"H1": None, "H2": None, "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": None, "H2": None, "B": [1, 2], "O": [1, 2]}, + {"H1": False, "H2": False, "B": True, "O": True}) + folders = os.listdir(self.client.storage_folder) + six.assertCountEqual(self, ["Other", "Bye"], folders) + + def basic_mocked_test(self): + mocked_user_io = UserIO(out=TestBufferConanOutput()) + mocked_user_io.request_boolean = Mock(return_value=True) + self.client.run("remove hello/*", user_io=mocked_user_io) + self.assert_folders({"H1": None, "H2": None, "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": None, "H2": None, "B": [1, 2], "O": [1, 2]}, + {"H1": False, "H2": False, "B": True, "O": True}) + folders = os.listdir(self.client.storage_folder) + six.assertCountEqual(self, ["Other", "Bye"], folders) + + def basic_packages_test(self): + self.client.run("remove hello/* -p -f") + self.assert_folders({"H1": [], "H2": [], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) + folders = os.listdir(self.client.storage_folder) + six.assertCountEqual(self, ["Hello", "Other", "Bye"], folders) + six.assertCountEqual(self, ["build", "source", "export"], + os.listdir(os.path.join(self.client.storage_folder, + "Hello/1.4.10/fenix/testing"))) + six.assertCountEqual(self, ["build", "source", "export"], + os.listdir(os.path.join(self.client.storage_folder, + "Hello/2.4.11/fenix/testing"))) + + def builds_test(self): + mocked_user_io = UserIO(out=TestBufferConanOutput()) + mocked_user_io.request_boolean = Mock(return_value=True) + self.client.run("remove hello/* -b", user_io=mocked_user_io) + self.assert_folders({"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [], "H2": [], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) + folders = os.listdir(self.client.storage_folder) + six.assertCountEqual(self, ["Hello", "Other", "Bye"], folders) + six.assertCountEqual(self, ["package", "source", "export"], + os.listdir(os.path.join(self.client.storage_folder, + "Hello/1.4.10/fenix/testing"))) + six.assertCountEqual(self, ["package", "source", "export"], + os.listdir(os.path.join(self.client.storage_folder, + "Hello/2.4.11/fenix/testing"))) + + def src_test(self): + mocked_user_io = UserIO(out=TestBufferConanOutput()) + mocked_user_io.request_boolean = Mock(return_value=True) + self.client.run("remove hello/* -s", user_io=mocked_user_io) + self.assert_folders({"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": False, "H2": False, "B": True, "O": True}) + folders = os.listdir(self.client.storage_folder) + six.assertCountEqual(self, ["Hello", "Other", "Bye"], folders) + six.assertCountEqual(self, ["package", "build", "export"], + os.listdir(os.path.join(self.client.storage_folder, + "Hello/1.4.10/fenix/testing"))) + six.assertCountEqual(self, ["package", "build", "export"], + os.listdir(os.path.join(self.client.storage_folder, + "Hello/2.4.11/fenix/testing"))) + + def reject_removal_test(self): + mocked_user_io = UserIO(out=TestBufferConanOutput()) + mocked_user_io.request_boolean = Mock(return_value=False) + self.client.run("remove hello/* -s -b -p", user_io=mocked_user_io) + self.assert_folders({"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) + + def remote_build_error_test(self): + self.client.run("remove hello/* -b -r=default", ignore_error=True) + self.assertIn("Remotes don't have 'build' or 'src' folder", self.client.user_io.out) + self.assert_folders({"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) + + def remote_packages_test(self): + self.client.run("remove hello/* -p -r=default -f") + self.assert_folders({"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [], "H2": [], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) + + def remote_conans_test(self): + self.client.run("remove hello/* -r=default -f") + self.assert_folders({"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": None, "H2": None, "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) + remote_folder = os.path.join(self.server_folder, ".conan_server/data") + folders = os.listdir(remote_folder) + six.assertCountEqual(self, ["Other", "Bye"], folders) + + def remove_specific_package_test(self): + self.client.run("remove hello/1.4.10* -p=1_H1 -f") + self.assert_folders({"H1": [2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) + + def remove_specific_packages_test(self): + self.client.run("remove hello/1.4.10* -p=1_H1,2_H1 -f") + self.assert_folders({"H1": [], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) + + def remove_specific_build_test(self): + self.client.run("remove hello/1.4.10* -b=1_H1 -f") + self.assert_folders({"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) + + def remove_specific_builds_test(self): + self.client.run("remove hello/1.4.10* -b=1_H1,2_H1 -f") + self.assert_folders({"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) + + def remove_remote_specific_package_test(self): + self.client.run("remove hello/1.4.10* -p=1_H1 -f -r=default") + self.assert_folders({"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) + + def remove_remote_specific_packages_test(self): + self.client.run("remove hello/1.4.10* -p=1_H1,2_H1 -f -r=default") + self.assert_folders({"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": [1, 2], "H2": [1, 2], "B": [1, 2], "O": [1, 2]}, + {"H1": True, "H2": True, "B": True, "O": True}) diff --git a/testbed/conan-io__conan/conans/test/command/search_test.py b/testbed/conan-io__conan/conans/test/command/search_test.py new file mode 100644 index 0000000000000000000000000000000000000000..2dadb492319a12603ea8109f9602d5677157b4b4 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/search_test.py @@ -0,0 +1,394 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from conans.paths import PACKAGES_FOLDER, CONANINFO, EXPORT_FOLDER, CONAN_MANIFEST +import os +from conans.model.manifest import FileTreeManifest +import shutil +from conans import COMPLEX_SEARCH_CAPABILITY + + +conan_vars1 = ''' +[settings] + arch=x64 + os=Windows + compiler=Visual Studio + compiler.version=8.1 +[options] + use_Qt=True +[full_requires] + Hello2/0.1@lasote/stable:11111 + OpenSSL/2.10@lasote/testing:2222 + HelloInfo1/0.45@fenix/testing:33333 +''' + +conan_vars1b = ''' +[settings] + arch=x86 + compiler=gcc + compiler.version=4.3 + compiler.libcxx=libstdc++ +[options] + use_Qt=True +''' + +conan_vars1c = ''' +[settings] + os=Linux + arch=x86 + compiler=gcc + compiler.version=4.5 + compiler.libcxx=libstdc++11 +[options] + use_Qt=False +[full_requires] + Hello2/0.1@lasote/stable:11111 + OpenSSL/2.10@lasote/testing:2222 + HelloInfo1/0.45@fenix/testing:33333 +[recipe_hash] + d41d8cd98f00b204e9800998ecf8427e +''' # The recipe_hash correspond to the faked conanmanifests in export + +conan_vars2 = ''' +[options] + use_OpenGL=True +[settings] + arch=x64 + os=Ubuntu + version=15.04 +''' + +conan_vars3 = ''' +[options] + HAVE_TESTS=True + USE_CONFIG=False +[settings] + os=Darwin +''' + +conan_vars4 = """[settings] + os=Windows + arch=x86_64 + compiler=gcc +[options] + language=1 +[full_requires] + Hello2/0.1@lasote/stable:11111 + OpenSSL/2.10@lasote/testing:2222 + HelloInfo1/0.45@fenix/testing:33333 +""" + + +class SearchTest(unittest.TestCase): + + def setUp(self): + self.servers = {"local": TestServer(server_capabilities=[]), + "search_able": TestServer(server_capabilities=[COMPLEX_SEARCH_CAPABILITY])} + self.client = TestClient(servers=self.servers) + + # No conans created + self.client.run("search") + output = self.client.user_io.out + self.assertIn('There are no packages', output) + + # Conans with and without packages created + self.root_folder1 = 'Hello/1.4.10/fenix/testing' + root_folder2 = 'helloTest/1.4.10/fenix/stable' + root_folder3 = 'Bye/0.14/fenix/testing' + root_folder4 = 'NodeInfo/1.0.2/fenix/stable' + root_folder5 = 'MissFile/1.0.2/fenix/stable' + root_folder11 = 'Hello/1.4.11/fenix/testing' + root_folder12 = 'Hello/1.4.12/fenix/testing' + + self.client.save({"Empty/1.10/fake/test/reg/fake.txt": "//", + "%s/%s/WindowsPackageSHA/%s" % (self.root_folder1, + PACKAGES_FOLDER, + CONANINFO): conan_vars1, + "%s/%s/WindowsPackageSHA/%s" % (root_folder11, + PACKAGES_FOLDER, + CONANINFO): conan_vars1, + "%s/%s/WindowsPackageSHA/%s" % (root_folder12, + PACKAGES_FOLDER, + CONANINFO): conan_vars1, + "%s/%s/PlatformIndependantSHA/%s" % (self.root_folder1, + PACKAGES_FOLDER, + CONANINFO): conan_vars1b, + "%s/%s/LinuxPackageSHA/%s" % (self.root_folder1, + PACKAGES_FOLDER, + CONANINFO): conan_vars1c, + "%s/%s/a44f541cd44w57/%s" % (root_folder2, + PACKAGES_FOLDER, + CONANINFO): conan_vars2, + "%s/%s/e4f7vdwcv4w55d/%s" % (root_folder3, + PACKAGES_FOLDER, + CONANINFO): conan_vars3, + "%s/%s/e4f7vdwcv4w55d/%s" % (root_folder4, + PACKAGES_FOLDER, + CONANINFO): conan_vars4, + "%s/%s/e4f7vdwcv4w55d/%s" % (root_folder5, + PACKAGES_FOLDER, + "hello.txt"): "Hello"}, + self.client.paths.store) + + # Fake some manifests to be able to calculate recipe hash + fake_manifest = FileTreeManifest(1212, {}) + self.client.save({os.path.join(self.root_folder1, EXPORT_FOLDER, CONAN_MANIFEST): str(fake_manifest), + os.path.join(root_folder2, EXPORT_FOLDER, CONAN_MANIFEST): str(fake_manifest), + os.path.join(root_folder3, EXPORT_FOLDER, CONAN_MANIFEST): str(fake_manifest), + os.path.join(root_folder4, EXPORT_FOLDER, CONAN_MANIFEST): str(fake_manifest), + }, + self.client.paths.store) + + def recipe_search_test(self): + self.client.run("search Hello*") + self.assertEquals("Existing package recipes:\n\n" + "Hello/1.4.10@fenix/testing\n" + "Hello/1.4.11@fenix/testing\n" + "Hello/1.4.12@fenix/testing\n" + "helloTest/1.4.10@fenix/stable\n", self.client.user_io.out) + + self.client.run("search Hello* --case-sensitive") + self.assertEquals("Existing package recipes:\n\n" + "Hello/1.4.10@fenix/testing\n" + "Hello/1.4.11@fenix/testing\n" + "Hello/1.4.12@fenix/testing\n", + self.client.user_io.out) + + self.client.run("search *fenix* --case-sensitive") + self.assertEquals("Existing package recipes:\n\n" + "Bye/0.14@fenix/testing\n" + "Hello/1.4.10@fenix/testing\n" + "Hello/1.4.11@fenix/testing\n" + "Hello/1.4.12@fenix/testing\n" + "MissFile/1.0.2@fenix/stable\n" + "NodeInfo/1.0.2@fenix/stable\n" + "helloTest/1.4.10@fenix/stable\n", self.client.user_io.out) + + def recipe_pattern_search_test(self): + self.client.run("search Hello*") + self.assertEquals("Existing package recipes:\n\n" + "Hello/1.4.10@fenix/testing\n" + "Hello/1.4.11@fenix/testing\n" + "Hello/1.4.12@fenix/testing\n" + "helloTest/1.4.10@fenix/stable\n", self.client.user_io.out) + + self.client.run("search Hello* --case-sensitive") + self.assertEquals("Existing package recipes:\n\n" + "Hello/1.4.10@fenix/testing\n" + "Hello/1.4.11@fenix/testing\n" + "Hello/1.4.12@fenix/testing\n", self.client.user_io.out) + + self.client.run("search *fenix* --case-sensitive") + self.assertEquals("Existing package recipes:\n\n" + "Bye/0.14@fenix/testing\n" + "Hello/1.4.10@fenix/testing\n" + "Hello/1.4.11@fenix/testing\n" + "Hello/1.4.12@fenix/testing\n" + "MissFile/1.0.2@fenix/stable\n" + "NodeInfo/1.0.2@fenix/stable\n" + "helloTest/1.4.10@fenix/stable\n", self.client.user_io.out) + + def package_search_with_invalid_reference_test(self): + self.client.run("search Hello -q 'a=1'", ignore_error=True) + self.assertIn("-q parameter only allowed with a valid recipe", str(self.client.user_io.out)) + + def package_search_with_empty_query_test(self): + self.client.run("search Hello/1.4.10/fenix/testing") + self.assertIn("WindowsPackageSHA", self.client.user_io.out) + self.assertIn("PlatformIndependantSHA", self.client.user_io.out) + self.assertIn("LinuxPackageSHA", self.client.user_io.out) + + def package_search_nonescaped_characters_test(self): + self.client.run('search Hello/1.4.10@fenix/testing -q "compiler=gcc AND compiler.libcxx=libstdc++11"') + self.assertIn("LinuxPackageSHA", self.client.user_io.out) + self.assertNotIn("PlatformIndependantSHA", self.client.user_io.out) + self.assertNotIn("WindowsPackageSHA", self.client.user_io.out) + + self.client.run('search Hello/1.4.10@fenix/testing -q "compiler=gcc AND compiler.libcxx=libstdc++"') + self.assertNotIn("LinuxPackageSHA", self.client.user_io.out) + self.assertIn("PlatformIndependantSHA", self.client.user_io.out) + self.assertNotIn("WindowsPackageSHA", self.client.user_io.out) + + # Now search with a remote + os.rmdir(self.servers["local"].paths.store) + shutil.copytree(self.client.paths.store, self.servers["local"].paths.store) + self.client.run("remove Hello* -f") + self.client.run('search Hello/1.4.10@fenix/testing -q "compiler=gcc AND compiler.libcxx=libstdc++11" -r local') + self.assertIn("LinuxPackageSHA", self.client.user_io.out) + self.assertNotIn("PlatformIndependantSHA", self.client.user_io.out) + self.assertNotIn("WindowsPackageSHA", self.client.user_io.out) + + self.client.run('search Hello/1.4.10@fenix/testing -q "compiler=gcc AND compiler.libcxx=libstdc++" -r local') + self.assertNotIn("LinuxPackageSHA", self.client.user_io.out) + self.assertIn("PlatformIndependantSHA", self.client.user_io.out) + self.assertNotIn("WindowsPackageSHA", self.client.user_io.out) + + def _assert_pkg_q(self, query, packages_found, remote): + + command = 'search Hello/1.4.10@fenix/testing -q \'%s\'' % query + if remote: + command += " -r %s" % remote + self.client.run(command) + + for pack_name in ["LinuxPackageSHA", "PlatformIndependantSHA", "WindowsPackageSHA"]: + self.assertEquals(pack_name in self.client.user_io.out, + pack_name in packages_found, "%s fail" % pack_name) + + def package_search_complex_queries_test(self): + + def test_cases(remote=None): + + if remote: # Simulate upload to remote + os.rmdir(self.servers[remote].paths.store) + shutil.copytree(self.client.paths.store, self.servers[remote].paths.store) + + q = '' + self._assert_pkg_q(q, ["LinuxPackageSHA", "PlatformIndependantSHA", + "WindowsPackageSHA"], remote) + q = 'compiler="gcc"' + self._assert_pkg_q(q, ["LinuxPackageSHA", "PlatformIndependantSHA"], remote) + + q = 'compiler=' # No packages found with empty value + self._assert_pkg_q(q, [], remote) + + q = 'compiler="gcc" OR compiler.libcxx=libstdc++11' + # Should find Visual because of the OR, visual doesn't care about libcxx + self._assert_pkg_q(q, ["LinuxPackageSHA", "PlatformIndependantSHA", + "WindowsPackageSHA"], remote) + + q = '(compiler="gcc" AND compiler.libcxx=libstdc++11) OR compiler.version=4.5' + self._assert_pkg_q(q, ["LinuxPackageSHA"], remote) + + q = '(compiler="gcc" AND compiler.libcxx=libstdc++11) OR '\ + '(compiler.version=4.5 OR compiler.version=8.1)' + self._assert_pkg_q(q, ["LinuxPackageSHA", "WindowsPackageSHA"], remote) + + q = '(compiler="gcc" AND compiler.libcxx=libstdc++) OR '\ + '(compiler.version=4.5 OR compiler.version=8.1)' + self._assert_pkg_q(q, ["LinuxPackageSHA", "PlatformIndependantSHA", + "WindowsPackageSHA"], remote) + + q = '(compiler="gcc" AND compiler.libcxx=libstdc++) OR '\ + '(compiler.version=4.3 OR compiler.version=8.1)' + self._assert_pkg_q(q, ["PlatformIndependantSHA", "WindowsPackageSHA"], remote) + + q = '(os="Linux" OR os=Windows)' + self._assert_pkg_q(q, ["PlatformIndependantSHA", "LinuxPackageSHA", + "WindowsPackageSHA"], remote) + + q = '(os="Linux" OR os=Windows) AND use_Qt=True' + self._assert_pkg_q(q, ["PlatformIndependantSHA", "WindowsPackageSHA"], remote) + + q = '(os="Linux" OR os=Windows) AND use_Qt=True AND nonexistant_option=3' + self._assert_pkg_q(q, ["PlatformIndependantSHA", "WindowsPackageSHA"], remote) + + q = '(os="Linux" OR os=Windows) AND use_Qt=True OR nonexistant_option=3' + self._assert_pkg_q(q, ["PlatformIndependantSHA", + "WindowsPackageSHA", "LinuxPackageSHA"], remote) + + # test in local + test_cases() + + # test in remote + test_cases(remote="local") + + # test in remote with search capabilities + test_cases(remote="search_able") + + def package_search_with_invalid_query_test(self): + self.client.run("search Hello/1.4.10/fenix/testing -q 'invalid'", ignore_error=True) + self.assertIn("Invalid package query: invalid", self.client.user_io.out) + + self.client.run("search Hello/1.4.10/fenix/testing -q 'os= 3'", ignore_error=True) + self.assertIn("Invalid package query: os= 3", self.client.user_io.out) + + self.client.run("search Hello/1.4.10/fenix/testing -q 'os=3 FAKE '", ignore_error=True) + self.assertIn("Invalid package query: os=3 FAKE ", self.client.user_io.out) + + self.client.run("search Hello/1.4.10/fenix/testing -q 'os=3 os.compiler=4'", ignore_error=True) + self.assertIn("Invalid package query: os=3 os.compiler=4", self.client.user_io.out) + + self.client.run("search Hello/1.4.10/fenix/testing -q 'not os=3 AND os.compiler=4'", ignore_error=True) + self.assertIn("Invalid package query: not os=3 AND os.compiler=4. 'not' operator is not allowed", self.client.user_io.out) + + self.client.run("search Hello/1.4.10/fenix/testing -q 'os=3 AND !os.compiler=4'", ignore_error=True) + self.assertIn("Invalid package query: os=3 AND !os.compiler=4. '!' character is not allowed", self.client.user_io.out) + + def package_search_properties_filter_test(self): + + # All packages without filter + self.client.run("search Hello/1.4.10/fenix/testing -q ''") + + self.assertIn("WindowsPackageSHA", self.client.user_io.out) + self.assertIn("PlatformIndependantSHA", self.client.user_io.out) + self.assertIn("LinuxPackageSHA", self.client.user_io.out) + + self.client.run('search Hello/1.4.10/fenix/testing -q os=Windows') + self.assertIn("WindowsPackageSHA", self.client.user_io.out) + self.assertIn("PlatformIndependantSHA", self.client.user_io.out) + self.assertNotIn("LinuxPackageSHA", self.client.user_io.out) + + self.client.run('search Hello/1.4.10/fenix/testing -q "os=Windows AND compiler.version=4.5"') + self.assertIn("There are no packages for reference 'Hello/1.4.10@fenix/testing' matching the query 'os=Windows AND compiler.version=4.5'", self.client.user_io.out) + + self.client.run('search Hello/1.4.10/fenix/testing -q "os=Linux AND compiler.version=4.5"') + self.assertNotIn("WindowsPackageSHA", self.client.user_io.out) + self.assertNotIn("PlatformIndependantSHA", self.client.user_io.out) + self.assertIn("LinuxPackageSHA", self.client.user_io.out) + + self.client.run('search Hello/1.4.10/fenix/testing -q "compiler.version=1.0"') + self.assertIn("There are no packages for reference 'Hello/1.4.10@fenix/testing' matching the query 'compiler.version=1.0'", self.client.user_io.out) + + self.client.run('search Hello/1.4.10/fenix/testing -q "compiler=gcc AND compiler.version=4.5"') + self.assertNotIn("WindowsPackageSHA", self.client.user_io.out) + self.assertNotIn("PlatformIndependantSHA", self.client.user_io.out) + self.assertIn("LinuxPackageSHA", self.client.user_io.out) + + self.client.run('search Hello/1.4.10/fenix/testing -q "arch=x86"') + # One package will be outdated from recipe and another don't + self.assertEquals("""Existing packages for recipe Hello/1.4.10@fenix/testing: + + Package_ID: LinuxPackageSHA + [options] + use_Qt: False + [settings] + arch: x86 + compiler: gcc + compiler.libcxx: libstdc++11 + compiler.version: 4.5 + os: Linux + [requires] + Hello2/0.1@lasote/stable:11111 + HelloInfo1/0.45@fenix/testing:33333 + OpenSSL/2.10@lasote/testing:2222 + outdated from recipe: False + + Package_ID: PlatformIndependantSHA + [options] + use_Qt: True + [settings] + arch: x86 + compiler: gcc + compiler.libcxx: libstdc++ + compiler.version: 4.3 + outdated from recipe: True + +""", self.client.user_io.out) + + self.client.run('search helloTest/1.4.10@fenix/stable -q use_OpenGL=False') + self.assertIn("There are no packages for reference 'helloTest/1.4.10@fenix/stable' " + "matching the query 'use_OpenGL=False'", self.client.user_io.out) + + self.client.run('search helloTest/1.4.10@fenix/stable -q use_OpenGL=True') + self.assertIn("Existing packages for recipe helloTest/1.4.10@fenix/stable", self.client.user_io.out) + + self.client.run('search helloTest/1.4.10@fenix/stable -q "use_OpenGL=True AND arch=x64"') + self.assertIn("Existing packages for recipe helloTest/1.4.10@fenix/stable", self.client.user_io.out) + + self.client.run('search helloTest/1.4.10@fenix/stable -q "use_OpenGL=True AND arch=x86"') + self.assertIn("There are no packages for reference 'helloTest/1.4.10@fenix/stable' " + "matching the query 'use_OpenGL=True AND arch=x86'", self.client.user_io.out) + + def search_with_no_local_test(self): + client = TestClient() + client.run("search nonexist/1.0@lasote/stable") + self.assertIn("There are no packages", self.client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/command/source_test.py b/testbed/conan-io__conan/conans/test/command/source_test.py new file mode 100644 index 0000000000000000000000000000000000000000..7b8d6be249edf2a5d22e48b4f97f749116bec9bd --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/source_test.py @@ -0,0 +1,65 @@ +import unittest +from conans.paths import CONANFILE +from conans.test.tools import TestClient +from conans.util.files import load +import os + + +class SourceTest(unittest.TestCase): + + def basic_source_test(self): + conanfile = ''' +from conans import ConanFile + +class ConanLib(ConanFile): + name = "Hello" + version = "0.1" + + def source(self): + self.output.info("Running source!") +''' + client = TestClient() + client.save({CONANFILE: conanfile}) + client.run("export lasote/stable") + client.run("source Hello/0.1@lasote/stable") + self.assertIn("Hello/0.1@lasote/stable: Configuring sources", client.user_io.out) + self.assertIn("Hello/0.1@lasote/stable: Running source!", client.user_io.out) + + # The second call shouldn't have effect + client.run("source Hello/0.1@lasote/stable") + self.assertNotIn("Hello/0.1@lasote/stable: Configuring sources", client.user_io.out) + self.assertNotIn("Hello/0.1@lasote/stable: Running source!", client.user_io.out) + + # Forced should have effect + client.run("source Hello/0.1@lasote/stable --force") + self.assertIn("WARN: Forced removal of source folder", client.user_io.out) + self.assertIn("Hello/0.1@lasote/stable: Configuring sources", client.user_io.out) + self.assertIn("Hello/0.1@lasote/stable: Running source!", client.user_io.out) + + def local_source_test(self): + conanfile = ''' +from conans import ConanFile +from conans.util.files import save + +class ConanLib(ConanFile): + + def source(self): + self.output.info("Running source!") + err + save("file1.txt", "Hello World") +''' + # First, failing source() + client = TestClient() + client.save({CONANFILE: conanfile}) + + client.run("source .", ignore_error=True) + self.assertIn("PROJECT: Running source!", client.user_io.out) + self.assertIn("ERROR: PROJECT: Error in source() method, line 9", client.user_io.out) + + # Fix the error and repeat + client.save({CONANFILE: conanfile.replace("err", "")}) + client.run("source .") + self.assertIn("PROJECT: Configuring sources in", client.user_io.out) + self.assertIn("PROJECT: WARN: Your previous source command failed", client.user_io.out) + self.assertIn("PROJECT: Running source!", client.user_io.out) + self.assertEqual("Hello World", load(os.path.join(client.current_folder, "file1.txt"))) diff --git a/testbed/conan-io__conan/conans/test/command/upload_test.py b/testbed/conan-io__conan/conans/test/command/upload_test.py new file mode 100644 index 0000000000000000000000000000000000000000..a7a78c480a981177743b04d1dbcaca3440c2ed93 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/upload_test.py @@ -0,0 +1,68 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from conans.test.utils.cpp_test_files import cpp_hello_conan_files + + +class UploadTest(unittest.TestCase): + + def not_existing_error_test(self): + """ Trying to upload with pattern not matched must raise an Error + """ + client = TestClient() + error = client.run("upload some_nonsense", ignore_error=True) + self.assertTrue(error) + self.assertIn("ERROR: No packages found matching pattern 'some_nonsense'", + client.user_io.out) + + def invalid_reference_error_test(self): + """ Trying to upload an invalid reference must raise an Error + """ + client = TestClient() + error = client.run("upload some_nonsense -p hash1", ignore_error=True) + self.assertTrue(error) + self.assertIn("ERROR: -p parameter only allowed with a valid recipe reference", + client.user_io.out) + + def non_existing_recipe_error_test(self): + """ Trying to upload a non-existing recipe must raise an Error + """ + client = TestClient() + error = client.run("upload Pkg/0.1@user/channel", ignore_error=True) + self.assertTrue(error) + self.assertIn("ERROR: There is no local conanfile exported as Pkg/0.1@user/channel", + client.user_io.out) + + def non_existing_package_error_test(self): + """ Trying to upload a non-existing package must raise an Error + """ + client = TestClient() + error = client.run("upload Pkg/0.1@user/channel -p hash1", ignore_error=True) + self.assertTrue(error) + self.assertIn("ERROR: There is no local conanfile exported as Pkg/0.1@user/channel", + client.user_io.out) + + def not_reupload_test(self): + """ Check that if the package has not been modified, it is not uploaded + again + """ + servers = {} + test_server = TestServer([("*/*@*/*", "*")], [("*/*@*/*", "*")], + users={"lasote": "mypass"}) + servers["default"] = test_server + client = TestClient(servers=servers, users={"default": [("lasote", "mypass")]}) + + files = cpp_hello_conan_files("Hello0", "1.2.1", build=False) + client.save(files) + client.run("export frodo/stable") + client.run("install Hello0/1.2.1@frodo/stable --build=missing") + client.run("upload Hello0/1.2.1@frodo/stable -r default --all") + self.assertIn("Uploading conan_package.tgz", client.user_io.out) + client.run("remove Hello0/1.2.1@frodo/stable -f") + client.run("search") + self.assertNotIn("Hello0/1.2.1@frodo/stable", client.user_io.out) + client.run("install Hello0/1.2.1@frodo/stable") + self.assertIn("Downloading conan_package.tgz", client.user_io.out) + client.run("upload Hello0/1.2.1@frodo/stable -r default --all") + self.assertIn("Uploaded conan recipe", client.user_io.out) + self.assertNotIn("Uploading conan_package.tgz", client.user_io.out) + self.assertIn("Package is up to date", client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/command/user_test.py b/testbed/conan-io__conan/conans/test/command/user_test.py new file mode 100644 index 0000000000000000000000000000000000000000..a03e10696238433474a52bfdcb41c0cfad6e0a3a --- /dev/null +++ b/testbed/conan-io__conan/conans/test/command/user_test.py @@ -0,0 +1,95 @@ +import unittest +from conans.test.tools import TestClient, TestServer + + +class UserTest(unittest.TestCase): + + def test_command_user(self): + """ Test that the user can be shown and changed, and it is reflected in the + user cache localdb + """ + client = TestClient() + client.run('user') + self.assertIn("ERROR: No remotes defined", client.user_io.out) + + def test_with_remote_no_connect(self): + test_server = TestServer() + client = TestClient(servers={"default": test_server}) + client.run('user') + self.assertIn("Current 'default' user: None (anonymous)", client.user_io.out) + + client.run('user john') + self.assertIn("Change 'default' user from None (anonymous) to john", client.user_io.out) + self.assertEqual(('john', None), client.localdb.get_login(test_server.fake_url)) + + client.run('user will') + self.assertIn("Change 'default' user from john to will", client.user_io.out) + self.assertEqual(('will', None), client.localdb.get_login(test_server.fake_url)) + + client.run('user None') + self.assertIn("Change 'default' user from will to None (anonymous)", client.user_io.out) + self.assertEqual((None, None), client.localdb.get_login(test_server.fake_url)) + + client.run('user') + self.assertIn("Current 'default' user: None (anonymous)", client.user_io.out) + + def test_command_user_with_password(self): + """ Checks the -p option, that obtains a token from the password. + Useful for integrations as travis, that interactive password is not + possible + """ + test_server = TestServer() + servers = {"default": test_server} + conan = TestClient(servers=servers, users={"default": [("lasote", "mypass")]}) + conan.run('user dummy -p ping_pong2', ignore_error=True) + self.assertIn("ERROR: Wrong user or password", conan.user_io.out) + conan.run('user lasote -p mypass') + self.assertNotIn("ERROR: Wrong user or password", conan.user_io.out) + self.assertIn("Change 'default' user from None (anonymous) to lasote", conan.user_io.out) + conan.run('user none') + self.assertIn("Change 'default' user from lasote to None (anonymous)", conan.user_io.out) + self.assertEqual((None, None), conan.localdb.get_login(test_server.fake_url)) + conan.run('user') + self.assertIn("Current 'default' user: None (anonymous)", conan.user_io.out) + + def test_command_user_with_password_spaces(self): + """ Checks the -p option, that obtains a token from the password. + Useful for integrations as travis, that interactive password is not + possible + """ + test_server = TestServer(users={"lasote": 'my "password'}) + servers = {"default": test_server} + conan = TestClient(servers=servers, users={"default": [("lasote", "mypass")]}) + conan.run(r'user lasote -p="my \"password"') + self.assertNotIn("ERROR: Wrong user or password", conan.user_io.out) + self.assertIn("Change 'default' user from None (anonymous) to lasote", conan.user_io.out) + conan.run('user none') + conan.run(r'user lasote -p "my \"password"') + self.assertNotIn("ERROR: Wrong user or password", conan.user_io.out) + self.assertIn("Change 'default' user from None (anonymous) to lasote", conan.user_io.out) + + def test_clean(self): + test_server = TestServer() + servers = {"default": test_server} + client = TestClient(servers=servers, users={"default": [("lasote", "mypass")]}) + base = ''' +from conans import ConanFile + +class ConanLib(ConanFile): + name = "lib" + version = "0.1" +''' + + files = {"conanfile.py": base} + client.save(files) + client.run("export lasote/stable") + client.run("upload lib/0.1@lasote/stable") + client.run("user") + self.assertIn("Current 'default' user: lasote", client.user_io.out) + client.run("user --clean") + client.run("user") + self.assertNotIn("lasote", client.user_io.out) + self.assertEqual("Current 'default' user: None (anonymous)\n", client.user_io.out) + client.run("upload lib/0.1@lasote/stable") + client.run("user") + self.assertIn("Current 'default' user: lasote", client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/compile_helpers_test.py b/testbed/conan-io__conan/conans/test/compile_helpers_test.py new file mode 100644 index 0000000000000000000000000000000000000000..5ba0dd2f19830a3cec8d03399908fd07fc7bee51 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/compile_helpers_test.py @@ -0,0 +1,431 @@ + +import unittest +from conans.client.configure_environment import ConfigureEnvironment +from conans.model.settings import Settings +from conans.client.gcc import GCC +import platform +import os +from conans.client.runner import ConanRunner +from conans.test.tools import TestBufferConanOutput, TestClient +from conans.test.utils.test_files import temp_folder +from conans.model.profile import Profile +from conans.model.scope import Scopes +from conans.util.files import save +from conans.paths import CONANFILE + + +class MockCompiler(object): + + def __init__(self, name, libcxx, version): + self.name = name + self.libcxx = libcxx + self.version = version + + def __repr__(self, *args, **kwargs): + return self.name + + +class MockSettings(Settings): + + def __init__(self, build_type="Release", os=None, arch=None, + compiler_name=None, libcxx=None, version=None): + self._build_type = build_type + self._libcxx = libcxx or "libstdc++" + self._os = os or "Linux" + self._arch = arch or "x86" + self._compiler = MockCompiler(compiler_name or "gcc", self._libcxx, version or "4.8") + + @property + def build_type(self): + return self._build_type + + @property + def libcxx(self): + return self._libcxx + + @property + def os(self): + return self._os + + @property + def arch(self): + return self._arch + + @property + def compiler(self): + return self._compiler + + +class MockAndroidSettings(Settings): + + @property + def os(self): + return "Android" + + +class BuildInfoMock(object): + + @property + def lib_paths(self): + return ["path/to/lib1", "path/to/lib2"] + + @property + def exelinkflags(self): + return ["-framework thing"] + + @property + def sharedlinkflags(self): + return ["-framework thing2"] + + @property + def include_paths(self): + return ["path/to/includes/lib1", "path/to/includes/lib2"] + + @property + def defines(self): + return ["MYDEF1", "MYDEF2"] + + @property + def libs(self): + return ["lib1", "lib2"] + + @property + def cflags(self): + return ["cflag1"] + + @property + def cppflags(self): + return ["cppflag1"] + + +class MockConanfile(object): + pass + + +class CompileHelpersTest(unittest.TestCase): + + def setUp(self): + self.current = os.getcwd() + os.chdir(temp_folder()) + + def tearDown(self): + os.chdir(self.current) + + def compile_flag_test(self): + win_settings = MockSettings("Release", os="Windows", arch="x86", + compiler_name="Visual Studio", libcxx=None, version="14") + env = ConfigureEnvironment(BuildInfoMock(), win_settings) + self.assertEquals(env.compile_flags, "lib1.lib lib2.lib") + + linux_s = MockSettings("Release", os="Linux", arch="x86", + compiler_name="gcc", libcxx="libstdc++", version="4.9") + env = ConfigureEnvironment(BuildInfoMock(), linux_s) + self.assertEquals(env.compile_flags, '-llib1 -llib2 -m32 -framework thing -framework ' + 'thing2 -s -DNDEBUG -DMYDEF1 -DMYDEF2 ' + '-I"path/to/includes/lib1" -I"path/to/includes/lib2" ' + '-L"path/to/lib1" -L"path/to/lib2" cppflag1 ' + '-D_GLIBCXX_USE_CXX11_ABI=0') + + linux_s_11 = MockSettings("Debug", os="Linux", arch="x86_64", + compiler_name="gcc", libcxx="libstdc++11", version="4.9") + env = ConfigureEnvironment(BuildInfoMock(), linux_s_11) + self.assertEquals(env.compile_flags, '-llib1 -llib2 -m64 -framework thing -framework thing2' + ' -g -DMYDEF1 -DMYDEF2 -I"path/to/includes/lib1" ' + '-I"path/to/includes/lib2" -L"path/to/lib1" ' + '-L"path/to/lib2" cppflag1 ' + '-D_GLIBCXX_USE_CXX11_ABI=1') + + linux_s_clang_std = MockSettings("Debug", os="Linux", arch="x86_64", + compiler_name="clang", libcxx="libstdc", version="4.9") + env = ConfigureEnvironment(BuildInfoMock(), linux_s_clang_std) + self.assertEquals(env.compile_flags, '-llib1 -llib2 -m64 -framework thing -framework thing2' + ' -g -DMYDEF1 -DMYDEF2 -I"path/to/includes/lib1" ' + '-I"path/to/includes/lib2" -L"path/to/lib1" ' + '-L"path/to/lib2" cppflag1 -stdlib=libstdc++') + + linux_s_clang = MockSettings("Debug", os="Linux", arch="x86_64", + compiler_name="clang", libcxx="libc++", version="4.9") + env = ConfigureEnvironment(BuildInfoMock(), linux_s_clang) + self.assertEquals(env.compile_flags, '-llib1 -llib2 -m64 -framework thing -framework thing2' + ' -g -DMYDEF1 -DMYDEF2 -I"path/to/includes/lib1" ' + '-I"path/to/includes/lib2" -L"path/to/lib1" ' + '-L"path/to/lib2" cppflag1 -stdlib=libc++') + + freebsd_s_clang_32 = MockSettings("Debug", os="FreeBSD", arch="x86", + compiler_name="clang", libcxx="libc++", version="3.8") + env = ConfigureEnvironment(BuildInfoMock(), freebsd_s_clang_32) + self.assertEquals(env.compile_flags, '-llib1 -llib2 -m32 -framework thing -framework thing2' + ' -g -DMYDEF1 -DMYDEF2 -I"path/to/includes/lib1" ' + '-I"path/to/includes/lib2" -L"path/to/lib1" ' + '-L"path/to/lib2" cppflag1 -stdlib=libc++') + + freebsd_s_clang_64 = MockSettings("Debug", os="FreeBSD", arch="x86_64", + compiler_name="clang", libcxx="libc++", version="3.8") + env = ConfigureEnvironment(BuildInfoMock(), freebsd_s_clang_64) + self.assertEquals(env.compile_flags, '-llib1 -llib2 -m64 -framework thing -framework thing2' + ' -g -DMYDEF1 -DMYDEF2 -I"path/to/includes/lib1" ' + '-I"path/to/includes/lib2" -L"path/to/lib1" ' + '-L"path/to/lib2" cppflag1 -stdlib=libc++') + + solaris_s_sun_cc_32 = MockSettings("Debug", os="SunOS", arch="x86", + compiler_name="sun-cc", libcxx="libCstd", version="5.10") + env = ConfigureEnvironment(BuildInfoMock(), solaris_s_sun_cc_32) + self.assertEquals(env.compile_flags, '-llib1 -llib2 -m32 -framework thing -framework thing2' + ' -g -DMYDEF1 -DMYDEF2 -I"path/to/includes/lib1" ' + '-I"path/to/includes/lib2" -L"path/to/lib1" ' + '-L"path/to/lib2" cppflag1 -library=Cstd') + + solaris_s_sun_cc_64 = MockSettings("Debug", os="SunOS", arch="x86_64", + compiler_name="sun-cc", libcxx="libCstd", version="5.10") + env = ConfigureEnvironment(BuildInfoMock(), solaris_s_sun_cc_64) + self.assertEquals(env.compile_flags, '-llib1 -llib2 -m64 -framework thing -framework thing2' + ' -g -DMYDEF1 -DMYDEF2 -I"path/to/includes/lib1" ' + '-I"path/to/includes/lib2" -L"path/to/lib1" ' + '-L"path/to/lib2" cppflag1 -library=Cstd') + + solaris_s_sun_cc_stlport = MockSettings("Debug", os="SunOS", arch="x86_64", + compiler_name="sun-cc", libcxx="libstlport", + version="5.10") + env = ConfigureEnvironment(BuildInfoMock(), solaris_s_sun_cc_stlport) + self.assertEquals(env.compile_flags, '-llib1 -llib2 -m64 -framework thing -framework thing2' + ' -g -DMYDEF1 -DMYDEF2 -I"path/to/includes/lib1" ' + '-I"path/to/includes/lib2" -L"path/to/lib1" ' + '-L"path/to/lib2" cppflag1 -library=stlport4') + + solaris_s_sun_cc_stdcxx = MockSettings("Debug", os="SunOS", arch="x86_64", + compiler_name="sun-cc", libcxx="libstdcxx", + version="5.10") + env = ConfigureEnvironment(BuildInfoMock(), solaris_s_sun_cc_stdcxx) + self.assertEquals(env.compile_flags, '-llib1 -llib2 -m64 -framework thing -framework thing2' + ' -g -DMYDEF1 -DMYDEF2 -I"path/to/includes/lib1" ' + '-I"path/to/includes/lib2" -L"path/to/lib1" ' + '-L"path/to/lib2" cppflag1 -library=stdcxx4') + + + def configure_environment_test(self): + win_settings = MockSettings("Release", os="Windows", arch="x86", + compiler_name="Visual Studio", libcxx=None, version="14") + + env = ConfigureEnvironment(BuildInfoMock(), win_settings) + + expected = 'call "%vs140comntools%../../VC/vcvarsall.bat" x86 && call _conan_env.bat' + self.assertEquals(env.command_line, expected) + + linux_s = MockSettings("Release", os="Linux", arch="x86", + compiler_name="gcc", libcxx="libstdc++", version="4.9") + env = ConfigureEnvironment(BuildInfoMock(), linux_s) + self.assertEquals(env.command_line, 'env LIBS="-llib1 -llib2" LDFLAGS="-Lpath/to/lib1 ' + '-Lpath/to/lib2 -m32 -framework thing -framework thing2 $LDFLAGS" ' + 'CFLAGS="$CFLAGS -m32 cflag1 -s -DNDEBUG ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'CPPFLAGS="$CPPFLAGS -m32 cppflag1 -D_GLIBCXX_USE_CXX11_ABI=0 -s -DNDEBUG ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'C_INCLUDE_PATH=$C_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2" ' + 'CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2"') + + c11settings = MockSettings("Release", os="Linux", arch="x86", + compiler_name="gcc", libcxx="libstdc++11", version="6.2") + env = ConfigureEnvironment(BuildInfoMock(), c11settings) + self.assertEquals(env.command_line, 'env LIBS="-llib1 -llib2" LDFLAGS="-Lpath/to/lib1 ' + '-Lpath/to/lib2 -m32 -framework thing -framework thing2 $LDFLAGS" ' + 'CFLAGS="$CFLAGS -m32 cflag1 -s -DNDEBUG ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'CPPFLAGS="$CPPFLAGS -m32 cppflag1 -D_GLIBCXX_USE_CXX11_ABI=1 -s -DNDEBUG ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'C_INCLUDE_PATH=$C_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2" ' + 'CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2"') + + clang_settings_64 = MockSettings("Release", os="Macos", arch="x86_64", + compiler_name="clang", libcxx="libc++", version="3.8") + env = ConfigureEnvironment(BuildInfoMock(), clang_settings_64) + self.assertEquals(env.command_line, 'env LIBS="-llib1 -llib2" LDFLAGS="-Lpath/to/lib1 ' + '-Lpath/to/lib2 -m64 -framework thing -framework thing2 $LDFLAGS" ' + 'CFLAGS="$CFLAGS -m64 cflag1 -DNDEBUG ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'CPPFLAGS="$CPPFLAGS -m64 cppflag1 -stdlib=libc++ -DNDEBUG ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'C_INCLUDE_PATH=$C_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2" ' + 'CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2"') + + clang_settings_std = MockSettings("Release", os="Macos", arch="x86_64", + compiler_name="clang", libcxx="libstdc", version="3.8") + env = ConfigureEnvironment(BuildInfoMock(), clang_settings_std) + self.assertEquals(env.command_line, 'env LIBS="-llib1 -llib2" LDFLAGS="-Lpath/to/lib1 ' + '-Lpath/to/lib2 -m64 -framework thing -framework thing2 $LDFLAGS" ' + 'CFLAGS="$CFLAGS -m64 cflag1 -DNDEBUG ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'CPPFLAGS="$CPPFLAGS -m64 cppflag1 -stdlib=libstdc++ -DNDEBUG ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'C_INCLUDE_PATH=$C_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2" ' + 'CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2"') + + clang_settings_std_debug = MockSettings("Debug", os="Macos", arch="x86", + compiler_name="clang", libcxx="libstdc", version="3.8") + env = ConfigureEnvironment(BuildInfoMock(), clang_settings_std_debug) + self.assertEquals(env.command_line, 'env LIBS="-llib1 -llib2" LDFLAGS="-Lpath/to/lib1 ' + '-Lpath/to/lib2 -m32 -framework thing -framework thing2 $LDFLAGS" ' + 'CFLAGS="$CFLAGS -m32 cflag1 -g ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'CPPFLAGS="$CPPFLAGS -m32 cppflag1 -stdlib=libstdc++ -g ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'C_INCLUDE_PATH=$C_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2" ' + 'CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2"') + + freebsd_settings = MockSettings("Release", os="FreeBSD", arch="x86", + compiler_name="clang", libcxx="libc++", version="3.8") + env = ConfigureEnvironment(BuildInfoMock(), freebsd_settings) + self.assertEquals(env.command_line, 'env LIBS="-llib1 -llib2" LDFLAGS="-Lpath/to/lib1 ' + '-Lpath/to/lib2 -m32 -framework thing -framework thing2 $LDFLAGS" ' + 'CFLAGS="$CFLAGS -m32 cflag1 -DNDEBUG ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'CPPFLAGS="$CPPFLAGS -m32 cppflag1 -stdlib=libc++ -DNDEBUG ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'C_INCLUDE_PATH=$C_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2" ' + 'CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2"') + + solaris_settings = MockSettings("Release", os="SunOS", arch="x86_64", + compiler_name="sun-cc", libcxx="libstlport", version="5.10") + env = ConfigureEnvironment(BuildInfoMock(), solaris_settings) + self.assertEquals(env.command_line, 'env LIBS="-llib1 -llib2" LDFLAGS="-Lpath/to/lib1 ' + '-Lpath/to/lib2 -m64 -framework thing -framework thing2 $LDFLAGS" ' + 'CFLAGS="$CFLAGS -m64 cflag1 -DNDEBUG ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'CPPFLAGS="$CPPFLAGS -m64 cppflag1 -library=stlport4 -DNDEBUG ' + '-Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2" ' + 'C_INCLUDE_PATH=$C_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2" ' + 'CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:"path/to/includes/lib1":' + '"path/to/includes/lib2"') + + + # Not supported yet + win_gcc = MockSettings("Release", os="Windows", arch="x86", + compiler_name="gcc", libcxx=None, version="4.9") + env = ConfigureEnvironment(BuildInfoMock(), win_gcc) + self.assertEquals(env.command_line_env, 'call _conan_env.bat') + + def gcc_test(self): + c11settings_release = MockSettings("Release", os="Linux", arch="x86", + compiler_name="gcc", libcxx="libstdc++11", + version="6.2") + gcc = GCC(c11settings_release) + self.assertEquals(gcc.command_line, "-s -DNDEBUG -m32 ") + + c11settings_debug = MockSettings("Debug", os="Linux", arch="x86", + compiler_name="gcc", libcxx="libstdc++", + version="6.2") + gcc = GCC(c11settings_debug) + self.assertEquals(gcc.command_line, "-g -m32 ") + + def append_variables_test(self): + output = TestBufferConanOutput() + runner = ConanRunner() + if platform.system() != "Windows": + os.environ["LDFLAGS"] = "ldflag=23 otherldflag=33" + os.environ["CPPFLAGS"] = "-cppflag -othercppflag" + os.environ["CFLAGS"] = "-cflag" + os.environ["C_INCLUDE_PATH"] = "/path/to/c_include_path:/anotherpath" + os.environ["CPLUS_INCLUDE_PATH"] = "/path/to/cpp_include_path:/anotherpathpp" + c11settings_release = MockSettings("Release", os="Linux", arch="x86", + compiler_name="gcc", libcxx="libstdc++11", + version="6.2") + env = ConfigureEnvironment(BuildInfoMock(), c11settings_release) + runner(env.command_line, output=output) + self.assertIn("LDFLAGS=-Lpath/to/lib1 -Lpath/to/lib2 -m32 -framework thing -framework thing2 ldflag=23 otherldflag=33\n", output) + self.assertIn("CPPFLAGS=-cppflag -othercppflag -m32 cppflag1 -D_GLIBCXX_USE_CXX11_ABI=1 -s -DNDEBUG -Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2\n", output) + self.assertIn("CFLAGS=-cflag -m32 cflag1 -s -DNDEBUG -Ipath/to/includes/lib1 -Ipath/to/includes/lib2 -DMYDEF1 -DMYDEF2\n", output) + self.assertIn("C_INCLUDE_PATH=/path/to/c_include_path:/anotherpath:path/to/includes/lib1:path/to/includes/lib2\n", output) + self.assertIn("CPLUS_INCLUDE_PATH=/path/to/cpp_include_path:/anotherpathpp:path/to/includes/lib1:path/to/includes/lib2\n", output) + + # Reset env vars to not mess with other tests + os.environ["LDFLAGS"] = "" + os.environ["CPPFLAGS"] = "" + os.environ["CFLAGS"] = "" + os.environ["C_INCLUDE_PATH"] = "" + os.environ["CPLUS_INCLUDE_PATH"] = "" + else: + os.environ["LIB"] = '/path/to/lib.a' + os.environ["CL"] = '/I"path/to/cl1" /I"path/to/cl2"' + + win_settings = MockSettings("Release", os="Windows", arch="x86", + compiler_name="Visual Studio", libcxx=None, + version="12") + env = ConfigureEnvironment(BuildInfoMock(), win_settings) + command = "%s && SET" % env.command_line + runner(command, output=output) + + self.assertIn('/path/to/lib.a;path/to/lib1;path/to/lib2', output) + self.assertIn('CL=/I"path/to/cl1" /I"path/to/cl2" ' + '/I"path/to/includes/lib1" /I"path/to/includes/lib2"', output) + + os.environ["LIB"] = "" + os.environ["CL"] = "" + + +conanfile_scope_env = """ +from conans import ConanFile, ConfigureEnvironment + +class AConan(ConanFile): + settings = "os" + requires = "Hello/0.1@lasote/testing" + generators = "env" + + def build(self): + env = ConfigureEnvironment(self) + self.run(env.command_line + (" && SET" if self.settings.os=="Windows" else " && env")) +""" + +conanfile_dep = """ +from conans import ConanFile + +class AConan(ConanFile): + name = "Hello" + version = "0.1" + + def package_info(self): + self.env_info.PATH=["/path/to/my/folder"] +""" + + +class ConfigureEnvironmentTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + + def build_with_profile_test(self): + self._create_profile("scopes_env", {}, + {}, # undefined scope do not apply to my packages + {"CXX": "/path/tomy/g++_build", "CC": "/path/tomy/gcc_build"}) + + self.client.save({CONANFILE: conanfile_dep}) + self.client.run("export lasote/testing") + + self.client.save({CONANFILE: conanfile_scope_env}, clean_first=True) + self.client.run("install --build=missing") + self.client.run("build -pr scopes_env") + self.assertRegexpMatches(str(self.client.user_io.out), "PATH=['\"]*/path/to/my/folder") + self._assert_env_variable_printed("CC", "/path/tomy/gcc_build") + self._assert_env_variable_printed("CXX", "/path/tomy/g++_build") + + def _assert_env_variable_printed(self, name, value): + self.assertIn("%s=%s" % (name, value), self.client.user_io.out) + + def _create_profile(self, name, settings, scopes=None, env=None): + profile = Profile() + profile._settings = settings or {} + if scopes: + profile.scopes = Scopes.from_list(["%s=%s" % (key, value) for key, value in scopes.items()]) + profile._env = env or {} + save(self.client.client_cache.profile_path(name), profile.dumps()) diff --git a/testbed/conan-io__conan/conans/test/conan_server_config_parser_test.py b/testbed/conan-io__conan/conans/test/conan_server_config_parser_test.py new file mode 100644 index 0000000000000000000000000000000000000000..679ec915dddf1ca7196fc48bb36f940c527aad3c --- /dev/null +++ b/testbed/conan-io__conan/conans/test/conan_server_config_parser_test.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +import unittest +from conans.test.utils.test_files import temp_folder +import os +from conans.util.files import save, mkdir +from conans.server.conf import ConanServerConfigParser +from conans.errors import ConanException + + +class Test(unittest.TestCase): + + def testNotAllowedEncodingPassword(self): + tmp_dir = temp_folder() + server_conf = """ +[server] +jwt_secret: 534534534 +jwt_expire_minutes: 120 +ssl_enabled: False +port: 9300 +public_port: +host_name: localhost +store_adapter: disk +authorize_timeout: 1800 +disk_storage_path: ~/.conan_server/data +disk_authorize_timeout: 1800 +updown_secret: tbsiGzeEygYSCcNrSYcuzmZr + + +[write_permissions] + +[users] +demo: %s + """ + server_dir = os.path.join(tmp_dir, ".conan_server") + mkdir(server_dir) + conf_path = os.path.join(server_dir, "server.conf") + + save(conf_path, server_conf % "cönan") + + server_config = ConanServerConfigParser(tmp_dir) + with self.assertRaisesRegexp(ConanException, "Password contains invalid characters. Only ASCII encoding is supported"): + server_config.users + + save(conf_path, server_conf % "manol ito!@") + server_config = ConanServerConfigParser(tmp_dir) + self.assertEquals(server_config.users, {"demo": "manol ito!@"}) + + # Now test from ENV + server_config = ConanServerConfigParser(tmp_dir, environment={"CONAN_SERVER_USERS": "demo: cönan"}) + with self.assertRaisesRegexp(ConanException, "Password contains invalid characters. Only ASCII encoding is supported"): + server_config.users + + server_config = ConanServerConfigParser(tmp_dir, environment={"CONAN_SERVER_USERS": "demo:manolito!@"}) + self.assertEquals(server_config.users, {"demo": "manolito!@"}) diff --git a/testbed/conan-io__conan/conans/test/conan_trace_file_test.py b/testbed/conan-io__conan/conans/test/conan_trace_file_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e1776a7c5ed63ded597a18b5f67f1a2920a881d2 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/conan_trace_file_test.py @@ -0,0 +1,113 @@ +import unittest +from conans import tools +from conans.test.utils.test_files import temp_folder +import os +from conans.model.ref import ConanFileReference +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.test.tools import TestServer, TestClient +from conans.util.files import load +import json +from conans.paths import CONANFILE, RUN_LOG_NAME +from conans.client.command import get_conan_runner +from conans.client.runner import ConanRunner + + +class ConanTraceTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + + def test_run_log_file_package_test(self): + '''Check if the log file is generated and packaged''' + + base = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello0" + version = "0.1" + + def build(self): + self.run('echo "Simulating cmake..."') + + def package(self): + self.copy(pattern="%s", dst="", keep_path=False) + ''' % RUN_LOG_NAME + + def _install_a_package(print_commands_to_output, generate_run_log_file): + + runner = ConanRunner(print_commands_to_output, generate_run_log_file, + log_run_to_output=True) + + client = TestClient(servers=self.servers, + users={"default": [("lasote", "mypass")]}, + runner=runner) + conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") + files = {} + files[CONANFILE] = base + client.save(files) + client.run("user lasote -p mypass -r default") + client.run("export lasote/stable") + client.run("install %s --build missing" % str(conan_reference)) + package_dir = client.client_cache.packages(ConanFileReference.loads("Hello0/0.1@lasote/stable")) + package_dir = os.path.join(package_dir, os.listdir(package_dir)[0]) + log_file_packaged = os.path.join(package_dir, RUN_LOG_NAME) + return log_file_packaged, client.user_io.out + + log_file_packaged, output = _install_a_package(False, True) + self.assertIn("Copied 1 '.log' files: conan_run.log", output) + self.assertTrue(os.path.exists(log_file_packaged)) + contents = load(log_file_packaged) + self.assertIn("Simulating cmake...", contents) + self.assertNotIn("----Running------%s> echo" % os.linesep, contents) + + log_file_packaged, output = _install_a_package(True, True) + self.assertIn("Copied 1 '.log' files: conan_run.log", output) + self.assertTrue(os.path.exists(log_file_packaged)) + contents = load(log_file_packaged) + self.assertIn("Simulating cmake...", contents) + self.assertIn("----Running------%s> echo" % os.linesep, contents) + + log_file_packaged, output = _install_a_package(False, False) + self.assertNotIn("Copied 1 '.log' files: conan_run.log", output) + self.assertFalse(os.path.exists(log_file_packaged)) + + def test_trace_actions(self): + client = TestClient(servers=self.servers, + users={"default": [("lasote", "mypass")]}) + trace_file = os.path.join(temp_folder(), "conan_trace.log") + with tools.environment_append({"CONAN_TRACE_FILE": trace_file}): + # UPLOAD A PACKAGE + conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") + files = cpp_hello_conan_files("Hello0", "0.1", need_patch=True, build=False) + client.save(files) + client.run("user lasote -p mypass -r default") + client.run("export lasote/stable") + client.run("install %s --build missing" % str(conan_reference)) + client.run("upload %s --all" % str(conan_reference)) + + traces = load(trace_file) + self.assertNotIn("mypass", traces) + self.assertIn('"password": "**********"', traces) + self.assertIn('"Authorization": "**********"', traces) + self.assertIn('"X-Client-Anonymous-Id": "**********"', traces) + actions = traces.splitlines() + self.assertEquals(len(actions), 17) + for trace in actions: + doc = json.loads(trace) + self.assertIn("_action", doc) # Valid jsons + + self.assertEquals(json.loads(actions[0])["_action"], "COMMAND") + self.assertEquals(json.loads(actions[0])["name"], "user") + + self.assertEquals(json.loads(actions[2])["_action"], "COMMAND") + self.assertEquals(json.loads(actions[2])["name"], "export") + + self.assertEquals(json.loads(actions[3])["_action"], "COMMAND") + self.assertEquals(json.loads(actions[3])["name"], "install") + + self.assertEquals(json.loads(actions[4])["_action"], "GOT_RECIPE_FROM_LOCAL_CACHE") + self.assertEquals(json.loads(actions[4])["_id"], "Hello0/0.1@lasote/stable") + + self.assertEquals(json.loads(actions[-1])["_action"], "UPLOADED_PACKAGE") diff --git a/testbed/conan-io__conan/conans/test/conanfile_exception_test.py b/testbed/conan-io__conan/conans/test/conanfile_exception_test.py new file mode 100644 index 0000000000000000000000000000000000000000..23c305fd81e51a4529a0fd6e57e140d792a61af2 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/conanfile_exception_test.py @@ -0,0 +1,28 @@ +import unittest +from conans.test.tools import TestClient + + +class ConanfileExceptionsTest(unittest.TestCase): + + def test_base(self): + + client = TestClient() + base = ''' +from conans import ConanFile + +class ConanLib(ConanFile): + name = "lib" + version = "0.1" + + def config(self): + raise Exception("Something went wrong!") +''' + + files = {"conanfile.py": base} + client.save(files) + client.run("export user/channel") + client.run("install lib/0.1@user/channel", ignore_error=True) + self.assertIn("ERROR: lib/0.1@user/channel: Error in config, config_options " + "or configure() method, line 9", + client.user_io.out) + self.assertIn('raise Exception("Something went wrong!")', client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/conanfile_extend_test.py b/testbed/conan-io__conan/conans/test/conanfile_extend_test.py new file mode 100644 index 0000000000000000000000000000000000000000..7eb6b799d2a2d4240a8d608b06de6bbcd41958b4 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/conanfile_extend_test.py @@ -0,0 +1,124 @@ +import unittest +from conans.test.tools import TestClient +from conans.util.files import load +import os + + +class ConanfileExtendTest(unittest.TestCase): + + def setUp(self): + client = TestClient() + base = ''' +from conans import ConanFile + +class ConanLib(ConanFile): + name = "lib" + version = "0.1" +''' + + files = {"conanfile.py": base} + client.save(files) + client.run("export user/channel") + base = ''' +from conans import ConanFile + +class ConanOtherLib(ConanFile): + name = "otherlib" + version = "0.2" + options = {"otherlib_option": [1, 2, 3]} + default_options="otherlib_option=3" +''' + + files = {"conanfile.py": base} + client.save(files) + client.run("export user/channel") + + self.base_folder = client.base_folder + + def test_base(self): + + base = ''' +from conans import ConanFile + +class HelloConan2(ConanFile): + name = "test" + version = "1.9" + requires = "lib/0.1@user/channel" + options = {"test_option": [1, 2, 3]} + default_options="test_option=2" + my_flag = False + + def build(self): + self.output.info("MyFlag %s" % self.my_flag) + ''' + extension = ''' +from conans import ConanFile, CMake +from conanfile import HelloConan2 + +class DevConanFile(HelloConan2): + my_flag = True + + def config(self): + self.options["otherlib"].otherlib_option = 1 + + def requirements(self): + self.requires("otherlib/0.2@user/channel") + + ''' + files = {"conanfile.py": base, + "conanfile_dev.py": extension} + + client = TestClient(self.base_folder) + client.save(files) + client.run("install --build") + conaninfo = load(os.path.join(client.current_folder, "conaninfo.txt")) + self.assertIn("lib/0.1@user/channel", conaninfo) + self.assertIn("test_option=2", conaninfo) + self.assertNotIn("otherlib/0.2@user/channel", conaninfo) + self.assertNotIn("otherlib:otherlib_option=1", conaninfo) + client.run("build") + self.assertIn("MyFlag False", client.user_io.out) + client.run("info") + self.assertIn("lib/0.1@user/channel", client.user_io.out) + self.assertNotIn("otherlib/0.2@user/channel", client.user_io.out) + + client.run("install --build --file=conanfile_dev.py") + conaninfo = load(os.path.join(client.current_folder, "conaninfo.txt")) + self.assertIn("lib/0.1@user/channel", conaninfo) + self.assertIn("test_option=2", conaninfo) + self.assertIn("otherlib/0.2@user/channel", conaninfo) + self.assertIn("otherlib:otherlib_option=1", conaninfo) + client.run("build -f=conanfile_dev.py") + self.assertIn("MyFlag True", client.user_io.out) + client.run("info -f=conanfile_dev.py") + self.assertIn("lib/0.1@user/channel", client.user_io.out) + self.assertIn("otherlib/0.2@user/channel", client.user_io.out) + + def test_txt(self): + + base = '''[requires] +lib/0.1@user/channel +''' + extension = '''[requires] +lib/0.1@user/channel +otherlib/0.2@user/channel + +[options] +otherlib:otherlib_option = 1 +''' + files = {"conanfile.txt": base, + "conanfile_dev.txt": extension} + + client = TestClient(self.base_folder) + client.save(files) + client.run("install --build") + conaninfo = load(os.path.join(client.current_folder, "conaninfo.txt")) + self.assertIn("lib/0.1@user/channel", conaninfo) + self.assertNotIn("otherlib/0.2@user/channel", conaninfo) + self.assertNotIn("otherlib:otherlib_option=1", conaninfo) + + client.run("install --build --file=conanfile_dev.txt") + conaninfo = load(os.path.join(client.current_folder, "conaninfo.txt")) + self.assertIn("lib/0.1@user/channel", conaninfo) + self.assertIn("otherlib/0.2@user/channel", conaninfo) + self.assertIn("otherlib:otherlib_option=1", conaninfo) diff --git a/testbed/conan-io__conan/conans/test/conanfile_helpers_test.py b/testbed/conan-io__conan/conans/test/conanfile_helpers_test.py new file mode 100644 index 0000000000000000000000000000000000000000..988af000ae09f801a1f242d2b9685e93837ee9dc --- /dev/null +++ b/testbed/conan-io__conan/conans/test/conanfile_helpers_test.py @@ -0,0 +1,80 @@ +import unittest +from conans.test.tools import TestClient +import random +import string + + +class ConanfileHelpersTest(unittest.TestCase): + + def test_replace_in_file(self): + for libname in [''.join(random.choice(string.ascii_lowercase) for _ in range(5)) + for _ in range(5)]: + helpers = ''' +def build_helper(output): + output.info("Building %d!") + ''' + other_helper = ''' +def source_helper(output): + output.info("Source %d!") + ''' + file_content = ''' +from conans import ConanFile +from {libname} import build_helper +from {libname}s.other import source_helper + +class ConanFileToolsTest(ConanFile): + name = "test" + version = "1.9" + exports = "*" + + def source(self): + source_helper(self.output) + + def build(self): + build_helper(self.output) + ''' + file_content2 = ''' +from a{libname} import build_helper +from a{libname}s.other import source_helper +from conans import ConanFile + + +class ConanFileToolsTest(ConanFile): + name = "test2" + version = "2.3" + exports = "*" + + def source(self): + source_helper(self.output) + + def build(self): + build_helper(self.output) + ''' + files = {"%s.py" % libname: helpers % 1, + "%ss/__init__.py" % libname: "", + "%ss/other.py" % libname: other_helper % 1, + "conanfile.py": file_content.format(libname=libname)} + + client = TestClient() + client.save(files) + client.run("export lasote/testing") + + client2 = TestClient(client.base_folder) + files = {"a%s.py" % libname: helpers % 2, + "a%ss/__init__.py" % libname: "", + "a%ss/other.py" % libname: other_helper % 2, + "conanfile.py": file_content2.format(libname=libname)} + client2.save(files) + client2.run("export lasote/testing") + + client3 = TestClient(client.base_folder) + files = {"conanfile.txt": """[requires] + test/1.9@lasote/testing\n + test2/2.3@lasote/testing"""} + client3.save(files) + client3.run("install --build") + # print client3.user_io.out + self.assertIn("Building 1!", client3.user_io.out) + self.assertIn("Source 1!", client3.user_io.out) + self.assertIn("Building 2!", client3.user_io.out) + self.assertIn("Source 2!", client3.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/conanfile_loader_test.py b/testbed/conan-io__conan/conans/test/conanfile_loader_test.py new file mode 100644 index 0000000000000000000000000000000000000000..f20fed9f4b4d5536112a992ae6ae457dd16cca2e --- /dev/null +++ b/testbed/conan-io__conan/conans/test/conanfile_loader_test.py @@ -0,0 +1,170 @@ + +import unittest +from conans.client.loader import ConanFileTextLoader, ConanFileLoader +from conans.errors import ConanException +from conans.util.files import save +import os +from conans.model.requires import Requirements +from conans.model.options import OptionsValues +from mock import Mock +from conans.model.settings import Settings +from conans.test.utils.test_files import temp_folder +from conans.model.scope import Scopes + + +class ConanLoaderTest(unittest.TestCase): + + def requires_init_test(self): + loader = ConanFileLoader(None, Settings(), None, OptionsValues.loads(""), Scopes(), + None, None) + tmp_dir = temp_folder() + conanfile_path = os.path.join(tmp_dir, "conanfile.py") + conanfile = """from conans import ConanFile +class MyTest(ConanFile): + requires = {} + def requirements(self): + self.requires("MyPkg/0.1@user/channel") +""" + for requires in ("''", "[]", "()", "None"): + save(conanfile_path, conanfile.format(requires)) + result = loader.load_conan(conanfile_path, output=None, consumer=True) + result.requirements() + self.assertEqual("MyPkg/0.1@user/channel", str(result.requires)) + + def conanfile_txt_errors_test(self): + # Valid content + file_content = '''[requires} +OpenCV/2.4.10@phil/stable # My requirement for CV +''' + with self.assertRaisesRegexp(ConanException, "Bad syntax"): + ConanFileTextLoader(file_content) + + file_content = '{hello}' + with self.assertRaisesRegexp(ConanException, "Unexpected line"): + ConanFileTextLoader(file_content) + + def plain_text_parser_test(self): + # Valid content + file_content = '''[requires] +OpenCV/2.4.10@phil/stable # My requirement for CV +OpenCV2/2.4.10@phil/stable # +OpenCV3/2.4.10@phil/stable +[generators] +one # My generator for this +two +[options] +OpenCV:use_python=True # Some option +OpenCV:other_option=False +OpenCV2:use_python2=1 +OpenCV2:other_option=Cosa # +''' + parser = ConanFileTextLoader(file_content) + exp = ['OpenCV/2.4.10@phil/stable', + 'OpenCV2/2.4.10@phil/stable', + 'OpenCV3/2.4.10@phil/stable'] + self.assertEquals(parser.requirements, exp) + + def load_conan_txt_test(self): + file_content = '''[requires] +OpenCV/2.4.10@phil/stable +OpenCV2/2.4.10@phil/stable +[generators] +one +two +[imports] +OpenCV/bin, * -> ./bin # I need this binaries +OpenCV/lib, * -> ./lib +[options] +OpenCV:use_python=True +OpenCV:other_option=False +OpenCV2:use_python2=1 +OpenCV2:other_option=Cosa +''' + tmp_dir = temp_folder() + file_path = os.path.join(tmp_dir, "file.txt") + save(file_path, file_content) + loader = ConanFileLoader(None, Settings(), None, OptionsValues.loads(""), Scopes(), + None, None) + ret = loader.load_conan_txt(file_path, None) + options1 = OptionsValues.loads("""OpenCV:use_python=True +OpenCV:other_option=False +OpenCV2:use_python2=1 +OpenCV2:other_option=Cosa""") + requirements = Requirements() + requirements.add("OpenCV/2.4.10@phil/stable") + requirements.add("OpenCV2/2.4.10@phil/stable") + + self.assertEquals(ret.requires, requirements) + self.assertEquals(ret.generators, ["one", "two"]) + self.assertEquals(ret.options.values.dumps(), options1.dumps()) + + ret.copy = Mock() + ret.imports() + + self.assertTrue(ret.copy.call_args_list, [('*', './bin', 'OpenCV/bin'), + ('*', './lib', 'OpenCV/lib')]) + + # Now something that fails + file_content = '''[requires] +OpenCV/2.4.104phil/stable <- use_python:True, other_option:False +''' + tmp_dir = temp_folder() + file_path = os.path.join(tmp_dir, "file.txt") + save(file_path, file_content) + loader = ConanFileLoader(None, Settings(), None, OptionsValues.loads(""), + Scopes(), None, None) + with self.assertRaisesRegexp(ConanException, "Wrong package recipe reference(.*)"): + loader.load_conan_txt(file_path, None) + + file_content = '''[requires] +OpenCV/2.4.10@phil/stable <- use_python:True, other_option:False +[imports] +OpenCV/bin/* - ./bin +''' + tmp_dir = temp_folder() + file_path = os.path.join(tmp_dir, "file.txt") + save(file_path, file_content) + loader = ConanFileLoader(None, Settings(), None, OptionsValues.loads(""), + Scopes(), None, None) + with self.assertRaisesRegexp(ConanException, "is too long. Valid names must contain"): + loader.load_conan_txt(file_path, None) + + def test_package_settings(self): + # CREATE A CONANFILE TO LOAD + tmp_dir = temp_folder() + conanfile_path = os.path.join(tmp_dir, "conanfile.py") + conanfile = """from conans import ConanFile +class MyTest(ConanFile): + requires = {} + name = "MyPackage" + version = "1.0" + settings = "os" +""" + save(conanfile_path, conanfile) + + # Apply windows for MyPackage + package_settings = {"MyPackage": [("os", "Windows")]} + loader = ConanFileLoader(None, Settings({"os": ["Windows", "Linux"]}), + package_settings, OptionsValues.loads(""), Scopes(), + None, None) + + recipe = loader.load_conan(conanfile_path, None) + self.assertEquals(recipe.settings.os, "Windows") + + # Apply Linux for MyPackage + package_settings = {"MyPackage": [("os", "Linux")]} + loader = ConanFileLoader(None, Settings({"os": ["Windows", "Linux"]}), + package_settings, OptionsValues.loads(""), Scopes(), + None, None) + + recipe = loader.load_conan(conanfile_path, None) + self.assertEquals(recipe.settings.os, "Linux") + + # If the package name is different from the conanfile one, it wont apply + package_settings = {"OtherPACKAGE": [("os", "Linux")]} + loader = ConanFileLoader(None, Settings({"os": ["Windows", "Linux"]}), + package_settings, OptionsValues.loads(""), Scopes(), + None, None) + + recipe = loader.load_conan(conanfile_path, None) + self.assertIsNone(recipe.settings.os.value) diff --git a/testbed/conan-io__conan/conans/test/conanfile_tools_test.py b/testbed/conan-io__conan/conans/test/conanfile_tools_test.py new file mode 100644 index 0000000000000000000000000000000000000000..437855b37d9ba544c531e7bca50acbed9453dd08 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/conanfile_tools_test.py @@ -0,0 +1,115 @@ +import unittest +import os +from conans.util.files import save, load +from conans.client.loader import ConanFileLoader +from conans.model.settings import Settings +from conans.model.options import OptionsValues +from conans.test.utils.test_files import temp_folder +from conans.model.scope import Scopes +from conans import tools +from nose_parameterized.parameterized import parameterized + + +base_conanfile = ''' +from conans import ConanFile +from conans.tools import patch, replace_in_file +import os + +class ConanFileToolsTest(ConanFile): + name = "test" + version = "1.9.10" +''' + + +class ConanfileToolsTest(unittest.TestCase): + + def test_untar(self): + tmp_dir = temp_folder() + file_path = os.path.join(tmp_dir, "example.txt") + save(file_path, "Hello world!") + tar_path = os.path.join(tmp_dir, "sample.tar") + try: + old_path = os.getcwd() + os.chdir(tmp_dir) + import tarfile + tar = tarfile.open(tar_path, "w") + tar.add("example.txt") + tar.close() + finally: + os.chdir(old_path) + output_dir = os.path.join(tmp_dir, "output_dir") + tools.unzip(tar_path, output_dir) + content = load(os.path.join(output_dir, "example.txt")) + self.assertEqual(content, "Hello world!") + + def test_replace_in_file(self): + file_content = base_conanfile + ''' + def build(self): + replace_in_file("text.txt", "ONE TWO THREE", "FOUR FIVE SIX") +''' + tmp_dir, file_path, text_file = self._save_files(file_content) + self._build_and_check(tmp_dir, file_path, text_file, "FOUR FIVE SIX") + + @parameterized.expand([(0, ), (1, )]) + def test_patch_from_file(self, strip): + if strip: + file_content = base_conanfile + ''' + def build(self): + patch(patch_file="file.patch", strip=%s) +''' % strip + patch_content = '''--- %s/text.txt\t2016-01-25 17:57:11.452848309 +0100 ++++ %s/text_new.txt\t2016-01-25 17:57:28.839869950 +0100 +@@ -1 +1 @@ +-ONE TWO THREE ++ONE TWO FOUR''' % ("old_path", "new_path") + else: + file_content = base_conanfile + ''' + def build(self): + patch(patch_file="file.patch") +''' + patch_content = '''--- text.txt\t2016-01-25 17:57:11.452848309 +0100 ++++ text_new.txt\t2016-01-25 17:57:28.839869950 +0100 +@@ -1 +1 @@ +-ONE TWO THREE ++ONE TWO FOUR''' + + tmp_dir, file_path, text_file = self._save_files(file_content) + patch_file = os.path.join(tmp_dir, "file.patch") + save(patch_file, patch_content) + self._build_and_check(tmp_dir, file_path, text_file, "ONE TWO FOUR") + + def test_patch_from_str(self): + file_content = base_conanfile + ''' + def build(self): + patch_content = \'''--- text.txt\t2016-01-25 17:57:11.452848309 +0100 ++++ text_new.txt\t2016-01-25 17:57:28.839869950 +0100 +@@ -1 +1 @@ +-ONE TWO THREE ++ONE TWO DOH!\''' + patch(patch_string=patch_content) + +''' + tmp_dir, file_path, text_file = self._save_files(file_content) + self._build_and_check(tmp_dir, file_path, text_file, "ONE TWO DOH!") + + def _save_files(self, file_content): + tmp_dir = temp_folder() + file_path = os.path.join(tmp_dir, "conanfile.py") + text_file = os.path.join(tmp_dir, "text.txt") + save(file_path, file_content) + save(text_file, "ONE TWO THREE") + return tmp_dir, file_path, text_file + + def _build_and_check(self, tmp_dir, file_path, text_file, msg): + loader = ConanFileLoader(None, Settings(), None, OptionsValues.loads(""), Scopes(), + None, None) + ret = loader.load_conan(file_path, None) + curdir = os.path.abspath(os.curdir) + os.chdir(tmp_dir) + try: + ret.build() + finally: + os.chdir(curdir) + + content = load(text_file) + self.assertEquals(content, msg) diff --git a/testbed/conan-io__conan/conans/test/conf_default_settings_test.py b/testbed/conan-io__conan/conans/test/conf_default_settings_test.py new file mode 100644 index 0000000000000000000000000000000000000000..9162b83edba6162b514698f7cb3014bb62a2b9be --- /dev/null +++ b/testbed/conan-io__conan/conans/test/conf_default_settings_test.py @@ -0,0 +1,33 @@ +import unittest +from conans.test.tools import TestClient +from conans.util.files import save +import os +from conans import tools + + +class ConfDefaultSettingsTest(unittest.TestCase): + + def test_update_settings(self): + default_conf = """[storage] +path: ~/.conan/data +[settings_defaults] +compiler=Visual Studio +compiler.version=42 +""" + client = TestClient() + save(client.client_cache.conan_conf_path, default_conf) + error = client.run("install Any/0.2@user/channel", ignore_error=True) + self.assertTrue(error) + self.assertIn("'42' is not a valid 'settings.compiler.version' value", client.user_io.out) + error = client.run('install -s compiler="Visual Studio" -s compiler.version=14', + ignore_error=True) + self.assertTrue(error) + self.assertIn("'42' is not a valid 'settings.compiler.version' value", client.user_io.out) + + with tools.environment_append({"CONAN_ENV_COMPILER_VERSION": "14"}): + self.assertEqual(os.environ.get("CONAN_ENV_COMPILER_VERSION"), "14") + error = client.run('install', ignore_error=True) + self.assertTrue(error) + self.assertIn("'42' is not a valid 'settings.compiler.version' value", + client.user_io.out) + self.assertIsNone(os.environ.get("CONAN_ENV_COMPILER_VERSION")) diff --git a/testbed/conan-io__conan/conans/test/create_package_test.py b/testbed/conan-io__conan/conans/test/create_package_test.py new file mode 100644 index 0000000000000000000000000000000000000000..c4e70cb9d40954d6874082402c04a360e1d57392 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/create_package_test.py @@ -0,0 +1,124 @@ +import unittest +import os +from conans.test.tools import TestClient, TestBufferConanOutput +from conans.test.utils.test_files import hello_source_files +from conans.client.manager import CONANFILE +from conans.model.ref import ConanFileReference, PackageReference +import shutil +from conans.paths import CONANINFO +from conans.client.packager import create_package +from conans.client.loader import ConanFileLoader +from conans.model.options import OptionsValues +from conans.model.settings import Settings +from conans.client.output import ScopedOutput +from conans.model.scope import Scopes + + +myconan1 = """ +from conans import ConanFile +import platform + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2.1" + files = '*' + + def package(self): + self.copy("*", "include", "include/math") + self.copy("include/physics/*.hpp") + self.copy("contrib/*", "include") + self.copy("include/opencv/*") + self.copy("include/opencv2/*") + self.copy("*", "include", "modules/simu/include") + self.copy("*", "include", "modules/3D/include") + self.copy("*", "include", "modules/dev/include") + self.copy("*.a", "lib/my_lib", "my_lib/debug") + self.copy("*.txt", "res/shares", "my_data") + +""" + + +class ExporterTest(unittest.TestCase): + + def complete_test(self): + """ basic installation of a new conans + """ + client = TestClient() + client.init_dynamic_vars() + files = hello_source_files() + + conan_ref = ConanFileReference.loads("Hello/1.2.1/frodo/stable") + reg_folder = client.paths.export(conan_ref) + + client.save(files, path=reg_folder) + client.save({CONANFILE: myconan1, + CONANINFO: "//empty", + "include/no_copy/lib0.h": "NO copy", + "include/math/lib1.h": "copy", + "include/math/lib2.h": "copy", + "include/physics/lib.hpp": "copy", + "my_lib/debug/libd.a": "copy", + "my_data/readme.txt": "copy", + "my_data/readme.md": "NO copy", + "contrib/math/math.h": "copy", + "contrib/physics/gravity.h": "copy", + "contrib/contrib.h": "copy", + "include/opencv/opencv.hpp": "copy", + "include/opencv2/opencv2.hpp": "copy", + "modules/simu/src/simu.cpp": "NO copy", + "modules/simu/include/opencv2/simu/simu.hpp": "copy", + "modules/3D/doc/readme.md": "NO copy", + "modules/3D/include/opencv2/3D/3D.hpp": "copy", + "modules/dev/src/dev.cpp": "NO copy", + "modules/dev/include/opencv2/dev/dev.hpp": "copy", + "modules/opencv_mod.hpp": "copy"}, path=reg_folder) + + conanfile_path = os.path.join(reg_folder, CONANFILE) + package_ref = PackageReference(conan_ref, "myfakeid") + build_folder = client.paths.build(package_ref) + package_folder = client.paths.package(package_ref) + + shutil.copytree(reg_folder, build_folder) + + loader = ConanFileLoader(None, Settings(), None, OptionsValues.loads(""), Scopes(), None, + None) + conanfile = loader.load_conan(conanfile_path, None) + output = ScopedOutput("", TestBufferConanOutput()) + create_package(conanfile, build_folder, package_folder, output) + + # test build folder + self.assertTrue(os.path.exists(build_folder)) + self.assertTrue(os.path.exists(os.path.join(package_folder, CONANINFO))) + + # test pack folder + self.assertTrue(os.path.exists(package_folder)) + + def exist(rel_path): + return os.path.exists(os.path.join(package_folder, rel_path)) + + # Expected files + self.assertTrue(exist("include/lib1.h")) + self.assertTrue(exist("include/lib2.h")) + self.assertTrue(exist("include/physics/lib.hpp")) + self.assertTrue(exist("include/contrib/math/math.h")) + self.assertTrue(exist("include/contrib/physics/gravity.h")) + self.assertTrue(exist("include/contrib/contrib.h")) + self.assertTrue(exist("include/opencv/opencv.hpp")) + self.assertTrue(exist("include/opencv2/opencv2.hpp")) + self.assertTrue(exist("include/opencv2/simu/simu.hpp")) + self.assertTrue(exist("include/opencv2/3D/3D.hpp")) + self.assertTrue(exist("include/opencv2/dev/dev.hpp")) + self.assertTrue(exist("lib/my_lib/libd.a")) + self.assertTrue(exist("res/shares/readme.txt")) + + # Not expected files + self.assertFalse(exist("include/opencv2/opencv_mod.hpp")) + self.assertFalse(exist("include/opencv2/simu.hpp")) + self.assertFalse(exist("include/opencv2/3D.hpp")) + self.assertFalse(exist("include/opencv2/dev.hpp")) + self.assertFalse(exist("include/modules/simu/src/simu.cpp")) + self.assertFalse(exist("include/modules/3D/doc/readme.md")) + self.assertFalse(exist("include/modules/dev/src/dev.cpp")) + self.assertFalse(exist("include/opencv2/opencv_mod.hpp")) + self.assertFalse(exist("include/include/no_copy/lib0.h")) + self.assertFalse(exist("res/my_data/readme.md")) diff --git a/testbed/conan-io__conan/conans/test/deps_graph_test.py b/testbed/conan-io__conan/conans/test/deps_graph_test.py new file mode 100644 index 0000000000000000000000000000000000000000..f85c484c4b8df2bd5c0834db3be46d59bd94c572 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/deps_graph_test.py @@ -0,0 +1,70 @@ +import unittest +from conans.client.deps_builder import DepsGraph, Node +from conans.model.ref import ConanFileReference +from conans.model.conan_file import ConanFile +from conans.model.settings import Settings + + +class DepsGraphTest(unittest.TestCase): + + def test_node(self): + """ nodes are different even if contain same values, + so they can be repeated if necessary in the graph (common + static libraries) + """ + conan_ref1 = ConanFileReference.loads("Hello/0.1@user/stable") + conan_ref2 = ConanFileReference.loads("Hello/0.1@user/stable") + + conanfile1 = ConanFile(None, None, Settings({}), ".") + conanfile2 = ConanFile(None, None, Settings({}), ".") + n1 = Node(conan_ref1, conanfile1) + n2 = Node(conan_ref2, conanfile2) + + self.assertNotEqual(n1, n2) + + def basic_levels_test(self): + deps = DepsGraph() + deps.add_node(1) + deps.add_node(2) + deps.add_node(3) + deps.add_edge(1, 2) + deps.add_edge(2, 3) + self.assertEqual([[3], [2], [1]], deps.by_levels()) + + def multi_levels_test(self): + deps = DepsGraph() + deps.add_node(1) + deps.add_node(2) + deps.add_node(32) + deps.add_node(31) + deps.add_edge(1, 2) + deps.add_edge(2, 31) + deps.add_edge(2, 32) + self.assertEqual([[31, 32], [2], [1]], deps.by_levels()) + + def multi_levels_test2(self): + deps = DepsGraph() + deps.add_node(1) + deps.add_node(5) + deps.add_node(2) + deps.add_node(32) + deps.add_node(31) + deps.add_edge(1, 2) + deps.add_edge(1, 5) + deps.add_edge(2, 31) + deps.add_edge(2, 32) + self.assertEqual([[5, 31, 32], [2], [1]], deps.by_levels()) + + def multi_levels_test3(self): + deps = DepsGraph() + deps.add_node(1) + deps.add_node(5) + deps.add_node(2) + deps.add_node(32) + deps.add_node(31) + deps.add_edge(1, 2) + deps.add_edge(1, 5) + deps.add_edge(2, 31) + deps.add_edge(2, 32) + deps.add_edge(32, 5) + self.assertEqual([[5, 31], [32], [2], [1]], deps.by_levels()) diff --git a/testbed/conan-io__conan/conans/test/detect_test.py b/testbed/conan-io__conan/conans/test/detect_test.py new file mode 100644 index 0000000000000000000000000000000000000000..3bd4833b434b4ba7aa276d983d8134274b688830 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/detect_test.py @@ -0,0 +1,14 @@ +import unittest +from conans.test.tools import TestBufferConanOutput +from conans.client.detect import detect_defaults_settings +import platform + + +class DetectTest(unittest.TestCase): + + def detect_test(self): + output = TestBufferConanOutput() + detect_defaults_settings(output) + self.assertIn("It seems to be the first time you run conan", output) + if platform.system() == "Linux": + self.assertIn("Found gcc", output) diff --git a/testbed/conan-io__conan/conans/test/disk_search_test.py b/testbed/conan-io__conan/conans/test/disk_search_test.py new file mode 100644 index 0000000000000000000000000000000000000000..3689071a763625d41930835f330e9efe01680c8c --- /dev/null +++ b/testbed/conan-io__conan/conans/test/disk_search_test.py @@ -0,0 +1,80 @@ +import os +import unittest +from conans.paths import (BUILD_FOLDER, PACKAGES_FOLDER, EXPORT_FOLDER, SimplePaths, CONANINFO) +from conans.model.ref import ConanFileReference +from conans.test.utils.test_files import temp_folder +from conans.search.search import DiskSearchManager, DiskSearchAdapter +from conans.util.files import save +from conans.model.info import ConanInfo + + +class SearchTest(unittest.TestCase): + + def setUp(self): + folder = temp_folder() + paths = SimplePaths(folder) + search_adapter = DiskSearchAdapter() + self.search_manager = DiskSearchManager(paths, search_adapter) + os.chdir(paths.store) + self.paths = paths + + def basic_test2(self): + conan_ref1 = ConanFileReference.loads("opencv/2.4.10@lasote/testing") + root_folder = str(conan_ref1).replace("@", "/") + artifacts = ["a", "b", "c"] + reg1 = "%s/%s" % (root_folder, EXPORT_FOLDER) + os.makedirs(reg1) + for artif_id in artifacts: + build1 = "%s/%s/%s" % (root_folder, BUILD_FOLDER, artif_id) + artif1 = "%s/%s/%s" % (root_folder, PACKAGES_FOLDER, artif_id) + os.makedirs(build1) + info = ConanInfo().loads("[settings]\n[options]") + save(os.path.join(artif1, CONANINFO), info.dumps()) + + packages = self.search_manager.search_packages(conan_ref1, "") + all_artif = [_artif for _artif in sorted(packages)] + self.assertEqual(all_artif, artifacts) + + def pattern_test(self): + refs = ["opencv/2.4.%s@lasote/testing" % ref for ref in ("1", "2", "3")] + refs = [ConanFileReference.loads(ref) for ref in refs] + for ref in refs: + root_folder = str(ref).replace("@", "/") + reg1 = "%s/%s" % (root_folder, EXPORT_FOLDER) + os.makedirs(reg1) + + recipes = self.search_manager.search("opencv/*@lasote/testing") + self.assertEqual(recipes, refs) + + def case_insensitive_test(self): + root_folder2 = "sdl/1.5/lasote/stable" + conan_ref2 = ConanFileReference.loads("sdl/1.5@lasote/stable") + os.makedirs("%s/%s" % (root_folder2, EXPORT_FOLDER)) + + root_folder3 = "assimp/0.14/phil/testing" + conan_ref3 = ConanFileReference.loads("assimp/0.14@phil/testing") + os.makedirs("%s/%s" % (root_folder3, EXPORT_FOLDER)) + + root_folder4 = "sdl/2.10/lasote/stable" + conan_ref4 = ConanFileReference.loads("sdl/2.10@lasote/stable") + os.makedirs("%s/%s" % (root_folder4, EXPORT_FOLDER)) + + root_folder5 = "SDL_fake/1.10/lasote/testing" + conan_ref5 = ConanFileReference.loads("SDL_fake/1.10@lasote/testing") + os.makedirs("%s/%s" % (root_folder5, EXPORT_FOLDER)) + # Case insensitive searches + search_adapter = DiskSearchAdapter() + search_manager = DiskSearchManager(self.paths, search_adapter) + + reg_conans = sorted([str(_reg) for _reg in search_manager.search("*")]) + self.assertEqual(reg_conans, [str(conan_ref5), + str(conan_ref3), + str(conan_ref2), + str(conan_ref4)]) + + reg_conans = sorted([str(_reg) for _reg in search_manager.search(pattern="sdl*")]) + self.assertEqual(reg_conans, [str(conan_ref5), str(conan_ref2), str(conan_ref4)]) + + # Case sensitive search + self.assertEqual(str(search_manager.search(pattern="SDL*", ignorecase=False)[0]), + str(conan_ref5)) diff --git a/testbed/conan-io__conan/conans/test/download_test.py b/testbed/conan-io__conan/conans/test/download_test.py new file mode 100644 index 0000000000000000000000000000000000000000..c6c6670c80d830e94d262646c8f51442ebc63a2e --- /dev/null +++ b/testbed/conan-io__conan/conans/test/download_test.py @@ -0,0 +1,107 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from conans.test.utils.test_files import hello_source_files +from conans.client.manager import CONANFILE +import os +from conans.model.ref import ConanFileReference, PackageReference +from conans.paths import CONAN_MANIFEST, CONANINFO +from conans.util.files import save +from conans.model.manifest import FileTreeManifest +from conans.client.proxy import ConanProxy + + +myconan1 = """ +from conans import ConanFile +import platform + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2.1" +""" + + +class DownloadTest(unittest.TestCase): + + def complete_test(self): + """ basic installation of a new conans + """ + servers = {} + # All can write (for avoid authentication until we mock user_io) + test_server = TestServer([("*/*@*/*", "*")], [("*/*@*/*", "*")]) + servers["default"] = test_server + + conan_digest = FileTreeManifest(123123123, {}) + + client = TestClient(servers=servers) + client.init_dynamic_vars() + conan_ref = ConanFileReference.loads("Hello/1.2.1@frodo/stable") + reg_folder = client.paths.export(conan_ref) + + files = hello_source_files() + client.save(files, path=reg_folder) + client.save({CONANFILE: myconan1, + CONAN_MANIFEST: str(conan_digest), + "include/math/lib1.h": "//copy", + "my_lib/debug/libd.a": "//copy", + "my_data/readme.txt": "//copy"}, path=reg_folder) + + package_ref = PackageReference(conan_ref, "fakeid") + package_folder = client.paths.package(package_ref) + save(os.path.join(package_folder, CONANINFO), "info") + save(os.path.join(package_folder, CONAN_MANIFEST), "manifest") + save(os.path.join(package_folder, "include", "lib1.h"), "//header") + save(os.path.join(package_folder, "lib", "my_lib", "libd.a"), "//lib") + save(os.path.join(package_folder, "res", "shares", "readme.txt"), + "//res") + + digest_path = client.client_cache.digestfile_package(package_ref) + expected_manifest = FileTreeManifest.create(os.path.dirname(digest_path)) + save(os.path.join(package_folder, CONAN_MANIFEST), str(expected_manifest)) + + client.run("upload %s" % str(conan_ref)) + client.run("upload %s -p %s" % (str(conan_ref), package_ref.package_id)) + + client2 = TestClient(servers=servers) + client2.init_dynamic_vars() + + installer = ConanProxy(client2.paths, client2.user_io, client2.remote_manager, "default") + + installer.get_recipe(conan_ref) + installer.get_package(package_ref, short_paths=False) + # Check that the output is done in order + lines = [line.strip() for line in str(client2.user_io.out).splitlines() + if line.startswith("Downloading")] + self.assertEqual(lines, ["Downloading conanmanifest.txt", + "Downloading conanfile.py", + "Downloading conan_export.tgz", + "Downloading conanmanifest.txt", + "Downloading conaninfo.txt", + "Downloading conan_package.tgz" + ]) + + reg_path = client2.paths.export(ConanFileReference.loads("Hello/1.2.1/frodo/stable")) + pack_folder = client2.paths.package(package_ref) + + # Test the file in the downloaded conans + files = ['CMakeLists.txt', + 'my_lib/debug/libd.a', + 'hello.cpp', + 'hello0.h', + CONANFILE, + CONAN_MANIFEST, + 'main.cpp', + 'include/math/lib1.h', + 'my_data/readme.txt'] + + for _file in files: + self.assertTrue(os.path.exists(os.path.join(reg_path, _file))) + self.assertTrue(os.path.exists(pack_folder)) + + # Test the file in the downloaded package + self.assertTrue(os.path.exists(pack_folder)) + self.assertTrue(os.path.exists(os.path.join(pack_folder, "include", + "lib1.h"))) + self.assertTrue(os.path.exists(os.path.join(pack_folder, "lib", + "my_lib/libd.a"))) + self.assertTrue(os.path.exists(os.path.join(pack_folder, "res", + "shares/readme.txt"))) diff --git a/testbed/conan-io__conan/conans/test/file_hashes_test.py b/testbed/conan-io__conan/conans/test/file_hashes_test.py new file mode 100644 index 0000000000000000000000000000000000000000..a25d280aed8f24170fdd4057e356415d9bbbcd68 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/file_hashes_test.py @@ -0,0 +1,29 @@ +import unittest + +from conans.tools import check_md5, check_sha256, check_sha1 +from conans.test.utils.test_files import temp_folder +from conans.util.files import save +import os +from conans.errors import ConanException + + +class HashesTest(unittest.TestCase): + + def md5_test(self): + folder = temp_folder() + filepath = os.path.join(folder, "file.txt") + file_content = "a file" + save(filepath, file_content) + + check_md5(filepath, "d6d0c756fb8abfb33e652a20e85b70bc") + check_sha1(filepath, "eb599ec83d383f0f25691c184f656d40384f9435") + check_sha256(filepath, "7365d029861e32c521f8089b00a6fb32daf0615025b69b599d1ce53501b845c2") + + with self.assertRaisesRegexp(ConanException, "md5 signature failed for 'file.txt' file. Computed signature:"): + check_md5(filepath, "invalid") + + with self.assertRaisesRegexp(ConanException, "sha1 signature failed for 'file.txt' file. Computed signature:"): + check_sha1(filepath, "invalid") + + with self.assertRaisesRegexp(ConanException, "sha256 signature failed for 'file.txt' file. Computed signature:"): + check_sha256(filepath, "invalid") diff --git a/testbed/conan-io__conan/conans/test/files_test.py b/testbed/conan-io__conan/conans/test/files_test.py new file mode 100644 index 0000000000000000000000000000000000000000..94d0faa8bd2786e3f697d5e72203a09ad9d9f026 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/files_test.py @@ -0,0 +1,61 @@ +import unittest +import os +from conans.util.files import save, load +from conans.test.utils.test_files import temp_folder +import platform +from conans.client.file_copier import FileCopier + + +class FileCopierTest(unittest.TestCase): + + def basic_test(self): + folder1 = temp_folder() + sub1 = os.path.join(folder1, "subdir1") + sub2 = os.path.join(folder1, "subdir2") + save(os.path.join(sub1, "file1.txt"), "Hello1") + save(os.path.join(sub1, "file2.c"), "Hello2") + save(os.path.join(sub1, "sub1/file1.txt"), "Hello1 sub") + save(os.path.join(sub1, "sub1/file2.c"), "Hello2 sub") + save(os.path.join(sub2, "file1.txt"), "2 Hello1") + save(os.path.join(sub2, "file2.c"), "2 Hello2") + + folder2 = temp_folder() + copier = FileCopier(folder1, folder2) + copier("*.txt", "texts") + self.assertEqual("Hello1", load(os.path.join(folder2, "texts/subdir1/file1.txt"))) + self.assertEqual("Hello1 sub", load(os.path.join(folder2, "texts/subdir1/sub1/file1.txt"))) + self.assertEqual("2 Hello1", load(os.path.join(folder2, "texts/subdir2/file1.txt"))) + self.assertEqual(['file1.txt'], os.listdir(os.path.join(folder2, "texts/subdir2"))) + + folder2 = temp_folder() + copier = FileCopier(folder1, folder2) + copier("*.txt", "texts", "subdir1") + self.assertEqual("Hello1", load(os.path.join(folder2, "texts/file1.txt"))) + self.assertEqual("Hello1 sub", load(os.path.join(folder2, "texts/sub1/file1.txt"))) + self.assertNotIn("subdir2", os.listdir(os.path.join(folder2, "texts"))) + + def basic_with_linked_dir_test(self): + if platform.system() == "Linux" or platform.system() == "Darwin": + folder1 = temp_folder() + sub1 = os.path.join(folder1, "subdir1") + sub2 = os.path.join(folder1, "subdir2") + os.makedirs(sub1) + os.symlink("subdir1", sub2) + save(os.path.join(sub1, "file1.txt"), "Hello1") + save(os.path.join(sub1, "file2.c"), "Hello2") + save(os.path.join(sub1, "sub1/file1.txt"), "Hello1 sub") + + folder2 = temp_folder() + copier = FileCopier(folder1, folder2) + copier("*.txt", "texts") + self.assertEqual("Hello1", load(os.path.join(folder2, "texts/subdir1/file1.txt"))) + self.assertEqual("Hello1 sub", load(os.path.join(folder2, "texts/subdir1/sub1/file1.txt"))) + self.assertEqual("Hello1", load(os.path.join(folder2, "texts/subdir2/file1.txt"))) + self.assertEqual(['file1.txt', 'sub1'].sort(), os.listdir(os.path.join(folder2, "texts/subdir2")).sort()) + + folder2 = temp_folder() + copier = FileCopier(folder1, folder2) + copier("*.txt", "texts", "subdir1") + self.assertEqual("Hello1", load(os.path.join(folder2, "texts/file1.txt"))) + self.assertEqual("Hello1 sub", load(os.path.join(folder2, "texts/sub1/file1.txt"))) + self.assertNotIn("subdir2", os.listdir(os.path.join(folder2, "texts"))) diff --git a/testbed/conan-io__conan/conans/test/generators/__init__.py b/testbed/conan-io__conan/conans/test/generators/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/conans/test/generators/cmake_test.py b/testbed/conan-io__conan/conans/test/generators/cmake_test.py new file mode 100644 index 0000000000000000000000000000000000000000..c8db331f8d604fa6beb1aee674e9860be17b822d --- /dev/null +++ b/testbed/conan-io__conan/conans/test/generators/cmake_test.py @@ -0,0 +1,66 @@ +import re +import unittest +from conans.model.settings import Settings +from conans.model.conan_file import ConanFile +from conans.client.generators.cmake import CMakeGenerator +from conans.model.build_info import CppInfo +from conans.model.ref import ConanFileReference + + +class CMakeGeneratorTest(unittest.TestCase): + + def _extract_macro(self, name, text): + pattern = ".*(macro\(%s\).*?endmacro\(\)).*" % name + return re.sub(pattern, r"\1", text, flags=re.DOTALL) + + def variables_setup_test(self): + conanfile = ConanFile(None, None, Settings({}), None) + ref = ConanFileReference.loads("MyPkg/0.1@lasote/stables") + cpp_info = CppInfo("dummy_root_folder1") + cpp_info.defines = ["MYDEFINE1"] + conanfile.deps_cpp_info.update(cpp_info, ref) + ref = ConanFileReference.loads("MyPkg2/0.1@lasote/stables") + cpp_info = CppInfo("dummy_root_folder2") + cpp_info.defines = ["MYDEFINE2"] + conanfile.deps_cpp_info.update(cpp_info, ref) + generator = CMakeGenerator(conanfile) + content = generator.content + cmake_lines = content.splitlines() + self.assertIn("set(CONAN_DEFINES_MYPKG -DMYDEFINE1)", cmake_lines) + self.assertIn("set(CONAN_DEFINES_MYPKG2 -DMYDEFINE2)", cmake_lines) + self.assertIn("set(CONAN_COMPILE_DEFINITIONS_MYPKG MYDEFINE1)", cmake_lines) + self.assertIn("set(CONAN_COMPILE_DEFINITIONS_MYPKG2 MYDEFINE2)", cmake_lines) + + def aux_cmake_test_setup_test(self): + conanfile = ConanFile(None, None, Settings({}), None) + generator = CMakeGenerator(conanfile) + aux_cmake_test_setup = generator.content + + # extract the conan_basic_setup macro + macro = self._extract_macro("conan_basic_setup", aux_cmake_test_setup) + self.assertEqual("""macro(conan_basic_setup) + conan_check_compiler() + conan_output_dirs_setup() + conan_set_find_library_paths() + if(NOT "${ARGV0}" STREQUAL "TARGETS") + message(STATUS "Conan: Using cmake global configuration") + conan_global_flags() + else() + message(STATUS "Conan: Using cmake targets configuration") + conan_define_targets() + endif() + conan_set_rpath() + conan_set_vs_runtime() + conan_set_libcxx() + conan_set_find_paths() +endmacro()""", macro) + + # extract the conan_set_find_paths macro + macro = self._extract_macro("conan_set_find_paths", aux_cmake_test_setup) + self.assertEqual("""macro(conan_set_find_paths) + # CMake can find findXXX.cmake files in the root of packages + set(CMAKE_MODULE_PATH ${CONAN_CMAKE_MODULE_PATH} ${CMAKE_MODULE_PATH}) + + # Make find_package() to work + set(CMAKE_PREFIX_PATH ${CONAN_CMAKE_MODULE_PATH} ${CMAKE_PREFIX_PATH}) +endmacro()""", macro) diff --git a/testbed/conan-io__conan/conans/test/generators/scons_test.py b/testbed/conan-io__conan/conans/test/generators/scons_test.py new file mode 100644 index 0000000000000000000000000000000000000000..fd2e577439a3f4bd029126c396ff066124794734 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/generators/scons_test.py @@ -0,0 +1,27 @@ +import re +import unittest +from conans.model.settings import Settings +from conans.model.conan_file import ConanFile +from conans.client.generators.scons import SConsGenerator +from conans.model.build_info import DepsCppInfo +from conans.model.ref import ConanFileReference + + +class SConsGeneratorTest(unittest.TestCase): + def variables_setup_test(self): + conanfile = ConanFile(None, None, Settings({}), None) + ref = ConanFileReference.loads("MyPkg/0.1@lasote/stables") + cpp_info = DepsCppInfo() + cpp_info.defines = ["MYDEFINE1"] + conanfile.deps_cpp_info.update(cpp_info, ref) + ref = ConanFileReference.loads("MyPkg2/0.1@lasote/stables") + cpp_info = DepsCppInfo() + cpp_info.defines = ["MYDEFINE2"] + conanfile.deps_cpp_info.update(cpp_info, ref) + generator = SConsGenerator(conanfile) + content = generator.content + scons_lines = content.splitlines() + self.assertIn(" \"CPPDEFINES\" : [\'MYDEFINE2\', \'MYDEFINE1\'],", scons_lines) + self.assertIn(" \"CPPDEFINES\" : [\'MYDEFINE1\'],", scons_lines) + self.assertIn(" \"CPPDEFINES\" : [\'MYDEFINE2\'],", scons_lines) + diff --git a/testbed/conan-io__conan/conans/test/generators_test.py b/testbed/conan-io__conan/conans/test/generators_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e22f10d2894d1e44081eae4b02bdc7ed29c7d3fc --- /dev/null +++ b/testbed/conan-io__conan/conans/test/generators_test.py @@ -0,0 +1,29 @@ +import unittest +from conans.test.tools import TestClient +import os + + +class GeneratorsTest(unittest.TestCase): + + def test_base(self): + base = ''' +[generators] +cmake +gcc +qbs +qmake +scons +txt +visual_studio +xcode +ycm + ''' + files = {"conanfile.txt": base} + client = TestClient() + client.save(files) + client.run("install --build") + self.assertEqual(sorted(['conanfile.txt', 'conaninfo.txt', 'conanbuildinfo.cmake', + 'conanbuildinfo.gcc', 'conanbuildinfo.qbs', 'conanbuildinfo.pri', + 'SConscript_conan', 'conanbuildinfo.txt', 'conanbuildinfo.props', + 'conanbuildinfo.xcconfig', '.ycm_extra_conf.py']), + sorted(os.listdir(client.current_folder))) diff --git a/testbed/conan-io__conan/conans/test/integration/__init__.py b/testbed/conan-io__conan/conans/test/integration/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/conans/test/integration/basic_build_test.py b/testbed/conan-io__conan/conans/test/integration/basic_build_test.py new file mode 100644 index 0000000000000000000000000000000000000000..8ba749c4b5ed450807695231ea7d84979a828f23 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/basic_build_test.py @@ -0,0 +1,69 @@ +import unittest +from conans.test.tools import TestClient +import os +from conans.paths import CONANINFO +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from nose.plugins.attrib import attr +from conans.util.files import load +from conans.model.info import ConanInfo +import platform +from conans.util.log import logger + + +@attr("slow") +class BasicBuildTest(unittest.TestCase): + + def _build(self, cmd, static, pure_c, use_cmake, lang): + client = TestClient() + dll_export = client.default_compiler_visual_studio and not static + files = cpp_hello_conan_files("Hello0", "0.1", dll_export=dll_export, + pure_c=pure_c, use_cmake=use_cmake) + + client.save(files) + client.run(cmd) + client.run('build') + ld_path = ("LD_LIBRARY_PATH=`pwd`" + if not static and not platform.system() == "Windows" else "") + command = os.sep.join([".", "bin", "say_hello"]) + client.runner("%s %s" % (ld_path, command), cwd=client.current_folder) + msg = "Hello" if lang == 0 else "Hola" + self.assertIn("%s Hello0" % msg, client.user_io.out) + conan_info_path = os.path.join(client.current_folder, CONANINFO) + conan_info = ConanInfo.loads(load(conan_info_path)) + self.assertTrue(conan_info.full_options.language == lang) + if static: + self.assertTrue(conan_info.full_options.static) + else: + self.assertFalse(conan_info.full_options.static) + + def build_cmake_test(self): + for pure_c in (False, True): + for cmd, lang, static in [("install", 0, True), + ("install -o language=1", 1, True), + ("install -o language=1 -o static=False", 1, False), + ("install -o static=False", 0, False)]: + self._build(cmd, static, pure_c, use_cmake=True, lang=lang) + + def build_default_test(self): + "build default (gcc in nix, VS in win)" + for pure_c in (False, True): + for cmd, lang, static in [("install", 0, True), + ("install -o language=1", 1, True), + ("install -o language=1 -o static=False", 1, False), + ("install -o static=False", 0, False)]: + self._build(cmd, static, pure_c, use_cmake=False, lang=lang) + + def build_mingw_test(self): + if platform.system() != "Windows": + return + not_env = os.system("g++ --version > nul") + if not_env != 0: + logger.error("This platform does not support G++ command") + return + install = "install -s compiler=gcc -s compiler.libcxx=libstdc++ -s compiler.version=4.9" + for pure_c in (False, True): + for cmd, lang, static in [(install, 0, True), + (install + " -o language=1", 1, True), + (install + " -o language=1 -o static=False", 1, False), + (install + " -o static=False", 0, False)]: + self._build(cmd, static, pure_c, use_cmake=False, lang=lang) diff --git a/testbed/conan-io__conan/conans/test/integration/case_sensitive_test.py b/testbed/conan-io__conan/conans/test/integration/case_sensitive_test.py new file mode 100644 index 0000000000000000000000000000000000000000..8b50fee76a82ff27eaee64897ca48d36a31f1423 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/case_sensitive_test.py @@ -0,0 +1,83 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.paths import is_case_insensitive_os, CONANFILE + + +conanfile = ''' +from conans import ConanFile + +class ConanLib(ConanFile): + name = "Hello0" + version = "0.1" + + def source(self): + self.output.info("Running source!") +''' + + +class CaseSensitiveTest(unittest.TestCase): + + def install_test(self): + test_server = TestServer() + servers = {"default": test_server} + client = TestClient(servers=servers, users={"default": [("lasote", "mypass")]}) + + files = cpp_hello_conan_files("Hello0", "0.1", build=False) + client.save(files) + client.run("export lasote/stable") + + client.run("install Hello0/0.1@lasote/stable --build missing") + client.run("upload Hello0/0.1@lasote/stable --all") + + # If we try to install the same package with --build oudated it's already ok + files = cpp_hello_conan_files("Hello1", "0.1", deps=["hello0/0.1@lasote/stable"], + build=False) + client.save(files) + error = client.run("install", ignore_error=True) + self._check(error, client) + + def _check(self, error, client): + self.assertTrue(error) + if is_case_insensitive_os(): + self.assertIn("case incompatible 'Hello0'", client.user_io.out) + else: + self.assertNotIn("case incompatible 'Hello0'", client.user_io.out) + + def install_same_test(self): + client = TestClient() + client.save({CONANFILE: conanfile}) + client.run("export lasote/stable") + error = client.run("install hello0/0.1@lasote/stable --build=missing", ignore_error=True) + self._check(error, client) + + def imports_test(self): + client = TestClient() + client.save({CONANFILE: conanfile}) + client.run("export lasote/stable") + client.run("install Hello0/0.1@lasote/stable --build=missing") + error = client.run("imports hello0/0.1@lasote/stable", ignore_error=True) + self._check(error, client) + + def package_test(self): + client = TestClient() + client.save({CONANFILE: conanfile}) + client.run("export lasote/stable") + client.run("install Hello0/0.1@lasote/stable --build=missing") + error = client.run("package hello0/0.1@lasote/stable", ignore_error=True) + self._check(error, client) + + def copy_test(self): + client = TestClient() + client.save({CONANFILE: conanfile}) + client.run("export lasote/stable") + client.run("install Hello0/0.1@lasote/stable --build=missing") + error = client.run("copy hello0/0.1@lasote/stable otheruser/testing", ignore_error=True) + self._check(error, client) + + def source_test(self): + client = TestClient() + client.save({CONANFILE: conanfile}) + client.run("export lasote/stable") + error = client.run("source hello0/0.1@lasote/stable", ignore_error=True) + self._check(error, client) diff --git a/testbed/conan-io__conan/conans/test/integration/cmake_multi_test.py b/testbed/conan-io__conan/conans/test/integration/cmake_multi_test.py new file mode 100644 index 0000000000000000000000000000000000000000..5491ef006261f6b3861e4ba986db4906cd4db4d7 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/cmake_multi_test.py @@ -0,0 +1,130 @@ +import unittest +from conans.test.tools import TestClient +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from nose.plugins.attrib import attr +from six import StringIO +from conans.test.utils.runner import TestRunner +import platform +import os + + +conanfile = """[requires] +Hello1/0.1@lasote/testing +[generators] +cmake_multi +""" + +cmake = """ +project(MyHello) +cmake_minimum_required(VERSION 2.8.12) + +# Some cross-building toolchains will define this +set(CMAKE_FIND_ROOT_PATH "/some/path") +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +include(${CMAKE_BINARY_DIR}/conanbuildinfo_multi.cmake) +conan_basic_setup() + +add_executable(say_hello main.cpp) +foreach(_LIB ${CONAN_LIBS_RELEASE}) + target_link_libraries(say_hello optimized ${_LIB}) +endforeach() +foreach(_LIB ${CONAN_LIBS_DEBUG}) + target_link_libraries(say_hello debug ${_LIB}) +endforeach() +""" + +cmake_targets = """ +project(MyHello) +cmake_minimum_required(VERSION 2.8.12) + +include(${CMAKE_BINARY_DIR}/conanbuildinfo_multi.cmake) +conan_basic_setup(TARGETS) + +add_executable(say_hello main.cpp) +target_link_libraries(say_hello CONAN_PKG::Hello1) +""" + +main = """ +#include "helloHello1.h" +#include + +int main(){{ + std::cout<<"Hello0:"< +void hello(){std::cout<<"Hello World!";} +""" + +conanfile = """[requires] +Hello/0.1@lasote/testing +""" + +cmake = """ +project(MyHello) +cmake_minimum_required(VERSION 2.8.12) + +include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) +conan_basic_setup(TARGETS) + +add_executable(say_hello main.cpp) +target_link_libraries(say_hello CONAN_PKG::Hello) +""" + +main = """ +#include "hello.h" +int main(){ + hello(); +} +""" + + +@attr("slow") +class CMakeTargetsTest(unittest.TestCase): + + def header_only_test(self): + client = TestClient() + client.save({"conanfile.py": conanfile_py, + "hello.h": hello}) + client.run("export lasote/testing") + client.save({"conanfile.txt": conanfile, + "CMakeLists.txt": cmake, + "main.cpp": main}, clean_first=True) + + client.run('install -g cmake') + client.runner("cmake .", cwd=client.current_folder) + self.assertNotIn("WARN: Unknown compiler '", client.user_io.out) + self.assertNotIn("', skipping the version check...", client.user_io.out) + self.assertIn("Configuring done", client.user_io.out) + self.assertIn("Generating done", client.user_io.out) + self.assertIn("Build files have been written", client.user_io.out) + client.save({"conanfile.txt": conanfile, + "CMakeLists.txt": cmake.replace("conanbuildinfo.cmake", + "conanbuildinfo_multi.cmake"), + "main.cpp": main}, clean_first=True) + + if platform.system() == "Windows": + debug_install = '-s compiler="Visual Studio" -s compiler.version=14 -s compiler.runtime=MDd' + release_install = '-s compiler="Visual Studio" -s compiler.version=14 -s compiler.runtime=MD' + + client.run('install %s -s build_type=Debug -g cmake_multi' % debug_install) + client.run('install %s -s build_type=Release -g cmake_multi' % release_install) + client.runner("cmake .", cwd=client.current_folder) + self.assertNotIn("WARN: Unknown compiler '", client.user_io.out) + self.assertNotIn("', skipping the version check...", client.user_io.out) + self.assertIn("Configuring done", client.user_io.out) + self.assertIn("Generating done", client.user_io.out) + self.assertIn("Build files have been written", client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/integration/complete_test.py b/testbed/conan-io__conan/conans/test/integration/complete_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e0671828215bb7e5bda6ab607ce9e951fe0fc384 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/complete_test.py @@ -0,0 +1,113 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.model.ref import ConanFileReference, PackageReference +import os +import time +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from nose.plugins.attrib import attr +from conans.test.utils.test_files import uncompress_packaged_files + + +@attr("slow") +class CompleteFlowTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + + def reuse_test(self): + conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") + files = cpp_hello_conan_files("Hello0", "0.1", need_patch=True) + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("install %s --build missing" % str(conan_reference)) + + self.assertIn("Hello0/0.1@lasote/stable package(): Copied 1 '.h' files: helloHello0.h", + self.client.user_io.out) + # Check compilation ok + package_ids = self.client.paths.conan_packages(conan_reference) + self.assertEquals(len(package_ids), 1) + package_ref = PackageReference(conan_reference, package_ids[0]) + self._assert_library_exists(package_ref, self.client.paths) + + # Upload conans + self.client.run("upload %s" % str(conan_reference)) + self.assertIn("Compressing exported", str(self.client.user_io.out)) + + # Not needed to tgz again + self.client.run("upload %s" % str(conan_reference)) + self.assertNotIn("Compressing exported", str(self.client.user_io.out)) + + # Check that conans exists on server + server_paths = self.servers["default"].paths + conan_path = server_paths.export(conan_reference) + self.assertTrue(os.path.exists(conan_path)) + + # Upload package + self.client.run("upload %s -p %s" % (str(conan_reference), str(package_ids[0]))) + self.assertIn("Compressing package", str(self.client.user_io.out)) + + # Not needed to tgz again + self.client.run("upload %s -p %s" % (str(conan_reference), str(package_ids[0]))) + self.assertNotIn("Compressing package", str(self.client.user_io.out)) + + # If we install the package again will be removed and re tgz + self.client.run("install %s --build missing" % str(conan_reference)) + # Upload package + self.client.run("upload %s -p %s" % (str(conan_reference), str(package_ids[0]))) + self.assertNotIn("Compressing package", str(self.client.user_io.out)) + + # Check library on server + self._assert_library_exists_in_server(package_ref, server_paths) + + # Now from other "computer" install the uploaded conans with same options (nothing) + other_conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + other_conan.run("install %s --build missing" % str(conan_reference)) + # Build should be empty + build_path = other_conan.paths.build(package_ref) + self.assertFalse(os.path.exists(build_path)) + # Lib should exist + self._assert_library_exists(package_ref, other_conan.paths) + + # Now install it but with other options + other_conan.run('install %s -o language=1 --build missing' % (str(conan_reference))) + # Should have two packages + package_ids = other_conan.paths.conan_packages(conan_reference) + self.assertEquals(len(package_ids), 2) + for package_id in package_ids: + ref = PackageReference(conan_reference, package_id) + self._assert_library_exists(ref, other_conan.paths) + + client3 = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + conan_reference = ConanFileReference.loads("Hello1/0.2@lasote/stable") + files3 = cpp_hello_conan_files("Hello1", "0.1", ["Hello0/0.1@lasote/stable"]) + client3.save(files3) + client3.run('install') + client3.run('build') + command = os.sep.join([".", "bin", "say_hello"]) + client3.runner(command, cwd=client3.current_folder) + self.assertIn("Hello Hello1", client3.user_io.out) + self.assertIn("Hello Hello0", client3.user_io.out) + + client3.run('install -o language=1 --build missing') + time.sleep(1) + client3.run('build') + + command = os.sep.join([".", "bin", "say_hello"]) + client3.runner(command, cwd=client3.current_folder) + self.assertIn("Hola Hello1", client3.user_io.out) + self.assertIn("Hola Hello0", client3.user_io.out) + + def _assert_library_exists(self, package_ref, paths): + package_path = paths.package(package_ref) + self.assertTrue(os.path.exists(os.path.join(package_path, "lib"))) + self._assert_library_files(package_path) + + def _assert_library_files(self, path): + libraries = os.listdir(os.path.join(path, "lib")) + self.assertEquals(len(libraries), 1) + + def _assert_library_exists_in_server(self, package_ref, paths): + folder = uncompress_packaged_files(paths, package_ref) + self._assert_library_files(folder) diff --git a/testbed/conan-io__conan/conans/test/integration/conan_env_test.py b/testbed/conan-io__conan/conans/test/integration/conan_env_test.py new file mode 100644 index 0000000000000000000000000000000000000000..175433bcccf7c801116009fb01f7c74bbf308db6 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/conan_env_test.py @@ -0,0 +1,135 @@ +import unittest +from conans.test.tools import TestClient +from conans.util.files import load +import os +import platform +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.paths import CONANFILE + + +class ConanEnvTest(unittest.TestCase): + + def test_package_env_working(self): + client = TestClient() + conanfile = """from conans import ConanFile +class MyPkg(ConanFile): + name = "Pkg" + version = "0.1" +""" + test_conanfile = """from conans import ConanFile +import os +class MyTest(ConanFile): + requires = "Pkg/0.1@lasote/testing" + def build(self): + self.output.warn('MYVAR==>%s' % os.environ.get('MYVAR', "")) + def test(self): + pass +""" + client.save({"conanfile.py": conanfile, + "test_package/conanfile.py": test_conanfile}) + client.run("test_package -e MYVAR=MYVALUE", ignore_error=True) + self.assertIn("MYVAR==>MYVALUE", client.user_io.out) + + def dual_compiler_settings_and_env_test(self): + + def patch_conanfile(conanfile): + return conanfile + ''' + def build(self): + import os + self.output.warn("COMPILER: %s=>%s" % (self.name, self.settings.compiler)) + self.output.warn("CXX: %s=>%s" % (self.name, os.environ["CXX"])) + self.output.warn("CC: %s=>%s" % (self.name, os.environ["CC"])) +''' + + client = TestClient() + files = cpp_hello_conan_files("Hello0", "1.0", deps=[], build=False) + files[CONANFILE] = patch_conanfile(files[CONANFILE]) + client.save(files) + client.run("export lasote/stable") + + files = cpp_hello_conan_files("Hello1", "1.0", + deps=["Hello0/1.0@lasote/stable"], build=False) + files[CONANFILE] = patch_conanfile(files[CONANFILE]) + client.save(files) + client.run("export lasote/stable") + + # Both with same settings + client.run("install Hello1/1.0@lasote/stable --build -s compiler=gcc" + " -s compiler.version=4.6 -s compiler.libcxx=libstdc++11" + " -e CXX=/mycompilercxx -e CC=/mycompilercc") + + self.assertIn("COMPILER: Hello0=>gcc", client.user_io.out) + self.assertIn("CXX: Hello0=>/mycompilercxx", client.user_io.out) + self.assertIn("CC: Hello0=>/mycompilercc", client.user_io.out) + + self.assertIn("COMPILER: Hello1=>gcc", client.user_io.out) + self.assertIn("CXX: Hello1=>/mycompilercxx", client.user_io.out) + self.assertIn("CC: Hello1=>/mycompilercc", client.user_io.out) + + # Different for Hello0 + client.run("install Hello1/1.0@lasote/stable --build -s compiler=gcc" + " -s compiler.version=4.6 -s compiler.libcxx=libstdc++11" + " -e CXX=/mycompilercxx -e CC=/mycompilercc" + " -s Hello0:compiler=clang -s Hello0:compiler.version=3.7" + " -s Hello0:compiler.libcxx=libstdc++" + " -e Hello0:CXX=/othercompilercxx -e Hello0:CC=/othercompilercc") + + self.assertIn("COMPILER: Hello0=>clang", client.user_io.out) + self.assertIn("CXX: Hello0=>/othercompilercxx", client.user_io.out) + self.assertIn("CC: Hello0=>/othercompilercc", client.user_io.out) + + self.assertIn("COMPILER: Hello1=>gcc", client.user_io.out) + self.assertIn("CXX: Hello1=>/mycompilercxx", client.user_io.out) + self.assertIn("CC: Hello1=>/mycompilercc", client.user_io.out) + + def conan_env_deps_test(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + def package_info(self): + self.env_info.var1="bad value" + self.env_info.var2.append("value2") + self.env_info.var3="Another value" + self.env_info.path = "/dir" +''' + files = {} + files["conanfile.py"] = conanfile + client.save(files) + client.run("export lasote/stable") + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello2" + version = "0.1" + def config(self): + self.requires("Hello/0.1@lasote/stable") + + def package_info(self): + self.env_info.var1="good value" + self.env_info.var2.append("value3") + ''' + files["conanfile.py"] = conanfile + client.save(files, clean_first=True) + client.run("export lasote/stable") + client.run("install Hello2/0.1@lasote/stable --build -g virtualenv") + ext = "bat" if platform.system() == "Windows" else "sh" + self.assertTrue(os.path.exists(os.path.join(client.current_folder, "activate.%s" % ext))) + self.assertTrue(os.path.exists(os.path.join(client.current_folder, "deactivate.%s" % ext))) + activate_contents = load(os.path.join(client.current_folder, "activate.%s" % ext)) + deactivate_contents = load(os.path.join(client.current_folder, "deactivate.%s" % ext)) + self.assertNotIn("bad value", activate_contents) + self.assertIn("var1=good value", activate_contents) + if platform.system() == "Windows": + self.assertIn("var2=value3;value2;%var2%", activate_contents) + else: + self.assertIn("var2=value3:value2:$var2", activate_contents) + self.assertIn("Another value", activate_contents) + self.assertIn("PATH=/dir", activate_contents) + + self.assertIn('var1=', deactivate_contents) + self.assertIn('var2=', deactivate_contents) diff --git a/testbed/conan-io__conan/conans/test/integration/conan_scopes_test.py b/testbed/conan-io__conan/conans/test/integration/conan_scopes_test.py new file mode 100644 index 0000000000000000000000000000000000000000..0626abdbdfaaa0578542f435dca874ce56b2fae9 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/conan_scopes_test.py @@ -0,0 +1,272 @@ + +import unittest +from conans.test.tools import TestClient +from conans.util.files import load +import os + + +class ConanScopeTest(unittest.TestCase): + + def conan_scopes_deps_test(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + def build(self): + if self.scope.dev: + self.output.warn("DEP DEV") + if self.scope.other: + self.output.warn("DEP OTHER") + ''' + files = {} + files["conanfile.py"] = conanfile + client.save(files) + client.run("export lasote/stable") + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello2" + version = "0.1" + def config(self): + if self.scope.other: + self.requires("Hello/0.1@lasote/stable", dev=True) + ''' + files["conanfile.py"] = conanfile + client.save(files, clean_first=True) + client.run("install --build") + + self.assertNotIn("Hello/0.1@lasote/stable", client.user_io.out) + + client.run("install -sc=other=True --build") + + self.assertIn("Hello/0.1@lasote/stable", client.user_io.out) + client.run("export lasote/stable") + + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + requires = "Hello2/0.1@lasote/stable" + ''' + files["conanfile.py"] = conanfile + client.save(files, clean_first=True) + client.run("install --build") + client.run("install -sc=Hello2:other=True --build") + + def conan_scopes_test(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + def build(self): + if self.scope.dev: + self.output.warn("DEP DEV") + if self.scope.other: + self.output.warn("DEP OTHER") + ''' + files = {} + files["conanfile.py"] = conanfile + client.save(files) + client.run("export lasote/stable") + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + requires = "Hello/0.1@lasote/stable" + def config(self): + self.output.info(self.scope) + if self.scope.dev: + self.output.warn("CONFIG_CONSUMER DEV") + if self.scope.other: + self.output.warn("CONFIG_CONSUMER OTHER") + def build(self): + if self.scope.dev: + self.output.warn("BUILD_CONSUMER DEV") + if self.scope.other: + self.output.warn("BUILD_CONSUMER OTHER") + ''' + files["conanfile.py"] = conanfile + client.save(files, clean_first=True) + + client.run("install --build") + + self.assertNotIn("WARN: DEP DEV", client.user_io.out) + self.assertNotIn("WARN: DEP OTHER", client.user_io.out) + self.assertIn("WARN: CONFIG_CONSUMER DEV", client.user_io.out) + self.assertNotIn("WARN: CONFIG_CONSUMER OTHER", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER DEV", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER OTHER", client.user_io.out) + + for command in ("install --build -sc other=True", "install --build"): + client.run(command) + conaninfo = load(os.path.join(client.current_folder, "conaninfo.txt")) + self.assertIn("[scope] dev=True other=True", "".join(conaninfo.splitlines())) + self.assertIn("dev=True, other=True", client.user_io.out) + self.assertNotIn("WARN: DEP DEV", client.user_io.out) + self.assertNotIn("WARN: DEP OTHER", client.user_io.out) + self.assertIn("WARN: CONFIG_CONSUMER DEV", client.user_io.out) + self.assertIn("WARN: CONFIG_CONSUMER OTHER", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER DEV", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER OTHER", client.user_io.out) + + for command in ("install --build -sc Hello:dev=True", "install --build"): + client.run(command) + conaninfo = load(os.path.join(client.current_folder, "conaninfo.txt")) + self.assertIn("[scope] dev=True other=True Hello:dev=True", + "".join(conaninfo.splitlines())) + self.assertIn("WARN: DEP DEV", client.user_io.out) + self.assertNotIn("WARN: DEP OTHER", client.user_io.out) + self.assertIn("WARN: CONFIG_CONSUMER DEV", client.user_io.out) + self.assertIn("WARN: CONFIG_CONSUMER OTHER", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER DEV", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER OTHER", client.user_io.out) + + for command in ("install --build -sc Hello:other=True", "install --build"): + client.run(command) + conaninfo = load(os.path.join(client.current_folder, "conaninfo.txt")) + self.assertIn("[scope] dev=True other=True" + " Hello:dev=True Hello:other=True", + "".join(conaninfo.splitlines())) + self.assertIn("WARN: DEP DEV", client.user_io.out) + self.assertIn("WARN: DEP OTHER", client.user_io.out) + self.assertIn("WARN: CONFIG_CONSUMER DEV", client.user_io.out) + self.assertIn("WARN: CONFIG_CONSUMER OTHER", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER DEV", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER OTHER", client.user_io.out) + + for command in ("install --build -sc Hello:other=False", "install --build"): + client.run(command) + conaninfo = load(os.path.join(client.current_folder, "conaninfo.txt")) + self.assertIn("[scope] dev=True other=True" + " Hello:dev=True Hello:other=False", + "".join(conaninfo.splitlines())) + self.assertIn("WARN: DEP DEV", client.user_io.out) + self.assertNotIn("WARN: DEP OTHER", client.user_io.out) + self.assertIn("WARN: CONFIG_CONSUMER DEV", client.user_io.out) + self.assertIn("WARN: CONFIG_CONSUMER OTHER", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER DEV", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER OTHER", client.user_io.out) + + for command in ("build", ): + client.run(command) + conaninfo = load(os.path.join(client.current_folder, "conaninfo.txt")) + self.assertIn("[scope] dev=True other=True" + " Hello:dev=True Hello:other=False", + "".join(conaninfo.splitlines())) + self.assertNotIn("WARN: DEP DEV", client.user_io.out) + self.assertNotIn("WARN: DEP OTHER", client.user_io.out) + self.assertNotIn("WARN: CONFIG_CONSUMER DEV", client.user_io.out) + self.assertNotIn("WARN: CONFIG_CONSUMER OTHER", client.user_io.out) + self.assertIn("WARN: BUILD_CONSUMER DEV", client.user_io.out) + self.assertIn("WARN: BUILD_CONSUMER OTHER", client.user_io.out) + + def conan_scopes_pattern_test(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + def build(self): + if self.scope.dev: + self.output.warn("DEP DEV") + if self.scope.other: + self.output.warn("DEP OTHER") + ''' + files = {} + files["conanfile.py"] = conanfile + client.save(files) + client.run("export lasote/stable") + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + requires = "Hello/0.1@lasote/stable" + def config(self): + if self.scope.dev: + self.output.warn("CONFIG_CONSUMER DEV") + if self.scope.other: + self.output.warn("CONFIG_CONSUMER OTHER") + def build(self): + if self.scope.dev: + self.output.warn("BUILD_CONSUMER DEV") + if self.scope.other: + self.output.warn("BUILD_CONSUMER OTHER") + ''' + files["conanfile.py"] = conanfile + client.save(files, clean_first=True) + + client.run("install --build") + + self.assertNotIn("WARN: DEP DEV", client.user_io.out) + self.assertNotIn("WARN: DEP OTHER", client.user_io.out) + self.assertIn("WARN: CONFIG_CONSUMER DEV", client.user_io.out) + self.assertNotIn("WARN: CONFIG_CONSUMER OTHER", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER DEV", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER OTHER", client.user_io.out) + + for command in ("install --build -sc ALL:other=True", "install --build"): + client.run(command) + conaninfo = load(os.path.join(client.current_folder, "conaninfo.txt")) + self.assertIn("[scope] dev=True ALL:other=True", + "".join(conaninfo.splitlines())) + self.assertNotIn("WARN: DEP DEV", client.user_io.out) + self.assertIn("WARN: DEP OTHER", client.user_io.out) + self.assertIn("WARN: CONFIG_CONSUMER DEV", client.user_io.out) + self.assertIn("WARN: CONFIG_CONSUMER OTHER", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER DEV", client.user_io.out) + self.assertNotIn("WARN: BUILD_CONSUMER OTHER", client.user_io.out) + + def conan_dev_requires_test(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Base" + version = "0.1" +''' + files = {} + files["conanfile.py"] = conanfile + client.save(files) + client.run("export lasote/stable") + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + dev_requires = "Base/0.1@lasote/stable" + name = "Hello" + version = "0.1" +''' + files = {} + files["conanfile.py"] = conanfile + client.save(files) + client.run("export lasote/stable") + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + dev_requires = "Hello/0.1@lasote/stable" + ''' + files["conanfile.py"] = conanfile + client.save(files, clean_first=True) + + client.run("install --build") + self.assertIn("Hello/0.1@lasote/stable:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9", + client.user_io.out) + self.assertNotIn("Base/0.1@lasote/stable", client.user_io.out) + client.run("install --build -sc dev=False") + self.assertNotIn("Hello/0.1@lasote/stable", client.user_io.out) + self.assertNotIn("Base/0.1@lasote/stable", client.user_io.out) + client.run("install --build -sc dev=True -sc Hello:dev=True") + self.assertIn("Hello/0.1@lasote/stable:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9", + client.user_io.out) + self.assertIn("Base/0.1@lasote/stable", client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/integration/conan_test_test.py b/testbed/conan-io__conan/conans/test/integration/conan_test_test.py new file mode 100644 index 0000000000000000000000000000000000000000..aa262e9b09f0996c738220e4fdca8c37bfbb5d50 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/conan_test_test.py @@ -0,0 +1,162 @@ +import unittest +from conans.test.tools import TestClient +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from nose.plugins.attrib import attr +from conans.util.files import load +from conans.model.ref import PackageReference +import os +from conans.paths import CONANFILE + + +@attr("slow") +class ConanTestTest(unittest.TestCase): + + def scopes_test_package_test(self): + client = TestClient() + conanfile = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + + def build(self): + self.output.info("Scope: %s" % self.scope) +""" + test_conanfile = """ +from conans import ConanFile, CMake +import os + +class HelloReuseConan(ConanFile): + requires = "Hello/0.1@lasote/stable" + + def test(self): + self.conanfile_directory +""" + client.save({"conanfile.py": conanfile, + "test/conanfile.py": test_conanfile}) + client.run("test_package --scope Hello:dev=True --build=missing") + self.assertIn("Hello/0.1@lasote/stable: Scope: dev=True", client.user_io.out) + + def fail_test_package_test(self): + client = TestClient() + conanfile = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + exports = "*" + + def package(self): + self.copy("*") +""" + test_conanfile = """ +from conans import ConanFile, CMake +import os + +class HelloReuseConan(ConanFile): + requires = "Hello/0.1@lasote/stable" + + def test(self): + self.conanfile_directory +""" + client.save({"conanfile.py": conanfile, + "FindXXX.cmake": "Hello FindCmake", + "test/conanfile.py": test_conanfile}) + client.run("test_package") + ref = PackageReference.loads("Hello/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + self.assertEqual("Hello FindCmake", + load(os.path.join(client.paths.package(ref), "FindXXX.cmake"))) + client.save({"FindXXX.cmake": "Bye FindCmake"}) + client.run("test_package") + self.assertEqual("Bye FindCmake", + load(os.path.join(client.paths.package(ref), "FindXXX.cmake"))) + + def _create(self, client, number, version, deps=None, export=True): + files = cpp_hello_conan_files(number, version, deps) + client.save(files) + if export: + client.run("export lasote/stable") + + def conan_test_test(self): + + # With classic requires + conanfile = ''' +from conans import ConanFile, CMake +import os + +class HelloReuseConan(ConanFile): + settings = "os", "compiler", "build_type", "arch" + requires = "Hello0/0.1@ lasote/stable" + generators = "cmake" + + def build(self): + cmake = CMake(self.settings) + self.run('cmake "%s" %s' % (self.conanfile_directory, cmake.command_line)) + self.run("cmake --build . %s" % cmake.build_config) + + def test(self): + # equal to ./bin/greet, but portable win: .\bin\greet + self.run(os.sep.join([".","bin", "greet"])) + ''' + self._test_with_conanfile(conanfile) + + # With requirements + conanfile = ''' +from conans import ConanFile, CMake +import os + +class HelloReuseConan(ConanFile): + settings = "os", "compiler", "build_type", "arch" + generators = "cmake" + + def requirements(self): + self.requires("Hello0/0.1@ lasote/stable") + + def build(self): + cmake = CMake(self.settings) + self.run('cmake "%s" %s' % (self.conanfile_directory, cmake.command_line)) + self.run("cmake --build . %s" % cmake.build_config) + + def test(self): + # equal to ./bin/greet, but portable win: .\bin\greet + self.run(os.sep.join([".","bin", "greet"])) + ''' + self._test_with_conanfile(conanfile) + + def _test_with_conanfile(self, test_conanfile): + client = TestClient() + files = cpp_hello_conan_files("Hello0", "0.1") + print_build = 'self.output.warn("BUILD_TYPE=>%s" % self.settings.build_type)' + files[CONANFILE] = files[CONANFILE].replace("def build(self):", + 'def build(self):\n %s' % print_build) + + # Add build_type setting + files[CONANFILE] = files[CONANFILE].replace(', "arch"', + ', "arch", "build_type"') + + cmakelist = """PROJECT(MyHello) +cmake_minimum_required(VERSION 2.8) + +include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) +conan_basic_setup() + +ADD_EXECUTABLE(greet main.cpp) +TARGET_LINK_LIBRARIES(greet ${CONAN_LIBS}) +""" + files["test_package/CMakeLists.txt"] = cmakelist + files["test_package/conanfile.py"] = test_conanfile + files["test_package/main.cpp"] = files["main.cpp"] + client.save(files) + client.run("export lasote/stable") + error = client.run("test -s build_type=Release") + self.assertFalse(error) + self.assertNotIn("Project: WARN: conanbuildinfo.txt file not found", client.user_io.out) + self.assertNotIn("Project: WARN: conanenv.txt file not found", client.user_io.out) + self.assertIn('Hello Hello0', client.user_io.out) + error = client.run("test -s Hello0:build_type=Debug -o Hello0:language=1") + self.assertFalse(error) + self.assertIn('Hola Hello0', client.user_io.out) + self.assertIn('BUILD_TYPE=>Debug', client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/integration/conanfile_errors_test.py b/testbed/conan-io__conan/conans/test/integration/conanfile_errors_test.py new file mode 100644 index 0000000000000000000000000000000000000000..b9853c01291a1dd7c268c7cdbd3e038f5e5fe043 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/conanfile_errors_test.py @@ -0,0 +1,151 @@ +import unittest +from conans.test.tools import TestClient + + +class ConanfileErrorsTest(unittest.TestCase): + + def copy_error_test(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + exports = "*" + def package(self): + self.copy2("*.h", dst="include", src=["include","platform"]) +''' + files = {"conanfile.py": conanfile, "test.txt": "Hello world"} + client.save(files) + client.run("export lasote/stable") + client.run("install Hello/0.1@lasote/stable --build", ignore_error=True) + self.assertIn("Hello/0.1@lasote/stable: Error in package() method, line 9", + client.user_io.out) + self.assertIn('self.copy2("*.h", dst="include", src=["include","platform"]', + client.user_io.out) + self.assertIn("'HelloConan' object has no attribute 'copy2'", + client.user_io.out) + + def copy_error2_test(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + exports = "*" + def package(self): + self.copy("*.h", dst="include", src=["include","platform"]) +''' + files = {"conanfile.py": conanfile, "test.txt": "Hello world"} + client.save(files) + client.run("export lasote/stable") + client.run("install Hello/0.1@lasote/stable --build", ignore_error=True) + self.assertIn("Hello/0.1@lasote/stable: Error in package() method, line 9", + client.user_io.out) + self.assertIn('self.copy("*.h", dst="include", src=["include","platform"]', + client.user_io.out) + # It results that the error is different in different Python2/3 and OSs + # self.assertIn("'list' object has no attribute 'replace'", client.user_io.out) + + def package_info_error_test(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + exports = "*" + def package_info(self): + self.copy2() +''' + files = {"conanfile.py": conanfile, "test.txt": "Hello world"} + client.save(files) + client.run("export lasote/stable") + client.run("install Hello/0.1@lasote/stable --build", ignore_error=True) + self.assertIn("Hello/0.1@lasote/stable: Error in package_info() method, line 9", + client.user_io.out) + self.assertIn('self.copy2()', + client.user_io.out) + self.assertIn("'HelloConan' object has no attribute 'copy2'", + client.user_io.out) + + def config_error_test(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + exports = "*" + def configure(self): + self.copy2() +''' + files = {"conanfile.py": conanfile, "test.txt": "Hello world"} + client.save(files) + client.run("export lasote/stable") + client.run("install Hello/0.1@lasote/stable --build", ignore_error=True) + self.assertIn("Hello/0.1@lasote/stable: Error in config, config_options or configure()" + " method, line 9", + client.user_io.out) + self.assertIn('self.copy2()', + client.user_io.out) + self.assertIn("'HelloConan' object has no attribute 'copy2'", + client.user_io.out) + + def source_error_test(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + exports = "*" + def source(self): + self.copy2() +''' + files = {"conanfile.py": conanfile, "test.txt": "Hello world"} + client.save(files) + client.run("export lasote/stable") + client.run("install Hello/0.1@lasote/stable --build", ignore_error=True) + self.assertIn("Hello/0.1@lasote/stable: Error in source() method, line 9", + client.user_io.out) + self.assertIn('self.copy2()', + client.user_io.out) + self.assertIn("'HelloConan' object has no attribute 'copy2'", + client.user_io.out) + + def duplicate_requires_test(self): + client = TestClient() + conanfile = ''' +[requires] +foo/0.1@user/testing +foo/0.2@user/testing +''' + files = {"conanfile.txt": conanfile} + client.save(files) + error = client.run("install . --build", ignore_error=True) + self.assertTrue(error) + self.assertIn("ERROR: Duplicated requirement", client.user_io.out) + + def duplicate_requires_py_test(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + requires = "foo/0.1@user/testing", "foo/0.2@user/testing" +''' + files = {"conanfile.py": conanfile} + client.save(files) + error = client.run("install . --build", ignore_error=True) + self.assertTrue(error) + self.assertIn("Error while initializing requirements. Duplicated requirement", + client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/integration/conflict_diamond_test.py b/testbed/conan-io__conan/conans/test/integration/conflict_diamond_test.py new file mode 100644 index 0000000000000000000000000000000000000000..97e7dbad7077e37f044b43671d40a16173a28e12 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/conflict_diamond_test.py @@ -0,0 +1,41 @@ +import unittest +from conans.test.tools import TestClient +from conans.paths import CONANFILE + + +class ConflictDiamondTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + + def _export(self, name, version, deps=None, export=True): + deps = ", ".join(['"%s"' % d for d in deps or []]) or '""' + conanfile = """ +from conans import ConanFile, CMake +import os + +class HelloReuseConan(ConanFile): + name = "%s" + version = "%s" + requires = %s +""" % (name, version, deps) + files = {CONANFILE: conanfile} + self.client.save(files, clean_first=True) + if export: + self.client.run("export lasote/stable") + + def reuse_test(self): + self._export("Hello0", "0.1") + self._export("Hello0", "0.2") + self._export("Hello1", "0.1", ["Hello0/0.1@lasote/stable"]) + self._export("Hello2", "0.1", ["Hello0/0.2@lasote/stable"]) + self._export("Hello3", "0.1", ["Hello1/0.1@lasote/stable", "Hello2/0.1@lasote/stable"], + export=False) + + self.client.run("install . --build missing") + self.assertIn("WARN: Conflict in Hello2/0.1@lasote/stable", self.client.user_io.out) + self.assertIn("PROJECT: Generated conaninfo.txt", self.client.user_io.out) + + self.client.run("install . --build missing --werror", ignore_error=True) + self.assertIn("ERROR: Conflict in Hello2/0.1@lasote/stable", self.client.user_io.out) + self.assertNotIn("PROJECT: Generated conaninfo.txt", self.client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/integration/copy_packages_test.py b/testbed/conan-io__conan/conans/test/integration/copy_packages_test.py new file mode 100644 index 0000000000000000000000000000000000000000..78320e0e9c155a6533b5e4ab0c3207a5af0135c4 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/copy_packages_test.py @@ -0,0 +1,38 @@ +import unittest +from conans.test.tools import TestClient +import os +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.paths import CONANFILE +from conans.model.ref import ConanFileReference +from conans.util.files import rmdir + + +class CopyPackagesTest(unittest.TestCase): + + def test_copy_command(self): + client = TestClient() + self._export_some_packages(client) + # Copy all packages + new_reference = ConanFileReference.loads("Hello0/0.1@pepe/testing") + client.run("copy Hello0/0.1@lasote/stable pepe/testing --all --force") + p1 = client.paths.packages(new_reference) + packages = os.listdir(p1) + self.assertEquals(len(packages), 3) + + # Copy just one + rmdir(p1) + client.run("copy Hello0/0.1@lasote/stable pepe/testing -p %s --force" % packages[0]) + packages = os.listdir(p1) + self.assertEquals(len(packages), 1) + + def _export_some_packages(self, client): + files = cpp_hello_conan_files("Hello0", "0.1") + # No build. + files[CONANFILE] = files[CONANFILE].replace("def build(self):", + "def build(self):\n return\n") + client.save(files) + client.run("export lasote/stable") + client.run("install Hello0/0.1@lasote/stable -s os=Windows --build missing") + client.run("install Hello0/0.1@lasote/stable -s os=Linux --build missing") + client.run("install Hello0/0.1@lasote/stable -s os=Linux -s compiler=gcc " + "-s compiler.version=4.6 -s compiler.libcxx=libstdc++ --build missing") diff --git a/testbed/conan-io__conan/conans/test/integration/custom_generator_test.py b/testbed/conan-io__conan/conans/test/integration/custom_generator_test.py new file mode 100644 index 0000000000000000000000000000000000000000..0ec05ecd410d87561f2134fc49a5afa22c7f6848 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/custom_generator_test.py @@ -0,0 +1,124 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.model.ref import ConanFileReference +import os +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.paths import CONANFILE, CONANFILE_TXT +from conans.util.files import load + + +generator = """ +from conans.model import Generator +from conans.paths import BUILD_INFO +from conans import ConanFile, CMake + +class MyCustomGenerator(Generator): + @property + def filename(self): + return "customfile.gen" + + @property + def content(self): + return "My custom generator content" + + +class MyCustomGeneratorPackage(ConanFile): + name = "MyCustomGen" + version = "0.2" +""" + +consumer = """ +[requires] +Hello0/0.1@lasote/stable +MyCustomGen/0.2@lasote/stable + +[generators] +MyCustomGenerator +""" + +generator_multi = """ +from conans.model import Generator +from conans.paths import BUILD_INFO +from conans import ConanFile, CMake + +class MyCustomMultiGenerator(Generator): + @property + def filename(self): + return "customfile.gen" + + @property + def content(self): + return {"file1.gen": "CustomContent1", + "file2.gen": "CustomContent2"} + + +class NoMatterTheName(ConanFile): + name = "MyCustomGen" + version = "0.2" +""" + +consumer_multi = """ +[requires] +MyCustomGen/0.2@lasote/stable + +[generators] +MyCustomMultiGenerator +""" + + +class CustomGeneratorTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + + def reuse_test(self): + conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") + files = cpp_hello_conan_files("Hello0", "0.1", build=False) + + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + client.save(files) + client.run("export lasote/stable") + client.run("upload %s" % str(conan_reference)) + + gen_reference = ConanFileReference.loads("MyCustomGen/0.2@lasote/stable") + files = {CONANFILE: generator} + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + client.save(files) + client.run("export lasote/stable") + client.run("upload %s" % str(gen_reference)) + + # Test local, no retrieval + files = {CONANFILE_TXT: consumer} + client.save(files, clean_first=True) + client.run("install --build") + generated = load(os.path.join(client.current_folder, "customfile.gen")) + self.assertEqual(generated, "My custom generator content") + + # Test retrieval from remote + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + files = {CONANFILE_TXT: consumer} + client.save(files) + client.run("install --build") + + generated = load(os.path.join(client.current_folder, "customfile.gen")) + self.assertEqual(generated, "My custom generator content") + + def multifile_test(self): + gen_reference = ConanFileReference.loads("MyCustomGen/0.2@lasote/stable") + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + files = {CONANFILE: generator_multi} + client.save(files) + client.run("export lasote/stable") + client.run("upload %s" % str(gen_reference)) + + # Test local, no retrieval + files = {CONANFILE_TXT: consumer_multi} + client.save(files, clean_first=True) + client.run("install --build") + self.assertIn("Generator MyCustomMultiGenerator is multifile. " + "Property 'filename' not used", + client.user_io.out) + for i in (1, 2): + generated = load(os.path.join(client.current_folder, "file%d.gen" % i)) + self.assertEqual(generated, "CustomContent%d" % i) diff --git a/testbed/conan-io__conan/conans/test/integration/diamond_test.py b/testbed/conan-io__conan/conans/test/integration/diamond_test.py new file mode 100644 index 0000000000000000000000000000000000000000..ebc392e9d6c536ecda4a83488ce0d8ef6b7e3837 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/diamond_test.py @@ -0,0 +1,170 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.model.ref import ConanFileReference +from nose.plugins.attrib import attr +from conans.model.build_info import DepsCppInfo +from conans.util.files import load +import os +from conans.paths import BUILD_INFO, CONANFILE, BUILD_INFO_CMAKE +import platform +from conans.util.log import logger +from conans.test.utils.test_files import wait_until_removed + + +@attr("slow") +class DiamondTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer( + [], # write permissions + users={"lasote": "mypass"}) # exported users and passwords + self.servers = {"default": test_server} + self.conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + + def _export_upload(self, name, version=None, deps=None, use_cmake=True, cmake_targets=False): + files = cpp_hello_conan_files(name, version, deps, need_patch=True, use_cmake=use_cmake, + cmake_targets=cmake_targets) + conan_ref = ConanFileReference(name, version, "lasote", "stable") + self.conan.save(files, clean_first=True) + self.conan.run("export lasote/stable") + self.conan.run("upload %s" % str(conan_ref)) + + def _check_individual_deps(self, client): + self.assertIn("INCLUDE [", client.user_io.out) + self.assertIn(".conan/data/Hello0/0.1/lasote/stable", client.user_io.out) + build_file = os.path.join(client.current_folder, BUILD_INFO) + content = load(build_file) + cmakebuildinfo = load(os.path.join(client.current_folder, BUILD_INFO_CMAKE)) + self.assertIn("set(CONAN_LIBS helloHello3 helloHello1 helloHello2 helloHello0", + cmakebuildinfo) + self.assertIn("set(CONAN_DEPENDENCIES Hello3 Hello1 Hello2 Hello0)", cmakebuildinfo) + deps_cpp_info = DepsCppInfo.loads(content) + self.assertEqual(len(deps_cpp_info.include_paths), 4) + for dep in ("Hello3", "Hello2", "Hello1", "Hello0"): + self.assertEqual(len(deps_cpp_info[dep].include_paths), 1) + self.assertEqual(len(deps_cpp_info[dep].lib_paths), 1) + self.assertEqual(deps_cpp_info[dep].libs, ["hello%s" % dep]) + build_file = os.path.join(client.current_folder, BUILD_INFO_CMAKE) + content = load(build_file) + for dep in ("Hello3", "Hello2", "Hello1", "Hello0"): + self.assertEqual(len(deps_cpp_info[dep].include_paths), 1) + self.assertIn("set(CONAN_INCLUDE_DIRS_%s " % dep.upper(), content) + self.assertIn("set(CONAN_LIBS_%s hello%s)" % (dep.upper(), dep), content) + + def diamond_cmake_test(self): + self._diamond_test(use_cmake=True) + + def diamond_cmake_targets_test(self): + self._diamond_test(use_cmake=True, cmake_targets=True) + + def diamond_default_test(self): + self._diamond_test(use_cmake=False) + + def diamond_mingw_test(self): + if platform.system() != "Windows": + return + not_env = os.system("g++ --version > nul") + if not_env != 0: + logger.error("This platform does not support G++ command") + return + install = "install -s compiler=gcc -s compiler.libcxx=libstdc++ -s compiler.version=4.9" + self._diamond_test(install=install, use_cmake=False) + + def _diamond_test(self, install="install", use_cmake=True, cmake_targets=False): + self._export_upload("Hello0", "0.1", use_cmake=use_cmake, cmake_targets=cmake_targets) + self._export_upload("Hello1", "0.1", ["Hello0/0.1@lasote/stable"], use_cmake=use_cmake, + cmake_targets=cmake_targets) + self._export_upload("Hello2", "0.1", ["Hello0/0.1@lasote/stable"], use_cmake=use_cmake, + cmake_targets=cmake_targets) + self._export_upload("Hello3", "0.1", ["Hello1/0.1@lasote/stable", + "Hello2/0.1@lasote/stable"], use_cmake=use_cmake, + cmake_targets=cmake_targets) + + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + files3 = cpp_hello_conan_files("Hello4", "0.1", ["Hello3/0.1@lasote/stable"], + use_cmake=use_cmake, cmake_targets=cmake_targets) + + # Add some stuff to base project conanfile to test further the individual + # flags in build_info (txt, cmake) files + content = files3[CONANFILE] + content = content.replace("generators =", 'generators = "txt",') + content = content.replace("def build(self):", + "def build(self):\n" + " self.output.info('INCLUDE %s' " + "% self.deps_cpp_info['Hello0'].include_paths)") + files3[CONANFILE] = content + client.save(files3) + + client.run("%s . --build missing" % install) + if use_cmake: + if cmake_targets: + self.assertIn("Conan: Using cmake targets configuration", client.user_io.out) + self.assertNotIn("Conan: Using cmake global configuration", client.user_io.out) + else: + self.assertIn("Conan: Using cmake global configuration", client.user_io.out) + self.assertNotIn("Conan: Using cmake targets configuration", client.user_io.out) + client.run("build .") + self._check_individual_deps(client) + + command = os.sep.join([".", "bin", "say_hello"]) + client.runner(command, cwd=client.current_folder) + self.assertEqual(['Hello Hello4', 'Hello Hello3', 'Hello Hello1', 'Hello Hello0', + 'Hello Hello2', 'Hello Hello0'], + str(client.user_io.out).splitlines()[-6:]) + + files3 = cpp_hello_conan_files("Hello4", "0.1", ["Hello3/0.1@lasote/stable"], language=1, + use_cmake=use_cmake, cmake_targets=cmake_targets) + files3[CONANFILE] = files3[CONANFILE].replace("generators =", 'generators = "txt",') + wait_until_removed(client.current_folder) + client.save(files3) + client.run("%s . --build missing" % install) + client.run("build .") + + client.runner(command, cwd=client.current_folder) + self.assertEqual(['Hola Hello4', 'Hola Hello3', 'Hola Hello1', 'Hola Hello0', + 'Hola Hello2', 'Hola Hello0'], + str(client.user_io.out).splitlines()[-6:]) + + # Try to upload and reuse the binaries + client.run("upload Hello3/0.1@lasote/stable --all") + self.assertEqual(str(client.user_io.out).count("Uploading package"), 2) + client.run("upload Hello1/0.1@lasote/stable --all") + self.assertEqual(str(client.user_io.out).count("Uploading package"), 2) + client.run("upload Hello2/0.1@lasote/stable --all") + self.assertEqual(str(client.user_io.out).count("Uploading package"), 2) + client.run("upload Hello0/0.1@lasote/stable --all") + self.assertEqual(str(client.user_io.out).count("Uploading package"), 2) + + client2 = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + files3 = cpp_hello_conan_files("Hello4", "0.1", ["Hello3/0.1@lasote/stable"], + use_cmake=use_cmake, cmake_targets=cmake_targets) + files3[CONANFILE] = files3[CONANFILE].replace("generators =", 'generators = "txt",') + client2.save(files3) + client2.run("%s . --build missing" % install) + client2.run("build .") + + self.assertNotIn("libhello0.a", client2.user_io.out) + self.assertNotIn("libhello1.a", client2.user_io.out) + self.assertNotIn("libhello2.a", client2.user_io.out) + self.assertNotIn("libhello3.a", client2.user_io.out) + client2.runner(command, cwd=client2.current_folder) + self.assertEqual(['Hello Hello4', 'Hello Hello3', 'Hello Hello1', 'Hello Hello0', + 'Hello Hello2', 'Hello Hello0'], + str(client2.user_io.out).splitlines()[-6:]) + + files3 = cpp_hello_conan_files("Hello4", "0.1", ["Hello3/0.1@lasote/stable"], language=1, + use_cmake=use_cmake, cmake_targets=cmake_targets) + files3[CONANFILE] = files3[CONANFILE].replace("generators =", 'generators = "txt",') + wait_until_removed(client2.current_folder) + client2.save(files3) + client2.run("%s . --build missing" % install) + client2.run("build .") + self.assertNotIn("libhello0.a", client2.user_io.out) + self.assertNotIn("libhello1.a", client2.user_io.out) + self.assertNotIn("libhello2.a", client2.user_io.out) + self.assertNotIn("libhello3.a", client2.user_io.out) + client2.runner(command, cwd=client2.current_folder) + self.assertEqual(['Hola Hello4', 'Hola Hello3', 'Hola Hello1', 'Hola Hello0', + 'Hola Hello2', 'Hola Hello0'], + str(client2.user_io.out).splitlines()[-6:]) diff --git a/testbed/conan-io__conan/conans/test/integration/flat_requirements_test.py b/testbed/conan-io__conan/conans/test/integration/flat_requirements_test.py new file mode 100644 index 0000000000000000000000000000000000000000..c157cbc5bb9293e75a655c72df3287429b26809a --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/flat_requirements_test.py @@ -0,0 +1,91 @@ +import unittest +from conans.model.ref import ConanFileReference, PackageReference +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.paths import (CONANFILE_TXT, BUILD_INFO_CMAKE, BUILD_INFO_GCC, CONANINFO, + BUILD_INFO_VISUAL_STUDIO, BUILD_INFO_XCODE) +from conans.util.files import save, load +import os +from conans.test.tools import TestClient +from conans.test.utils.test_files import temp_folder + + +class FlatRequirementsTest(unittest.TestCase): + + def setUp(self): + self.conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") + self.files = cpp_hello_conan_files("Hello0", "0.1", build=False) + self.conan = TestClient() + self.conan.save(self.files) + self.conan.run("export lasote/stable") + + def consumer_with_flat_requirement_test(self): + # We want to reuse exported Hello0/0.1@lasote/stable + tmp_dir = temp_folder() + req_file = '''[requires] +Hello0/0.1@lasote/stable # My req comment +[generators] +gcc # I need this generator for.. +cmake +visual_studio +xcode +''' + save(os.path.join(tmp_dir, CONANFILE_TXT), req_file) + + self.conan.current_folder = tmp_dir + # Install requirements + self.conan.run('install --build missing') + self.assertEqual(sorted([CONANFILE_TXT, BUILD_INFO_GCC, BUILD_INFO_CMAKE, + BUILD_INFO_VISUAL_STUDIO, BUILD_INFO_XCODE, CONANINFO]), + sorted(os.listdir(tmp_dir))) + + cmake = load(os.path.join(tmp_dir, BUILD_INFO_CMAKE)) + gcc = load(os.path.join(tmp_dir, BUILD_INFO_GCC)) + + self.assertIn("CONAN_INCLUDE_DIRS", cmake) + self.assertIn("CONAN_LIB_DIRS", cmake) + self.assertIn("CONAN_LIBS", cmake) + + self.assertIn("CONAN_INCLUDE_DIRS", cmake) + self.assertIn("CONAN_LIB_DIRS", cmake) + self.assertIn(".conan/data/Hello0/0.1/lasote/stable/package", cmake) + + self.assertIn("-L", gcc) + self.assertIn("-l", gcc) + self.assertIn("-I", gcc) + + self.assertIn(".conan/data/Hello0/0.1/lasote/stable/package", gcc) + + # CHECK VISUAL STUDIO GENERATOR + + from xml.dom import minidom + xmldoc = minidom.parse(os.path.join(tmp_dir, BUILD_INFO_VISUAL_STUDIO)) + definition_group = xmldoc.getElementsByTagName('ItemDefinitionGroup')[0] + compiler = definition_group.getElementsByTagName("ClCompile")[0] + + include_dirs = compiler.getElementsByTagName("AdditionalIncludeDirectories")[0].firstChild.data + definitions = compiler.getElementsByTagName("PreprocessorDefinitions")[0].firstChild.data + + linker = definition_group.getElementsByTagName("Link")[0] + lib_dirs = linker.getElementsByTagName("AdditionalLibraryDirectories")[0].firstChild.data + libs = linker.getElementsByTagName("AdditionalDependencies")[0].firstChild.data + + package_id = os.listdir(self.conan.paths.packages(self.conan_reference))[0] + package_ref = PackageReference(self.conan_reference, package_id) + package_paths = self.conan.paths.package(package_ref).replace("\\", "/") + + expected_lib_dirs = os.path.join(package_paths, "lib").replace("\\", "/") + expected_include_dirs = os.path.join(package_paths, "include").replace("\\", "/") + + self.assertIn(expected_lib_dirs, lib_dirs) + self.assertEquals("helloHello0.lib;%(AdditionalDependencies)", libs) + self.assertEquals("%(PreprocessorDefinitions)", definitions) + self.assertIn(expected_include_dirs, include_dirs) + + # CHECK XCODE GENERATOR + xcode = load(os.path.join(tmp_dir, BUILD_INFO_XCODE)) + + self.assertIn('LIBRARY_SEARCH_PATHS = $(inherited) "%s"' % expected_lib_dirs, xcode) + self.assertIn('HEADER_SEARCH_PATHS = $(inherited) "%s"' % expected_include_dirs, xcode) + self.assertIn("GCC_PREPROCESSOR_DEFINITIONS = $(inherited)", xcode) + self.assertIn("OTHER_CFLAGS = $(inherited)", xcode) + self.assertIn("OTHER_CPLUSPLUSFLAGS = $(inherited)", xcode) diff --git a/testbed/conan-io__conan/conans/test/integration/go_complete_test.py b/testbed/conan-io__conan/conans/test/integration/go_complete_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e873c568546b5e4530691a8695e0d43b9a938f3b --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/go_complete_test.py @@ -0,0 +1,155 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.model.ref import ConanFileReference, PackageReference +import os +from conans.test.utils.context_manager import CustomEnvPath +import platform +from conans.test.utils.test_files import scan_folder +from conans.test.utils.test_files import uncompress_packaged_files +from nose.plugins.attrib import attr + +stringutil_conanfile = ''' +from conans import ConanFile + +class Stringutil(ConanFile): + name = "stringutil" + version = "0.1" + exports = '*' + def package(self): + self.copy("*") +''' + +reverse = '''// Package stringutil contains utility functions for working with strings. +package stringutil + +// Reverse returns its argument string reversed rune-wise left to right. +func Reverse(s string) string { + r := []rune(s) + for i, j := 0, len(r)-1; i < len(r)/2; i, j = i+1, j-1 { + r[i], r[j] = r[j], r[i] + } + return string(r) +} +''' + +reverse_test = '''package stringutil + +import "testing" + +func TestReverse(t *testing.T) { + cases := []struct { + in, want string + }{ + {"Hello, world", "dlrow ,olleH"}, + {"", ""}, + } + for _, c := range cases { + got := Reverse(c.in) + if got != c.want { + t.Errorf("Reverse(%q) == %q, want %q", c.in, got, c.want) + } + } +} +''' + +reuse_conanfile = ''' +from conans import ConanFile + +class Hello(ConanFile): + name = "default" + version = "0.1" + exports = '*' + requires = "stringutil/0.1@lasote/stable" + def imports(self): + self.copy("*.go", "./src/stringutil", "", "stringutil") +''' + +main = '''package main + +import ( + "fmt" + "stringutil" +) + +func main() { + fmt.Printf(stringutil.Reverse("!oG ,olleH")) +} +''' + + +@attr('golang') +class GoCompleteTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + + def reuse_test(self): + conan_reference = ConanFileReference.loads("stringutil/0.1@lasote/stable") + files = {'conanfile.py': stringutil_conanfile, + 'reverse.go': reverse, + 'reverse_test.go': reverse_test, + 'reverse.txt': reverse, + 'hello/helloreverse.txt': reverse} + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("install %s --build missing" % str(conan_reference)) + # Check compilation ok + package_ids = self.client.paths.conan_packages(conan_reference) + self.assertEquals(len(package_ids), 1) + package_ref = PackageReference(conan_reference, package_ids[0]) + self._assert_package_exists(package_ref, self.client.paths, list(files.keys())) + + # Upload conans + self.client.run("upload %s" % str(conan_reference)) + + # Check that conans exists on server + server_paths = self.servers["default"].paths + conan_path = server_paths.export(conan_reference) + self.assertTrue(os.path.exists(conan_path)) + + # Upload package + self.client.run("upload %s -p %s" % (str(conan_reference), str(package_ids[0]))) + + # Check library on server + self._assert_package_exists_in_server(package_ref, server_paths, list(files.keys())) + + # Now from other "computer" install the uploaded conans with same options (nothing) + other_conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + other_conan.run("install %s --build missing" % str(conan_reference)) + # Build should be empty + build_path = other_conan.paths.build(package_ref) + self.assertFalse(os.path.exists(build_path)) + # Lib should exist + self._assert_package_exists(package_ref, other_conan.paths, list(files.keys())) + + reuse_conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + files = {'conanfile.py': reuse_conanfile, + 'src/hello/main.go': main} + reuse_conan.save(files) + reuse_conan.run("install --build missing") + + with CustomEnvPath(paths_to_add=['$GOPATH/bin'], + var_to_add=[('GOPATH', reuse_conan.current_folder), ]): + + if platform.system() == "Windows": + command = "hello" + else: + command = './hello' + reuse_conan.runner('go install hello', cwd=reuse_conan.current_folder) + reuse_conan.runner(command, cwd=os.path.join(reuse_conan.current_folder, 'bin')) + self.assertIn("Hello, Go!", reuse_conan.user_io.out) + + def _assert_package_exists(self, package_ref, paths, files): + package_path = paths.package(package_ref) + self.assertTrue(os.path.exists(os.path.join(package_path))) + real_files = scan_folder(package_path) + for f in files: + self.assertIn(f, real_files) + + def _assert_package_exists_in_server(self, package_ref, paths, files): + folder = uncompress_packaged_files(paths, package_ref) + real_files = scan_folder(folder) + for f in files: + self.assertIn(f, real_files) diff --git a/testbed/conan-io__conan/conans/test/integration/go_diamond_test.py b/testbed/conan-io__conan/conans/test/integration/go_diamond_test.py new file mode 100644 index 0000000000000000000000000000000000000000..d84553cfed0e620e37b320917db9212113279159 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/go_diamond_test.py @@ -0,0 +1,82 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.model.ref import ConanFileReference +import platform +import os +from conans.test.utils.context_manager import CustomEnvPath +from conans.test.utils.test_files import hello_conan_files +from nose.plugins.attrib import attr + + +@attr('golang') +class GoDiamondTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + self.conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + + def _export_upload(self, ref_str, number=0, deps=None): + conan_reference = ConanFileReference.loads(ref_str) + files = hello_conan_files(conan_reference=conan_reference, number=number, deps=deps, + lang='go') + self.conan.save(files, clean_first=True) + self.conan.run("export lasote/stable") + self.conan.run("upload %s" % str(conan_reference)) + + def reuse_test(self): + self._export_upload("hello0/0.1@lasote/stable") + self._export_upload("hello1/0.1@lasote/stable", 1, [0]) + self._export_upload("hello2/0.1@lasote/stable", 2, [0]) + self._export_upload("hello3/0.1@lasote/stable", 3, [1, 2]) + + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + conan_reference = ConanFileReference.loads("hello4/0.2@lasote/stable") + files3 = hello_conan_files(conan_reference=conan_reference, number=4, deps=[3], lang='go') + client.save(files3) + client.run("install --build missing") + client.run("build") + command = os.sep.join([".", "bin", "say_hello"]) + with CustomEnvPath(paths_to_add=['$GOPATH/bin'], + var_to_add=[('GOPATH', client.current_folder), ]): + + client.runner('go install hello4_main', cwd=os.path.join(client.current_folder, 'src')) + if platform.system() == "Windows": + command = "hello4_main" + else: + command = './hello4_main' + client.runner(command, cwd=os.path.join(client.current_folder, 'bin')) + + self.assertEqual(['Hello 4', 'Hello 3', 'Hello 1', 'Hello 0', 'Hello 2', 'Hello 0'], + str(client.user_io.out).splitlines()[-6:]) + + # Try to upload and reuse the binaries + client.run("upload hello3/0.1@lasote/stable --all") + self.assertEqual(str(client.user_io.out).count("Uploading package"), 1) + client.run("upload hello1/0.1@lasote/stable --all") + self.assertEqual(str(client.user_io.out).count("Uploading package"), 1) + client.run("upload hello2/0.1@lasote/stable --all") + self.assertEqual(str(client.user_io.out).count("Uploading package"), 1) + client.run("upload hello0/0.1@lasote/stable --all") + self.assertEqual(str(client.user_io.out).count("Uploading package"), 1) +# + client2 = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + conan_reference = ConanFileReference.loads("hello4/0.2@lasote/stable") + + files3 = hello_conan_files(conan_reference=conan_reference, number=4, deps=[3], lang='go') + client2.save(files3) + + client2.run("install --build missing") + command = os.sep.join([".", "bin", "say_hello"]) + with CustomEnvPath(paths_to_add=['$GOPATH/bin'], + var_to_add=[('GOPATH', client2.current_folder), ]): + client2.runner('go install hello4_main', + cwd=os.path.join(client2.current_folder, 'src')) + if platform.system() == "Windows": + command = "hello4_main" + else: + command = './hello4_main' + client2.runner(command, cwd=os.path.join(client2.current_folder, 'bin')) + + self.assertEqual(['Hello 4', 'Hello 3', 'Hello 1', 'Hello 0', 'Hello 2', 'Hello 0'], + str(client2.user_io.out).splitlines()[-6:]) diff --git a/testbed/conan-io__conan/conans/test/integration/half_diamond_test.py b/testbed/conan-io__conan/conans/test/integration/half_diamond_test.py new file mode 100644 index 0000000000000000000000000000000000000000..faee116dda9274f8546393606050ecf82b465788 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/half_diamond_test.py @@ -0,0 +1,52 @@ +import unittest +from conans.test.tools import TestClient +from conans.paths import CONANFILE +from conans.util.files import load +import os + + +class HalfDiamondTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + + def _export(self, name, deps=None, export=True): + deps = ", ".join(['"%s"' % d for d in deps or []]) or '""' + conanfile = """ +from conans import ConanFile, CMake +import os + +class HelloReuseConan(ConanFile): + name = "%s" + version = "0.1" + requires = %s + options = {"potato": [True, False]} + default_options = "potato=True" + + def config_options(self): + del self.options.potato +""" % (name, deps) + files = {CONANFILE: conanfile} + self.client.save(files, clean_first=True) + if export: + self.client.run("export lasote/stable") + + def reuse_test(self): + self._export("Hello0") + self._export("Hello1", ["Hello0/0.1@lasote/stable"]) + self._export("Hello2", ["Hello1/0.1@lasote/stable", "Hello0/0.1@lasote/stable"]) + self._export("Hello3", ["Hello2/0.1@lasote/stable"], export=False) + + self.client.run("install . --build missing") + self.assertIn("PROJECT: Generated conaninfo.txt", self.client.user_io.out) + + def check_duplicated_full_requires_test(self): + self._export("Hello0") + self._export("Hello1", ["Hello0/0.1@lasote/stable"]) + self._export("Hello2", ["Hello1/0.1@lasote/stable", "Hello0/0.1@lasote/stable"], + export=False) + + self.client.run("install . --build missing") + self.assertIn("PROJECT: Generated conaninfo.txt", self.client.user_io.out) + conaninfo = load(os.path.join(self.client.current_folder, "conaninfo.txt")) + self.assertEqual(1, conaninfo.count("Hello0/0.1@lasote/stable")) diff --git a/testbed/conan-io__conan/conans/test/integration/info_test.py b/testbed/conan-io__conan/conans/test/integration/info_test.py new file mode 100644 index 0000000000000000000000000000000000000000..15c6b5bd30cb4500b25381046bc4eab098471cb3 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/info_test.py @@ -0,0 +1,114 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.paths import CONANFILE +import os +from conans.util.files import load, save + + +class InfoTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer(users={"lu": "mypass"}) + self.servers = {"default": test_server} + self.clients = {} + + def _export(self, name=0, version=None, deps=None): + client = TestClient(servers=self.servers, users={"default": [("lu", "mypass")]}) + self.clients[name] = client + # Not necessary to actually build binaries + files = cpp_hello_conan_files(name, version, deps, build=False) + client.save(files, clean_first=True) + client.run("export lu/st") + client.run("upload %s/%s@lu/st" % (name, version)) + + def assert_last_line(self, client, line): + lastline = str(client.user_io.out).splitlines()[-1] + self.assertEquals(lastline, line) + + def info_build_test(self): + """Test that the output of 'conan info --build' is correct + + +-----------+ + +------> | H0 | <--------+ + | +------+----+ | + private | ^ |private + | | | + +----+-----+ +----+------+ +----+------+ + | H1a | | H1b | | H1c | + +----+-----+ +-----------+ +----+------+ + ^ ^ + | | + | | ++----------+-+ +-------+------+ +| H2a | <------+ +-----> | H2c | ++------------+ | | +--------------+ + | | + +---+----+---+ + | H3 | + +------------+ + + """ + + self._export("H0", "0.1") + + self._export("H1a", "0.1", deps=[("H0/0.1@lu/st", "private")]) + self._export("H1b", "0.1", deps=["H0/0.1@lu/st"]) + self._export("H1c", "0.1", deps=[("H0/0.1@lu/st", "private")]) + + self._export("H2a", "0.1", deps=["H1a/0.1@lu/st"]) + self._export("H2c", "0.1", deps=["H1c/0.1@lu/st"]) + + self._export("H3", "0.1", deps=["H2a/0.1@lu/st", + "H2c/0.1@lu/st"]) + + # If we install H3 we need to build all except H1b + self.clients["H3"].run("info --build missing") + self.assert_last_line(self.clients["H3"], + "H0/0.1@lu/st, H1a/0.1@lu/st, H1c/0.1@lu/st, H2a/0.1@lu/st, H2c/0.1@lu/st") + + # If we install H0 we need to build nothing (current project) + self.clients["H0"].run("info --build missing") + self.assert_last_line(self.clients["H0"], "") + + # If we install H0 we need to build H0 + self.clients["H1a"].run("info --build missing") + self.assert_last_line(self.clients["H1a"], "H0/0.1@lu/st") + + # If we build and upload H1a and H1c, no more H0 (private) is required + self.clients["H3"].run("install H1a/0.1@lu/st --build ") + self.clients["H3"].run("install H1c/0.1@lu/st --build ") + self.clients["H3"].run("upload H1a/0.1@lu/st --all") + self.clients["H3"].run("upload H1c/0.1@lu/st --all") + + self.clients["H3"].run("remove '*' -f") + self.clients["H3"].run("info --build missing") + self.assert_last_line(self.clients["H3"], + "H2a/0.1@lu/st, H2c/0.1@lu/st") + + # But if we force to build all, all nodes have to be built + self.clients["H3"].run("remove '*' -f") + self.clients["H3"].run("info --build") + self.assert_last_line(self.clients["H3"], + "H0/0.1@lu/st, H1a/0.1@lu/st, H1c/0.1@lu/st, H2a/0.1@lu/st, H2c/0.1@lu/st") + + # Now upgrade the recipe H1a and upload it (but not the package) + # so the package become outdated + conanfile_path = os.path.join(self.clients["H1a"].current_folder, CONANFILE) + conanfile = load(conanfile_path) + conanfile += "\n# MODIFIED" + save(conanfile_path, conanfile) + self.clients["H1a"].run("export lu/st") + self.clients["H1a"].run("upload H1a/0.1@lu/st") # NOW IS OUTDATED! + + # Without build outdated the built packages are the same + self.clients["H3"].run("remove '*' -f") + self.clients["H3"].run("info --build missing") + self.assert_last_line(self.clients["H3"], + "H2a/0.1@lu/st, H2c/0.1@lu/st") + + # But with build outdated we have to build the private H0 (but only once) and H1a + self.clients["H3"].run("remove '*' -f") + self.clients["H3"].run("info --build outdated") + self.assert_last_line(self.clients["H3"], + "H0/0.1@lu/st, H1a/0.1@lu/st, H2a/0.1@lu/st, H2c/0.1@lu/st") diff --git a/testbed/conan-io__conan/conans/test/integration/install_outdated_test.py b/testbed/conan-io__conan/conans/test/integration/install_outdated_test.py new file mode 100644 index 0000000000000000000000000000000000000000..6b29f205075cc17d22ad51bf50ff09e0c58eb4f6 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/install_outdated_test.py @@ -0,0 +1,92 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.model.ref import ConanFileReference +from conans.util.files import rmdir + + +class InstallOutdatedPackagesTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + self.new_client = TestClient(servers=self.servers, + users={"default": [("lasote", "mypass")]}) + + self.ref = ConanFileReference.loads("Hello0/0.1@lasote/stable") + files = cpp_hello_conan_files("Hello0", "0.1", build=False) + self.client.save(files) + self.client.run("export lasote/stable") + + self.client.run("install Hello0/0.1@lasote/stable --build missing") + self.client.run("upload Hello0/0.1@lasote/stable --all") + + def install_outdated_test(self): + # If we try to install the same package with --build oudated it's already ok + self.client.run("install Hello0/0.1@lasote/stable --build outdated") + self.assertIn("Hello0/0.1@lasote/stable: Package is up to date", self.client.user_io.out) + + # Then we can export a modified recipe and try to install without --build outdated + files = cpp_hello_conan_files("Hello0", "0.1", build=False) + files["conanfile.py"] = files["conanfile.py"] + "\n#Otherline" + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("install Hello0/0.1@lasote/stable") + self.assertIn("Hello0/0.1@lasote/stable: Already installed!", self.client.user_io.out) + self.assertNotIn("Package is up to date", self.client.user_io.out) + self.assertNotIn("Outdated package!", self.client.user_io.out) + + # Try now with the --build outdated + self.client.run("install Hello0/0.1@lasote/stable --build outdated") + self.assertNotIn("Package is up to date", self.client.user_io.out) + self.assertIn("Outdated package!", self.client.user_io.out) + self.assertIn("Building your package", self.client.user_io.out) + + # Remove all local references, export again (the modified version not uploaded) + # and try to install, it will discard the remote package too + self.client.run("remove Hello0* -f") + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("remote add_ref Hello0/0.1@lasote/stable default") + self.client.run("install Hello0/0.1@lasote/stable --build outdated") + self.assertNotIn("Hello0/0.1@lasote/stable: Already installed!", self.client.user_io.out) + self.assertNotIn("Package is up to date", self.client.user_io.out) + self.assertIn("Outdated package!", self.client.user_io.out) + self.assertIn("Building your package", self.client.user_io.out) + + def install_outdated_dep_test(self): + # A new recipe that depends on Hello0/0.1 + new_client = TestClient(servers=self.servers, + users={"default": [("lasote", "mypass")]}) + files = cpp_hello_conan_files("Hello1", "0.1", ["Hello0/0.1@lasote/stable"], build=False) + new_client.save(files) + new_client.run("export lasote/stable") + self.assertIn("A new conanfile.py version was exported", new_client.user_io.out) + # It will retrieve from the remote Hello0 and build Hello1 + new_client.run("install Hello1/0.1@lasote/stable --build missing") + + # Then modify REMOTE Hello0 recipe files (WITH THE OTHER CLIENT) + files = cpp_hello_conan_files("Hello0", "0.1", build=False) + files["conanfile.py"] = files["conanfile.py"] + "\n#MODIFIED RECIPE" + self.client.save(files) + self.client.run("export lasote/stable") + self.assertIn("A new conanfile.py version was exported", self.client.user_io.out) + self.client.run("install Hello0/0.1@lasote/stable --build missing") + # Upload only the recipe, so the package is outdated in the server + self.client.run("upload Hello0/0.1@lasote/stable") + + # Now, with the new_client, remove only the binary package from Hello0 + rmdir(new_client.paths.packages(self.ref)) + # And try to install Hello1 again, should not complain because the remote + # binary is in the "same version" than local cached Hello0 + new_client.run("install Hello1/0.1@lasote/stable --build outdated") + self.assertIn("Downloading conan_package.tgz", new_client.user_io.out) + self.assertIn("Hello0/0.1@lasote/stable: Package is up to date", new_client.user_io.out) + + # But if we remove the full Hello0 local package, will retrieve the updated + # recipe and the outdated package + new_client.run("remove Hello0* -f") + new_client.run("install Hello1/0.1@lasote/stable --build outdated") + self.assertIn("Hello0/0.1@lasote/stable: Outdated package!", new_client.user_io.out) + self.assertIn("Hello0/0.1@lasote/stable: Building your package", new_client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/integration/install_selected_packages_test.py b/testbed/conan-io__conan/conans/test/integration/install_selected_packages_test.py new file mode 100644 index 0000000000000000000000000000000000000000..80913d9f4f2152e6601ba6b30bf3e47314f0ef66 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/install_selected_packages_test.py @@ -0,0 +1,99 @@ +import unittest +from conans.test.tools import TestClient, TestServer +import os +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.paths import CONANFILE +from conans.model.ref import ConanFileReference, PackageReference +from conans.util.files import load + + +class InstallSelectedPackagesTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + self.package_ids = self._upload_some_packages(self.client) + self.new_client = TestClient(servers=self.servers, + users={"default": [("lasote", "mypass")]}) + + def install_all_test(self): + # Should retrieve the three packages + self.new_client.run("install Hello0/0.1@lasote/stable --all") + p1 = os.path.join(self.new_client.paths.packages(self.ref)) + packages = os.listdir(p1) + self.assertEquals(len(packages), 3) + + def install_some_reference_test(self): + # Should retrieve the specified packages + self.new_client.run("install Hello0/0.1@lasote/stable -p %s" % self.package_ids[0]) + packages = os.listdir(self.new_client.paths.packages(self.ref)) + self.assertEquals(len(packages), 1) + self.assertEquals(packages[0], self.package_ids[0]) + + self.new_client.run("install Hello0/0.1@lasote/stable -p %s -p %s" % (self.package_ids[0], + self.package_ids[1])) + packages = os.listdir(self.new_client.paths.packages(self.ref)) + self.assertEquals(len(packages), 2) + + def download_recipe_twice_test(self): + expected_conanfile_contents = self.files[CONANFILE] + self.new_client.run("install Hello0/0.1@lasote/stable --all") + got_conanfile = load(os.path.join(self.new_client.paths.export(self.ref), CONANFILE)) + self.assertEquals(expected_conanfile_contents, got_conanfile) + + self.new_client.run("install Hello0/0.1@lasote/stable --all") + got_conanfile = load(os.path.join(self.new_client.paths.export(self.ref), CONANFILE)) + self.assertEquals(expected_conanfile_contents, got_conanfile) + + self.new_client.run("install Hello0/0.1@lasote/stable --all") + got_conanfile = load(os.path.join(self.new_client.paths.export(self.ref), CONANFILE)) + self.assertEquals(expected_conanfile_contents, got_conanfile) + + def download_packages_twice_test(self): + expected_header_contents = self.files["helloHello0.h"] + package_folder = self.new_client.paths.package(PackageReference(self.ref, self.package_ids[0])) + + self.new_client.run("install Hello0/0.1@lasote/stable --all") + got_header = load(os.path.join(package_folder, "include", "helloHello0.h")) + self.assertEquals(expected_header_contents, got_header) + + self.new_client.run("install Hello0/0.1@lasote/stable --all") + got_header = load(os.path.join(package_folder, "include", "helloHello0.h")) + self.assertEquals(expected_header_contents, got_header) + + self.new_client.run("install Hello0/0.1@lasote/stable --all") + got_header = load(os.path.join(package_folder, "include", "helloHello0.h")) + self.assertEquals(expected_header_contents, got_header) + + def install_all_but_no_packages_test(self): + + # Remove all from remote + self.new_client.run("remove Hello* -f -r default") + + # Try to install all + self.new_client.run("install Hello0/0.1@lasote/stable --all", ignore_error=True) + self.assertIn("'Hello0/0.1@lasote/stable' not found in remote", self.new_client.user_io.out) + + # Upload only the recipe + self.new_client.save(self.files) + self.new_client.run("export lasote/stable") + self.new_client.run("upload Hello0/0.1@lasote/stable --all") + + # And try to download all + self.new_client.run("install Hello0/0.1@lasote/stable --all") + self.assertIn("No remote binary packages found in remote", self.new_client.user_io.out) + + def _upload_some_packages(self, client): + self.ref = ConanFileReference.loads("Hello0/0.1@lasote/stable") + self.files = cpp_hello_conan_files("Hello0", "0.1") + # No build. + self.files[CONANFILE] = self.files[CONANFILE].replace("def build(self):", "def build(self):\n return\n") + client.save(self.files) + client.run("export lasote/stable") + client.run("install Hello0/0.1@lasote/stable -s os=Windows --build missing") + client.run("install Hello0/0.1@lasote/stable -s os=Linux --build missing") + client.run("install Hello0/0.1@lasote/stable -s os=Linux -s compiler=gcc -s " + "compiler.version=4.6 -s compiler.libcxx=libstdc++ --build missing") + client.run("upload Hello0/0.1@lasote/stable --all") + return os.listdir(self.client.paths.packages(self.ref)) diff --git a/testbed/conan-io__conan/conans/test/integration/install_update_test.py b/testbed/conan-io__conan/conans/test/integration/install_update_test.py new file mode 100644 index 0000000000000000000000000000000000000000..2aa3a8ee7b03c67c8e75649d92779334100bd1af --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/install_update_test.py @@ -0,0 +1,40 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from conans.model.ref import ConanFileReference, PackageReference +import os +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.util.files import load +from time import sleep + + +class InstallUpdateTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + + def reuse_test(self): + files = cpp_hello_conan_files("Hello0", "1.0", build=False) + + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("install Hello0/1.0@lasote/stable --build") + self.client.run("upload Hello0/1.0@lasote/stable --all") + + client2 = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + client2.run("install Hello0/1.0@lasote/stable") + + files["helloHello0.h"] = "//EMPTY!" + self.client.save(files, clean_first=True) + sleep(1) + self.client.run("export lasote/stable") + self.client.run("install Hello0/1.0@lasote/stable --build") + self.client.run("upload Hello0/1.0@lasote/stable --all") + + client2.run("install Hello0/1.0@lasote/stable --update") + ref = ConanFileReference.loads("Hello0/1.0@lasote/stable") + package_ids = client2.paths.conan_packages(ref) + package_path = client2.paths.package(PackageReference(ref, package_ids[0])) + header = load(os.path.join(package_path, "include/helloHello0.h")) + self.assertEqual(header, "//EMPTY!") diff --git a/testbed/conan-io__conan/conans/test/integration/loop_detection_test.py b/testbed/conan-io__conan/conans/test/integration/loop_detection_test.py new file mode 100644 index 0000000000000000000000000000000000000000..1c80b4cb1540f44054d2235873c72fd323eca5e0 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/loop_detection_test.py @@ -0,0 +1,27 @@ +import unittest +from conans.test.tools import TestClient + + +class LoopDectectionTest(unittest.TestCase): + + def copy_error_test(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile + +class Package{number}Conan(ConanFile): + name = "Package{number}" + version = "0.1" + requires = "Package{dep}/0.1@lasote/stable" +''' + for package_number in [1, 2, 3]: + content = conanfile.format(number=package_number, dep=package_number % 3 + 1) + files = {"conanfile.py": content} + + client.save(files, clean_first=True) + client.run("export lasote/stable") + + client.run("install Package3/0.1@lasote/stable --build", ignore_error=True) + self.assertIn("ERROR: Loop detected: Package3/0.1@lasote/stable->" + "Package1/0.1@lasote/stable->Package2/0.1@lasote/stable", + client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/integration/manifest_validation_test.py b/testbed/conan-io__conan/conans/test/integration/manifest_validation_test.py new file mode 100644 index 0000000000000000000000000000000000000000..d71ad04f8513e9c6ea86d90a6978ef3f308a04e8 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/manifest_validation_test.py @@ -0,0 +1,246 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.model.ref import ConanFileReference +import os +from conans.util.files import save, load, md5 +from conans.model.ref import PackageReference +from conans.paths import CONANFILE, SimplePaths + + +class ManifestValidationTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + + conanfile = """from conans import ConanFile + +class ConanFileTest(ConanFile): + name = "Hello" + version = "0.1" + exports = "*" +""" + self.files = {CONANFILE: conanfile, "data.txt": "MyData"} + # Export and upload the conanfile + self.reference = ConanFileReference.loads("Hello/0.1@lasote/stable") + self.client.save(self.files) + self.client.run("export lasote/stable") + + def _capture_verify_manifest(self, reference, remote="local cache", folder=""): + self.client.run("install %s --build missing --manifests %s" % (str(reference), folder)) + self.assertIn("Installed manifest for 'Hello/0.1@lasote/stable' from %s" % remote, + self.client.user_io.out) + self.assertIn("Installed manifest for 'Hello/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9' from %s" % remote, + self.client.user_io.out) + + real_folder = folder or ".conan_manifests" + output_folder = os.path.join(self.client.current_folder, real_folder) + paths = SimplePaths(output_folder) + self.assertTrue(os.path.exists(paths.digestfile_conanfile(self.reference))) + package_reference = PackageReference.loads("Hello/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + self.assertTrue(os.path.exists(paths.digestfile_package(package_reference))) + + # again should do nothing + self.client.run("install %s --build missing --manifests %s" + % (str(self.reference), folder)) + self.assertNotIn("manifest", self.client.user_io.out) + + # now verify + self.client.run("install %s --build missing --verify %s" % (str(self.reference), folder)) + self.assertIn("Manifest for 'Hello/0.1@lasote/stable': OK", self.client.user_io.out) + self.assertIn("Manifest for '%s': OK" % str(package_reference), self.client.user_io.out) + + def capture_verify_manifest_test(self): + self._capture_verify_manifest("Hello/0.1@lasote/stable") + + def conanfile_capture_verify_manifest_test(self): + files = {"conanfile.txt": "[requires]\nHello/0.1@lasote/stable"} + self.client.save(files, clean_first=True) + self._capture_verify_manifest(".") + + def capture_verify_manifest_folder_test(self): + self._capture_verify_manifest("Hello/0.1@lasote/stable", folder="my_custom_folder") + + def conanfile_capture_verify_manifest_folder_test(self): + files = {"conanfile.txt": "[requires]\nHello/0.1@lasote/stable"} + self.client.save(files, clean_first=True) + folder = "mymanifests" + self._capture_verify_manifest(".", folder=folder) + + conanfile = """from conans import ConanFile +class ConanFileTest(ConanFile): + name = "Hello2" + version = "0.1" +""" + client = TestClient(base_folder=self.client.base_folder) + client.save({CONANFILE: conanfile}) + client.run("export lasote/stable") + + files = {"conanfile.txt": "[requires]\nHello2/0.1@lasote/stable\nHello/0.1@lasote/stable"} + self.client.save(files) + + self.client.run("install . --build missing --manifests %s" % folder) + + remote = "local cache" + package_reference = PackageReference.loads("Hello/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + self.assertIn("Manifest for 'Hello/0.1@lasote/stable': OK", self.client.user_io.out) + self.assertIn("Manifest for '%s': OK" % str(package_reference), self.client.user_io.out) + self.assertIn("Installed manifest for 'Hello2/0.1@lasote/stable' from %s" % remote, + self.client.user_io.out) + self.assertIn("Installed manifest for 'Hello2/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9' from %s" % remote, + self.client.user_io.out) + + output_folder = os.path.join(self.client.current_folder, folder) + paths = SimplePaths(output_folder) + self.assertTrue(os.path.exists(paths.digestfile_conanfile(self.reference))) + self.assertTrue(os.path.exists(paths.digestfile_package(package_reference))) + package_reference = PackageReference.loads("Hello2/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + self.assertTrue(os.path.exists(paths.digestfile_package(package_reference))) + + def remote_capture_verify_manifest_test(self): + self.client.run("upload %s --all" % str(self.reference)) + self.client.run("remove Hello* -f") + files = {"conanfile.txt": "[requires]\nHello/0.1@lasote/stable"} + self.client.save(files, clean_first=True) + self._capture_verify_manifest(".", remote="default:") + + def _failed_verify(self, reference, remote="local cache"): + self.client.run("install %s --build missing --manifests" % str(reference)) + self.assertIn("Installed manifest for 'Hello/0.1@lasote/stable' from %s" % remote, + self.client.user_io.out) + self.assertIn("Installed manifest for 'Hello/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9' from %s" % remote, + self.client.user_io.out) + + output_folder = os.path.join(self.client.current_folder, ".conan_manifests") + paths = SimplePaths(output_folder) + self.assertTrue(os.path.exists(paths.digestfile_conanfile(self.reference))) + + package_reference = PackageReference.loads("Hello/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + self.assertTrue(os.path.exists(paths.digestfile_package(package_reference))) + + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + conanfile = """from conans import ConanFile +class ConanFileTest(ConanFile): + name = "Hello" + version = "0.1" + exports = "*" +""" + files = {CONANFILE: conanfile, "data.txt": "MyDataHacked"} + # Export and upload the conanfile + client.save(files) + client.run("export lasote/stable") + client.run("upload %s --all" % str(self.reference)) + + # now verify, with update + self.client.run("remove Hello/0.1@lasote/stable -f") + self.client.run("install %s --build missing --verify" + % str(self.reference), + ignore_error=True) + self.assertNotIn("Manifest for 'Hello/0.1@lasote/stable': OK", self.client.user_io.out) + self.assertNotIn("Manifest for '%s': OK" % str(package_reference), self.client.user_io.out) + self.assertIn("Modified or new manifest 'Hello/0.1@lasote/stable' detected", + self.client.user_io.out) + + def capture_verify_error_manifest_test(self): + self._failed_verify("Hello/0.1@lasote/stable") + + def conanfile_capture_verify_error_manifest_test(self): + files = {"conanfile.txt": "[requires]\nHello/0.1@lasote/stable"} + self.client.save(files, clean_first=True) + self._failed_verify(".") + + def _failed_package_verify(self, reference, remote="local cache"): + self.client.run("install %s --build missing --manifests" % str(reference)) + self.assertIn("Installed manifest for 'Hello/0.1@lasote/stable' from %s" % remote, + self.client.user_io.out) + self.assertIn("Installed manifest for 'Hello/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9' from %s" % remote, + self.client.user_io.out) + + output_folder = os.path.join(self.client.current_folder, ".conan_manifests") + paths = SimplePaths(output_folder) + self.assertTrue(os.path.exists(paths.digestfile_conanfile(self.reference))) + + package_reference = PackageReference.loads("Hello/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + self.assertTrue(os.path.exists(paths.digestfile_package(package_reference))) + + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + + client.save(self.files) + client.run("export lasote/stable") + client.run("install Hello/0.1@lasote/stable --build=missing") + info = os.path.join(client.paths.package(package_reference), "conaninfo.txt") + info_content = load(info) + info_content += "# Dummy string" + save(info, info_content) + manifest = client.paths.load_package_manifest(package_reference) + manifest.file_sums["conaninfo.txt"] = md5(info_content) + save(client.paths.digestfile_package(package_reference), str(manifest)) + + manifest = client.paths.load_package_manifest(package_reference) + client.run("upload %s --all" % str(self.reference)) + + # now verify, with update + self.client.run("remove Hello/0.1@lasote/stable -f") + self.client.run("install %s --build missing --verify" + % str(self.reference), + ignore_error=True) + self.assertNotIn("Manifest for 'Hello/0.1@lasote/stable': OK", self.client.user_io.out) + self.assertNotIn("Manifest for '%s': OK" % str(package_reference), self.client.user_io.out) + self.assertIn("Modified or new manifest '%s' detected" % str(package_reference), + self.client.user_io.out) + + def capture_verify_package_error_manifest_test(self): + self._failed_package_verify("Hello/0.1@lasote/stable") + + def conanfile_capture_verify_package_error_manifest_test(self): + files = {"conanfile.txt": "[requires]\nHello/0.1@lasote/stable"} + self.client.save(files, clean_first=True) + self._failed_package_verify(".") + + def manifest_wrong_folder_test(self): + reference = "Hello/0.1@lasote/stable" + self.client.run("install %s --build missing --verify whatever" + % str(reference), ignore_error=True) + self.assertIn("Manifest folder does not exist:", self.client.user_io.out) + + def manifest_wrong_args_test(self): + reference = "Hello/0.1@lasote/stable" + self.client.run("install %s --build missing --verify -m" + % str(reference), ignore_error=True) + self.assertIn("ERROR: Do not specify both", self.client.user_io.out) + self.client.run("install %s --build missing -mi -m" + % str(reference), ignore_error=True) + self.assertIn("ERROR: Do not specify both", self.client.user_io.out) + + def test_corrupted_recipe(self): + export_path = self.client.paths.export(self.reference) + file_path = os.path.join(export_path, "data.txt") + save(file_path, "BAD CONTENT") + + self.client.run("install %s --build missing --manifests" % str(self.reference), + ignore_error=True) + self.assertIn("Hello/0.1@lasote/stable local cache package is corrupted", + self.client.user_io.out) + + def test_corrupted_package(self): + self.client.run("install %s --build missing" % str(self.reference)) + package_reference = PackageReference.loads("Hello/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + package_path = self.client.paths.package(package_reference) + file_path = os.path.join(package_path, "conaninfo.txt") + save(file_path, load(file_path) + "RANDOM STRING") + + self.client.run("install %s --build missing --manifests" % str(self.reference), + ignore_error=True) + self.assertIn("%s local cache package is corrupted" % str(package_reference), + self.client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/integration/multi_build_test.py b/testbed/conan-io__conan/conans/test/integration/multi_build_test.py new file mode 100644 index 0000000000000000000000000000000000000000..8d9350cf6ac9aaaf1e524289e3599ded0e247904 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/multi_build_test.py @@ -0,0 +1,48 @@ +import unittest +from conans.test.tools import TestClient +from conans.model.ref import ConanFileReference +import os +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from nose.plugins.attrib import attr + + +@attr("slow") +class CollectLibsTest(unittest.TestCase): + + def collect_libs_test(self): + conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") + files = cpp_hello_conan_files("Hello0", "0.1", collect_libs=True) + client = TestClient() + client.save(files) + client.run("export lasote/stable") + + client.run("install %s --build missing" % str(conan_reference)) + + # Check compilation ok + package_ids = client.paths.conan_packages(conan_reference) + self.assertEquals(len(package_ids), 1) + + # Reuse them + conan_reference = ConanFileReference.loads("Hello1/0.2@lasote/stable") + files3 = cpp_hello_conan_files("Hello1", "0.1", ["Hello0/0.1@lasote/stable"], + collect_libs=True) + + # reusing the binary already in cache + client.save(files3, clean_first=True) + client.run('install') + client.run('build') + + command = os.sep.join([".", "bin", "say_hello"]) + client.runner(command, cwd=client.current_folder) + self.assertIn("Hello Hello1", client.user_io.out) + self.assertIn("Hello Hello0", client.user_io.out) + + # rebuilding the binary in cache + client.run('remove "*" -p -f') + client.run('install --build') + client.run('build') + + command = os.sep.join([".", "bin", "say_hello"]) + client.runner(command, cwd=client.current_folder) + self.assertIn("Hello Hello1", client.user_io.out) + self.assertIn("Hello Hello0", client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/integration/multi_remote_test.py b/testbed/conan-io__conan/conans/test/integration/multi_remote_test.py new file mode 100644 index 0000000000000000000000000000000000000000..0dc3732c2d56fdad2b9a353610a5fb1d981a8078 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/multi_remote_test.py @@ -0,0 +1,85 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.model.ref import ConanFileReference +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from collections import OrderedDict + + +class MultiRemoteTest(unittest.TestCase): + + def setUp(self): + self.servers = OrderedDict() + self.users = {} + for i in range(3): + test_server = TestServer() + self.servers["remote%d" % i] = test_server + self.users["remote%d" % i] = [("lasote", "mypass")] + + self.client = TestClient(servers=self.servers, users=self.users) + + def upload_test(self): + conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") + files = cpp_hello_conan_files("Hello0", "0.1") + files["conanfile.py"] = files["conanfile.py"].replace("def build(", "def build2(") + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("upload %s" % str(conan_reference)) + + self.client.run("info %s" % str(conan_reference)) + self.assertIn("remote0=http://", self.client.user_io.out) + + # The remote, once fixed does not change + self.client.run("upload %s -r=remote1" % str(conan_reference)) + self.client.run("info %s" % str(conan_reference)) + self.assertIn("remote0=http://", self.client.user_io.out) + + # Now install it in other machine from remote 0 + client2 = TestClient(servers=self.servers, users=self.users) + client2.run("install %s --build=missing" % str(conan_reference)) + client2.run("info %s" % str(conan_reference)) + self.assertIn("remote0=http://", client2.user_io.out) + + # Now install it in other machine from remote 1 + servers = self.servers.copy() + servers.pop("remote0") + client3 = TestClient(servers=servers, users=self.users) + client3.run("install %s --build=missing" % str(conan_reference)) + client3.run("info %s" % str(conan_reference)) + self.assertIn("remote1=http://", client3.user_io.out) + + def install_from_remotes_test(self): + for i in range(3): + conan_reference = ConanFileReference.loads("Hello%d/0.1@lasote/stable" % i) + files = cpp_hello_conan_files("Hello%d" % i, "0.1") + files["conanfile.py"] = files["conanfile.py"].replace("def build(", "def build2(") + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("upload %s -r=remote%d" % (str(conan_reference), i)) + + self.client.run("info %s" % str(conan_reference)) + self.assertIn("remote%d=http://" % i, self.client.user_io.out) + + + # Now install it in other machine from remote 0 + client2 = TestClient(servers=self.servers, users=self.users) + conan_reference = ConanFileReference.loads("HelloX/0.1@lasote/stable") + files = cpp_hello_conan_files("HelloX", "0.1", deps=["Hello0/0.1@lasote/stable", + "Hello1/0.1@lasote/stable", + "Hello2/0.1@lasote/stable"]) + files["conanfile.py"] = files["conanfile.py"].replace("def build(", "def build2(") + client2.save(files) + client2.run("install --build=missing") + self.assertIn("Hello0/0.1@lasote/stable from remote0", client2.user_io.out) + self.assertIn("Hello1/0.1@lasote/stable from remote1", client2.user_io.out) + self.assertIn("Hello2/0.1@lasote/stable from remote2", client2.user_io.out) + client2.run("info") + self.assertIn("Remote: remote0=http://", client2.user_io.out) + self.assertIn("Remote: remote1=http://", client2.user_io.out) + self.assertIn("Remote: remote2=http://", client2.user_io.out) + + + + + + + \ No newline at end of file diff --git a/testbed/conan-io__conan/conans/test/integration/multi_remotes_test.py b/testbed/conan-io__conan/conans/test/integration/multi_remotes_test.py new file mode 100644 index 0000000000000000000000000000000000000000..b6c9d31b6fc6d75c0f7def98dd4adde33ac4beaf --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/multi_remotes_test.py @@ -0,0 +1,87 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.paths import CONANFILE +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from collections import OrderedDict +from time import sleep + + +class MultiRemotesTest(unittest.TestCase): + + def setUp(self): + default_server = TestServer() + local_server = TestServer() + self.servers = OrderedDict() + self.servers["default"] = default_server + self.servers["local"] = local_server + + def _create(self, client, number, version, deps=None, export=True, modifier=""): + files = cpp_hello_conan_files(number, version, deps, build=False) + # To avoid building + files = {CONANFILE: files[CONANFILE].replace("config(", "config2(") + modifier} + client.save(files, clean_first=True) + if export: + client.run("export lasote/stable") + + def conan_test_test(self): + '''Checks --build in test command''' + client_a = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")], + "local": [("lasote", "mypass")]}) + client_b = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")], + "local": [("lasote", "mypass")]}) + + # Upload Hello0 to local and default from client_a + self._create(client_a, "Hello0", "0.0") + client_a.run("upload Hello0/0.0@lasote/stable -r local") + client_a.run("upload Hello0/0.0@lasote/stable -r default") + client_a.run("remote list_ref") + self.assertIn("Hello0/0.0@lasote/stable: local", str(client_a.user_io.out)) + sleep(1) # For timestamp and updates checks + + # Download Hello0 from local with client_b + client_b.run("install Hello0/0.0@lasote/stable -r local --build missing") + client_b.run("remote list_ref") + self.assertIn("Hello0/0.0@lasote/stable: local", str(client_b.user_io.out)) + + # Update Hello0 with client_a and reupload + self._create(client_a, "Hello0", "0.0", modifier="\n") + client_a.run("upload Hello0/0.0@lasote/stable -r local") + + # Execute info method in client_b, should advise that there is an update + client_b.run("info Hello0/0.0@lasote/stable -u") + self.assertIn("Updates: There is a newer version (local)", str(client_b.user_io.out)) + + # Now try to update the package with install -u + client_b.run("remote list_ref") + self.assertIn("Hello0/0.0@lasote/stable: local", str(client_b.user_io.out)) + client_b.run("install Hello0/0.0@lasote/stable -u --build") + self.assertIn("Hello0/0.0@lasote/stable: Retrieving from remote 'local'", + str(client_b.user_io.out)) + client_b.run("remote list_ref") + self.assertIn("Hello0/0.0@lasote/stable: local", str(client_b.user_io.out)) + + # Upload a new version from client A, but only to the default server (not the ref-listed) + # Upload Hello0 to local and default from client_a + sleep(1) # For timestamp and updates checks + self._create(client_a, "Hello0", "0.0", modifier="\n\n") + client_a.run("upload Hello0/0.0@lasote/stable -r default") + + # Now client_b checks for updates without -r parameter + client_b.run("info Hello0/0.0@lasote/stable -u") + self.assertIn("Remote: local", str(client_b.user_io.out)) + self.assertIn("You have the latest version (local)", str(client_b.user_io.out)) + + # But if we connect to default, should tell us that there is an update IN DEFAULT! + client_b.run("info Hello0/0.0@lasote/stable -r default -u") + self.assertIn("Remote: local", str(client_b.user_io.out)) + self.assertIn("There is a newer version (default)", str(client_b.user_io.out)) + client_b.run("remote list_ref") + self.assertIn("Hello0/0.0@lasote/stable: local", str(client_b.user_io.out)) + + # Well, now try to update the package with -r default -u + client_b.run("install Hello0/0.0@lasote/stable -r default -u --build") + self.assertIn("Hello0/0.0@lasote/stable: Retrieving from remote 'default'", + str(client_b.user_io.out)) + client_b.run("info Hello0/0.0@lasote/stable -u") + self.assertIn("Updates: The local file is newer than remote's one (local)", + str(client_b.user_io.out)) diff --git a/testbed/conan-io__conan/conans/test/integration/only_source_test.py b/testbed/conan-io__conan/conans/test/integration/only_source_test.py new file mode 100644 index 0000000000000000000000000000000000000000..25e74d33e9e46e10bac27406dd184911a8e7c222 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/only_source_test.py @@ -0,0 +1,222 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.model.ref import ConanFileReference +import os +from conans.paths import CONANFILE +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.util.files import load + + +class OnlySourceTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + + def _create(self, client, number, version, deps=None, export=True): + files = cpp_hello_conan_files(number, version, deps, build=False, config=False) + + client.save(files, clean_first=True) + if export: + client.run("export lasote/stable") + + def conan_test_test(self): + '''Checks --build in test command''' + + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + self._create(client, "Hello0", "0.0") + self._create(client, "Hello1", "1.1", ["Hello0/0.0@lasote/stable"]) + + # Now test out Hello2 + self._create(client, "Hello2", "2.2", ["Hello1/1.1@lasote/stable"], export=True) + hello2conanfile = load(os.path.join(client.current_folder, CONANFILE)) + client.save({CONANFILE: hello2conanfile}) + + test_conanfile = ''' +from conans.model.conan_file import ConanFile + +class DefaultNameConan(ConanFile): + settings = "os", "compiler", "arch" + requires = "Hello2/2.2@lasote/stable" + generators = "cmake" + + def test(self): + pass + ''' + client.save({"test/%s" % CONANFILE: test_conanfile}) + + # Should recognize the hello package + # Will Fail because Hello0/0.0 and Hello1/1.1 has not built packages + # and by default no packages are built + error = client.run("test", ignore_error=True) + self.assertTrue(error) + self.assertIn('Try to build from sources with "--build Hello0"', client.user_io.out) + + # We generate the package for Hello0/0.0 + client.run("install Hello0/0.0@lasote/stable --build Hello0") + + # Still missing Hello1/1.1 + error = client.run("test", ignore_error=True) + self.assertTrue(error) + self.assertIn('Try to build from sources with "--build Hello1"', client.user_io.out) + + # We generate the package for Hello1/1.1 + client.run("install Hello1/1.1@lasote/stable --build Hello1") + + # Now Hello2 should be built and not fail + client.run("test") + self.assertNotIn("Can't find a 'Hello2/2.2@lasote/stable' package", client.user_io.out) + self.assertIn('Hello2/2.2@lasote/stable: WARN: Forced build from source', + client.user_io.out) + + # Now package is generated but should be built again + client.run("test") + self.assertIn('Hello2/2.2@lasote/stable: WARN: Forced build from source', + client.user_io.out) + + def build_policies_update_test(self): + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + conanfile = """ +from conans import ConanFile + +class MyPackage(ConanFile): + name = "test" + version = "1.9" + build_policy = 'always' + + def source(self): + self.output.info("Getting sources") + def build(self): + self.output.info("Building sources") + def package(self): + self.output.info("Packaging this test package") + """ + + files = {CONANFILE: conanfile} + client.save(files, clean_first=True) + client.run("export lasote/stable") + client.run("install test/1.9@lasote/stable") + self.assertIn("Getting sources", client.user_io.out) + self.assertIn("Building sources", client.user_io.out) + self.assertIn("Packaging this test package", client.user_io.out) + self.assertIn("Building package from source as defined by build_policy='always'", + client.user_io.out) + client.run("upload test/1.9@lasote/stable") + + def build_policies_in_conanfile_test(self): + + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + files = cpp_hello_conan_files("Hello0", "1.0", [], config=False, build=False) + + # --- Build policy to missing --- + files[CONANFILE] = files[CONANFILE].replace("exports = '*'", "exports = '*'\n build_policy = 'missing'") + client.save(files, clean_first=True) + client.run("export lasote/stable") + + # Install, it will build automatically if missing (without the --build missing option) + client.run("install Hello0/1.0@lasote/stable -g txt") + self.assertIn("Building", client.user_io.out) + self.assertIn("Generated txt created conanbuildinfo.txt", client.user_io.out) + + # Try to do it again, now we have the package, so not build is done + client.run("install Hello0/1.0@lasote/stable -g txt") + self.assertNotIn("Building", client.user_io.out) + self.assertIn("Generated txt created conanbuildinfo.txt", client.user_io.out) + + # Try now to upload all packages, should not crash because of the "missing" build policy + client.run("upload Hello0/1.0@lasote/stable --all", ignore_error=False) + + # --- Build policy to always --- + files[CONANFILE] = files[CONANFILE].replace("build_policy = 'missing'", "build_policy = 'always'") + client.save(files, clean_first=True) + client.run("export lasote/stable") + + # Install, it will build automatically if missing (without the --build missing option) + client.run("install Hello0/1.0@lasote/stable -g txt") + self.assertIn("Detected build_policy 'always', trying to remove source folder", + client.user_io.out) + self.assertIn("Building", client.user_io.out) + self.assertIn("Generated txt created conanbuildinfo.txt", client.user_io.out) + + # Try to do it again, now we have the package, but we build again + client.run("install Hello0/1.0@lasote/stable -g txt") + self.assertIn("Building", client.user_io.out) + self.assertIn("Detected build_policy 'always', trying to remove source folder", + client.user_io.out) + self.assertIn("Generated txt created conanbuildinfo.txt", client.user_io.out) + + # Try now to upload all packages, should crash because of the "always" build policy + client.run("upload Hello0/1.0@lasote/stable --all", ignore_error=True) + self.assertIn("no packages can be uploaded", client.user_io.out) + + def reuse_test(self): + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") + files = cpp_hello_conan_files("Hello0", "0.1") + files[CONANFILE] = files[CONANFILE].replace("build", "build2") + + client.save(files) + client.run("export lasote/stable") + client.run("install %s --build missing" % str(conan_reference)) + + self.assertTrue(os.path.exists(client.paths.builds(conan_reference))) + self.assertTrue(os.path.exists(client.paths.packages(conan_reference))) + + # Upload + client.run("upload %s --all" % str(conan_reference)) + + # Now from other "computer" install the uploaded conans with same options (nothing) + other_conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + other_conan.run("install %s --build missing" % str(conan_reference)) + self.assertFalse(os.path.exists(other_conan.paths.builds(conan_reference))) + self.assertTrue(os.path.exists(other_conan.paths.packages(conan_reference))) + + # Now from other "computer" install the uploaded conans with same options (nothing) + other_conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + other_conan.run("install %s --build" % str(conan_reference)) + self.assertTrue(os.path.exists(other_conan.paths.builds(conan_reference))) + self.assertTrue(os.path.exists(other_conan.paths.packages(conan_reference))) + + # Use an invalid pattern and check that its not builded from source + other_conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + other_conan.run("install %s --build HelloInvalid" % str(conan_reference)) + self.assertFalse(os.path.exists(other_conan.paths.builds(conan_reference))) + self.assertTrue(os.path.exists(other_conan.paths.packages(conan_reference))) + + # Use another valid pattern and check that its not builded from source + other_conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + other_conan.run("install %s --build HelloInvalid -b Hello" % str(conan_reference)) + self.assertTrue(os.path.exists(other_conan.paths.builds(conan_reference))) + self.assertTrue(os.path.exists(other_conan.paths.packages(conan_reference))) + + # Now even if the package is in local store, check that's rebuilded + other_conan.run("install %s -b Hello*" % str(conan_reference)) + self.assertIn("Copying sources to build folder", other_conan.user_io.out) + + other_conan.run("install %s" % str(conan_reference)) + self.assertNotIn("Copying sources to build folder", other_conan.user_io.out) + + def detect_name_quotes_test(self): + base = ''' +from conans import ConanFile + +class ConanLib(ConanFile): + name = 'lib' + version = "0.1" +''' + test = ''' +from conans import ConanFile + +class ConanLib(ConanFile): + requires = "lib/0.1@user/channel" + def build(self): + self.conanfile_directory + def test(self): + pass +''' + files = {"conanfile.py": base, + "test/conanfile.py": test} + client = TestClient() + client.save(files) + client.run("export user/channel") + client.run("test_package") diff --git a/testbed/conan-io__conan/conans/test/integration/order_libs_test.py b/testbed/conan-io__conan/conans/test/integration/order_libs_test.py new file mode 100644 index 0000000000000000000000000000000000000000..de73d65427b907b52690aaa99db8a9ff83c32576 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/order_libs_test.py @@ -0,0 +1,60 @@ +import unittest +from conans.test.tools import TestClient +from conans.paths import CONANFILE +from conans.util.files import load +import os + + +class OrderLibsTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + + def _export(self, name, deps=None, export=True): + def _libs(): + if name == "LibPNG": + libs = '"m"' + elif name == "SDL2": + libs = '"m", "rt", "pthread", "dl"' + else: + libs = "" + return libs + deps = ", ".join(['"%s/1.0@lasote/stable"' % d for d in deps or []]) or '""' + conanfile = """ +from conans import ConanFile, CMake + +class HelloReuseConan(ConanFile): + name = "%s" + version = "1.0" + requires = %s + generators = "txt", "cmake" + + def package_info(self): + self.cpp_info.libs = ["%s", %s] +""" % (name, deps, name, _libs()) + + files = {CONANFILE: conanfile} + self.client.save(files, clean_first=True) + if export: + self.client.run("export lasote/stable") + + def reuse_test(self): + self._export("ZLib") + self._export("BZip2") + self._export("SDL2", ["ZLib"]) + self._export("LibPNG", ["ZLib"]) + self._export("freeType", ["BZip2", "LibPNG"]) + self._export("SDL2_ttf", ["freeType", "SDL2"]) + self._export("MyProject", ["SDL2_ttf"], export=False) + + self.client.run("install . --build missing") + self.assertIn("PROJECT: Generated conaninfo.txt", self.client.user_io.out) + + expected_libs = ['SDL2_ttf', 'SDL2', 'rt', 'pthread', 'dl', 'freeType', + 'BZip2', 'LibPNG', 'm', 'ZLib'] + conanbuildinfo = load(os.path.join(self.client.current_folder, "conanbuildinfo.txt")) + libs = os.linesep.join(expected_libs) + self.assertIn(libs, conanbuildinfo) + conanbuildinfo = load(os.path.join(self.client.current_folder, "conanbuildinfo.cmake")) + libs = " ".join(expected_libs) + self.assertIn(libs, conanbuildinfo) diff --git a/testbed/conan-io__conan/conans/test/integration/package_command_test.py b/testbed/conan-io__conan/conans/test/integration/package_command_test.py new file mode 100644 index 0000000000000000000000000000000000000000..de11e2dbbb36a03207b86b76cd50db607995f7d5 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/package_command_test.py @@ -0,0 +1,168 @@ +import unittest +from conans.test.tools import TestClient +from conans.model.ref import ConanFileReference, PackageReference +import os +from conans.paths import CONANFILE +from conans.util.files import mkdir, load +from conans.test.utils.test_files import temp_folder + + +class PackageCommandTest(unittest.TestCase): + + def package_errors_test(self): + client = TestClient() + client.run("package whatever@user/channel", ignore_error=True) + self.assertIn("Wrong package recipe", client.user_io.out) + + client.run("package whatever/1.0@user/channel", ignore_error=True) + self.assertIn("ERROR: Package recipe 'whatever/1.0@user/channel' does not exist", + client.user_io.out) + + conanfile_template = """ +from conans import ConanFile + +class MyConan(ConanFile): + name = "MyLib" + version = "0.1" +""" + client.save({CONANFILE: conanfile_template}) + client.run("export lasote/stable") + client.run("package MyLib/0.1@lasote/stable", ignore_error=True) + self.assertIn("ERROR: MyLib/0.1@lasote/stable: Package recipe has not been built locally", + client.user_io.out) + + builds_dir = client.paths.builds(ConanFileReference.loads("MyLib/0.1@lasote/stable")) + os.makedirs(builds_dir) + client.run("package MyLib/0.1@lasote/stable", ignore_error=True) + self.assertIn("ERROR: MyLib/0.1@lasote/stable: Package recipe has not been built locally", + client.user_io.out) + + client.run("package MyLib/0.1@lasote/stable 1234", ignore_error=True) + self.assertIn("ERROR: MyLib/0.1@lasote/stable: Package binary '1234' folder doesn't exist", + client.user_io.out) + + def local_package_test(self): + """Use 'conan package' to process locally the package method""" + client = TestClient() + conanfile_template = """ +from conans import ConanFile + +class MyConan(ConanFile): + def package(self): + self.copy(pattern="*.h", dst="include", src="include") +""" + files = {"include/file.h": "foo", + CONANFILE: conanfile_template} + + client.save(files) + client.run("install -g env -g txt") + client.run("build") + origin_folder = client.current_folder + client.current_folder = temp_folder() + client.run('package "%s"' % origin_folder) + content = load(os.path.join(client.current_folder, "include/file.h")) + self.assertEqual(content, "foo") + + def local_package_build_test(self): + """Use 'conan package' to process locally the package method""" + client = TestClient() + conanfile_template = """ +from conans import ConanFile + +class MyConan(ConanFile): + def package(self): + self.copy(pattern="*.h", dst="include", src="include") +""" + files = {"include/file.h": "foo", + CONANFILE: conanfile_template} + + client.save(files) + origin_folder = client.current_folder + build_folder = os.path.join(client.current_folder, "build") + mkdir(build_folder) + client.current_folder = build_folder + client.run("install .. -g env -g txt") + client.run("source ..") + client.run("build ..") + client.current_folder = temp_folder() + client.run('package "%s/build"' % origin_folder) + content = load(os.path.join(client.current_folder, "include/file.h")) + self.assertEqual(content, "foo") + + def local_flow_test(self): + """Use 'conan package' to process locally the package method""" + client = TestClient() + conanfile_template = """ +from conans import ConanFile + +class MyConan(ConanFile): + def package(self): + self.copy(pattern="*.h", dst="include", src="include") +""" + files = {"include/file.h": "foo", + CONANFILE: conanfile_template} + + client.save(files) + origin_folder = client.current_folder + client.run("install -g env -g txt") + client.run("source") + client.run("build") + client.run("package .", ignore_error=True) + self.assertIn("ERROR: Cannot 'conan package' to the build folder", client.user_io.out) + package_folder = os.path.join(origin_folder, "package") + mkdir(package_folder) + client.current_folder = package_folder + client.run('package ..') + content = load(os.path.join(client.current_folder, "include/file.h")) + self.assertEqual(content, "foo") + + def package_test(self): + """Use 'conan package' command to repackage a generated package (without build it)""" + client = TestClient() + conanfile_template = """ +from conans import ConanFile + +class MyConan(ConanFile): + name = "MyLib" + version = "0.1" + exports = '*' + + def package(self): + self.copy(pattern="*.h", dst="include", keep_path=False) + #self.copy(pattern="*.a", dst="lib", keep_path=False) +""" + files = {"lib/file1.a": "foo", + "include/file.h": "foo", + CONANFILE: conanfile_template} + + client.save(files) + client.run("export lasote/stable") + + # Build and package conan file + conan_reference = ConanFileReference.loads("MyLib/0.1@lasote/stable") + client.run("install %s --build missing" % str(conan_reference)) + package_id = "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9" + package_path = client.paths.package(PackageReference(conan_reference, package_id)) + # Verify the headers are there but lib doesn't + self.assertTrue(os.path.exists(os.path.join(package_path, "include", "file.h"))) + self.assertFalse(os.path.exists(os.path.join(package_path, "lib", "file1.a"))) + + # Fix conanfile and re-package + client.save({CONANFILE: conanfile_template.replace("#", "")}) + client.run("export lasote/stable") + # Build and package conan file + client.run("package %s %s" % (conan_reference, package_id)) + self.assertIn("MyLib/0.1@lasote/stable: " + "Re-packaging 5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9", client.user_io.out) + self.assertTrue(os.path.exists(os.path.join(package_path, "include", "file.h"))) + self.assertTrue(os.path.exists(os.path.join(package_path, "lib", "file1.a"))) + + # Fix again conanfile and re-package with AL + client.save({CONANFILE: conanfile_template.replace("self.copy", "pass #")}) + client.run("export lasote/stable") + # Build and package conan file + client.run("package %s" % str(conan_reference)) + self.assertIn("MyLib/0.1@lasote/stable: " + "Re-packaging 5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9", client.user_io.out) + self.assertFalse(os.path.exists(os.path.join(package_path, "include", "file.h"))) + self.assertFalse(os.path.exists(os.path.join(package_path, "lib", "file1.a"))) diff --git a/testbed/conan-io__conan/conans/test/integration/private_deps_test.py b/testbed/conan-io__conan/conans/test/integration/private_deps_test.py new file mode 100644 index 0000000000000000000000000000000000000000..7dc1456de6f29ec3db61460af1b9b2a9fad2519c --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/private_deps_test.py @@ -0,0 +1,229 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from conans.model.ref import ConanFileReference +import os +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.paths import CONANINFO, BUILD_INFO_CMAKE +from conans.util.files import load +from conans.model.info import ConanInfo +from nose.plugins.attrib import attr + + +@attr("slow") +class PrivateDepsTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + + def _export_upload(self, name=0, version=None, deps=None, msg=None, static=True, build=True, + upload=True): + dll_export = self.client.default_compiler_visual_studio and not static + files = cpp_hello_conan_files(name, version, deps, msg=msg, static=static, + private_includes=True, dll_export=dll_export, build=build, + cmake_targets=False) + conan_ref = ConanFileReference(name, version, "lasote", "stable") + self.client.save(files, clean_first=True) + self.client.run("export lasote/stable") + if upload: + self.client.run("upload %s" % str(conan_ref)) + + def _export(self, name=0, version=None, deps=None): + files = cpp_hello_conan_files(name, version, deps, + private_includes=True, build=False, + cmake_targets=True) + self.client.save(files, clean_first=True) + self.client.run("export lasote/stable") + + def modern_cmake_test(self): + self._export("glew", "0.1") + self._export("glm", "0.1") + self._export("gf", "0.1", deps=[("glm/0.1@lasote/stable", "private"), + "glew/0.1@lasote/stable"]) + + self._export("ImGuiTest", "0.1", deps=["glm/0.1@lasote/stable", + "gf/0.1@lasote/stable"]) + + # Consuming project + self._export("Project", "0.1", deps=["ImGuiTest/0.1@lasote/stable"]) + + # Build packages for both recipes + self.client.run('install . --build=missing') + conanbuildinfo_cmake = load(os.path.join(self.client.current_folder, + "conanbuildinfo.cmake")) + + self.assertIn("CONAN_PKG::gf PROPERTY INTERFACE_LINK_LIBRARIES " + "${CONAN_FULLPATH_LIBS_GF} CONAN_PKG::glew ${CONAN_SHARED_LINKER_FLAGS_GF}", + conanbuildinfo_cmake) + self.assertIn("CONAN_PKG::ImGuiTest PROPERTY INTERFACE_LINK_LIBRARIES " + "${CONAN_FULLPATH_LIBS_IMGUITEST} CONAN_PKG::glm CONAN_PKG::gf", + conanbuildinfo_cmake) + + def consumer_force_build_test(self): + """If a conanfile requires another private conanfile, but in the install is forced + the build, the private node has to be downloaded and built""" + self._export_upload("Hello0", "0.1", build=False, upload=False) + self._export_upload("Hello1", "0.1", deps=[("Hello0/0.1@lasote/stable", "private")], + build=False, upload=False) + + # Build packages for both recipes + self.client.run('install Hello1/0.1@lasote/stable --build missing') + + # Upload them to remote + self.client.run("upload Hello0/0.1@lasote/stable --all") + self.client.run("upload Hello1/0.1@lasote/stable --all") + + # Remove local recipes and packages + self.client.run('remove Hello* -f') + + # Install them without force build, private is not retrieved + self.client.run('install Hello1/0.1@lasote/stable --build missing') + # FIXME: recipe should not be retrieved either + # self.assertNotIn("Hello0/0.1@lasote/stable", self.client.user_io.out) + self.assertNotIn("Hello0/0.1@lasote/stable: Package installed", self.client.user_io.out) + + # Remove local recipes and packages + self.client.run('remove Hello* -f') + + # Install them without force build, private is not retrieved + self.client.run('install Hello1/0.1@lasote/stable ') + self.assertNotIn("Hello0/0.1@lasote/stable: Package installed", self.client.user_io.out) + + # Remove local recipes and packages + self.client.run('remove Hello* -f') + + # Install them without forcing build + self.client.run('install Hello1/0.1@lasote/stable --build Hello1') + self.assertIn("Hello0/0.1@lasote/stable: Package installed", self.client.user_io.out) + self.assertIn("Hello1/0.1@lasote/stable: Building your package", self.client.user_io.out) + + def consumer_private_test(self): + self._export_upload("Hello0", "0.1", build=False, upload=False) + self._export_upload("Hello1", "0.1", deps=["Hello0/0.1@lasote/stable"], + build=False, upload=False) + self._export_upload("Hello2", "0.1", deps=[("Hello1/0.1@lasote/stable", "private")], + build=False, upload=False) + self._export_upload("Hello3", "0.1", deps=[("Hello2/0.1@lasote/stable"), + ], + build=False, upload=False) + + self.client.run('install --build missing') + self.assertIn("Hello0/0.1@lasote/stable: Generating the package", self.client.user_io.out) + self.assertIn("Hello1/0.1@lasote/stable: Generating the package", self.client.user_io.out) + self.assertIn("Hello2/0.1@lasote/stable: Generating the package", self.client.user_io.out) + + self.client.run("remove Hello0* -p -f ") + self.client.run("remove Hello1* -p -f") + self.client.run("search Hello0/0.1@lasote/stable") + self.assertIn("There are no packages for pattern 'Hello0/0.1@lasote/stable'", + self.client.user_io.out) + self.client.run("search Hello1/0.1@lasote/stable") + self.assertIn("There are no packages for pattern 'Hello1/0.1@lasote/stable'", + self.client.user_io.out) + + self.client.run('install --build missing') + self.assertNotIn("Hello0/0.1@lasote/stable: Generating the package", + self.client.user_io.out) + self.assertNotIn("Hello1/0.1@lasote/stable: Generating the package", + self.client.user_io.out) + + def reuse_test(self): + self._export_upload("Hello0", "0.1") + self._export_upload("Hello00", "0.2", msg="#") + self._export_upload("Hello1", "0.1", deps=[("Hello0/0.1@lasote/stable", "private")], + static=False) + self._export_upload("Hello2", "0.1", deps=[("Hello00/0.2@lasote/stable", "private")], + static=False) + + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + files3 = cpp_hello_conan_files("Hello3", "0.1", ["Hello1/0.1@lasote/stable", + "Hello2/0.1@lasote/stable"]) + + # WE need to copy the DLLs and dylib + client.save(files3) + + client.run('install --build missing') + client.run('build') + + # assert Hello3 only depends on Hello2, and Hello1 + info_path = os.path.join(client.current_folder, BUILD_INFO_CMAKE) + build_info_cmake = load(info_path) + # Ensure it does not depend on Hello0 to build, as private in dlls + self.assertNotIn("Hello0", repr(build_info_cmake)) + + command = os.sep.join([".", "bin", "say_hello"]) + client.runner(command, cwd=client.current_folder) + self.assertEqual(['Hello Hello3', 'Hello Hello1', 'Hello Hello0', 'Hello Hello2', + 'Hello #'], + str(client.user_io.out).splitlines()[-5:]) + + # assert Hello3 only depends on Hello2, and Hello1 + info_path = os.path.join(client.current_folder, CONANINFO) + conan_info = ConanInfo.loads(load(info_path)) + + self.assertEqual("language=0\nstatic=True", conan_info.options.dumps()) + + # Try to upload and reuse the binaries + client.run("upload Hello1/0.1@lasote/stable --all") + self.assertEqual(str(client.user_io.out).count("Uploading package"), 1) + client.run("upload Hello2/0.1@lasote/stable --all") + self.assertEqual(str(client.user_io.out).count("Uploading package"), 1) + + client2 = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + files2 = cpp_hello_conan_files("Hello3", "0.1", ["Hello1/0.1@lasote/stable", + "Hello2/0.1@lasote/stable"]) + + # WE need to copy the DLLs + client2.save(files2) + + client2.run("install . --build missing") + self.assertNotIn("Package installed in Hello0/0.1", client2.user_io.out) + self.assertNotIn("Building", client2.user_io.out) + client2.run("build .") + + self.assertNotIn("libhello0.a", client2.user_io.out) + self.assertNotIn("libhello00.a", client2.user_io.out) + self.assertNotIn("libhello1.a", client2.user_io.out) + self.assertNotIn("libhello2.a", client2.user_io.out) + self.assertNotIn("libhello3.a", client2.user_io.out) + client2.runner(command, cwd=client2.current_folder) + + self.assertEqual(['Hello Hello3', 'Hello Hello1', 'Hello Hello0', 'Hello Hello2', + 'Hello #'], + str(client2.user_io.out).splitlines()[-5:]) + files3 = cpp_hello_conan_files("Hello3", "0.2", ["Hello1/0.1@lasote/stable", + "Hello2/0.1@lasote/stable"], language=1) + + client2.save(files3) + client2.run('install -o language=1 --build missing') + client2.run('build') + self.assertNotIn("libhello0.a", client2.user_io.out) + self.assertNotIn("libhello00.a", client2.user_io.out) + self.assertNotIn("libhello1.a", client2.user_io.out) + self.assertNotIn("libhello2.a", client2.user_io.out) + self.assertNotIn("libhello3.a", client2.user_io.out) + client2.runner(command, cwd=client2.current_folder) + self.assertEqual(['Hola Hello3', 'Hola Hello1', + 'Hola Hello0', 'Hola Hello2', 'Hola #'], + str(client2.user_io.out).splitlines()[-5:]) + + # Issue 79, fixing private deps from current project + files3 = cpp_hello_conan_files("Hello3", "0.2", ["Hello1/0.1@lasote/stable", + "Hello2/0.1@lasote/stable", + ("Hello0/0.1@lasote/stable", "private"), + ("Hello00/0.2@lasote/stable", "private")], + language=1) + + client2.save(files3, clean_first=True) + client2.run('install -o language=1 --build missing') + client2.run('build') + self.assertNotIn("libhello0.a", client2.user_io.out) + self.assertNotIn("libhello00.a", client2.user_io.out) + self.assertNotIn("libhello1.a", client2.user_io.out) + self.assertNotIn("libhello2.a", client2.user_io.out) + self.assertNotIn("libhello3.a", client2.user_io.out) + client2.runner(command, cwd=client2.current_folder) + self.assertEqual(['Hola Hello3', 'Hola Hello1', + 'Hola Hello0', 'Hola Hello2', 'Hola #', 'Hola Hello0', 'Hola #'], + str(client2.user_io.out).splitlines()[-7:]) diff --git a/testbed/conan-io__conan/conans/test/integration/profile_test.py b/testbed/conan-io__conan/conans/test/integration/profile_test.py new file mode 100644 index 0000000000000000000000000000000000000000..8cfc047a22832c8d5a68797d3e214de5e7d7ad41 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/profile_test.py @@ -0,0 +1,310 @@ +import unittest +from conans.test.tools import TestClient +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.util.files import save, load +import os +import platform +from conans.paths import CONANFILE +from collections import OrderedDict +from conans.test.utils.test_files import temp_folder +from conans.test.utils.profiles import create_profile +from nose_parameterized import parameterized + + +conanfile_scope_env = """ +import platform +from conans import ConanFile + +class AConan(ConanFile): + name = "Hello0" + version = "0.1" + settings = "os", "compiler", "arch" + + def build(self): + self.output.warn("Scope myscope: %s" % self.scope.myscope) + self.output.warn("Scope otherscope: %s" % self.scope.otherscope) + self.output.warn("Scope undefined: %s" % self.scope.undefined) + # Print environment vars + if self.settings.os == "Windows": + self.run("SET") + else: + self.run("env") + +""" + + +class ProfileTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + + def bad_syntax_test(self): + self.client.save({CONANFILE: conanfile_scope_env}) + self.client.run("export lasote/stable") + + profile = ''' + [settings + ''' + save(self.client.client_cache.profile_path("clang"), profile) + self.client.run("install Hello0/0.1@lasote/stable --build missing -pr clang", ignore_error=True) + self.assertIn("Error reading 'clang' profile", self.client.user_io.out) + self.assertIn("Bad syntax", self.client.user_io.out) + + profile = ''' + [settings] + [invented] + ''' + save(self.client.client_cache.profile_path("clang"), profile) + self.client.run("install Hello0/0.1@lasote/stable --build missing -pr clang", ignore_error=True) + self.assertIn("Unrecognized field 'invented'", self.client.user_io.out) + self.assertIn("Error reading 'clang' profile", self.client.user_io.out) + + profile = ''' + [settings] + as + ''' + save(self.client.client_cache.profile_path("clang"), profile) + self.client.run("install Hello0/0.1@lasote/stable --build missing -pr clang", ignore_error=True) + self.assertIn("Error reading 'clang' profile: Invalid setting line 'as'", self.client.user_io.out) + + profile = ''' + [env] + as + ''' + save(self.client.client_cache.profile_path("clang"), profile) + self.client.run("install Hello0/0.1@lasote/stable --build missing -pr clang", ignore_error=True) + self.assertIn("Error reading 'clang' profile: Invalid env line 'as'", self.client.user_io.out) + + profile = ''' + [scopes] + as + ''' + save(self.client.client_cache.profile_path("clang"), profile) + self.client.run("install Hello0/0.1@lasote/stable --build missing -pr clang", ignore_error=True) + self.assertIn("Error reading 'clang' profile: Bad scope as", self.client.user_io.out) + + profile = ''' + [settings] + os = a value + ''' + save(self.client.client_cache.profile_path("clang"), profile) + self.client.run("install Hello0/0.1@lasote/stable --build missing -pr clang", ignore_error=True) + # stripped "a value" + self.assertIn("'a value' is not a valid 'settings.os'", self.client.user_io.out) + + profile = ''' + [env] + ENV_VAR = a value + ''' + save(self.client.client_cache.profile_path("clang"), profile) + self.client.run("install Hello0/0.1@lasote/stable --build missing -pr clang", ignore_error=True) + self._assert_env_variable_printed("ENV_VAR", "a value") + + profile = ''' + # Line with comments is not a problem + [env] + # Not even here + ENV_VAR = a value + ''' + save(self.client.client_cache.profile_path("clang"), profile) + self.client.run("install Hello0/0.1@lasote/stable --build -pr clang", ignore_error=True) + self._assert_env_variable_printed("ENV_VAR", "a value") + + @parameterized.expand([("", ), ("./local_profiles/", ), (temp_folder() + "/", )]) + def build_with_profile_test(self, path): + if path == "": + folder = self.client.client_cache.profiles_path + elif path == "./local_profiles/": + folder = os.path.join(self.client.current_folder, "local_profiles") + else: + folder = path + create_profile(folder, "scopes_env", settings={}, + scopes={}, # undefined scope do not apply to my packages + env=[("CXX", "/path/tomy/g++_build"), + ("CC", "/path/tomy/gcc_build")]) + + self.client.save({CONANFILE: conanfile_scope_env}) + self.client.run('build -pr "%sscopes_env"' % path) + self._assert_env_variable_printed("CC", "/path/tomy/gcc_build") + self._assert_env_variable_printed("CXX", "/path/tomy/g++_build") + + @parameterized.expand([("", ), ("./local_profiles/", ), (temp_folder() + "/", )]) + def build_with_missing_profile_test(self, path): + self.client.save({CONANFILE: conanfile_scope_env}) + error = self.client.run('build -pr "%sscopes_env"' % path, ignore_error=True) + self.assertTrue(error) + self.assertIn("ERROR: Specified profile '%sscopes_env' doesn't exist" % path, + self.client.user_io.out) + + def install_profile_env_test(self): + files = cpp_hello_conan_files("Hello0", "0.1", build=False) + files["conanfile.py"] = conanfile_scope_env + + create_profile(self.client.client_cache.profiles_path, "envs", settings={}, + env=[("A_VAR", "A_VALUE")], package_env={"Hello0": [("OTHER_VAR", 2)]}) + + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("install Hello0/0.1@lasote/stable --build missing -pr envs") + self._assert_env_variable_printed("A_VAR", "A_VALUE") + self._assert_env_variable_printed("OTHER_VAR", "2") + + # Override with package var + self.client.run("install Hello0/0.1@lasote/stable --build -pr envs -e Hello0:A_VAR=OTHER_VALUE") + self._assert_env_variable_printed("A_VAR", "OTHER_VALUE") + self._assert_env_variable_printed("OTHER_VAR", "2") + + # Override package var with package var + self.client.run("install Hello0/0.1@lasote/stable --build -pr envs -e Hello0:A_VAR=OTHER_VALUE -e Hello0:OTHER_VAR=3") + self._assert_env_variable_printed("A_VAR", "OTHER_VALUE") + self._assert_env_variable_printed("OTHER_VAR", "3") + + def install_profile_settings_test(self): + files = cpp_hello_conan_files("Hello0", "0.1", build=False) + files["conanfile.py"] = files["conanfile.py"].replace("generators =", "generators = \"txt\",") + + # Create a profile and use it + profile_settings = {"compiler": "Visual Studio", + "compiler.version": "12", + "compiler.runtime": "MD", + "arch": "x86"} + + create_profile(self.client.client_cache.profiles_path, "vs_12_86", + settings=profile_settings, package_settings={}) + + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("install --build missing -pr vs_12_86") + info = load(os.path.join(self.client.current_folder, "conaninfo.txt")) + for setting, value in profile_settings.items(): + self.assertIn("%s=%s" % (setting, value), info) + + # Try to override some settings in install command + self.client.run("install --build missing -pr vs_12_86 -s compiler.version=14") + info = load(os.path.join(self.client.current_folder, "conaninfo.txt")) + for setting, value in profile_settings.items(): + if setting != "compiler.version": + self.assertIn("%s=%s" % (setting, value), info) + else: + self.assertIn("compiler.version=14", info) + + # Use package settings in profile + tmp_settings = OrderedDict() + tmp_settings["compiler"] = "gcc" + tmp_settings["compiler.libcxx"] = "libstdc++11" + tmp_settings["compiler.version"] = "4.8" + package_settings = {"Hello0": tmp_settings} + create_profile(self.client.client_cache.profiles_path, + "vs_12_86_Hello0_gcc", settings=profile_settings, + package_settings=package_settings) + # Try to override some settings in install command + self.client.run("install --build missing -pr vs_12_86_Hello0_gcc -s compiler.version=14") + info = load(os.path.join(self.client.current_folder, "conaninfo.txt")) + self.assertIn("compiler=gcc", info) + self.assertIn("compiler.libcxx=libstdc++11", info) + + # If other package is specified compiler is not modified + package_settings = {"NoExistsRecipe": tmp_settings} + create_profile(self.client.client_cache.profiles_path, + "vs_12_86_Hello0_gcc", settings=profile_settings, + package_settings=package_settings) + # Try to override some settings in install command + self.client.run("install --build missing -pr vs_12_86_Hello0_gcc -s compiler.version=14") + info = load(os.path.join(self.client.current_folder, "conaninfo.txt")) + self.assertIn("compiler=Visual Studio", info) + self.assertNotIn("compiler.libcxx", info) + + # Mix command line package settings with profile + package_settings = {"Hello0": tmp_settings} + create_profile(self.client.client_cache.profiles_path, "vs_12_86_Hello0_gcc", + settings=profile_settings, package_settings=package_settings) + + # Try to override some settings in install command + self.client.run("install --build missing -pr vs_12_86_Hello0_gcc" + " -s compiler.version=14 -s Hello0:compiler.libcxx=libstdc++") + info = load(os.path.join(self.client.current_folder, "conaninfo.txt")) + self.assertIn("compiler=gcc", info) + self.assertNotIn("compiler.libcxx=libstdc++11", info) + self.assertIn("compiler.libcxx=libstdc++", info) + + def scopes_env_test(self): + # Create a profile and use it + create_profile(self.client.client_cache.profiles_path, "scopes_env", settings={}, + scopes={"Hello0:myscope": "1", + "ALL:otherscope": "2", + "undefined": "3"}, # undefined scope do not apply to my packages + env=[("CXX", "/path/tomy/g++"), ("CC", "/path/tomy/gcc")]) + self.client.save({CONANFILE: conanfile_scope_env}) + self.client.run("export lasote/stable") + self.client.run("install Hello0/0.1@lasote/stable --build missing -pr scopes_env") + + self.assertIn("Scope myscope: 1", self.client.user_io.out) + self.assertIn("Scope otherscope: 2", self.client.user_io.out) + self.assertIn("Scope undefined: None", self.client.user_io.out) + + self._assert_env_variable_printed("CC", "/path/tomy/gcc") + self._assert_env_variable_printed("CXX", "/path/tomy/g++") + + # The env variable shouldn't persist after install command + self.assertFalse(os.environ.get("CC", None) == "/path/tomy/gcc") + self.assertFalse(os.environ.get("CXX", None) == "/path/tomy/g++") + + def test_package_test(self): + test_conanfile = '''from conans.model.conan_file import ConanFile +from conans import CMake +import os + +class DefaultNameConan(ConanFile): + name = "DefaultName" + version = "0.1" + settings = "os", "compiler", "arch", "build_type" + requires = "Hello0/0.1@lasote/stable" + + def build(self): + # Print environment vars + # self.run('cmake %s %s' % (self.conanfile_directory, cmake.command_line)) + if self.settings.os == "Windows": + self.run('echo "My var is %ONE_VAR%"') + else: + self.run('echo "My var is $ONE_VAR"') + + def test(self): + pass + +''' + files = {} + files["conanfile.py"] = conanfile_scope_env + files["test_package/conanfile.py"] = test_conanfile + # Create a profile and use it + create_profile(self.client.client_cache.profiles_path, "scopes_env", settings={}, + scopes={}, env=[("ONE_VAR", "ONE_VALUE")]) + + self.client.save(files) + self.client.run("test_package --profile scopes_env") + + self._assert_env_variable_printed("ONE_VAR", "ONE_VALUE") + self.assertIn("My var is ONE_VALUE", str(self.client.user_io.out)) + + # Try now with package environment vars + create_profile(self.client.client_cache.profiles_path, "scopes_env2", settings={}, + scopes={}, package_env={"DefaultName": [("ONE_VAR", "IN_TEST_PACKAGE")], + "Hello0": [("ONE_VAR", "PACKAGE VALUE")]}) + + self.client.run("test_package --profile scopes_env2") + + self._assert_env_variable_printed("ONE_VAR", "PACKAGE VALUE") + self.assertIn("My var is IN_TEST_PACKAGE", str(self.client.user_io.out)) + + # Try now overriding some variables with command line + self.client.run("test_package --profile scopes_env2 -e DefaultName:ONE_VAR=InTestPackageOverride " + "-e Hello0:ONE_VAR=PackageValueOverride ") + + self._assert_env_variable_printed("ONE_VAR", "PackageValueOverride") + self.assertIn("My var is InTestPackageOverride", str(self.client.user_io.out)) + + # A global setting in command line won't override a scoped package variable + self.client.run("test_package --profile scopes_env2 -e ONE_VAR=AnotherValue") + self._assert_env_variable_printed("ONE_VAR", "PACKAGE VALUE") + + def _assert_env_variable_printed(self, name, value): + self.assertIn("%s=%s" % (name, value), self.client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/integration/python_build_test.py b/testbed/conan-io__conan/conans/test/integration/python_build_test.py new file mode 100644 index 0000000000000000000000000000000000000000..6156860ecdd487f23084c65fe5c935f2fc7ec7a1 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/python_build_test.py @@ -0,0 +1,176 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from conans.paths import CONANFILE, CONANENV, BUILD_INFO +from conans.util.files import load +import os + + +conanfile = """from conans import ConanFile + +class ConanToolPackage(ConanFile): + name = "conantool" + version = "1.0" + exports = "*" + build_policy = "missing" + + def package(self): + self.copy("*") + + def package_info(self): + self.env_info.PYTHONPATH.append(self.package_folder) +""" + + +test = """def foo(output): + output.info("Hello Foo") +def bar(output): + output.info("Hello Bar") +def baz(output): + output.info("Hello Baz") +def boom(output): + output.info("Hello Boom") +""" + + +reuse = """from conans import ConanFile, tools + +class ToolsTest(ConanFile): + name = "Consumer" + version = "0.1" + requires = "conantool/1.0@lasote/stable" + + def source(self): + with tools.pythonpath(self): + import mytest + mytest.baz(self.output) + + def build(self): + with tools.pythonpath(self): + import mytest + mytest.foo(self.output) + + def package(self): + with tools.pythonpath(self): + import mytest + mytest.boom(self.output) + + def package_info(self): + with tools.pythonpath(self): + import mytest + mytest.bar(self.output) +""" + + +class PythonBuildTest(unittest.TestCase): + + def reuse_test(self): + client = TestClient() + client.save({CONANFILE: conanfile, "__init__.py": "", "mytest.py": test}) + client.run("export lasote/stable") + + client.save({CONANFILE: reuse}, clean_first=True) + client.run("install . -g txt -g env") + content = load(os.path.join(client.current_folder, CONANENV)) + self.assertIn("PYTHONPATH", content) + self.assertIn("Hello Bar", client.user_io.out) + self.assertNotIn("Hello Foo", client.user_io.out) + client.run("build") + self.assertNotIn("Hello Bar", client.user_io.out) + self.assertIn("Hello Foo", client.user_io.out) + + client.run("export lasote/stable") + client.run("install Consumer/0.1@lasote/stable --build") + lines = [line.split(":")[1] for line in str(client.user_io.out).splitlines() + if line.startswith("Consumer/0.1@lasote/stable: Hello")] + self.assertEqual([' Hello Baz', ' Hello Foo', ' Hello Boom', ' Hello Bar'], + lines) + + def upload_reuse_test(self): + server = TestServer() + servers = {"default": server} + client = TestClient(servers=servers, users={"default": [("lasote", "mypass")]}) + client.save({CONANFILE: conanfile, "__init__.py": "", "mytest.py": test}) + client.run("export lasote/stable") + + client.save({CONANFILE: reuse}, clean_first=True) + client.run("export lasote/stable") + client.run("install Consumer/0.1@lasote/stable --build") + lines = [line.split(":")[1] for line in str(client.user_io.out).splitlines() + if line.startswith("Consumer/0.1@lasote/stable: Hello")] + self.assertEqual([' Hello Baz', ' Hello Foo', ' Hello Boom', ' Hello Bar'], + lines) + + client.run("upload conantool/1.0@lasote/stable --all") + client.run("remove * -f") + client.run("search") + self.assertNotIn("lasote/stable", client.user_io.out) + client.run("export lasote/stable") + client.run("install Consumer/0.1@lasote/stable --build") + lines = [line.split(":")[1] for line in str(client.user_io.out).splitlines() + if line.startswith("Consumer/0.1@lasote/stable: Hello")] + self.assertEqual([' Hello Baz', ' Hello Foo', ' Hello Boom', ' Hello Bar'], + lines) + # Try again, just in case + client.run("upload conantool/1.0@lasote/stable --all") + client.run("remove * -f -r=default") + client.run("upload conantool/1.0@lasote/stable --all") + + def basic_install_test(self): + client = TestClient() + client.save({CONANFILE: conanfile, "__init__.py": "", "mytest.py": test}) + client.run("export lasote/stable") + + client.save({CONANFILE: reuse}, clean_first=True) + client.run("export lasote/stable") + client.run("install Consumer/0.1@lasote/stable --build") + lines = [line.split(":")[1] for line in str(client.user_io.out).splitlines() + if line.startswith("Consumer/0.1@lasote/stable: Hello")] + self.assertEqual([' Hello Baz', ' Hello Foo', ' Hello Boom', ' Hello Bar'], + lines) + + def basic_package_test(self): + client = TestClient() + client.save({CONANFILE: conanfile, "__init__.py": "", "mytest.py": test}) + client.run("export lasote/stable") + + client.save({CONANFILE: reuse}, clean_first=True) + client.run("export lasote/stable") + client.run("install Consumer/0.1@lasote/stable --build", ignore_error=True) + lines = [line.split(":")[1] for line in str(client.user_io.out).splitlines() + if line.startswith("Consumer/0.1@lasote/stable: Hello")] + self.assertEqual([' Hello Baz', ' Hello Foo', ' Hello Boom', ' Hello Bar'], + lines) + + client.run("package Consumer/0.1@lasote/stable") + + def basic_source_test(self): + client = TestClient() + client.save({CONANFILE: conanfile, "__init__.py": "", "mytest.py": test}) + client.run("export lasote/stable") + + client.save({CONANFILE: reuse}, clean_first=True) + client.run("export lasote/stable") + client.run("install -g txt -g env") + client.run("source Consumer/0.1@lasote/stable") + self.assertIn("Hello Baz", client.user_io.out) + self.assertNotIn("Hello Foo", client.user_io.out) + self.assertNotIn("Hello Bar", client.user_io.out) + self.assertNotIn("Hello Boom", client.user_io.out) + + def errors_test(self): + client = TestClient() + client.save({CONANFILE: conanfile, "__init__.py": "", "mytest.py": test}) + client.run("export lasote/stable") + + client.save({CONANFILE: reuse}, clean_first=True) + client.run("export lasote/stable") + client.run("install") + # BUILD_INFO is created by default, remove it to check message + os.remove(os.path.join(client.current_folder, BUILD_INFO)) + client.run("source Consumer/0.1@lasote/stable", ignore_error=True) + self.assertIn("Consumer/0.1@lasote/stable: WARN: conanenv.txt file not found", + client.user_io.out) + self.assertIn("Consumer/0.1@lasote/stable: WARN: conanbuildinfo.txt file not found", + client.user_io.out) + # Output in py3 is different, uses single quote + self.assertIn("No module named mytest", str(client.user_io.out).replace("'", "")) diff --git a/testbed/conan-io__conan/conans/test/integration/python_diamond_test.py b/testbed/conan-io__conan/conans/test/integration/python_diamond_test.py new file mode 100644 index 0000000000000000000000000000000000000000..0cd1f7d71c1a1cea9235d103f2e9481ee131ad44 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/python_diamond_test.py @@ -0,0 +1,48 @@ +import unittest +from conans.test.tools import TestClient +from conans.test.utils.python_test_files import py_hello_conan_files +import platform + + +class PythonDiamondTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + + def _export_upload(self, name, version=None, deps=None): + files = py_hello_conan_files(name, version, deps) + self.client.save(files, clean_first=True) + self.client.run("export lasote/stable") + + def reuse_test(self): + self._export_upload("Hello0", "0.1") + self._export_upload("Hello1", "0.1", ["Hello0/0.1@lasote/stable"]) + self._export_upload("Hello2", "0.1", ["Hello0/0.1@lasote/stable"]) + self._export_upload("Hello3", "0.1", ["Hello1/0.1@lasote/stable", + "Hello2/0.1@lasote/stable"]) + + files3 = py_hello_conan_files("Hello4", "0.1", ["Hello3/0.1@lasote/stable"]) + self.client.save(files3, clean_first=True) + + self.client.run("install .") + self.assertIn("Hello1/0.1@lasote/stable: Build stuff Hello0", self.client.user_io.out) + self.assertIn("Hello2/0.1@lasote/stable: Build stuff Hello0", self.client.user_io.out) + + self.assertIn(" ".join(["Hello3/0.1@lasote/stable: Build stuff Hello1", + "Hello3/0.1@lasote/stable: Build stuff Hello0", + "Hello3/0.1@lasote/stable: Build stuff Hello2", + "Hello3/0.1@lasote/stable: Build stuff Hello0"]), + " ".join(str(self.client.user_io.out).splitlines())) + self.assertNotIn("Project: Build stuff Hello3", self.client.user_io.out) + + self.client.run("build") + self.assertIn("Project: Build stuff Hello3", self.client.user_io.out) + + if platform.system() == "Windows": + command = "activate && python main.py" + else: + command = 'bash -c "source activate.sh && python main.py"' + self.client.runner(command, cwd=self.client.current_folder) + self.assertEqual(['Hello Hello4', 'Hello Hello3', 'Hello Hello1', 'Hello Hello0', + 'Hello Hello2', 'Hello Hello0'], + str(self.client.user_io.out).splitlines()[-6:]) diff --git a/testbed/conan-io__conan/conans/test/integration/same_userchannel_test.py b/testbed/conan-io__conan/conans/test/integration/same_userchannel_test.py new file mode 100644 index 0000000000000000000000000000000000000000..08f6b7cd3422e09875803a7ec129e1b3ff25b6cc --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/same_userchannel_test.py @@ -0,0 +1,91 @@ +import unittest +from conans.test.tools import TestClient +import os + + +class SameUserChannelTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + conanfile = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + build_policy = "missing" + + def build(self): + self.output.info("Building %s") +""" + for channel in ("lasote/stable", "other/testing"): + self.client.save({"conanfile.py": conanfile % channel}) + self.client.run("export %s" % channel) + + self.conanfile = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + build_policy = "missing" + + def requirements(self): + self.requires("Say/0.1@%s/%s" % (self.user, self.channel)) + + def build(self): + self.output.info("Building %s/%s" % (self.user, self.channel) ) +""" + + self.test_conanfile = """ +from conans import ConanFile, CMake +import os + +class HelloReuseConan(ConanFile): + requires = "Hello/0.1@lasote/stable" + + def test(self): + self.conanfile_directory +""" + self.client.save({"conanfile.py": self.conanfile, + "test/conanfile.py": self.test_conanfile}) + + def test_testpackage(self): + self.client.run("test_package") + self.assertIn("Say/0.1@lasote/stable: Building lasote/stable", self.client.user_io.out) + self.assertIn("Hello/0.1@lasote/stable: Building lasote/stable", self.client.user_io.out) + self.assertNotIn("other/testing", self.client.user_io.out) + + self.client.save({"conanfile.py": self.conanfile, + "test/conanfile.py": self.test_conanfile.replace("lasote/stable", + "other/testing")}) + self.client.run("test_package") + self.assertIn("Say/0.1@other/testing: Building other/testing", self.client.user_io.out) + self.assertIn("Hello/0.1@other/testing: Building other/testing", self.client.user_io.out) + self.assertNotIn("lasote/stable", self.client.user_io.out) + + def test_local_commands(self): + error = self.client.run("install", ignore_error=True) + self.assertEqual(error, True) + self.assertIn("ERROR: Conanfile: CONAN_USERNAME environment variable not defined, " + "but self.user is used in conanfile", self.client.user_io.out) + + os.environ["CONAN_USERNAME"] = "lasote" + error = self.client.run("install", ignore_error=True) + self.assertEqual(error, True) + self.assertIn("ERROR: Conanfile: CONAN_CHANNEL environment variable not defined, " + "but self.channel is used in conanfile", self.client.user_io.out) + + os.environ["CONAN_CHANNEL"] = "stable" + self.client.run("install") + self.assertIn("Say/0.1@lasote/stable: Building lasote/stable", self.client.user_io.out) + self.assertNotIn("other/testing", self.client.user_io.out) + + os.environ["CONAN_USERNAME"] = "other" + os.environ["CONAN_CHANNEL"] = "testing" + self.client.run("install") + self.assertIn("Say/0.1@other/testing: Building other/testing", self.client.user_io.out) + self.assertNotIn("lasote/stable", self.client.user_io.out) + + del os.environ["CONAN_USERNAME"] + del os.environ["CONAN_CHANNEL"] diff --git a/testbed/conan-io__conan/conans/test/integration/settings_override_test.py b/testbed/conan-io__conan/conans/test/integration/settings_override_test.py new file mode 100644 index 0000000000000000000000000000000000000000..099e90dec1b0586b5ce89f695dac3ef5af836b2c --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/settings_override_test.py @@ -0,0 +1,90 @@ +import unittest +from conans.test.tools import TestClient +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.paths import CONANFILE, CONANINFO +from conans.model.ref import ConanFileReference +from conans.util.files import load +import os +from conans import tools + + +class SettingsOverrideTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + files = cpp_hello_conan_files(name="MinGWBuild", version="0.1", build=False) + self._patch_build_to_print_compiler(files) + + self.client.save(files) + self.client.run("export lasote/testing") + + def test_override(self): + + files = cpp_hello_conan_files(name="VisualBuild", + version="0.1", build=False, deps=["MinGWBuild/0.1@lasote/testing"]) + self._patch_build_to_print_compiler(files) + self.client.save(files) + self.client.run("export lasote/testing") + self.client.run("install VisualBuild/0.1@lasote/testing --build missing -s compiler='Visual Studio' " + "-s compiler.version=14 -s compiler.runtime=MD " + "-s MinGWBuild:compiler='gcc' -s MinGWBuild:compiler.libcxx='libstdc++' " + "-s MinGWBuild:compiler.version=4.8") + + self.assertIn("COMPILER=> MinGWBuild gcc", self.client.user_io.out) + self.assertIn("COMPILER=> VisualBuild Visual Studio", self.client.user_io.out) + + # CHECK CONANINFO FILE + packs_dir = self.client.paths.packages(ConanFileReference.loads("MinGWBuild/0.1@lasote/testing")) + pack_dir = os.path.join(packs_dir, os.listdir(packs_dir)[0]) + conaninfo = load(os.path.join(pack_dir, CONANINFO)) + self.assertIn("compiler=gcc", conaninfo) + + # CHECK CONANINFO FILE + packs_dir = self.client.paths.packages(ConanFileReference.loads("VisualBuild/0.1@lasote/testing")) + pack_dir = os.path.join(packs_dir, os.listdir(packs_dir)[0]) + conaninfo = load(os.path.join(pack_dir, CONANINFO)) + self.assertIn("compiler=Visual Studio", conaninfo) + self.assertIn("compiler.version=14", conaninfo) + + def test_non_existing_setting(self): + files = cpp_hello_conan_files(name="VisualBuild", + version="0.1", build=False, deps=["MinGWBuild/0.1@lasote/testing"]) + self.client.save(files) + self.client.run("export lasote/testing") + self.client.run("install VisualBuild/0.1@lasote/testing --build missing -s compiler='Visual Studio' " + "-s compiler.version=14 -s compiler.runtime=MD " + "-s MinGWBuild:missingsetting='gcc' ", ignore_error=True) + self.assertIn("settings.missingsetting' doesn't exist", self.client.user_io.out) + + def test_override_in_non_existing_recipe(self): + files = cpp_hello_conan_files(name="VisualBuild", + version="0.1", build=False, deps=["MinGWBuild/0.1@lasote/testing"]) + self._patch_build_to_print_compiler(files) + self.client.save(files) + self.client.run("export lasote/testing") + self.client.run("install VisualBuild/0.1@lasote/testing --build missing -s compiler='Visual Studio' " + "-s compiler.version=14 -s compiler.runtime=MD " + "-s MISSINGID:compiler='gcc' ") + + self.assertIn("COMPILER=> MinGWBuild Visual Studio", self.client.user_io.out) + self.assertIn("COMPILER=> VisualBuild Visual Studio", self.client.user_io.out) + + def test_override_setting_with_env_variables(self): + files = cpp_hello_conan_files(name="VisualBuild", + version="0.1", build=False, deps=["MinGWBuild/0.1@lasote/testing"]) + self._patch_build_to_print_compiler(files) + self.client.save(files) + self.client.run("export lasote/testing") + with tools.environment_append({"CONAN_ENV_COMPILER": "Visual Studio", + "CONAN_ENV_COMPILER_VERSION": "14", + "CONAN_ENV_COMPILER_RUNTIME": "MD"}): + self.client.run("install VisualBuild/0.1@lasote/testing --build missing") + + self.assertIn("COMPILER=> MinGWBuild Visual Studio", self.client.user_io.out) + + def _patch_build_to_print_compiler(self, files): + files[CONANFILE] = files[CONANFILE] + ''' + def build(self): + self.output.warn("COMPILER=> %s %s" % (self.name, str(self.settings.compiler))) + +''' diff --git a/testbed/conan-io__conan/conans/test/integration/shared_chain_test.py b/testbed/conan-io__conan/conans/test/integration/shared_chain_test.py new file mode 100644 index 0000000000000000000000000000000000000000..500603e6b6a294b1a4c86713d54ad4b6c8991421 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/shared_chain_test.py @@ -0,0 +1,46 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.model.ref import ConanFileReference +from nose.plugins.attrib import attr +from conans.util.files import rmdir +import shutil +import os + + +@attr("slow") +class SharedChainTest(unittest.TestCase): + + def setUp(self): + self.static = False + test_server = TestServer() + self.servers = {"default": test_server} + + def _export_upload(self, name, version=None, deps=None): + conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + dll_export = conan.default_compiler_visual_studio + files = cpp_hello_conan_files(name, version, deps, static=False, dll_export=dll_export) + conan_ref = ConanFileReference(name, version, "lasote", "stable") + conan.save(files, clean_first=True) + + conan.run("export lasote/stable") + conan.run("install '%s' --build missing" % str(conan_ref)) + conan.run("upload %s --all" % str(conan_ref)) + rmdir(conan.current_folder) + shutil.rmtree(conan.paths.store, ignore_errors=True) + + def uploaded_chain_test(self): + self._export_upload("Hello0", "0.1") + self._export_upload("Hello1", "0.1", ["Hello0/0.1@lasote/stable"]) + + client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + files2 = cpp_hello_conan_files("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], static=True) + client.save(files2) + + client.run("install . --build missing") + client.run("build .") + command = os.sep.join([".", "bin", "say_hello"]) + + client.runner(command, cwd=client.current_folder) + self.assertEqual(['Hello Hello2', 'Hello Hello1', 'Hello Hello0'], + str(client.user_io.out).splitlines()[-3:]) diff --git a/testbed/conan-io__conan/conans/test/integration/symlinks_test.py b/testbed/conan-io__conan/conans/test/integration/symlinks_test.py new file mode 100644 index 0000000000000000000000000000000000000000..2e968ba09b89f803351141b5ba7b01de959cf897 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/symlinks_test.py @@ -0,0 +1,87 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from conans.util.files import load, save +from conans.model.ref import PackageReference +import os +import platform + +conanfile = """ +from conans import ConanFile +from conans.util.files import save +import os + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + + def build(self): + save("file1.txt", "Hello1") + os.symlink("file1.txt", "file1.txt.1") + + def package(self): + self.copy("*.txt*", links=True) +""" + +test_conanfile = """[requires] +Hello/0.1@lasote/stable + +[imports] +., * -> . +""" + + +class SymLinksTest(unittest.TestCase): + + def _check(self, client, ref, build=True): + folders = [client.paths.package(ref), client.current_folder] + if build: + folders.append(client.paths.build(ref)) + for base in folders: + filepath = os.path.join(base, "file1.txt") + link = os.path.join(base, "file1.txt.1") + self.assertEqual(os.readlink(link), "file1.txt") + file1 = load(filepath) + self.assertEqual("Hello1", file1) + file1 = load(link) + self.assertEqual("Hello1", file1) + # Save any different string, random, or the base path + save(filepath, base) + self.assertEqual(load(link), base) + + def basic_test(self): + if platform.system() == "Windows": + return + + client = TestClient() + client.save({"conanfile.py": conanfile, + "conanfile.txt": test_conanfile}) + client.run("export lasote/stable") + client.run("install --build -f=conanfile.txt") + ref = PackageReference.loads("Hello/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + self._check(client, ref) + + client.run("install --build -f=conanfile.txt") + self._check(client, ref) + + def upload_test(self): + if platform.system() == "Windows": + return + + test_server = TestServer() + servers = {"default": test_server} + client = TestClient(servers=servers, users={"default": [("lasote", "mypass")]}) + + client.save({"conanfile.py": conanfile, + "conanfile.txt": test_conanfile}) + client.run("export lasote/stable") + client.run("install --build -f=conanfile.txt") + ref = PackageReference.loads("Hello/0.1@lasote/stable:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + client.run("upload Hello/0.1@lasote/stable --all") + client.run('remove "*" -f') + client.save({"conanfile.txt": test_conanfile}, clean_first=True) + client.run("install") + self._check(client, ref, build=False) diff --git a/testbed/conan-io__conan/conans/test/integration/syncronize_test.py b/testbed/conan-io__conan/conans/test/integration/syncronize_test.py new file mode 100644 index 0000000000000000000000000000000000000000..0115d85db0f07a6287f54c1f30982ade8bf793eb --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/syncronize_test.py @@ -0,0 +1,122 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.model.ref import ConanFileReference, PackageReference +import os +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from nose.plugins.attrib import attr +from conans.util.files import load, save +from conans.test.utils.test_files import uncompress_packaged_files, temp_folder +from conans.paths import EXPORT_TGZ_NAME, CONAN_MANIFEST, PACKAGE_TGZ_NAME +from conans.tools import untargz +from conans.model.manifest import FileTreeManifest + + +@attr("slow") +class SynchronizeTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + + def upload_test(self): + conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") + files = cpp_hello_conan_files("Hello0", "0.1") + files["to_be_deleted.txt"] = "delete me" + files["to_be_deleted2.txt"] = "delete me2" + + remote_paths = self.client.servers["default"].paths + server_conan_path = remote_paths.export(conan_reference) + + self.client.save(files) + self.client.run("export lasote/stable") + + # Upload conan file + self.client.run("upload %s" % str(conan_reference)) + + # Verify the files are there + self.assertTrue(os.path.exists(os.path.join(server_conan_path, EXPORT_TGZ_NAME))) + tmp = temp_folder() + untargz(os.path.join(server_conan_path, EXPORT_TGZ_NAME), tmp) + self.assertTrue(load(os.path.join(tmp, "to_be_deleted.txt")), "delete me") + self.assertTrue(load(os.path.join(tmp, "to_be_deleted2.txt")), "delete me2") + + # Now delete local files export and upload and check that they are not in server + os.remove(os.path.join(self.client.current_folder, "to_be_deleted.txt")) + self.client.run("export lasote/stable") + self.client.run("upload %s" % str(conan_reference)) + self.assertTrue(os.path.exists(os.path.join(server_conan_path, EXPORT_TGZ_NAME))) + tmp = temp_folder() + untargz(os.path.join(server_conan_path, EXPORT_TGZ_NAME), tmp) + self.assertFalse(os.path.exists(os.path.join(tmp, "to_be_deleted.txt"))) + self.assertTrue(os.path.exists(os.path.join(tmp, "to_be_deleted2.txt"))) + + # Now modify a file, and delete other, and put a new one. + files["to_be_deleted2.txt"] = "modified content" + files["new_file.lib"] = "new file" + del files["to_be_deleted.txt"] + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("upload %s" % str(conan_reference)) + + # Verify all is correct + self.assertTrue(os.path.exists(os.path.join(server_conan_path, EXPORT_TGZ_NAME))) + tmp = temp_folder() + untargz(os.path.join(server_conan_path, EXPORT_TGZ_NAME), tmp) + self.assertTrue(load(os.path.join(tmp, "to_be_deleted2.txt")), "modified content") + self.assertTrue(load(os.path.join(tmp, "new_file.lib")), "new file") + self.assertFalse(os.path.exists(os.path.join(tmp, "to_be_deleted.txt"))) + + ########################## + # Now try with the package + ########################## + + self.client.run("install %s --build missing" % str(conan_reference)) + # Upload package + package_ids = self.client.paths.conan_packages(conan_reference) + self.client.run("upload %s -p %s" % (str(conan_reference), str(package_ids[0]))) + + # Check that conans exists on server + package_reference = PackageReference(conan_reference, str(package_ids[0])) + package_server_path = remote_paths.package(package_reference) + self.assertTrue(os.path.exists(package_server_path)) + + # Add a new file to package (artificially), upload again and check + pack_path = self.client.paths.package(package_reference) + new_file_source_path = os.path.join(pack_path, "newlib.lib") + save(new_file_source_path, "newlib") + os.unlink(os.path.join(pack_path, PACKAGE_TGZ_NAME)) # Force new tgz + + self._create_manifest(package_reference) + self.client.run("upload %s -p %s" % (str(conan_reference), str(package_ids[0]))) + + folder = uncompress_packaged_files(remote_paths, package_reference) + remote_file_path = os.path.join(folder, "newlib.lib") + self.assertTrue(os.path.exists(remote_file_path)) + + # Now modify the file and check again + save(new_file_source_path, "othercontent") + self._create_manifest(package_reference) + self.client.run("upload %s -p %s" % (str(conan_reference), str(package_ids[0]))) + folder = uncompress_packaged_files(remote_paths, package_reference) + remote_file_path = os.path.join(folder, "newlib.lib") + self.assertTrue(os.path.exists(remote_file_path)) + self.assertTrue(load(remote_file_path), "othercontent") + + # Now delete the file and check again + os.remove(new_file_source_path) + self._create_manifest(package_reference) + os.unlink(os.path.join(pack_path, PACKAGE_TGZ_NAME)) # Force new tgz + self.client.run("upload %s -p %s" % (str(conan_reference), str(package_ids[0]))) + folder = uncompress_packaged_files(remote_paths, package_reference) + remote_file_path = os.path.join(folder, "newlib.lib") + + self.assertFalse(os.path.exists(remote_file_path)) + self.assertNotEquals(remote_file_path, new_file_source_path) + + def _create_manifest(self, package_reference): + # Create the manifest to be able to upload the package + pack_path = self.client.paths.package(package_reference) + digest_path = self.client.client_cache.digestfile_package(package_reference) + expected_manifest = FileTreeManifest.create(os.path.dirname(digest_path)) + save(os.path.join(pack_path, CONAN_MANIFEST), str(expected_manifest)) diff --git a/testbed/conan-io__conan/conans/test/integration/system_reqs_test.py b/testbed/conan-io__conan/conans/test/integration/system_reqs_test.py new file mode 100644 index 0000000000000000000000000000000000000000..2fcb9665f0c2bd6fc738346c8f5c42090540596f --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/system_reqs_test.py @@ -0,0 +1,124 @@ +import unittest +from conans.test.tools import TestClient +import os +from conans.model.ref import PackageReference, ConanFileReference +from conans.util.files import load +from conans.errors import ConanException + +base_conanfile = ''' +from conans import ConanFile + +class TestSystemReqs(ConanFile): + name = "Test" + version = "0.1" + options = {"myopt": [True, False]} + default_options = "myopt=True" + + def system_requirements(self): + self.output.info("*+Running system requirements+*") + %GLOBAL% + return "Installed my stuff" +''' + + +class SystemReqsTest(unittest.TestCase): + + def local_system_requirements_test(self): + client = TestClient() + files = {'conanfile.py': base_conanfile.replace("%GLOBAL%", "")} + client.save(files) + client.run("install .") + self.assertIn("*+Running system requirements+*", client.user_io.out) + + files = {'conanfile.py': base_conanfile.replace("%GLOBAL%", "self.run('fake command!')")} + client.save(files) + with self.assertRaisesRegexp(Exception, "Command failed"): + client.run("install .") + + def per_package_test(self): + client = TestClient() + files = {'conanfile.py': base_conanfile.replace("%GLOBAL%", "")} + client.save(files) + client.run("export user/testing") + client.run("install Test/0.1@user/testing --build missing") + self.assertIn("*+Running system requirements+*", client.user_io.out) + conan_ref = ConanFileReference.loads("Test/0.1@user/testing") + self.assertFalse(os.path.exists(client.paths.system_reqs(conan_ref))) + package_ref = PackageReference(conan_ref, "f0ba3ca2c218df4a877080ba99b65834b9413798") + load_file = load(client.paths.system_reqs_package(package_ref)) + self.assertIn("Installed my stuff", load_file) + + # Run again + client.run("install Test/0.1@user/testing --build missing") + self.assertNotIn("*+Running system requirements+*", client.user_io.out) + self.assertFalse(os.path.exists(client.paths.system_reqs(conan_ref))) + load_file = load(client.paths.system_reqs_package(package_ref)) + self.assertIn("Installed my stuff", load_file) + + # Run with different option + client.run("install Test/0.1@user/testing -o myopt=False --build missing") + self.assertIn("*+Running system requirements+*", client.user_io.out) + self.assertFalse(os.path.exists(client.paths.system_reqs(conan_ref))) + package_ref2 = PackageReference(conan_ref, "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + load_file = load(client.paths.system_reqs_package(package_ref2)) + self.assertIn("Installed my stuff", load_file) + + # remove packages + client.run("remove Test* -f -p 544", ignore_error=True) + self.assertTrue(os.path.exists(client.paths.system_reqs_package(package_ref))) + client.run("remove Test* -f -p f0ba3ca2c218df4a877080ba99b65834b9413798") + self.assertFalse(os.path.exists(client.paths.system_reqs_package(package_ref))) + self.assertTrue(os.path.exists(client.paths.system_reqs_package(package_ref2))) + client.run("remove Test* -f -p 5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + self.assertFalse(os.path.exists(client.paths.system_reqs_package(package_ref))) + self.assertFalse(os.path.exists(client.paths.system_reqs_package(package_ref2))) + + def global_test(self): + client = TestClient() + files = {'conanfile.py': base_conanfile.replace("%GLOBAL%", + "self.global_system_requirements=True")} + client.save(files) + client.run("export user/testing") + client.run("install Test/0.1@user/testing --build missing") + self.assertIn("*+Running system requirements+*", client.user_io.out) + conan_ref = ConanFileReference.loads("Test/0.1@user/testing") + package_ref = PackageReference(conan_ref, "a527106fd9f2e3738a55b02087c20c0a63afce9d") + self.assertFalse(os.path.exists(client.paths.system_reqs_package(package_ref))) + load_file = load(client.paths.system_reqs(conan_ref)) + self.assertIn("Installed my stuff", load_file) + + # Run again + client.run("install Test/0.1@user/testing --build missing") + self.assertNotIn("*+Running system requirements+*", client.user_io.out) + self.assertFalse(os.path.exists(client.paths.system_reqs_package(package_ref))) + load_file = load(client.paths.system_reqs(conan_ref)) + self.assertIn("Installed my stuff", load_file) + + # Run with different option + client.run("install Test/0.1@user/testing -o myopt=False --build missing") + self.assertNotIn("*+Running system requirements+*", client.user_io.out) + package_ref2 = PackageReference(conan_ref, "54c9626b48cefa3b819e64316b49d3b1e1a78c26") + self.assertFalse(os.path.exists(client.paths.system_reqs_package(package_ref))) + self.assertFalse(os.path.exists(client.paths.system_reqs_package(package_ref2))) + load_file = load(client.paths.system_reqs(conan_ref)) + self.assertIn("Installed my stuff", load_file) + + # remove packages + client.run("remove Test* -f -p") + self.assertFalse(os.path.exists(client.paths.system_reqs_package(package_ref))) + self.assertFalse(os.path.exists(client.paths.system_reqs_package(package_ref2))) + self.assertFalse(os.path.exists(client.paths.system_reqs(conan_ref))) + + def wrong_output_test(self): + client = TestClient() + files = {'conanfile.py': + base_conanfile.replace("%GLOBAL%", "").replace('"Installed my stuff"', 'None')} + client.save(files) + client.run("export user/testing") + client.run("install Test/0.1@user/testing --build missing") + self.assertIn("*+Running system requirements+*", client.user_io.out) + conan_ref = ConanFileReference.loads("Test/0.1@user/testing") + self.assertFalse(os.path.exists(client.paths.system_reqs(conan_ref))) + package_ref = PackageReference(conan_ref, "f0ba3ca2c218df4a877080ba99b65834b9413798") + load_file = load(client.paths.system_reqs_package(package_ref)) + self.assertEqual('', load_file) diff --git a/testbed/conan-io__conan/conans/test/integration/upload_test.py b/testbed/conan-io__conan/conans/test/integration/upload_test.py new file mode 100644 index 0000000000000000000000000000000000000000..3657cae93f34040ddaeb754bc7048556c4d51119 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/upload_test.py @@ -0,0 +1,109 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.model.ref import ConanFileReference, PackageReference +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from nose.plugins.attrib import attr +import os +from conans.test.utils.test_files import uncompress_packaged_files + + +class CompleteFlowTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + + def reuse_uploaded_tgz_test(self): + '''Download packages from a remote, then copy to another channel + and reupload them. Because they have not changed, the tgz is not created + again''' + + # UPLOAD A PACKAGE + conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") + files = cpp_hello_conan_files("Hello0", "0.1", need_patch=True, build=False) + files["another_export_file.lib"] = "to compress" + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("install %s --build missing" % str(conan_reference)) + self.client.run("upload %s --all" % str(conan_reference)) + self.assertIn("Compressing exported files", self.client.user_io.out) + self.assertIn("Compressing package", self.client.user_io.out) + + # UPLOAD TO A DIFFERENT CHANNEL WITHOUT COMPRESS AGAIN + self.client.run("copy %s lasote/testing" % str(conan_reference)) + self.client.run("upload Hello0/0.1@lasote/testing --all") + self.assertNotIn("Compressing exported files", self.client.user_io.out) + self.assertNotIn("Compressing package", self.client.user_io.out) + + def reuse_downloaded_tgz_test(self): + '''Download packages from a remote, then copy to another channel + and reupload them. It needs to compress it again, not tgz is kept''' + + # UPLOAD A PACKAGE + conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") + files = cpp_hello_conan_files("Hello0", "0.1", need_patch=True, build=False) + files["another_export_file.lib"] = "to compress" + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("install %s --build missing" % str(conan_reference)) + self.client.run("upload %s --all" % str(conan_reference)) + self.assertIn("Compressing exported files", self.client.user_io.out) + self.assertIn("Compressing package", self.client.user_io.out) + + # Other user downloads the package + # THEN A NEW USER DOWNLOADS THE PACKAGES AND UPLOADS COMPRESSING AGAIN + # BECAUSE ONLY TGZ IS KEPT WHEN UPLOADING + other_client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + other_client.run("install Hello0/0.1@lasote/stable --all") + other_client.run("upload Hello0/0.1@lasote/stable --all") + self.assertIn("Compressing exported files", self.client.user_io.out) + self.assertIn("Compressing package", self.client.user_io.out) + + def upload_only_tgz_if_needed_test(self): + conan_reference = ConanFileReference.loads("Hello0/0.1@lasote/stable") + files = cpp_hello_conan_files("Hello0", "0.1", need_patch=True, build=False) + files["lib/another_export_file.lib"] = "to compress" + self.client.save(files) + self.client.run("export lasote/stable") + self.client.run("install %s --build missing" % str(conan_reference)) + + # Upload conans + self.client.run("upload %s" % str(conan_reference)) + self.assertIn("Compressing exported", str(self.client.user_io.out)) + + # Not needed to tgz again + self.client.run("upload %s" % str(conan_reference)) + self.assertNotIn("Compressing exported", str(self.client.user_io.out)) + + # Check that conans exists on server + server_paths = self.servers["default"].paths + conan_path = server_paths.export(conan_reference) + self.assertTrue(os.path.exists(conan_path)) + package_ids = self.client.paths.conan_packages(conan_reference) + package_ref = PackageReference(conan_reference, package_ids[0]) + + # Upload package + self.client.run("upload %s -p %s" % (str(conan_reference), str(package_ids[0]))) + self.assertIn("Compressing package", str(self.client.user_io.out)) + + # Not needed to tgz again + self.client.run("upload %s -p %s" % (str(conan_reference), str(package_ids[0]))) + self.assertNotIn("Compressing package", str(self.client.user_io.out)) + + # If we install the package again will be removed and re tgz + self.client.run("install %s --build missing" % str(conan_reference)) + # Upload package + self.client.run("upload %s -p %s" % (str(conan_reference), str(package_ids[0]))) + self.assertNotIn("Compressing package", str(self.client.user_io.out)) + + # Check library on server + self._assert_library_exists_in_server(package_ref, server_paths) + + def _assert_library_exists_in_server(self, package_ref, paths): + folder = uncompress_packaged_files(paths, package_ref) + self._assert_library_files(folder) + + def _assert_library_files(self, path): + libraries = os.listdir(os.path.join(path, "lib")) + self.assertEquals(len(libraries), 1) diff --git a/testbed/conan-io__conan/conans/test/integration/vcvars_env_test.py b/testbed/conan-io__conan/conans/test/integration/vcvars_env_test.py new file mode 100644 index 0000000000000000000000000000000000000000..c300bead29f15b54a5403defc0d4a6d4dfc402a2 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/vcvars_env_test.py @@ -0,0 +1,85 @@ +import unittest +from conans.test.tools import TestClient +from conans.util.files import save +import os + + +class VCVarsTest(unittest.TestCase): + + def basic(self): + save("test1.bat", """@echo off +set MYVAR=OK +set MYVAR2=OK +""") + + save("test2.bat", """@echo off +if defined MYVAR (SET RESPONSE=%MYVAR%;Yeah!) else (SET RESPONSE=Nop!) +""") + + save("test3.bat", """@echo off +set VAR1=HOLA +set VAR1=ADIOS;%VAR1% +set VAR1=BYE;%VAR1% +""") + + def call(cmd): + print (cmd, " => \t",) + os.system(cmd) + # print "MYVAR ", os.environ.get("MYVAR") + print("") + print("") + call("set MYVAR=HOLA && echo %^MYVAR%") + call("call set MYVAR=HOLA && echo %MYVAR%") + call("call set MYVAR=HOLA && echo %^MYVAR%") + call("call set MYVAR=HOLA && call echo %^MYVAR%") + call("if defined MYVAR (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!) && call echo %^RESPONSE%") + call("if defined MYVAR (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!) & call echo %^RESPONSE%") + call("(if defined MYVAR (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!)) && call echo %^RESPONSE%") + call("(if defined MYVAR (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!)) & call echo %^RESPONSE%") + call("call set MYVAR=OK && if defined MYVAR (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!) && call echo %^RESPONSE%") + call("call set MYVAR=OK && (if defined MYVAR (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!)) && call echo %^RESPONSE%") + call("call set MYVAR=OK && if defined MYVAR (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!) & call echo %^RESPONSE%") + call("call set MYVAR=OK & if defined MYVAR (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!) && call echo %^RESPONSE%") + call("call set MYVAR=OK & if defined MYVAR (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!) & call echo %^RESPONSE%") + call("call set MYVAR=OK & if defined %MYVAR% (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!) && call echo %^RESPONSE%") + call("call set MYVAR=OK & if defined %MYVAR% (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!) & call echo %^RESPONSE%") + call("call set MYVAR=OK & if defined %^MYVAR% (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!) && call echo %^RESPONSE%") + call("call set MYVAR=OK & if defined %^MYVAR% (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!) & call echo %^RESPONSE%") + + call("call test1.bat && echo %^MYVAR%") + call("call test1.bat & echo %^MYVAR%") + call("call test1.bat && call echo %^MYVAR%") + call("call test1.bat & call echo %^MYVAR%") + call("call test1.bat && call test2.bat && call echo %RESPONSE%") + call("call test1.bat && call (if defined MYVAR (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!)) && call echo %RESPONSE%") + call('call test1.bat && call "if defined MYVAR (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!))" && call echo %RESPONSE%') + call("call test1.bat && (if defined MYVAR (SET RESPONSE=Yeah!) else (SET RESPONSE=Nop!)) && call echo %RESPONSE%") + call('call test1.bat && (if "%MYVAR%"=="" (SET RESPONSE=Nop!) else (SET RESPONSE=%^MYVAR%;Yeah!)) ' + "&& (if defined MYVAR2 (SET RESPONSE2=Oui!) else (SET RESPONSE2=Nein!)) " + "&& call echo %RESPONSE% %RESPONSE2%") + + call('SET "VAR1=VALUE1" SET "VAR2=VALUE2" && call echo %VAR1% %VAR2%') + call("call test3.bat && call echo %VAR1%") + + def conan_env_deps(self): + client = TestClient() + conanfile = ''' +from conans import ConanFile, tools +import os + +class HelloConan(ConanFile): + settings = "os", "compiler", "arch" + + def build(self): + print os.environ.get("CL") + print os.environ.get("LIB") + vcvars = tools.vcvars_command(self.settings) + self.run(vcvars) + print os.environ.get("CL") + print os.environ.get("LIB") +''' + files = {} + files["conanfile.py"] = conanfile + client.save(files) + client.run("build") + # print client.user_io.out diff --git a/testbed/conan-io__conan/conans/test/integration/verify_ssl_test.py b/testbed/conan-io__conan/conans/test/integration/verify_ssl_test.py new file mode 100644 index 0000000000000000000000000000000000000000..d689a427b2f2ab7036d5bf31f21133a3c57a0130 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/verify_ssl_test.py @@ -0,0 +1,56 @@ +import unittest +from conans.test.tools import TestClient +from requests.models import Response + + +resp = Response() +resp._content = b'{"results": []}' +resp.status_code = 200 + + +class RequesterMockTrue(object): + + def __init__(self, *args, **kwargs): + pass + + def get(self, url, auth=None, headers=None, verify=None, stream=None): + assert("cacert.pem" in verify) + return resp + + +class RequesterMockFalse(object): + + def __init__(self, *args, **kwargs): + pass + + def get(self, url, auth=None, headers=None, verify=None, stream=None): + assert(verify is False) + return resp + + +class VerifySSLTest(unittest.TestCase): + + def verify_ssl_test(self): + + self.client = TestClient(requester_class=RequesterMockTrue) + self.client.run("remote add myremote https://localhost False") + self.client.run("remote list") + self.assertIn("Verify SSL: False", self.client.user_io.out) + + self.client.run("remote update myremote https://localhost True") + self.client.run("remote list") + self.assertIn("Verify SSL: True", self.client.user_io.out) + + self.client.run("remote remove myremote") + self.client.run("remote add myremote https://localhost") + self.client.run("remote list") + self.assertIn("Verify SSL: True", self.client.user_io.out) + + # Verify that SSL is checked in requrests + self.client.run("search op* -r myremote") + + # Verify that SSL is not checked in requests + self.client = TestClient(requester_class=RequesterMockFalse) + self.client.run("remote add myremote https://localhost False") + self.client.run("search op* -r myremote") + diff --git a/testbed/conan-io__conan/conans/test/integration/version_check_test.py b/testbed/conan-io__conan/conans/test/integration/version_check_test.py new file mode 100644 index 0000000000000000000000000000000000000000..602a43a8d619d11370b3f5a19452bb95e4ba18b5 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/version_check_test.py @@ -0,0 +1,112 @@ +import unittest +from conans.test.tools import TestServer, TestClient +from conans.model.version import Version +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from collections import OrderedDict + + +class VersionCheckTest(unittest.TestCase): + + def check_versions_test(self): + # Client deprecated + self.servers = {"default": self._get_server(10, 5)} + self.client = TestClient(servers=self.servers, + users={"default": [("lasote", "mypass")]}, client_version=4) + + errors = self.client.run("search something -r default", ignore_error=True) + self.assertIn("Your conan's client version is deprecated for the current remote (v10). " + "Upgrade conan client.", self.client.user_io.out) + self.assertTrue(errors) # Not Errors + + # Client outdated + self.servers = {"default": self._get_server(10, 4)} + self.client = TestClient(servers=self.servers, + users={"default": [("lasote", "mypass")]}, client_version=4) + + errors = self.client.run("search something -r default", ignore_error=False) + self.assertIn(" A new conan version (v10) is available in current remote. Please, " + "upgrade conan client to avoid deprecation.", self.client.user_io.out) + self.assertFalse(errors) # Not Errors + + # Client ok + self.servers = {"default": self._get_server(10, 4)} + self.client = TestClient(servers=self.servers, + users={"default": [("lasote", "mypass")]}, client_version=10) + + errors = self.client.run("search something -r default", ignore_error=False) + self.assertNotIn("conan client", self.client.user_io.out) + self.assertFalse(errors) # Not Errors + + # Server outdated + self.servers = {"default": self._get_server(1, 1)} + self.client = TestClient(servers=self.servers, + users={"default": [("lasote", "mypass")]}, client_version=10, + min_server_compatible_version=1) + + errors = self.client.run("search something -r default", ignore_error=True) + self.assertNotIn("The conan remote version is outdated (v1). Please, contact" + " with your system administrator and upgrade the remote to" + " avoid deprecation", self.client.user_io.out) + self.assertFalse(errors) # No Errors + + # Server deprecated + self.servers = {"default": self._get_server(1, 1)} + self.client = TestClient(servers=self.servers, + users={"default": [("lasote", "mypass")]}, client_version=10, + min_server_compatible_version=2) + + errors = self.client.run("search something -r default", ignore_error=True) + self.assertIn("Your conan's client is incompatible with this remote." + " The server is deprecated. " + "(v1). Please, contact with your system administrator and" + " upgrade the server.", + self.client.user_io.out) + self.assertTrue(errors) # Errors + + def check_multi_server_test(self): + # Check what happen if we have 2 servers and one is outdated + # The expected behavior: If we specify the remote with (-r), the commmand will fail + # if the client fot that remote is outdated. If we are looking for a package (not with -r) + # the client will look for the package on each remote. + + # Client deprecated for "the_last_server" but OK for "normal_server" + self.servers = OrderedDict([("the_last_server", self._get_server(10, 4)), + ("normal_server", self._get_server(4, 2))]) + + # First upload a package ok with an ok client + tmp_client = TestClient(servers=self.servers, + users={"normal_server": [("lasote", "mypass")], + "the_last_server": [("lasote", "mypass")]}, + client_version=4) + files = cpp_hello_conan_files("Hello0", "0.1", build=False) + + tmp_client.save(files) + tmp_client.run("export lasote/stable") + errors = tmp_client.run("upload Hello0/0.1@lasote/stable -r normal_server --all") + errors |= tmp_client.run("upload Hello0/0.1@lasote/stable -r the_last_server --all") + self.assertFalse(errors) + tmp_client.run("remote remove_ref Hello0/0.1@lasote/stable") + # Now with a conflictive client...try to look in servers + self.client = TestClient(servers=self.servers, + users={"normal_server": [("lasote", "mypass")], + "the_last_server": [("lasote", "mypass")]}, + client_version=2) + errors = self.client.run("search something -r the_last_server", ignore_error=True) + self.assertIn("Your conan's client version is deprecated for the current remote (v10). " + "Upgrade conan client.", self.client.user_io.out) + self.assertTrue(errors) # Errors + + errors = self.client.run("install Hello0/0.1@lasote/stable --build missing", + ignore_error=True) + self.assertIn("Your conan's client version is deprecated for the current remote (v10). " + "Upgrade conan client.", self.client.user_io.out) + self.assertFalse(errors) # No Errors! because it finds the package in the second remote + + def _get_server(self, server_version, min_client_compatible_version): + server_version = str(server_version) + min_client_compatible_version = str(min_client_compatible_version) + return TestServer( + [], # write permissions + users={"lasote": "mypass"}, + server_version=Version(server_version), + min_client_compatible_version=Version(min_client_compatible_version)) diff --git a/testbed/conan-io__conan/conans/test/integration/version_ranges_diamond_test.py b/testbed/conan-io__conan/conans/test/integration/version_ranges_diamond_test.py new file mode 100644 index 0000000000000000000000000000000000000000..580edc933a4dbe390e08895375a4a378483678d1 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/integration/version_ranges_diamond_test.py @@ -0,0 +1,149 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from conans.paths import CONANFILE +from conans.util.files import load +import os +from nose_parameterized import parameterized + + +class VersionRangesMultiRemoteTest(unittest.TestCase): + + def setUp(self): + self.servers = {"default": TestServer(), + "other": TestServer()} + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")], + "other": [("lasote", "mypass")]}) + + def _export(self, name, version, deps=None, export=True, upload=True, remote="default"): + deps = ", ".join(['"%s"' % d for d in deps or []]) or '""' + conanfile = """ +from conans import ConanFile, CMake +import os + +class HelloReuseConan(ConanFile): + name = "%s" + version = "%s" + requires = %s +""" % (name, version, deps) + files = {CONANFILE: conanfile} + self.client.save(files, clean_first=True) + if export: + self.client.run("export lasote/stable") + if upload: + self.client.run("upload %s/%s@lasote/stable -r=%s" % (name, version, remote)) + + def resolve_from_remotes_test(self): + self._export("Hello0", "0.1") + self._export("Hello0", "0.2") + self._export("Hello0", "0.3", remote="other") + self._export("Hello1", "0.1", ["Hello0/[>0.1,<0.4]@lasote/stable"], export=False, + upload=False) + + for remote, solution in [("default", "0.2"), ("other", "0.3")]: + self.client.run('remove "Hello0/0.*" -f') + self.client.run("install . --build missing -r=%s" % remote) + self.assertIn("Version range '>0.1,<0.4' required by 'None' " + "resolved to 'Hello0/%s@lasote/stable'" % solution, + self.client.user_io.out) + self.assertIn("PROJECT: Generated conaninfo.txt", self.client.user_io.out) + content = load(os.path.join(self.client.current_folder, "conaninfo.txt")) + self.assertIn("Hello0/%s@lasote/stable" % solution, content) + + +class VersionRangesDiamondTest(unittest.TestCase): + + def setUp(self): + test_server = TestServer() + self.servers = {"default": test_server} + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + + def _export(self, name, version, deps=None, export=True, upload=True): + deps = ", ".join(['"%s"' % d for d in deps or []]) or '""' + conanfile = """ +from conans import ConanFile, CMake +import os + +class HelloReuseConan(ConanFile): + name = "%s" + version = "%s" + requires = %s +""" % (name, version, deps) + files = {CONANFILE: conanfile} + self.client.save(files, clean_first=True) + if export: + self.client.run("export lasote/stable") + if upload: + self.client.run("upload %s/%s@lasote/stable" % (name, version)) + + def local_then_remote_test(self): + self._export("Hello0", "0.1") + self._export("Hello0", "0.2") + self._export("Hello0", "0.3") + self._export("Hello0", "1.4") + self._export("Hello1", "0.1", ["Hello0/[>0.1,<0.3]@lasote/stable"], export=False, + upload=False) + + self.client.run('remove "Hello0/0.*" -f') + self.client.run("install . --build missing") + self.assertIn("Version range '>0.1,<0.3' required by 'None' " + "resolved to 'Hello0/0.2@lasote/stable'", self.client.user_io.out) + self.assertIn("PROJECT: Generated conaninfo.txt", self.client.user_io.out) + + content = load(os.path.join(self.client.current_folder, "conaninfo.txt")) + self.assertIn("Hello0/0.2@lasote/stable", content) + + @parameterized.expand([(False, ), (True,) + ]) + def reuse_test(self, upload): + self._export("Hello0", "0.1", upload=upload) + self._export("Hello0", "0.2", upload=upload) + self._export("Hello0", "0.3", upload=upload) + self._export("Hello1", "0.1", ["Hello0/[>0.1,<0.3]@lasote/stable"], upload=upload) + self._export("Hello2", "0.1", ["Hello0/[0.2]@lasote/stable"], upload=upload) + self._export("Hello3", "0.1", ["Hello1/[>=0]@lasote/stable", "Hello2/[~=0]@lasote/stable"], + export=False, upload=upload) + + if upload: + self.client.run('remove "*" -f') + + self.client.run("install . --build missing") + + def check1(): + self.assertIn("Version range '~=0' required by 'None' resolved to " + "'Hello2/0.1@lasote/stable'", self.client.user_io.out) + self.assertIn("Version range '>0.1,<0.3' required by 'Hello1/0.1@lasote/stable' " + "resolved to 'Hello0/0.2@lasote/stable'", self.client.user_io.out) + self.assertIn("Version range '0.2' required by 'Hello2/0.1@lasote/stable' resolved " + "to 'Hello0/0.2@lasote/stable'", self.client.user_io.out) + self.assertNotIn("Conflict", self.client.user_io.out) + self.assertIn("PROJECT: Generated conaninfo.txt", self.client.user_io.out) + + content = load(os.path.join(self.client.current_folder, "conaninfo.txt")) + self.assertIn("Hello0/0.2@lasote/stable", content) + self.assertIn("Hello1/0.1@lasote/stable", content) + self.assertIn("Hello2/0.1@lasote/stable", content) + + check1() + + if upload: + self._export("Hello0", "0.2.1", upload=upload) + self.client.run('remove Hello0/0.2.1@lasote/stable -f') + self._export("Hello3", "0.1", ["Hello1/[>=0]@lasote/stable", "Hello2/[~=0]@lasote/stable"], + export=False, upload=upload) + self.client.run("install . --build missing") + check1() + # Now update + self.client.run("install . --update --build missing") + self.assertIn("Version range '~=0' required by 'None' resolved to " + "'Hello2/0.1@lasote/stable'", self.client.user_io.out) + self.assertIn("Version range '>0.1,<0.3' required by 'Hello1/0.1@lasote/stable' " + "resolved to 'Hello0/0.2.1@lasote/stable'", self.client.user_io.out) + self.assertIn("Version range '0.2' required by 'Hello2/0.1@lasote/stable' resolved " + "to 'Hello0/0.2.1@lasote/stable'", self.client.user_io.out) + self.assertNotIn("Conflict", self.client.user_io.out) + self.assertIn("PROJECT: Generated conaninfo.txt", self.client.user_io.out) + + content = load(os.path.join(self.client.current_folder, "conaninfo.txt")) + self.assertIn("Hello0/0.2.1@lasote/stable", content) + self.assertIn("Hello1/0.1@lasote/stable", content) + self.assertIn("Hello2/0.1@lasote/stable", content) diff --git a/testbed/conan-io__conan/conans/test/libcxx_setting_test.py b/testbed/conan-io__conan/conans/test/libcxx_setting_test.py new file mode 100644 index 0000000000000000000000000000000000000000..30c023f504f4d5948358867d4a2d40643d0cf23a --- /dev/null +++ b/testbed/conan-io__conan/conans/test/libcxx_setting_test.py @@ -0,0 +1,144 @@ +import unittest +from conans.test.tools import TestClient +import platform +from conans.util.files import load +import os + + +file_content = ''' +from conans import ConanFile, CMake + +class ConanFileToolsTest(ConanFile): + name = "test" + version = "1.9" + settings = "os", "compiler", "arch", "build_type" + url = "1" + license = "2" + export = ["CMakeLists.txt", "main.c"] + generators = ["cmake"] + + def build(self): + self.output.warn("Building...") + cmake = CMake(self.settings) + self.output.warn(cmake.command_line) + command = cmake.command_line.replace('-G "Visual Studio 12 Win64"', "") + self.run('cmake . %s' % command) + self.run("cmake --build . %s" % cmake.build_config) + + def package(self): + self.copy("*", ".", ".") + + ''' +cmakelists = '''PROJECT(conanzlib) +set(CONAN_DISABLE_CHECK_COMPILER TRUE) +cmake_minimum_required(VERSION 2.8) +include(conanbuildinfo.cmake) +CONAN_BASIC_SETUP() +MESSAGE("CXX FLAGS=> ${CMAKE_CXX_FLAGS}") +get_directory_property( DirDefs DIRECTORY ${CMAKE_SOURCE_DIR} COMPILE_DEFINITIONS) +foreach( d ${DirDefs} ) + message( STATUS "Found Define: " ${d} ) +endforeach() +''' + + +def nowintest(func): + if platform.system() == "Windows": + func.__test__ = False + return func + + +class LibcxxSettingTest(unittest.TestCase): + + def setUp(self): + self.files = {"conanfile.py": file_content, "CMakeLists.txt": cmakelists} + + @nowintest + def test_declared_stdlib_and_passed(self): + client = TestClient() + client.save(self.files) + client.run("export lasote/testing") + + if platform.system() == "SunOS": + client.run('install -s compiler=sun-cc -s compiler.libcxx=libCstd', ignore_error=False) + client.run('build') + self.assertIn("-library=Cstd", str(client.user_io.out)) + + client.run('install -s compiler=sun-cc -s compiler.libcxx=libstdcxx', ignore_error=False) + client.run('build') + self.assertIn("-library=stdcxx4", str(client.user_io.out)) + + client.run('install -s compiler=sun-cc -s compiler.libcxx=libstlport', ignore_error=False) + client.run('build') + self.assertIn("-library=stlport4", str(client.user_io.out)) + + else: + client.run('install -s compiler=clang -s compiler.version=3.3 -s compiler.libcxx=libstdc++ ', ignore_error=False) + client.run('build') + self.assertIn("-stdlib=libstdc++", str(client.user_io.out)) + self.assertIn("Found Define: _GLIBCXX_USE_CXX11_ABI=0", str(client.user_io.out)) + + client.run('install -s compiler=clang -s compiler.libcxx=libstdc++11', ignore_error=False) + client.run('build') + self.assertIn("-stdlib=libstdc++", str(client.user_io.out)) + self.assertIn("Found Define: _GLIBCXX_USE_CXX11_ABI=1", str(client.user_io.out)) + + client.run('install -s compiler=clang -s compiler.version=3.3 -s compiler.libcxx=libc++', ignore_error=False) + client.run('build') + self.assertIn("-stdlib=libc++", str(client.user_io.out)) + self.assertNotIn("Found Define: _GLIBCXX_USE_CXX11", str(client.user_io.out)) + + def test_C_only(self): + config = ''' + def config(self): + del self.settings.compiler.libcxx # C package only +''' + self.files["conanfile.py"] = self.files["conanfile.py"].replace('["cmake"]', + '["cmake"]\n %s' % config) + + self.files["conanfile.py"] = self.files["conanfile.py"].replace("def build", "def nobuild") + client = TestClient() + client.save(self.files) + client.run("export lasote/testing") + client.run("install") + # Also check that it not fails the config method with Visual Studio, because of the lack of libcxx + client.run('install -s compiler="Visual Studio" -s compiler.version=12 -s compiler.runtime=MD', ignore_error=False) + self.assertIn("Generated cmake created conanbuildinfo.cmake", str(client.user_io.out)) + + conaninfo = load(os.path.join(client.current_folder, "conaninfo.txt")) + self.assertNotIn("libcxx", conaninfo[:conaninfo.find("[full_settings]")]) + client.run('install test/1.9@lasote/testing -s compiler=gcc -s compiler.version=4.9 --build', ignore_error=False) + + # Now try to reuse the installed package defining libstc++11 for the new package + newlib_content = ''' +from conans import ConanFile, CMake + +class ConanFileToolsTest(ConanFile): + name = "test2" + version = "1.9" + settings = "os", "compiler", "arch", "build_type" + url = "1" + license = "2" + export = ["CMakeLists.txt", "main.c"] + generators = ["cmake"] + requires = "test/1.9@lasote/testing" + + def build(self): + pass + ''' + new_client = TestClient(base_folder=client.base_folder) # Share storage + new_client.save({"conanfile.py": newlib_content, "CMakeLists.txt": cmakelists}) + new_client.run('install -s compiler=gcc -s compiler.libcxx=libstdc++11 -s compiler.version=4.9', ignore_error=False) + # Package is found and everything is ok + self.assertIn("Generated cmake created conanbuildinfo.cmake", str(new_client.user_io.out)) + + # Try again without removing the setting, if we use libstdc++11, the C package won't be found + self.files["conanfile.py"] = self.files["conanfile.py"].replace("def config", "def config222") + client.save(self.files) + client.run("export lasote/testing") + client.run("install -s compiler=gcc -s compiler.libcxx=libstdc++ -s compiler.version=4.9") + conaninfo = load(os.path.join(client.current_folder, "conaninfo.txt")) + self.assertIn("libcxx", conaninfo[:conaninfo.find("[full_settings]")]) + client.run('install test/1.9@lasote/testing -s compiler=gcc --build -s compiler.libcxx=libstdc++ -s compiler.version=4.9', ignore_error=False) + new_client.run('install -s compiler=gcc -s compiler.libcxx=libstdc++11 -s compiler.version=4.9', ignore_error=True) + self.assertIn("Can't find a 'test/1.9@lasote/testing' package for the specified options and settings", str(new_client.user_io.out)) diff --git a/testbed/conan-io__conan/conans/test/local_db_test.py b/testbed/conan-io__conan/conans/test/local_db_test.py new file mode 100644 index 0000000000000000000000000000000000000000..84713c265f3aa38dfd7f11fd5a69af2d786057e4 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/local_db_test.py @@ -0,0 +1,24 @@ +import unittest +from conans.client.store.localdb import LocalDB +import os +from conans.test.utils.test_files import temp_folder + + +class LocalStoreTest(unittest.TestCase): + + def localdb_test(self): + tmp_dir = temp_folder() + db_file = os.path.join(tmp_dir, "dbfile") + localdb = LocalDB(db_file) + + # Test write and read login + localdb.init() + user, token = localdb.get_login("myurl1") + self.assertIsNone(user) + self.assertIsNone(token) + + localdb.set_login(("pepe", "token"), "myurl1") + user, token = localdb.get_login("myurl1") + self.assertEquals("pepe", user) + self.assertEquals("token", token) + self.assertEquals("pepe", localdb.get_username("myurl1")) diff --git a/testbed/conan-io__conan/conans/test/model/__init__.py b/testbed/conan-io__conan/conans/test/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/conans/test/model/build_info_test.py b/testbed/conan-io__conan/conans/test/model/build_info_test.py new file mode 100644 index 0000000000000000000000000000000000000000..49e051da604d529d5ea4f0365735f805d00beedd --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/build_info_test.py @@ -0,0 +1,45 @@ +import unittest +import os +from conans.model.build_info import DepsCppInfo, CppInfo +from conans.client.generators import TXTGenerator +from collections import namedtuple +from conans.model.env_info import DepsEnvInfo +from conans.test.utils.test_files import temp_folder +import platform + + +class BuildInfoTest(unittest.TestCase): + + def _equal(self, item1, item2): + for field in item1.fields: + self.assertEqual(getattr(item1, field), + getattr(item2, field)) + + def help_test(self): + deps_env_info = DepsEnvInfo() + deps_cpp_info = DepsCppInfo() + deps_cpp_info.includedirs.append("C:/whatever") + deps_cpp_info.includedirs.append("C:/whenever") + deps_cpp_info.libdirs.append("C:/other") + deps_cpp_info.libs.extend(["math", "winsock", "boost"]) + child = DepsCppInfo() + child.includedirs.append("F:/ChildrenPath") + child.cppflags.append("cxxmyflag") + deps_cpp_info._dependencies["Boost"] = child + fakeconan = namedtuple("Conanfile", "deps_cpp_info cpp_info deps_env_info env_info") + output = TXTGenerator(fakeconan(deps_cpp_info, None, deps_env_info, None)).content + deps_cpp_info2 = DepsCppInfo.loads(output) + self._equal(deps_cpp_info, deps_cpp_info2) + + def cpp_info_test(self): + folder = temp_folder() + info = CppInfo(folder) + info.includedirs.append("/usr/include") + info.libdirs.append("/usr/lib") + bin_abs_dir = "C:/usr/bin" if platform.system() == "Windows" else "/tmp" + info.bindirs.append(bin_abs_dir) + info.bindirs.append("local_bindir") + self.assertEqual(info.include_paths, [os.path.join(folder, "include"), "/usr/include"]) + self.assertEqual(info.lib_paths, [os.path.join(folder, "lib"), "/usr/lib"]) + self.assertEqual(info.bin_paths, [os.path.join(folder, "bin"), bin_abs_dir, + os.path.join(folder, "local_bindir")]) diff --git a/testbed/conan-io__conan/conans/test/model/env_info_test.py b/testbed/conan-io__conan/conans/test/model/env_info_test.py new file mode 100644 index 0000000000000000000000000000000000000000..b738332f9ec620ec26e95fe9e0bfdbcff06323b3 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/env_info_test.py @@ -0,0 +1,35 @@ +import unittest +from conans.model.env_info import DepsEnvInfo +from conans.model.ref import ConanFileReference + + +class EnvInfoTest(unittest.TestCase): + + def assign_test(self): + env = DepsEnvInfo() + env.foo = "var" + env.foo.append("var2") + env.foo2 = "var3" + env.foo2 = "var4" + env.foo63 = "other" + + self.assertEquals(env.vars, {"foo": ["var", "var2"], "foo2": "var4", "foo63": "other"}) + + def update_test(self): + env = DepsEnvInfo() + env.foo = "var" + env.foo.append("var2") + env.foo2 = "var3" + env.foo2 = "var4" + env.foo63 = "other" + + env2 = DepsEnvInfo() + env2.foo = "new_value" + env2.foo2.append("not") + env2.foo3.append("var3") + + env.update(env2, ConanFileReference.loads("pack/1.0@lasote/testing")) + + self.assertEquals(env.vars, {"foo": ["var", "var2", "new_value"], + "foo2": "var4", "foo3": ["var3"], + "foo63": "other"}) diff --git a/testbed/conan-io__conan/conans/test/model/info_test.py b/testbed/conan-io__conan/conans/test/model/info_test.py new file mode 100644 index 0000000000000000000000000000000000000000..09dbadbfd75f06928530523e33dc51e69f615d80 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/info_test.py @@ -0,0 +1,57 @@ +import unittest +from conans.model.info import ConanInfo + +info_text = '''[settings] + arch=x86_64 + build_type=Debug + compiler=gcc + compiler.libcxx=libstdc++11 + compiler.version=5.2 + os=Linux + +[requires] + bzip2/1.Y.Z + zlib/1.Y.Z + +[options] + fPIC=True + header_only=False + shared=False + +[full_settings] + arch=x86_64 + build_type=Debug + compiler=gcc + compiler.libcxx=libstdc++11 + compiler.version=5.2 + os=Linux + +[full_requires] + bzip2/1.0.6@lasote/stable:c6c01ee5ea2cf4af63e7b83b722b0a2d90640641 + zlib/1.2.8@lasote/stable:2dec3996ef8de7edb0304eaf4efdd96a0477d3a3 + +[full_options] + fPIC=True + header_only=False + shared=False + bzip2:fPIC=True + bzip2:shared=False + zlib:shared=False + +[scope]''' + + +class ConanInfoTest(unittest.TestCase): + + def test_serialize(self): + info = ConanInfo.loads(info_text) + min_serial = info.serialize_min() + expected = {'full_requires': + ['bzip2/1.0.6@lasote/stable:c6c01ee5ea2cf4af63e7b83b722b0a2d90640641', + 'zlib/1.2.8@lasote/stable:2dec3996ef8de7edb0304eaf4efdd96a0477d3a3'], + 'options': {'shared': 'False', 'fPIC': 'True', 'header_only': 'False'}, + 'recipe_hash': None, + 'settings': {'arch': 'x86_64', 'compiler.libcxx': 'libstdc++11', + 'compiler.version': '5.2', 'os': 'Linux', + 'build_type': 'Debug', 'compiler': 'gcc'}} + self.assertEquals(min_serial, expected) diff --git a/testbed/conan-io__conan/conans/test/model/manifest_test.py b/testbed/conan-io__conan/conans/test/model/manifest_test.py new file mode 100644 index 0000000000000000000000000000000000000000..49384c2129c314eefb260a607a045d879697fbda --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/manifest_test.py @@ -0,0 +1,46 @@ +import unittest +from conans.util.files import save, load, md5 +import os +from conans.model.manifest import FileTreeManifest +from conans.test.utils.test_files import temp_folder + + +class ManifestTest(unittest.TestCase): + + def test_tree_manifest(self): + tmp_dir = temp_folder() + files = {"one.ext": "aalakjshdlkjahsdlkjahsdljkhsadljkhasljkdhlkjashd", + "path/to/two.txt": "asdas13123", + "two.txt": "asdasdasdasdasdasd", + "folder/damn.pyc": "binarythings", + "folder/damn.pyo": "binarythings2", + "pythonfile.pyc": "binarythings3"} + for filename, content in files.items(): + save(os.path.join(tmp_dir, filename), content) + + manifest = FileTreeManifest.create(tmp_dir) + + save(os.path.join(tmp_dir, "THEMANIFEST.txt"), str(manifest)) + + readed_manifest = FileTreeManifest.loads(load(os.path.join(tmp_dir, "THEMANIFEST.txt"))) + + self.assertEquals(readed_manifest, manifest) + # Not included the pycs or pyo + self.assertEquals(set(manifest.file_sums.keys()), + set(["one.ext", "path/to/two.txt", "two.txt"])) + + for filepath, md5readed in manifest.file_sums.items(): + content = files[filepath] + self.assertEquals(md5(content), md5readed) + + def already_pyc_in_manifest_test(self): + tmp_dir = temp_folder() + save(os.path.join(tmp_dir, "man.txt"), "1478122267\nconanfile.pyc: " + "2bcac725a0e6843ef351f4d18cf867ec\n" + "conanfile.py: 2bcac725a0e6843ef351f4d18cf867ec", + "conanfile.pyo: 2bcac725a0e6843ef351f4d18cf867ec") + + read_manifest = FileTreeManifest.loads(load(os.path.join(tmp_dir, "man.txt"))) + # Not included the pycs or pyo + self.assertEquals(set(read_manifest.file_sums.keys()), + set(["conanfile.py"])) diff --git a/testbed/conan-io__conan/conans/test/model/options_test.py b/testbed/conan-io__conan/conans/test/model/options_test.py new file mode 100644 index 0000000000000000000000000000000000000000..0e64e319992bbcf84fc8c68c80c06273c5369e52 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/options_test.py @@ -0,0 +1,169 @@ +import unittest +from conans.model.options import OptionsValues, PackageOptions, Options, PackageOptionValues,\ + option_undefined_msg +from conans.model.ref import ConanFileReference +from conans.test.tools import TestBufferConanOutput +from conans.errors import ConanException + + +class OptionsTest(unittest.TestCase): + + def setUp(self): + package_options = PackageOptions.loads("""{static: [True, False], + optimized: [2, 3, 4], + path: ANY}""") + values = PackageOptionValues() + values.add_option("static", True) + values.add_option("optimized", 3) + values.add_option("path", "NOTDEF") + package_options.values = values + self.sut = Options(package_options) + + def undefined_value_test(self): + """ Not assigning a value to options will raise an error at validate() step + """ + package_options = PackageOptions.loads("""{ + path: ANY}""") + with self.assertRaisesRegexp(ConanException, option_undefined_msg("path")): + package_options.validate() + package_options.path = "Something" + package_options.validate() + + def undefined_value_none_test(self): + """ The value None is allowed as default, not necessary to default to it + """ + package_options = PackageOptions.loads('{path: [None, "Other"]}') + package_options.validate() + package_options = PackageOptions.loads('{path: ["None", "Other"]}') + package_options.validate() + + def items_test(self): + self.assertEqual(self.sut.items(), [("optimized", "3"), ("path", "NOTDEF"), + ("static", "True")]) + self.assertEqual(self.sut.items(), [("optimized", "3"), ("path", "NOTDEF"), + ("static", "True")]) + + def change_test(self): + self.sut.path = "C:/MyPath" + self.assertEqual(self.sut.items(), [("optimized", "3"), ("path", "C:/MyPath"), + ("static", "True")]) + self.assertEqual(self.sut.items(), [("optimized", "3"), ("path", "C:/MyPath"), + ("static", "True")]) + with self.assertRaisesRegexp(ConanException, + "'5' is not a valid 'options.optimized' value"): + self.sut.optimized = 5 + + def boolean_test(self): + self.sut.static = False + self.assertFalse(self.sut.static) + self.assertTrue(not self.sut.static) + self.assertTrue(self.sut.static == False) + self.assertFalse(self.sut.static == True) + self.assertFalse(self.sut.static != False) + self.assertTrue(self.sut.static != True) + self.assertTrue(self.sut.static == "False") + self.assertTrue(self.sut.static != "True") + + def basic_test(self): + boost_values = PackageOptionValues() + boost_values.add_option("static", False) + boost_values.add_option("thread", True) + boost_values.add_option("thread.multi", "off") + poco_values = PackageOptionValues() + poco_values.add_option("deps_bundled", True) + hello1_values = PackageOptionValues() + hello1_values.add_option("static", False) + hello1_values.add_option("optimized", 4) + + options = {"Boost": boost_values, + "Poco": poco_values, + "Hello1": hello1_values} + down_ref = ConanFileReference.loads("Hello0/0.1@diego/testing") + own_ref = ConanFileReference.loads("Hello1/0.1@diego/testing") + output = TestBufferConanOutput() + self.sut.propagate_upstream(options, down_ref, own_ref, output) + self.assertEqual(self.sut.values.as_list(), [("optimized", "4"), + ("path", "NOTDEF"), + ("static", "False"), + ("Boost:static", "False"), + ("Boost:thread", "True"), + ("Boost:thread.multi", "off"), + ("Poco:deps_bundled", "True")]) + + boost_values = PackageOptionValues() + boost_values.add_option("static", 2) + boost_values.add_option("thread", "Any") + boost_values.add_option("thread.multi", "on") + poco_values = PackageOptionValues() + poco_values.add_option("deps_bundled", "What") + hello1_values = PackageOptionValues() + hello1_values.add_option("static", True) + hello1_values.add_option("optimized", "2") + options2 = {"Boost": boost_values, + "Poco": poco_values, + "Hello1": hello1_values} + down_ref = ConanFileReference.loads("Hello2/0.1@diego/testing") + self.sut.propagate_upstream(options2, down_ref, own_ref, output) + self.assertIn("""WARN: Hello2/0.1@diego/testing tried to change Hello1/0.1@diego/testing option optimized to 2 +but it was already assigned to 4 by Hello0/0.1@diego/testing +WARN: Hello2/0.1@diego/testing tried to change Hello1/0.1@diego/testing option static to True +but it was already assigned to False by Hello0/0.1@diego/testing +WARN: Hello2/0.1@diego/testing tried to change Hello1/0.1@diego/testing option Boost:static to 2 +but it was already assigned to False by Hello0/0.1@diego/testing +WARN: Hello2/0.1@diego/testing tried to change Hello1/0.1@diego/testing option Boost:thread to Any +but it was already assigned to True by Hello0/0.1@diego/testing +WARN: Hello2/0.1@diego/testing tried to change Hello1/0.1@diego/testing option Boost:thread.multi to on +but it was already assigned to off by Hello0/0.1@diego/testing +WARN: Hello2/0.1@diego/testing tried to change Hello1/0.1@diego/testing option Poco:deps_bundled to What +but it was already assigned to True by Hello0/0.1@diego/testing""", str(output)) + self.assertEqual(self.sut.values.dumps(), + """optimized=4 +path=NOTDEF +static=False +Boost:static=False +Boost:thread=True +Boost:thread.multi=off +Poco:deps_bundled=True""") + + +class OptionsValuesTest(unittest.TestCase): + + def setUp(self): + self.sut = OptionsValues.loads("""static=True + optimized=3 + Poco:deps_bundled=True + Boost:static=False + Boost:thread=True + Boost:thread.multi=off + """) + + def test_from_list(self): + option_values = OptionsValues(self.sut.as_list()) + self.assertEqual(option_values.dumps(), self.sut.dumps()) + + def test_dumps(self): + self.assertEqual(self.sut.dumps(), "\n".join(["optimized=3", + "static=True", + "Boost:static=False", + "Boost:thread=True", + "Boost:thread.multi=off", + "Poco:deps_bundled=True"])) + + def test_sha_constant(self): + self.assertEqual(self.sut.sha({"Boost", "Poco"}), + "2442d43f1d558621069a15ff5968535f818939b5") + self.sut.new_option = False + self.sut["Boost"].new_option = "off" + self.sut["Poco"].new_option = 0 + + self.assertEqual(self.sut.dumps(), "\n".join(["new_option=False", + "optimized=3", + "static=True", + "Boost:new_option=off", + "Boost:static=False", + "Boost:thread=True", + "Boost:thread.multi=off", + "Poco:deps_bundled=True", + "Poco:new_option=0"])) + self.assertEqual(self.sut.sha({"Boost", "Poco"}), + "2442d43f1d558621069a15ff5968535f818939b5") diff --git a/testbed/conan-io__conan/conans/test/model/other_settings_test.py b/testbed/conan-io__conan/conans/test/model/other_settings_test.py new file mode 100644 index 0000000000000000000000000000000000000000..c391bfa13bccc467687126954a79efda5fb2947e --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/other_settings_test.py @@ -0,0 +1,190 @@ +import unittest +from conans.util.files import save, load +from conans.test.tools import TestClient +from conans.paths import CONANFILE, CONANINFO +from conans.model.info import ConanInfo +import os +from conans.model.settings import undefined_value, bad_value_msg + + +class SettingsTest(unittest.TestCase): + + def setUp(self): + self.client = TestClient() + + def settings_as_a_str_test(self): + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = "os" +""" + self.client.save({CONANFILE: content}) + self.client.run("install -s os=Windows --build missing") + # Now read the conaninfo and verify that settings applied is only os and value is windows + conan_info = ConanInfo.loads(load(os.path.join(self.client.current_folder, CONANINFO))) + self.assertEquals(conan_info.settings.os, "Windows") + + self.client.run("install -s os=Linux --build missing") + # Now read the conaninfo and verify that settings applied is only os and value is windows + conan_info = ConanInfo.loads(load(os.path.join(self.client.current_folder, CONANINFO))) + self.assertEquals(conan_info.settings.os, "Linux") + + def settings_as_a_list_conanfile_test(self): + """Declare settings as a list""" + # Now with conanfile as a list + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = "os", "arch" +""" + self.client.save({CONANFILE: content}) + self.client.run("install -s os=Windows --build missing") + conan_info = ConanInfo.loads(load(os.path.join(self.client.current_folder, CONANINFO))) + self.assertEquals(conan_info.settings.os, "Windows") + self.assertEquals(conan_info.settings.fields, ["arch", "os"]) + + def settings_as_a_dict_conanfile_test(self): + """Declare settings as a dict""" + # Now with conanfile as a dict + # XXX: this test only works on machines that default arch to "x86" or "x86_64" + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = {"os": ["Windows"], "arch": ["x86", "x86_64"]} +""" + self.client.save({CONANFILE: content}) + self.client.run("install -s os=Windows --build missing") + conan_info = ConanInfo.loads(load(os.path.join(self.client.current_folder, CONANINFO))) + self.assertEquals(conan_info.settings.os, "Windows") + self.assertEquals(conan_info.settings.fields, ["arch", "os"]) + + def invalid_settings_test(self): + '''Test wrong values and wrong constraints''' + default_conf = load(self.client.paths.conan_conf_path) + new_conf = default_conf.replace("os=", "# os=") + save(self.client.paths.conan_conf_path, new_conf) + # MISSING VALUE FOR A SETTING + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = "os", "build_type" +""" + + self.client.save({CONANFILE: content}) + self.client.run("install --build missing", ignore_error=True) + self.assertIn(undefined_value("settings.os"), str(self.client.user_io.out)) + + def invalid_settings_test2(self): + # MISSING A DEFAULT VALUE BECAUSE ITS RESTRICTED TO OTHER, SO ITS REQUIRED + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = {"os": ["Windows", "Linux", "Macos", "FreeBSD", "SunOS"], "compiler": ["Visual Studio"]} +""" + + self.client.save({CONANFILE: content}) + self.client.run("install -s compiler=gcc -s compiler.version=4.8 --build missing", ignore_error=True) + self.assertIn(bad_value_msg("settings.compiler", "gcc", ["Visual Studio"]), + str(self.client.user_io.out)) + + def invalid_settings_test3(self): + # dict without options + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = {"os": None, "compiler": ["Visual Studio"]} +""" + + self.client.save({CONANFILE: content}) + self.client.run("install -s compiler=gcc -s compiler.version=4.8 --build missing", ignore_error=True) + self.assertIn(bad_value_msg("settings.compiler", "gcc", ["Visual Studio"]), + str(self.client.user_io.out)) + + # Test wrong settings in conanfile + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = invalid +""" + + self.client.save({CONANFILE: content}) + self.client.run("install --build missing", ignore_error=True) + self.assertIn("invalid' is not defined", + str(self.client.user_io.out)) + + # Test wrong values in conanfile + def invalid_settings_test4(self): + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = "os" +""" + + self.client.save({CONANFILE: content}) + self.client.run("install -s os=ChromeOS --build missing", ignore_error=True) + self.assertIn(bad_value_msg("settings.os", "ChromeOS", + ['Android', 'FreeBSD', 'Linux', 'Macos', 'SunOS', 'Windows', 'iOS']), + str(self.client.user_io.out)) + + # Now add new settings to config and try again + config = load(self.client.paths.settings_path) + config = config.replace("Windows,", + "Windows, ChromeOS,") + + save(self.client.paths.settings_path, config) + self.client.run("install -s os=ChromeOS --build missing") + self.assertIn('Generated conaninfo.txt', str(self.client.user_io.out)) + + # Settings is None + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = None +""" + self.client.save({CONANFILE: content}) + self.client.run("install --build missing") + self.assertIn('Generated conaninfo.txt', str(self.client.user_io.out)) + conan_info = ConanInfo.loads(load(os.path.join(self.client.current_folder, CONANINFO))) + self.assertEquals(conan_info.settings.dumps(), "") + + # Settings is {} + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = {} +""" + self.client.save({CONANFILE: content}) + self.client.run("install --build missing") + self.assertIn('Generated conaninfo.txt', str(self.client.user_io.out)) + conan_info = ConanInfo.loads(load(os.path.join(self.client.current_folder, CONANINFO))) + self.assertEquals(conan_info.settings.dumps(), "") diff --git a/testbed/conan-io__conan/conans/test/model/profile_test.py b/testbed/conan-io__conan/conans/test/model/profile_test.py new file mode 100644 index 0000000000000000000000000000000000000000..3de3cac0486530e4500fb600dc26457b656de0f7 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/profile_test.py @@ -0,0 +1,188 @@ +import unittest +from conans.model.profile import Profile + + +class ProfileTest(unittest.TestCase): + + def profile_test(self): + + # Empty profile + profile = Profile() + dump = profile.dumps() + new_profile = Profile.loads(dump) + self.assertEquals(new_profile.settings, profile.settings) + + # Settings + profile = Profile() + profile._settings["arch"] = "x86_64" + profile._settings["compiler.version"] = "12" + profile._settings["compiler"] = "Visual Studio" + + profile._env["CXX"] = "path/to/my/compiler/g++" + profile._env["CC"] = "path/to/my/compiler/gcc" + + profile.scopes["p1"]["conaning"] = "1" + profile.scopes["p2"]["testing"] = "2" + + dump = profile.dumps() + new_profile = Profile.loads(dump) + self.assertEquals(new_profile.settings, profile.settings) + self.assertEquals(new_profile._settings["arch"], "x86_64") + self.assertEquals(new_profile._settings["compiler.version"], "12") + self.assertEquals(new_profile._settings["compiler"], "Visual Studio") + + self.assertEquals(new_profile._env["CXX"], "path/to/my/compiler/g++") + self.assertEquals(new_profile._env["CC"], "path/to/my/compiler/gcc") + + self.assertEquals(dict(new_profile.scopes)["p1"]["conaning"], '1') + self.assertEquals(dict(new_profile.scopes)["p2"]["testing"], '2') + + def profile_settings_update_test(self): + prof = '''[settings] +os=Windows +''' + new_profile = Profile.loads(prof) + + new_profile.update_settings([("OTHER", "2")]) + self.assertEquals(new_profile.settings, [("os", "Windows"), ("OTHER", "2")]) + + new_profile.update_settings([("compiler.version", "3"), ("compiler", "2")]) + self.assertEquals(new_profile.settings, [("os", "Windows"), ("OTHER", "2"), + ("compiler", "2"), ("compiler.version", "3")]) + + def package_settings_update_test(self): + prof = '''[settings] +MyPackage:os=Windows +''' + np = Profile.loads(prof) + + np.update_package_settings({"MyPackage": [("OTHER", "2")]}) + self.assertEquals(np.package_settings, {"MyPackage": [("os", "Windows"), ("OTHER", "2")]}) + + np.update_package_settings({"MyPackage": [("compiler.version", "3"), ("compiler", "2")]}) + self.assertEquals(np.package_settings, {"MyPackage": + [("os", "Windows"), ("OTHER", "2"), + ("compiler", "2"), ("compiler.version", "3")]}) + + def profile_env_update_test(self): + prof = '''[env] +CXX_FLAGS="-DAAA=0" +[settings] +''' + new_profile = Profile.loads(prof) + + new_profile.update_env([("OTHER", "2")]) + self.assertEquals(new_profile.env, [("OTHER", "2"), ("CXX_FLAGS", "-DAAA=0")]) + + new_profile.update_env([("OTHER", "3"), ("NEW", "4")]) + self.assertEquals(new_profile.env, [("OTHER", "3"), ("NEW", "4"), ("CXX_FLAGS", "-DAAA=0")]) + + new_profile.update_env([("NEW", "4"), ("CXX_FLAGS", "A")]) + self.assertEquals(new_profile.env, [("NEW", "4"), ("CXX_FLAGS", "A"), ("OTHER", "3")]) + + def profile_package_env_update_test(self): + prof = '''[env] +MyPackage:VARIABLE=2 +[settings] +''' + new_profile = Profile.loads(prof) + + new_profile.update_packages_env({"MyPackage": [("VARIABLE", "3")]}) + self.assertEquals(new_profile.package_env["MyPackage"], [("VARIABLE", "3")]) + + new_profile.update_packages_env({"MyPackage": [("OTHER", "2")]}) + self.assertEquals(new_profile.package_env["MyPackage"], [("OTHER", "2"), ("VARIABLE", "3")]) + + new_profile.update_packages_env({"MyPackage": [("SOME", "VAR"), ("OTHER", "22")]}) + self.assertEquals(new_profile.package_env["MyPackage"], [("SOME", "VAR"), ("OTHER", "22"), ("VARIABLE", "3")]) + + new_profile.update_packages_env({"OtherPackage": [("ONE", "2")]}) + self.assertEquals(new_profile.package_env["MyPackage"], [("SOME", "VAR"), ("OTHER", "22"), ("VARIABLE", "3")]) + self.assertEquals(new_profile.package_env["OtherPackage"], [("ONE", "2")]) + + def profile_loads_test(self): + prof = '''[env] +CXX_FLAGS="-DAAA=0" +[settings] +''' + new_profile = Profile.loads(prof) + self.assertEquals(new_profile.env, [("CXX_FLAGS", "-DAAA=0")]) + + prof = '''[env] +CXX_FLAGS="-DAAA=0" +MyPackage:VAR=1 +MyPackage:OTHER=2 +OtherPackage:ONE=ONE +[settings] +''' + new_profile = Profile.loads(prof) + self.assertEquals(new_profile.env, [("CXX_FLAGS", "-DAAA=0")]) + self.assertEquals(new_profile.package_env, {"MyPackage": [("VAR", "1"), ("OTHER", "2")], + "OtherPackage": [("ONE", "ONE")]}) + + prof = '''[env] +CXX_FLAGS='-DAAA=0' +[settings] +''' + new_profile = Profile.loads(prof) + self.assertEquals(new_profile.env, [("CXX_FLAGS", "-DAAA=0")]) + + prof = '''[env] +CXX_FLAGS=-DAAA=0 +[settings] +''' + new_profile = Profile.loads(prof) + self.assertEquals(new_profile.env, [("CXX_FLAGS", "-DAAA=0")]) + + prof = '''[env] +CXX_FLAGS="-DAAA=0 +[settings] +''' + new_profile = Profile.loads(prof) + self.assertEquals(new_profile.env, [("CXX_FLAGS", "\"-DAAA=0")]) + + prof = ''' +[settings] +zlib:compiler=gcc +compiler=Visual Studio +''' + new_profile = Profile.loads(prof) + self.assertEquals(new_profile._package_settings["zlib"], {"compiler": "gcc"}) + self.assertEquals(new_profile._settings["compiler"], "Visual Studio") + + def profile_dump_order_test(self): + # Settings + profile = Profile() + profile._package_settings["zlib"] = {"compiler": "gcc"} + profile._settings["compiler.version"] = "12" + profile._settings["arch"] = "x86_64" + profile._settings["compiler"] = "Visual Studio" + + self.assertEqual('[settings]\narch=x86_64\ncompiler=Visual Studio\ncompiler.version=12\nzlib:compiler=gcc\n[scopes]\n[env]', + profile.dumps()) + + def apply_test(self): + # Settings + profile = Profile() + profile._settings["compiler.version"] = "12" + profile._settings["arch"] = "x86_64" + profile._settings["compiler"] = "Visual Studio" + + profile._env["CXX"] = "path/to/my/compiler/g++" + profile._env["CC"] = "path/to/my/compiler/gcc" + + profile.scopes["p1"]["conaning"] = "True" + profile.scopes["p2"]["testing"] = "True" + + profile.update_settings({"compiler.version": "14"}) + + self.assertEqual('[settings]\narch=x86_64\ncompiler=Visual Studio\ncompiler.version=14\n' + '[scopes]\np1:conaning=True\np2:testing=True\n' + '[env]\nCC=path/to/my/compiler/gcc\nCXX=path/to/my/compiler/g++', + profile.dumps()) + + profile.update_scopes({"p1": {"new_one": 2}}) + self.assertEqual('[settings]\narch=x86_64\ncompiler=Visual Studio\ncompiler.version=14\n' + '[scopes]\np1:new_one=2\np2:testing=True\n' + '[env]\nCC=path/to/my/compiler/gcc\nCXX=path/to/my/compiler/g++', + profile.dumps()) diff --git a/testbed/conan-io__conan/conans/test/model/ref_test.py b/testbed/conan-io__conan/conans/test/model/ref_test.py new file mode 100644 index 0000000000000000000000000000000000000000..db4f4c0cbe0b49856115fec3c4a0c3828f14c66f --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/ref_test.py @@ -0,0 +1,28 @@ +import unittest +from conans.model.ref import ConanFileReference +from conans.errors import ConanException + + +class RefTest(unittest.TestCase): + def basic_test(self): + ref = ConanFileReference.loads("opencv/2.4.10 @ lasote/testing") + self.assertEqual(ref.name, "opencv") + self.assertEqual(ref.version, "2.4.10") + self.assertEqual(ref.user, "lasote") + self.assertEqual(ref.channel, "testing") + self.assertEqual(str(ref), "opencv/2.4.10@lasote/testing") + + ref = ConanFileReference.loads("opencv_lite/2.4.10@phil-lewis/testing") + self.assertEqual(ref.name, "opencv_lite") + self.assertEqual(ref.version, "2.4.10") + self.assertEqual(ref.user, "phil-lewis") + self.assertEqual(ref.channel, "testing") + self.assertEqual(str(ref), "opencv_lite/2.4.10@phil-lewis/testing") + + def errors_test(self): + self.assertRaises(ConanException, ConanFileReference.loads, "") + self.assertRaises(ConanException, ConanFileReference.loads, "opencv/2.4.10") + self.assertRaises(ConanException, ConanFileReference.loads, "opencv/2.4.10 @ lasote") + self.assertRaises(ConanException, ConanFileReference.loads, "opencv??/2.4.10@laso/testing") + self.assertRaises(ConanException, ConanFileReference.loads, ".opencv/2.4.10@lasote/testing") + self.assertRaises(ConanException, ConanFileReference.loads, "o/2.4.10 @ lasote/testing") diff --git a/testbed/conan-io__conan/conans/test/model/scope_test.py b/testbed/conan-io__conan/conans/test/model/scope_test.py new file mode 100644 index 0000000000000000000000000000000000000000..cb0e6147fbfdb8c9e0ab1d58227ef1ba214e11a2 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/scope_test.py @@ -0,0 +1,25 @@ +import unittest +from conans.model.scope import Scopes + + +class ScopeTest(unittest.TestCase): + + def from_list_test(self): + scope = Scopes.from_list(["theroot:thescope=http://conan.io"]) + self.assertEquals(scope["theroot"]["thescope"], "http://conan.io") + self.assertEquals(scope.package_scope("theroot")["thescope"], "http://conan.io") + + scope = Scopes.from_list(["thescope=http://conan.io"]) + self.assertEquals(scope["0CONAN_ROOT*"]["thescope"], "http://conan.io") + + scope = Scopes.from_list(["theroot:thescope=TRUE"]) + self.assertTrue(scope["theroot"]["thescope"]) + + scope = Scopes.from_list(["theroot:thescope=true"]) + self.assertTrue(scope["theroot"]["thescope"]) + + scope = Scopes.from_list(["theroot:thescope=FALSE"]) + self.assertFalse(scope["theroot"]["thescope"]) + + scope = Scopes.from_list(["theroot:thescope=false"]) + self.assertFalse(scope["theroot"]["thescope"]) diff --git a/testbed/conan-io__conan/conans/test/model/settings_test.py b/testbed/conan-io__conan/conans/test/model/settings_test.py new file mode 100644 index 0000000000000000000000000000000000000000..be5ad2a4b52eebfaa0f96ea0880ed5be4d312152 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/settings_test.py @@ -0,0 +1,332 @@ +from conans.errors import ConanException +import unittest +from conans.model.settings import Settings, bad_value_msg, undefined_value, undefined_field + + +class SettingsTest(unittest.TestCase): + + def setUp(self): + data = {"compiler": { + "Visual Studio": { + "version": ["10", "11", "12"], + "runtime": ["MD", "MT"]}, + "gcc": { + "version": ["4.8", "4.9"], + "arch": {"x86": {"speed": ["A", "B"]}, + "x64": {"speed": ["C", "D"]}}} + }, + "os": ["Windows", "Linux"]} + self.sut = Settings(data) + + def any_test(self): + data = {"target": "ANY"} + sut = Settings(data) + sut.target = "native" + self.assertTrue(sut.target == "native") + + def remove_test(self): + self.sut.remove("compiler") + self.sut.os = "Windows" + self.sut.validate() + self.assertEqual(self.sut.values.dumps(), "os=Windows") + + def remove_compiler_test(self): + self.sut.compiler.remove("Visual Studio") + with self.assertRaises(ConanException) as cm: + self.sut.compiler = "Visual Studio" + self.assertEqual(str(cm.exception), + bad_value_msg("settings.compiler", "Visual Studio", ["gcc"])) + + def remove_version_test(self): + self.sut.compiler["Visual Studio"].version.remove("12") + self.sut.compiler = "Visual Studio" + with self.assertRaises(ConanException) as cm: + self.sut.compiler.version = "12" + self.assertEqual(str(cm.exception), + bad_value_msg("settings.compiler.version", "12", ["10", "11"])) + self.sut.compiler.version = 11 + self.assertEqual(self.sut.compiler.version, "11") + + def remove_os_test(self): + self.sut.os.remove("Windows") + with self.assertRaises(ConanException) as cm: + self.sut.os = "Windows" + self.assertEqual(str(cm.exception), + bad_value_msg("settings.os", "Windows", ["Linux"])) + self.sut.os = "Linux" + self.assertEqual(self.sut.os, "Linux") + + def loads_default_test(self): + settings = Settings.loads("""os: [Windows, Linux, Macos, Android, FreeBSD, SunOS] +arch: [x86, x86_64, arm] +compiler: + sun-cc: + version: ["5.10", "5.11", "5.12", "5.13", "5.14"] + gcc: + version: ["4.8", "4.9", "5.0"] + Visual Studio: + runtime: [None, MD, MT, MTd, MDd] + version: ["10", "11", "12"] + clang: + version: ["3.5", "3.6", "3.7"] + +build_type: [None, Debug, Release]""") + settings.compiler = "clang" + settings.compiler.version = "3.5" + self.assertEqual(settings.compiler, "clang") + self.assertEqual(settings.compiler.version, "3.5") + + def loads_test(self): + settings = Settings.loads(""" +compiler: + Visual Studio: + runtime: [MD, MT] + version: + '10': + arch: ["32"] + '11': + &id1 + arch: ["32", "64"] + '12': + *id1 + gcc: + arch: + x64: + speed: [C, D] + x86: + speed: [A, B] + version: ['4.8', '4.9'] +os: [Windows, Linux] +""") + settings.values_list = [('compiler', 'Visual Studio'), + ('compiler.version', '10'), + ('compiler.version.arch', '32')] + self.assertEqual(settings.values_list, + [('compiler', 'Visual Studio'), + ('compiler.version', '10'), + ('compiler.version.arch', '32')]) + + settings.compiler.version = "10" + settings.compiler.version.arch = "32" + settings.compiler.version = "11" + settings.compiler.version.arch = "64" + settings.compiler.version = "12" + settings.compiler.version.arch = "64" + + self.assertEqual(settings.values_list, + [('compiler', 'Visual Studio'), + ('compiler.version', '12'), + ('compiler.version.arch', '64')]) + + def set_value_test(self): + self.sut.values_list = [("compiler", "Visual Studio")] + self.assertEqual(self.sut.compiler, "Visual Studio") + self.sut.values_list = [("compiler.version", "12")] + self.assertEqual(self.sut.compiler.version, "12") + self.sut.values_list = [("compiler", "gcc")] + self.assertEqual(self.sut.compiler, "gcc") + self.sut.values_list = [("compiler.version", "4.8")] + self.assertEqual(self.sut.compiler.version, "4.8") + self.sut.values_list = [("compiler.arch", "x86")] + self.assertEqual(self.sut.compiler.arch, "x86") + self.sut.values_list = [("compiler.arch.speed", "A")] + self.assertEqual(self.sut.compiler.arch.speed, "A") + + def constraint_test(self): + s2 = {"os": None} + self.sut.constraint(s2) + with self.assertRaises(ConanException) as cm: + self.sut.compiler + self.assertEqual(str(cm.exception), undefined_field("settings", "compiler", ["os"])) + self.sut.os = "Windows" + self.sut.os = "Linux" + + def constraint2_test(self): + s2 = {"os2": None} + with self.assertRaises(ConanException) as cm: + self.sut.constraint(s2) + self.assertEqual(str(cm.exception), undefined_field("settings", "os2", ["compiler", "os"])) + + def constraint3_test(self): + s2 = {"os": ["Win"]} + with self.assertRaises(ConanException) as cm: + self.sut.constraint(s2) + self.assertEqual(str(cm.exception), + bad_value_msg("os", "Win", ["Linux", "Windows"])) + + def constraint4_test(self): + s2 = {"os": ["Windows"]} + self.sut.constraint(s2) + with self.assertRaises(ConanException) as cm: + self.sut.os = "Linux" + self.assertEqual(str(cm.exception), bad_value_msg("settings.os", "Linux", ["Windows"])) + + self.sut.os = "Windows" + + def constraint5_test(self): + s2 = {"os": None, + "compiler": {"Visual Studio": {"version2": None}}} + + with self.assertRaises(ConanException) as cm: + self.sut.constraint(s2) + self.assertEqual(str(cm.exception), undefined_field("settings.compiler", "version2", + ['runtime', 'version'])) + self.sut.os = "Windows" + + def constraint6_test(self): + s2 = {"os": None, + "compiler": {"Visual Studio": {"version": None}}} + + self.sut.constraint(s2) + self.sut.compiler = "Visual Studio" + with self.assertRaises(ConanException) as cm: + self.sut.compiler.arch + self.assertEqual(str(cm.exception), undefined_field("settings.compiler", "arch", + ['version'], "Visual Studio")) + self.sut.os = "Windows" + self.sut.compiler.version = "11" + self.sut.compiler.version = "12" + + def constraint7_test(self): + s2 = {"os": None, + "compiler": {"Visual Studio": {"version": ("11", "10")}, + "gcc": None}} + + self.sut.constraint(s2) + self.sut.compiler = "Visual Studio" + with self.assertRaises(ConanException) as cm: + self.sut.compiler.version = "12" + self.assertEqual(str(cm.exception), + bad_value_msg("settings.compiler.version", "12", ["10", "11"])) + self.sut.compiler.version = "10" + self.sut.compiler.version = "11" + self.sut.os = "Windows" + self.sut.compiler = "gcc" + + def validate_test(self): + with self.assertRaisesRegexp(ConanException, undefined_value("settings.compiler")): + self.sut.validate() + + self.sut.compiler = "gcc" + with self.assertRaisesRegexp(ConanException, undefined_value("settings.compiler.arch")): + self.sut.validate() + + self.sut.compiler.arch = "x86" + with self.assertRaisesRegexp(ConanException, + undefined_value("settings.compiler.arch.speed")): + self.sut.validate() + + self.sut.compiler.arch.speed = "A" + with self.assertRaisesRegexp(ConanException, undefined_value("settings.compiler.version")): + self.sut.validate() + + self.sut.compiler.version = "4.8" + with self.assertRaisesRegexp(ConanException, undefined_value("settings.os")): + self.sut.validate() + + self.sut.os = "Windows" + self.sut.validate() + self.assertEqual(self.sut.values_list, [("compiler", "gcc"), + ("compiler.arch", "x86"), + ("compiler.arch.speed", "A"), + ("compiler.version", "4.8"), + ("os", "Windows")]) + + def validate2_test(self): + self.sut.os = "Windows" + self.sut.compiler = "Visual Studio" + with self.assertRaisesRegexp(ConanException, undefined_value("settings.compiler.runtime")): + self.sut.validate() + + self.sut.compiler.runtime = "MD" + with self.assertRaisesRegexp(ConanException, undefined_value("settings.compiler.version")): + self.sut.validate() + + self.sut.compiler.version = "10" + self.sut.validate() + + self.assertEqual(self.sut.values_list, [("compiler", "Visual Studio"), + ("compiler.runtime", "MD"), + ("compiler.version", "10"), + ("os", "Windows")]) + + def basic_test(self): + s = Settings({"os": ["Windows", "Linux"]}) + s.os = "Windows" + with self.assertRaises(ConanException) as cm: + self.sut.compiler = "kk" + self.assertEqual(str(cm.exception), + bad_value_msg("settings.compiler", "kk", "['Visual Studio', 'gcc']")) + + def my_test(self): + self.assertEqual(self.sut.compiler, None) + + with self.assertRaises(ConanException) as cm: + self.sut.compiler = "kk" + self.assertEqual(str(cm.exception), + bad_value_msg("settings.compiler", "kk", "['Visual Studio', 'gcc']")) + + self.sut.compiler = "Visual Studio" + self.assertEqual(str(self.sut.compiler), "Visual Studio") + self.assertEqual(self.sut.compiler, "Visual Studio") + + with self.assertRaises(ConanException) as cm: + self.sut.compiler.kk + self.assertEqual(str(cm.exception), + undefined_field("settings.compiler", "kk", "['runtime', 'version']", + "Visual Studio")) + + self.assertEqual(self.sut.compiler.version, None) + + with self.assertRaises(ConanException) as cm: + self.sut.compiler.version = "123" + self.assertEqual(str(cm.exception), + bad_value_msg("settings.compiler.version", "123", ['10', '11', '12'])) + + self.sut.compiler.version = "12" + self.assertEqual(self.sut.compiler.version, "12") + self.assertEqual(str(self.sut.compiler.version), "12") + + with self.assertRaises(ConanException) as cm: + assert self.sut.compiler == "kk" + self.assertEqual(str(cm.exception), + bad_value_msg("settings.compiler", "kk", "['Visual Studio', 'gcc']")) + + self.assertFalse(self.sut.compiler == "gcc") + self.assertTrue(self.sut.compiler == "Visual Studio") + + self.assertTrue(self.sut.compiler.version == "12") + self.assertFalse(self.sut.compiler.version == "11") + + with self.assertRaises(ConanException) as cm: + assert self.sut.compiler.version == "13" + self.assertEqual(str(cm.exception), + bad_value_msg("settings.compiler.version", "13", ['10', '11', '12'])) + + self.sut.compiler = "gcc" + with self.assertRaises(ConanException) as cm: + self.sut.compiler.runtime + self.assertEqual(str(cm.exception), + undefined_field("settings.compiler", "runtime", "['arch', 'version']", + "gcc")) + + self.sut.compiler.arch = "x86" + self.sut.compiler.arch.speed = "A" + self.assertEqual(self.sut.compiler.arch.speed, "A") + + with self.assertRaises(ConanException) as cm: + self.sut.compiler.arch.speed = "D" + self.assertEqual(str(cm.exception), + bad_value_msg("settings.compiler.arch.speed", "D", ['A', 'B'])) + + self.sut.compiler.arch = "x64" + self.sut.compiler.arch.speed = "C" + self.assertEqual(self.sut.compiler.arch.speed, "C") + + with self.assertRaises(ConanException) as cm: + self.sut.compiler.arch.speed = "A" + self.assertEqual(str(cm.exception), + bad_value_msg("settings.compiler.arch.speed", "A", ['C', 'D'])) + + self.sut.compiler.arch.speed = "D" + self.assertEqual(self.sut.compiler.arch.speed, "D") diff --git a/testbed/conan-io__conan/conans/test/model/transitive_reqs_test.py b/testbed/conan-io__conan/conans/test/model/transitive_reqs_test.py new file mode 100644 index 0000000000000000000000000000000000000000..06c8b6309a421b682ca7fa445b3409f9855ce360 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/transitive_reqs_test.py @@ -0,0 +1,1782 @@ +import unittest +from conans.test.tools import TestBufferConanOutput +from conans.paths import CONANFILE +import os +from conans.client.deps_builder import DepsGraphBuilder +from conans.model.ref import ConanFileReference +from conans.model.options import OptionsValues, option_not_exist_msg, option_wrong_value_msg +from conans.client.loader import ConanFileLoader +from conans.util.files import save +from conans.model.settings import Settings, bad_value_msg +from conans.errors import ConanException +from conans.model.requires import Requirements +from conans.client.conf import default_settings_yml +from conans.model.values import Values +from conans.test.utils.test_files import temp_folder +from collections import namedtuple +from conans.model.scope import Scopes + + +class Retriever(object): + def __init__(self, loader, output): + self.loader = loader + self.output = output + self.folder = temp_folder() + + def root(self, content): + conan_path = os.path.join(self.folder, "root") + save(conan_path, content) + conanfile = self.loader.load_conan(conan_path, self.output, consumer=True) + return conanfile + + def conan(self, conan_ref, content): + if isinstance(conan_ref, str): + conan_ref = ConanFileReference.loads(conan_ref) + conan_path = os.path.join(self.folder, "/".join(conan_ref), CONANFILE) + save(conan_path, content) + + def get_recipe(self, conan_ref): + conan_path = os.path.join(self.folder, "/".join(conan_ref), CONANFILE) + return conan_path + +say_content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" +""" + +say_content2 = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.2" +""" + +hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" +""" + +chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing" +""" + +bye_content = """ +from conans import ConanFile + +class ByeConan(ConanFile): + name = "Bye" + version = "0.2" + requires = "Say/0.1@diego/testing" +""" + +bye_content2 = """ +from conans import ConanFile + +class ByeConan(ConanFile): + name = "Bye" + version = "0.2" + requires = "Say/0.2@diego/testing" +""" + +hello_ref = ConanFileReference.loads("Hello/1.2@diego/testing") +say_ref = ConanFileReference.loads("Say/0.1@diego/testing") +say_ref2 = ConanFileReference.loads("Say/0.2@diego/testing") +chat_ref = ConanFileReference.loads("Chat/2.3@diego/testing") +bye_ref = ConanFileReference.loads("Bye/0.2@diego/testing") + + +def _get_nodes(graph, name): + """ return all the nodes matching a particular name. Could be >1 in case + that private requirements embed different versions + """ + return [n for n in graph.nodes if n.conanfile.name == name] + + +Edge = namedtuple("Edge", "src dst") + + +def _get_edges(graph): + + edges = set() + for n in graph.nodes: + edges.update([Edge(n, neigh) for neigh in graph.neighbors(n)]) + return edges + + +class MockRequireResolver(object): + def resolve(self, rquire, conanref): # @UnusedVariable + return + + +class ConanRequirementsTest(unittest.TestCase): + + def setUp(self): + self.output = TestBufferConanOutput() + self.loader = ConanFileLoader(None, Settings.loads(""), None, + OptionsValues.loads(""), Scopes(), + env=[], package_env={}) + self.retriever = Retriever(self.loader, self.output) + self.builder = DepsGraphBuilder(self.retriever, self.output, self.loader, MockRequireResolver()) + + def root(self, content): + root_conan = self.retriever.root(content) + deps_graph = self.builder.load(None, root_conan) + return deps_graph + + def test_basic(self): + deps_graph = self.root(say_content) + self.assertEqual(_get_edges(deps_graph), set()) + self.assertEqual(1, len(deps_graph.nodes)) + node = _get_nodes(deps_graph, "Say")[0] + self.assertEqual(node.conan_ref, None) + self._check_say(node.conanfile) + + def _check_say(self, conanfile, version="0.1", options=""): + self.assertEqual(conanfile.version, version) + self.assertEqual(conanfile.name, "Say") + self.assertEqual(conanfile.options.values.dumps(), options) + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values_list, []) + self.assertEqual(conanfile.requires, Requirements()) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), options) + self.assertEqual(conaninfo.full_options.dumps(), options) + self.assertEqual(conaninfo.requires.dumps(), "") + self.assertEqual(conaninfo.full_requires.dumps(), "") + + def test_transitive(self): + self.retriever.conan(say_ref, say_content) + deps_graph = self.root(hello_content) + self.assertEqual(2, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + say = _get_nodes(deps_graph, "Say")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say)}) + + self.assertEqual(say.conan_ref, say_ref) + self._check_say(say.conanfile) + + def _check_hello(self, hello, say_ref): + conanfile = hello.conanfile + self.assertEqual(conanfile.version, "1.2") + self.assertEqual(conanfile.name, "Hello") + self.assertEqual(conanfile.options.values.dumps(), "") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(say_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "") + self.assertEqual(conaninfo.requires.dumps(), "%s/%s" % (say_ref.name, say_ref.version)) + self.assertEqual(conaninfo.full_requires.dumps(), + "%s:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9" % str(say_ref)) + + def test_transitive_two_levels(self): + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + deps_graph = self.root(chat_content) + + self.assertEqual(3, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello)}) + + self.assertEqual(hello.conan_ref, hello_ref) + self.assertEqual(say.conan_ref, say_ref) + self.assertEqual(chat.conan_ref, None) + + self._check_say(say.conanfile) + self._check_hello(hello, say_ref) + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "") + self.assertEqual(conaninfo.requires.dumps(), "Hello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_diamond_no_conflict(self): + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing", "Bye/0.2@diego/testing" +""" + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + self.retriever.conan(bye_ref, bye_content) + deps_graph = self.root(chat_content) + + self.assertEqual(4, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + bye = _get_nodes(deps_graph, "Bye")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello), + Edge(bye, say), Edge(chat, bye)}) + + self.assertEqual(hello.conan_ref, hello_ref) + self.assertEqual(say.conan_ref, say_ref) + self.assertEqual(chat.conan_ref, None) + self.assertEqual(bye.conan_ref, bye_ref) + + self._check_say(say.conanfile) + self._check_hello(hello, say_ref) + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref), + str(bye_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "") + self.assertEqual(conaninfo.requires.dumps(), "Bye/0.2\nHello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Bye/0.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_simple_override(self): + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = ("Hello/1.2@diego/testing", + ("Say/0.2@diego/testing", "override")) +""" + + self.retriever.conan(say_ref, say_content) + self.retriever.conan(say_ref2, say_content2) + self.retriever.conan(hello_ref, hello_content) + deps_graph = self.root(chat_content) + + self.assertEqual(3, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello)}) + + self._check_say(say.conanfile, version="0.2") + self._check_hello(hello, say_ref2) + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref), + (str(say_ref2), "override"))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "") + self.assertEqual(conaninfo.requires.dumps(), "Hello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Hello/1.2@diego/testing:9d98d1ba7893ef6602e1d629b190a1d2a1100a65\n" + "Say/0.2@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_version_requires_change(self): + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing" + + def conan_info(self): + hello_require = self.info.requires["Hello"] + hello_require.version = hello_require.full_version.minor() + say_require = self.info.requires["Say"] + say_require.name = say_require.full_name + say_require.version = hello_require.full_version.major() +""" + + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + deps_graph = self.root(chat_content) + + self.assertEqual(3, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello)}) + + self._check_say(say.conanfile, version="0.1") + self._check_hello(hello, say_ref) + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "") + self.assertEqual(conaninfo.requires.dumps(), "Hello/1.2.Z\nSay/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_version_requires2_change(self): + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing" + + def conan_info(self): + self.info.requires["Hello"].full_package() + self.info.requires["Say"].semver() +""" + + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + deps_graph = self.root(chat_content) + + self.assertEqual(3, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello)}) + + self._check_say(say.conanfile, version="0.1") + self._check_hello(hello, say_ref) + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "") + self.assertEqual(conaninfo.requires.dumps(), + "Hello/1.2/diego/testing/0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1") + self.assertEqual(conaninfo.full_requires.dumps(), + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_diamond_conflict_error(self): + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing", "Bye/0.2@diego/testing" +""" + self.retriever.conan(say_ref, say_content) + self.retriever.conan(say_ref2, say_content2) + self.retriever.conan(hello_ref, hello_content) + self.retriever.conan(bye_ref, bye_content2) + self.output.werror_active = True + with self.assertRaisesRegexp(ConanException, "Conflict in Bye/0.2@diego/testing"): + self.root(chat_content) + + def test_diamond_conflict(self): + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing", "Bye/0.2@diego/testing" +""" + self.retriever.conan(say_ref, say_content) + self.retriever.conan(say_ref2, say_content2) + self.retriever.conan(hello_ref, hello_content) + self.retriever.conan(bye_ref, bye_content2) + deps_graph = self.root(chat_content) + + self.assertIn("""Conflict in Bye/0.2@diego/testing + Requirement Say/0.2@diego/testing conflicts with already defined Say/0.1@diego/testing + Keeping Say/0.1@diego/testing + To change it, override it in your base requirements""", self.output) + self.assertEqual(4, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + bye = _get_nodes(deps_graph, "Bye")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello), + Edge(bye, say), Edge(chat, bye)}) + + self.assertEqual(hello.conan_ref, hello_ref) + self.assertEqual(say.conan_ref, say_ref) + self.assertEqual(bye.conan_ref, bye_ref) + + self._check_say(say.conanfile) + self._check_hello(hello, say_ref) + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref), + str(bye_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "") + self.assertEqual(conaninfo.requires.dumps(), "Bye/0.2\nHello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Bye/0.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_diamond_conflict_solved(self): + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = ("Hello/1.2@diego/testing", "Bye/0.2@diego/testing", + ("Say/0.2@diego/testing", "override")) +""" + self.retriever.conan(say_ref, say_content) + self.retriever.conan(say_ref2, say_content2) + self.retriever.conan(hello_ref, hello_content) + self.retriever.conan(bye_ref, bye_content2) + deps_graph = self.root(chat_content) + + self.assertIn("Hello/1.2@diego/testing requirement Say/0.1@diego/testing overriden by " + "your conanfile to Say/0.2@diego/testing", self.output) + self.assertNotIn("Conflict", self.output) + self.assertEqual(4, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + bye = _get_nodes(deps_graph, "Bye")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello), + Edge(bye, say), Edge(chat, bye)}) + + self.assertEqual(hello.conan_ref, hello_ref) + self.assertEqual(say.conan_ref, say_ref2) + self.assertEqual(bye.conan_ref, bye_ref) + + self._check_say(say.conanfile, version="0.2") + self._check_hello(hello, say_ref2) + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref), + str(bye_ref), + (str(say_ref2), "override"))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "") + self.assertEqual(conaninfo.requires.dumps(), "Bye/0.2\nHello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Bye/0.2@diego/testing:9d98d1ba7893ef6602e1d629b190a1d2a1100a65\n" + "Hello/1.2@diego/testing:9d98d1ba7893ef6602e1d629b190a1d2a1100a65\n" + "Say/0.2@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_basic_option(self): + say_content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + options = {"myoption": [123, 234]} + default_options = "myoption=123" +""" + deps_graph = self.root(say_content) + self.assertEqual(1, len(deps_graph.nodes)) + say = _get_nodes(deps_graph, "Say")[0] + self.assertEqual(_get_edges(deps_graph), set()) + + self._check_say(say.conanfile, options="myoption=123") + + def test_basic_transitive_option(self): + say_content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + options = {"myoption": [123, 234]} + default_options = "myoption=123" +""" + + def _assert_conanfile(conanfile_content): + self.retriever.conan(say_ref, say_content) + deps_graph = self.root(conanfile_content) + + self.assertEqual(2, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + say = _get_nodes(deps_graph, "Say")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say)}) + + self.assertEqual(say.conan_ref, say_ref) + self._check_say(say.conanfile, options="myoption=234") + + conanfile = hello.conanfile + self.assertEqual(conanfile.version, "1.2") + self.assertEqual(conanfile.name, "Hello") + self.assertEqual(conanfile.options.values.dumps(), "Say:myoption=234") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values_list, []) + self.assertEqual(conanfile.requires, Requirements(str(say_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "Say:myoption=234") + self.assertEqual(conaninfo.requires.dumps(), "%s/%s" % (say_ref.name, say_ref.version)) + self.assertEqual(conaninfo.full_requires.dumps(), + "%s:48bb3c5cbdb4822ae87914437ca3cceb733c7e1d" % str(say_ref)) + + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" + default_options = [("Say:myoption", "234")] # To test list definition +""" + + _assert_conanfile(hello_content) + + hello_content_tuple = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" + default_options = "Say:myoption=234", # To test tuple definition +""" + _assert_conanfile(hello_content_tuple) + + def test_transitive_two_levels_options(self): + say_content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + options = {"myoption": [123, 234]} +""" + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" +""" + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing" + default_options = "Say:myoption=234" +""" + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + deps_graph = self.root(chat_content) + + self.assertEqual(3, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello)}) + + self.assertEqual(hello.conan_ref, hello_ref) + self.assertEqual(say.conan_ref, say_ref) + + self._check_say(say.conanfile, options="myoption=234") + + conanfile = hello.conanfile + self.assertEqual(conanfile.version, "1.2") + self.assertEqual(conanfile.name, "Hello") + self.assertEqual(conanfile.options.values.dumps(), "Say:myoption=234") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(say_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "Say:myoption=234") + self.assertEqual(conaninfo.requires.dumps(), "%s/%s" % (say_ref.name, say_ref.version)) + self.assertEqual(conaninfo.full_requires.dumps(), + "%s:48bb3c5cbdb4822ae87914437ca3cceb733c7e1d" % str(say_ref)) + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "Say:myoption=234") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "Say:myoption=234") + self.assertEqual(conaninfo.requires.dumps(), "Hello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "%s:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "%s:48bb3c5cbdb4822ae87914437ca3cceb733c7e1d" + % (str(hello_ref), str(say_ref))) + + def test_transitive_two_levels_wrong_options(self): + say_content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + options = {"myoption": [123, 234]} +""" + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" +""" + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing" + default_options = "Say:myoption2=234" +""" + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + + with self.assertRaises(ConanException) as cm: + self.root(chat_content) + self.assertEqual(str(cm.exception), + "Say/0.1@diego/testing: %s" % option_not_exist_msg("myoption2", + ['myoption'])) + + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing" + default_options = "Say:myoption=235" +""" + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + + with self.assertRaises(ConanException) as cm: + self.root(chat_content) + self.assertEqual(str(cm.exception), "Say/0.1@diego/testing: %s" + % option_wrong_value_msg("myoption", "235", ["123", "234"])) + + def test_diamond_no_conflict_options(self): + say_content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + options = {"myoption": [123, 234]} +""" + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" + default_options = "Say:myoption=234" +""" + bye_content = """ +from conans import ConanFile + +class ByeConan(ConanFile): + name = "Bye" + version = "0.2" + requires = "Say/0.1@diego/testing" + default_options = "Say:myoption=234" +""" + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing", "Bye/0.2@diego/testing" +""" + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + self.retriever.conan(bye_ref, bye_content) + deps_graph = self.root(chat_content) + + self.assertEqual(4, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + bye = _get_nodes(deps_graph, "Bye")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello), + Edge(bye, say), Edge(chat, bye)}) + + self._check_say(say.conanfile, options="myoption=234") + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "Say:myoption=234") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref), + str(bye_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "Say:myoption=234") + self.assertEqual(conaninfo.requires.dumps(), "Bye/0.2\nHello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Bye/0.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:48bb3c5cbdb4822ae87914437ca3cceb733c7e1d") + + def test_diamond_conflict_options(self): + say_content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + options = {"myoption": [123, 234]} +""" + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" + default_options = "Say:myoption=234" +""" + bye_content = """ +from conans import ConanFile + +class ByeConan(ConanFile): + name = "Bye" + version = "0.2" + requires = "Say/0.1@diego/testing" + default_options = "Say:myoption=123" +""" + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing", "Bye/0.2@diego/testing" +""" + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + self.retriever.conan(bye_ref, bye_content) + + self.output.werror_active = True + with self.assertRaisesRegexp(ConanException, "tried to change"): + self.root(chat_content) + + self.output.werror_active = False + deps_graph = self.root(chat_content) + + self.assertEqual(4, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + bye = _get_nodes(deps_graph, "Bye")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello), + Edge(bye, say), Edge(chat, bye)}) + + self._check_say(say.conanfile, options="myoption=234") + self.assertIn("Bye/0.2@diego/testing tried to change Say/0.1@diego/testing " + "option myoption to 123 but it was already assigned to 234 " + "by Hello/1.2@diego/testing", str(self.output).replace("\n", " ")) + self.assertEqual(4, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + bye = _get_nodes(deps_graph, "Bye")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello), + Edge(bye, say), Edge(chat, bye)}) + + self._check_say(say.conanfile, options="myoption=234") + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "Say:myoption=234") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref), + str(bye_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "Say:myoption=234") + self.assertEqual(conaninfo.requires.dumps(), "Bye/0.2\nHello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Bye/0.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:48bb3c5cbdb4822ae87914437ca3cceb733c7e1d") + + def test_diamond_conflict_options_solved(self): + say_content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + options = {"myoption": [123, 234]} +""" + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" + default_options = "Say:myoption=234" +""" + bye_content = """ +from conans import ConanFile + +class ByeConan(ConanFile): + name = "Bye" + version = "0.2" + requires = "Say/0.1@diego/testing" + default_options = "Say:myoption=123" +""" + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing", "Bye/0.2@diego/testing" + default_options = "Say:myoption=123" +""" + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + self.retriever.conan(bye_ref, bye_content) + deps_graph = self.root(chat_content) + + self.assertEqual(self.output, "") + self.assertEqual(4, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + bye = _get_nodes(deps_graph, "Bye")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello), + Edge(bye, say), Edge(chat, bye)}) + self._check_say(say.conanfile, options="myoption=123") + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "Say:myoption=123") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref), + str(bye_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "Say:myoption=123") + self.assertEqual(conaninfo.requires.dumps(), "Bye/0.2\nHello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Bye/0.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:e736d892567343489b1360fde797ad18a2911920") + + def test_conditional(self): + zlib_content = """ +from conans import ConanFile + +class ZlibConan(ConanFile): + name = "Zlib" + version = "2.1" +""" + say_content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + options = {"zip": [True, False]} + + def requirements(self): + if self.options.zip: + self.requires("Zlib/2.1@diego/testing") +""" + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" + default_options = "Say:zip=True" +""" + bye_content = """ +from conans import ConanFile + +class ByeConan(ConanFile): + name = "Bye" + version = "0.2" + requires = "Say/0.1@diego/testing" + default_options = "Say:zip=True" +""" + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing", "Bye/0.2@diego/testing" +""" + zlib_ref = ConanFileReference.loads("Zlib/2.1@diego/testing") + self.retriever.conan(zlib_ref, zlib_content) + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + self.retriever.conan(bye_ref, bye_content) + + deps_graph = self.root(chat_content) + self.assertEqual(self.output, "") + self.assertEqual(5, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + bye = _get_nodes(deps_graph, "Bye")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + zlib = _get_nodes(deps_graph, "Zlib")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello), + Edge(bye, say), + Edge(chat, bye), Edge(say, zlib)}) + + conanfile = say.conanfile + self.assertEqual(conanfile.version, "0.1") + self.assertEqual(conanfile.name, "Say") + self.assertEqual(conanfile.options.values.dumps(), "zip=True") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(zlib_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "zip=True") + self.assertEqual(conaninfo.full_options.dumps(), "zip=True") + self.assertEqual(conaninfo.requires.dumps(), "Zlib/2.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Zlib/2.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + chat_content2 = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing", "Bye/0.2@diego/testing" + default_options = "Say:zip=False" +""" + deps_graph = self.root(chat_content2) + self.assertEqual(self.output, "") + self.assertEqual(4, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + bye = _get_nodes(deps_graph, "Bye")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello), + Edge(bye, say), Edge(chat, bye)}) + + conanfile = say.conanfile + self.assertEqual(conanfile.version, "0.1") + self.assertEqual(conanfile.name, "Say") + self.assertEqual(conanfile.options.values.dumps(), "zip=False") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements()) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "zip=False") + self.assertEqual(conaninfo.full_options.dumps(), "zip=False") + self.assertEqual(conaninfo.requires.dumps(), "") + self.assertEqual(conaninfo.full_requires.dumps(), "") + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "Say:zip=False") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref), str(bye_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "Say:zip=False") + self.assertEqual(conaninfo.requires.dumps(), "Bye/0.2\nHello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Bye/0.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_transitive_private(self): + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "0.1" + requires = ("Say/0.1@diego/testing", "private"), +""" + bye_content = """ +from conans import ConanFile + +class ByeConan(ConanFile): + name = "Bye" + version = "0.2" + requires = ("Say/0.2@diego/testing", "private"), +""" + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing", "Bye/0.2@diego/testing" +""" + self.retriever.conan(say_ref, say_content) + self.retriever.conan(say_ref2, say_content2) + self.retriever.conan(hello_ref, hello_content) + self.retriever.conan(bye_ref, bye_content) + deps_graph = self.root(chat_content) + + self.assertEqual(5, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + bye = _get_nodes(deps_graph, "Bye")[0] + say_nodes = sorted(_get_nodes(deps_graph, "Say")) + say1 = say_nodes[0] + say2 = say_nodes[1] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say1), Edge(chat, hello), + Edge(bye, say2), Edge(chat, bye)}) + self.assertEqual(hello.conanfile.name, "Hello") + self.assertEqual(hello.conan_ref, hello_ref) + self.assertEqual(say1.conanfile.name, "Say") + self.assertEqual(say1.conanfile.version, "0.1") + self.assertEqual(say1.conan_ref, say_ref) + self.assertEqual(say2.conanfile.name, "Say") + self.assertEqual(say2.conanfile.version, "0.2") + self.assertEqual(say2.conan_ref, say_ref2) + self.assertEqual(chat.conanfile.name, "Chat") + self.assertEqual(bye.conanfile.name, "Bye") + self.assertEqual(bye.conan_ref, bye_ref) + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref), str(bye_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "") + self.assertEqual(conaninfo.requires.dumps(), "Bye/0.2\nHello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Bye/0.2@diego/testing:9d98d1ba7893ef6602e1d629b190a1d2a1100a65\n" + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9\n" + "Say/0.2@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_transitive_diamond_private(self): + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = ("Say/0.1@diego/testing", "private"), +""" + bye_content = """ +from conans import ConanFile + +class ByeConan(ConanFile): + name = "Bye" + version = "0.2" + requires = "Say/0.1@diego/testing" +""" + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing", "Bye/0.2@diego/testing" +""" + self.retriever.conan(say_ref, say_content) + self.retriever.conan(say_ref2, say_content2) + self.retriever.conan(hello_ref, hello_content) + self.retriever.conan(bye_ref, bye_content) + deps_graph = self.root(chat_content) + + self.assertEqual(5, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + bye = _get_nodes(deps_graph, "Bye")[0] + say_nodes = sorted(_get_nodes(deps_graph, "Say")) + say1 = say_nodes[0] + say2 = say_nodes[1] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertTrue((_get_edges(deps_graph) == {Edge(hello, say1), Edge(chat, hello), + Edge(bye, say2), Edge(chat, bye)}) or + (_get_edges(deps_graph) == {Edge(hello, say2), Edge(chat, hello), + Edge(bye, say1), Edge(chat, bye)}) + ) + self.assertEqual(hello.conanfile.name, "Hello") + self.assertEqual(hello.conan_ref, hello_ref) + self.assertEqual(say1.conanfile.name, "Say") + self.assertEqual(say1.conanfile.version, "0.1") + self.assertEqual(say1.conan_ref, say_ref) + self.assertEqual(say2.conanfile.name, "Say") + self.assertEqual(say2.conanfile.version, "0.1") + self.assertEqual(say2.conan_ref, say_ref) + self.assertEqual(chat.conanfile.name, "Chat") + self.assertEqual(bye.conanfile.name, "Bye") + self.assertEqual(bye.conan_ref, bye_ref) + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), "") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref), str(bye_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "") + self.assertEqual(conaninfo.full_options.dumps(), "") + self.assertEqual(conaninfo.requires.dumps(), "Bye/0.2\nHello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "Bye/0.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_dep_requires_clear(self): + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" + + def conan_info(self): + self.info.requires.clear() +""" + + self.retriever.conan(say_ref, say_content) + deps_graph = self.root(hello_content) + + self.assertEqual(2, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + self.assertEqual(hello.conanfile.name, "Hello") + self.assertEqual(hello.conanfile.info.requires.dumps(), "") + self.assertEqual(hello.conanfile.info.full_requires.dumps(), + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_remove_build_requires(self): + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" + + def conan_info(self): + self.info.requires.remove("Say") +""" + + self.retriever.conan(say_ref, say_content) + deps_graph = self.root(hello_content) + + self.assertEqual(2, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + self.assertEqual(hello.conanfile.name, "Hello") + self.assertEqual(hello.conanfile.info.requires.dumps(), "") + self.assertEqual(hello.conanfile.info.full_requires.dumps(), + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_remove_two_build_requires(self): + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "1.2" + requires = "Hello/1.2@diego/testing", "Bye/0.2@diego/testing" + + def conan_info(self): + self.info.requires.remove("Bye", "Hello") +""" + + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + self.retriever.conan(bye_ref, bye_content) + deps_graph = self.root(chat_content) + + self.assertEqual(4, len(deps_graph.nodes)) + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(chat.conanfile.name, "Chat") + self.assertEqual(chat.conanfile.info.requires.dumps(), "") + self.assertEqual(chat.conanfile.info.full_requires.dumps(), + "Bye/0.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + + def test_propagate_indirect_options(self): + say_content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + options = {"shared": [True, False]} + default_options = "shared=False" +""" + + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" + options = {"shared": [True, False]} + default_options = "shared=True" + + def conan_info(self): + if self.options.shared: + self.info.options["Say"] = self.info.full_options["Say"] +""" + + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing" + options = {"shared": [True, False]} + default_options = "shared=True" + + def conan_info(self): + if self.options.shared: + self.info.options["Hello"] = self.info.full_options["Hello"] + self.info.options["Say"].shared = self.info.full_options["Say"].shared +""" + + self.retriever.conan(say_ref, say_content) + self.retriever.conan(hello_ref, hello_content) + deps_graph = self.root(chat_content) + + self.assertEqual(3, len(deps_graph.nodes)) + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(chat.conanfile.name, "Chat") + self.assertEqual(chat.conanfile.info.requires.dumps(), "Hello/1.Y.Z") + self.assertEqual(chat.conanfile.info.full_requires.dumps(), + "Hello/1.2@diego/testing:93c0f28f41be7e2dfe12fd6fb93dac72c77cc0d9\n" + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + self.assertEqual(chat.conanfile.info.options.dumps(), + "shared=True\nHello:shared=True\nSay:shared=False") + + # Now change the chat content + deps_graph = self.root(chat_content.replace("shared=True", "shared=False")) + + self.assertEqual(3, len(deps_graph.nodes)) + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(chat.conanfile.name, "Chat") + self.assertEqual(chat.conanfile.info.requires.dumps(), "Hello/1.Y.Z") + self.assertEqual(chat.conanfile.info.full_requires.dumps(), + "Hello/1.2@diego/testing:93c0f28f41be7e2dfe12fd6fb93dac72c77cc0d9\n" + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + self.assertEqual(chat.conanfile.info.options.dumps(), "shared=False") + + # Now change the hello content + self.retriever.conan(hello_ref, hello_content.replace("shared=True", "shared=False")) + deps_graph = self.root(chat_content) + + self.assertEqual(3, len(deps_graph.nodes)) + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(chat.conanfile.name, "Chat") + self.assertEqual(chat.conanfile.info.requires.dumps(), "Hello/1.Y.Z") + self.assertEqual(chat.conanfile.info.full_requires.dumps(), + "Hello/1.2@diego/testing:0b09634eb446bffb8d3042a3f19d813cfc162b9d\n" + "Say/0.1@diego/testing:5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + self.assertEqual(chat.conanfile.info.options.dumps(), + "shared=True\nHello:shared=False\nSay:shared=False") + + +class CoreSettingsTest(unittest.TestCase): + + def setUp(self): + self.output = TestBufferConanOutput() + + def root(self, content, options="", settings=""): + full_settings = Settings.loads(default_settings_yml) + full_settings.values = Values.loads(settings) + options = OptionsValues.loads(options) + loader = ConanFileLoader(None, full_settings, None, options, Scopes(), + env=None, package_env=None) + retriever = Retriever(loader, self.output) + builder = DepsGraphBuilder(retriever, self.output, loader, MockRequireResolver()) + root_conan = retriever.root(content) + deps_graph = builder.load(None, root_conan) + return deps_graph + + def test_basic(self): + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = "os" + options = {"myoption": [1, 2, 3]} + + def conan_info(self): + self.info.settings.os = "Win" + self.info.options.myoption = "1,2,3" +""" + deps_graph = self.root(content, options="myoption=2", settings="os=Windows") + self.assertEqual(_get_edges(deps_graph), set()) + self.assertEqual(1, len(deps_graph.nodes)) + node = _get_nodes(deps_graph, "Say")[0] + self.assertEqual(node.conan_ref, None) + conanfile = node.conanfile + + def check(conanfile, options, settings): + self.assertEqual(conanfile.version, "0.1") + self.assertEqual(conanfile.name, "Say") + self.assertEqual(conanfile.options.values.dumps(), options) + self.assertEqual(conanfile.settings.fields, ["os"]) + self.assertEqual(conanfile.settings.values.dumps(), settings) + self.assertEqual(conanfile.requires, Requirements()) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "os=Win") + self.assertEqual(conaninfo.full_settings.dumps(), settings) + self.assertEqual(conaninfo.options.dumps(), "myoption=1,2,3") + self.assertEqual(conaninfo.full_options.dumps(), options) + self.assertEqual(conaninfo.requires.dumps(), "") + self.assertEqual(conaninfo.full_requires.dumps(), "") + + self.assertEqual(conaninfo.package_id(), "6a3d66035e2dcbcfd16d5541b40785c01487c2f9") + + check(conanfile, "myoption=2", "os=Windows") + + deps_graph = self.root(content, options="myoption=1", settings="os=Linux") + self.assertEqual(_get_edges(deps_graph), set()) + self.assertEqual(1, len(deps_graph.nodes)) + node = _get_nodes(deps_graph, "Say")[0] + + conanfile = node.conanfile + check(conanfile, "myoption=1", "os=Linux") + + def test_errors(self): + with self.assertRaisesRegexp(ConanException, "root: No subclass of ConanFile"): + self.root("") + + with self.assertRaisesRegexp(ConanException, "root: More than 1 conanfile in the file"): + self.root("""from conans import ConanFile +class HelloConan(ConanFile):pass +class ByeConan(ConanFile):pass""") + + def test_config(self): + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = "os" + options = {"myoption": [1, 2, 3]} + + def config(self): + if self.settings.os == "Linux": + self.options.clear() +""" + deps_graph = self.root(content, options="myoption=2", settings="os=Windows") + self.assertEqual(_get_edges(deps_graph), set()) + self.assertEqual(1, len(deps_graph.nodes)) + node = _get_nodes(deps_graph, "Say")[0] + self.assertEqual(node.conan_ref, None) + conanfile = node.conanfile + + def check(conanfile, options, settings): + self.assertEqual(conanfile.version, "0.1") + self.assertEqual(conanfile.name, "Say") + self.assertEqual(conanfile.options.values.dumps(), options) + self.assertEqual(conanfile.settings.fields, ["os"]) + self.assertEqual(conanfile.settings.values.dumps(), settings) + self.assertEqual(conanfile.requires, Requirements()) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), settings) + self.assertEqual(conaninfo.full_settings.dumps(), settings) + self.assertEqual(conaninfo.options.dumps(), options) + self.assertEqual(conaninfo.full_options.dumps(), options) + self.assertEqual(conaninfo.requires.dumps(), "") + self.assertEqual(conaninfo.full_requires.dumps(), "") + + check(conanfile, "myoption=2", "os=Windows") + + deps_graph = self.root(content, options="myoption=1", settings="os=Linux") + self.assertEqual(_get_edges(deps_graph), set()) + self.assertEqual(1, len(deps_graph.nodes)) + node = _get_nodes(deps_graph, "Say")[0] + + conanfile = node.conanfile + check(conanfile, "", "os=Linux") + + def test_config_remove(self): + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = "os", "arch" + options = {"arch_independent": [True, False]} + + def config(self): + if self.options.arch_independent: + self.settings.remove("arch") + self.settings.os.remove("Linux") +""" + deps_graph = self.root(content, options="arch_independent=True", settings="os=Windows") + self.assertEqual(_get_edges(deps_graph), set()) + self.assertEqual(1, len(deps_graph.nodes)) + node = _get_nodes(deps_graph, "Say")[0] + self.assertEqual(node.conan_ref, None) + conanfile = node.conanfile + + def check(conanfile, options, settings): + self.assertEqual(conanfile.version, "0.1") + self.assertEqual(conanfile.name, "Say") + self.assertEqual(conanfile.options.values.dumps(), options) + self.assertEqual(conanfile.settings.fields, ["os"]) + self.assertEqual(conanfile.settings.values.dumps(), settings) + self.assertEqual(conanfile.requires, Requirements()) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), settings) + self.assertEqual(conaninfo.full_settings.dumps(), settings) + self.assertEqual(conaninfo.options.dumps(), options) + self.assertEqual(conaninfo.full_options.dumps(), options) + self.assertEqual(conaninfo.requires.dumps(), "") + self.assertEqual(conaninfo.full_requires.dumps(), "") + + check(conanfile, "arch_independent=True", "os=Windows") + + with self.assertRaises(ConanException) as cm: + self.root(content, options="arch_independent=True", settings="os=Linux") + self.assertIn(bad_value_msg("settings.os", "Linux", + ['Android', 'FreeBSD', 'Macos', 'SunOS', "Windows", "iOS"]), + str(cm.exception)) + + def test_config_remove2(self): + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = "os", "arch", "compiler" + + def config(self): + del self.settings.compiler.version +""" + deps_graph = self.root(content, settings="os=Windows\n compiler=gcc\narch=x86\n" + "compiler.libcxx=libstdc++") + self.assertIn("WARN: config() has been deprecated. Use config_options and configure", + self.output) + self.assertEqual(_get_edges(deps_graph), set()) + self.assertEqual(1, len(deps_graph.nodes)) + node = _get_nodes(deps_graph, "Say")[0] + self.assertEqual(node.conan_ref, None) + conanfile = node.conanfile + + self.assertEqual(conanfile.version, "0.1") + self.assertEqual(conanfile.name, "Say") + self.assertEqual(conanfile.options.values.dumps(), "") + self.assertEqual(conanfile.settings.fields, ["arch", "compiler", "os"]) + self.assertNotIn("compiler.version", conanfile.settings.values.dumps()) + self.assertEqual(conanfile.requires, Requirements()) + + def test_new_configure(self): + content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + settings = "os" + options = {"shared": [True, False], "header_only": [True, False],} + default_options = "shared=False", "header_only=True" + + def config_options(self): + if self.settings.os == "Windows": + del self.options.shared + + def configure(self): + if self.options.header_only: + self.settings.clear() + del self.options.shared +""" + deps_graph = self.root(content, settings="os=Linux") + self.assertEqual(_get_edges(deps_graph), set()) + self.assertEqual(1, len(deps_graph.nodes)) + node = _get_nodes(deps_graph, "Say")[0] + self.assertEqual(node.conan_ref, None) + conanfile = node.conanfile + + self.assertEqual(conanfile.version, "0.1") + self.assertEqual(conanfile.name, "Say") + self.assertEqual(conanfile.options.values.dumps(), "header_only=True") + self.assertNotIn(conanfile.options.values.dumps(), "shared") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.requires, Requirements()) + + # in lib mode, there is OS and shared + deps_graph = self.root(content, settings="os=Linux", options="header_only=False") + self.assertEqual(_get_edges(deps_graph), set()) + self.assertEqual(1, len(deps_graph.nodes)) + node = _get_nodes(deps_graph, "Say")[0] + self.assertEqual(node.conan_ref, None) + conanfile = node.conanfile + + self.assertEqual(conanfile.version, "0.1") + self.assertEqual(conanfile.name, "Say") + self.assertEqual(conanfile.options.values.dumps(), "header_only=False\nshared=False") + self.assertNotIn(conanfile.options.values.dumps(), "shared") + self.assertEqual(conanfile.settings.fields, ["os"]) + self.assertEqual(conanfile.requires, Requirements()) + + # In windows there is no shared option + deps_graph = self.root(content, settings="os=Windows", options="header_only=False") + self.assertEqual(_get_edges(deps_graph), set()) + self.assertEqual(1, len(deps_graph.nodes)) + node = _get_nodes(deps_graph, "Say")[0] + self.assertEqual(node.conan_ref, None) + conanfile = node.conanfile + + self.assertEqual(conanfile.version, "0.1") + self.assertEqual(conanfile.name, "Say") + self.assertEqual(conanfile.options.values.dumps(), "header_only=False") + self.assertNotIn(conanfile.options.values.dumps(), "shared") + self.assertEqual(conanfile.settings.fields, ["os"]) + self.assertEqual(conanfile.requires, Requirements()) + + def test_transitive_two_levels_options(self): + say_content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "0.1" + options = {"myoption_say": [123, 234]} +""" + hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/0.1@diego/testing" + options = {"myoption_hello": [True, False]} +""" + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.2@diego/testing" + options = {"myoption_chat": ["on", "off"]} +""" + output = TestBufferConanOutput() + loader = ConanFileLoader(None, Settings.loads(""), None, + OptionsValues.loads("Say:myoption_say=123\n" + "Hello:myoption_hello=True\n" + "myoption_chat=on"), + Scopes(), env=None, package_env=None) + retriever = Retriever(loader, output) + builder = DepsGraphBuilder(retriever, output, loader, MockRequireResolver()) + retriever.conan(say_ref, say_content) + retriever.conan(hello_ref, hello_content) + + root_conan = retriever.root(chat_content) + deps_graph = builder.load(None, root_conan) + + self.assertEqual(3, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say), Edge(chat, hello)}) + + self.assertEqual(hello.conan_ref, hello_ref) + self.assertEqual(say.conan_ref, say_ref) + + conanfile = say.conanfile + self.assertEqual(conanfile.version, "0.1") + self.assertEqual(conanfile.name, "Say") + self.assertEqual(conanfile.options.values.dumps(), "myoption_say=123") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values_list, []) + self.assertEqual(conanfile.requires, Requirements()) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "myoption_say=123") + self.assertEqual(conaninfo.full_options.dumps(), "myoption_say=123") + self.assertEqual(conaninfo.requires.dumps(), "") + self.assertEqual(conaninfo.full_requires.dumps(), "") + + conanfile = hello.conanfile + self.assertEqual(conanfile.version, "1.2") + self.assertEqual(conanfile.name, "Hello") + self.assertEqual(conanfile.options.values.dumps(), + "myoption_hello=True\nSay:myoption_say=123") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(say_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "myoption_hello=True") + self.assertEqual(conaninfo.full_options.dumps(), + "myoption_hello=True\nSay:myoption_say=123") + self.assertEqual(conaninfo.requires.dumps(), "%s/%s" % (say_ref.name, say_ref.version)) + self.assertEqual(conaninfo.full_requires.dumps(), + "%s:751fd69d10b2a54fdd8610cdae748d6b22700841" % str(say_ref)) + + conanfile = chat.conanfile + self.assertEqual(conanfile.version, "2.3") + self.assertEqual(conanfile.name, "Chat") + self.assertEqual(conanfile.options.values.dumps(), + "myoption_chat=on\nHello:myoption_hello=True\nSay:myoption_say=123") + self.assertEqual(conanfile.settings.fields, []) + self.assertEqual(conanfile.settings.values.dumps(), "") + self.assertEqual(conanfile.requires, Requirements(str(hello_ref))) + + conaninfo = conanfile.info + self.assertEqual(conaninfo.settings.dumps(), "") + self.assertEqual(conaninfo.full_settings.dumps(), "") + self.assertEqual(conaninfo.options.dumps(), "myoption_chat=on") + self.assertEqual(conaninfo.full_options.dumps(), + "myoption_chat=on\nHello:myoption_hello=True\nSay:myoption_say=123") + self.assertEqual(conaninfo.requires.dumps(), "Hello/1.Y.Z") + self.assertEqual(conaninfo.full_requires.dumps(), + "%s:95c360996106af45b8eec11a37df19fda39a5880\n" + "%s:751fd69d10b2a54fdd8610cdae748d6b22700841" + % (str(hello_ref), str(say_ref))) diff --git a/testbed/conan-io__conan/conans/test/model/values_test.py b/testbed/conan-io__conan/conans/test/model/values_test.py new file mode 100644 index 0000000000000000000000000000000000000000..defdb6aecb2d356aafd6bc3d114d8a916bb2c959 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/values_test.py @@ -0,0 +1,26 @@ +import unittest +from conans.model.values import Values + + +class ValuesTest(unittest.TestCase): + + def simple_test(self): + v = Values() + self.assertEqual(v.compiler, None) + v.compiler = 3 + self.assertTrue(v.compiler == "3") + + self.assertEqual(v.compiler.version, None) + v.compiler.version = "asfaf" + self.assertEqual(v.compiler.version, "asfaf") + + my_list = v.as_list() + self.assertEqual(my_list, [('compiler', '3'), + ('compiler.version', 'asfaf')]) + + values = Values.from_list(my_list) + self.assertEqual(values.dumps(), v.dumps()) + + v.compiler = None + self.assertEqual(v.as_list(), [('compiler', 'None')]) + self.assertEqual(v.dumps(), "compiler=None") diff --git a/testbed/conan-io__conan/conans/test/model/version_ranges_test.py b/testbed/conan-io__conan/conans/test/model/version_ranges_test.py new file mode 100644 index 0000000000000000000000000000000000000000..b1c61276ad082369e8ab6bab4d0734bc164e2b01 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/version_ranges_test.py @@ -0,0 +1,326 @@ +import unittest +from conans.test.tools import TestBufferConanOutput +from conans.paths import CONANFILE +import os +from conans.client.deps_builder import DepsGraphBuilder +from conans.model.ref import ConanFileReference +from conans.model.options import OptionsValues +from conans.client.loader import ConanFileLoader +from conans.util.files import save, list_folder_subdirs +from conans.model.settings import Settings +from conans.model.requires import Requirements +from conans.test.utils.test_files import temp_folder +from collections import namedtuple +from conans.model.scope import Scopes +from conans.client.require_resolver import RequireResolver, satisfying +import re +from nose_parameterized import parameterized + + +class BasicMaxVersionTest(unittest.TestCase): + def basic_test(self): + output = TestBufferConanOutput() + result = satisfying(["1.1", "1.2", "1.3", "2.1"], "", output) + self.assertEqual(result, "2.1") + result = satisfying(["1.1", "1.2", "1.3", "2.1"], "1", output) + self.assertEqual(result, "1.3") + result = satisfying(["1.1", "1.2", "1.3", "2.1"], "1.1", output) + self.assertEqual(result, "1.1") + result = satisfying(["1.1", "1.2", "1.3", "2.1"], ">1.1", output) + self.assertEqual(result, "2.1") + result = satisfying(["1.1", "1.2", "1.3", "2.1"], "<1.3", output) + self.assertEqual(result, "1.2") + result = satisfying(["1.1", "1.2", "1.3", "2.1"], "<=1.3", output) + self.assertEqual(result, "1.3") + result = satisfying(["1.1", "1.2", "1.3", "2.1"], ">1.1,<2.1", output) + self.assertEqual(result, "1.3") + result = satisfying(["1.1.1", "1.1.2", "1.2.1", "1.3", "2.1"], "<1.2", output) + self.assertEqual(result, "1.1.2") + result = satisfying(["1.1.1", "1.1.2", "1.2.1", "1.3", "2.1"], "<1.2.1", output) + self.assertEqual(result, "1.1.2") + result = satisfying(["1.1.1", "1.1.2", "1.2.1", "1.3", "2.1"], "<=1.2.1", output) + self.assertEqual(result, "1.2.1") + result = satisfying(["1.6.1"], ">1.5.0,<1.6.8", output) + self.assertEqual(result, "1.6.1") + result = satisfying(["1.1.1", "1.1.2", "1.2", "1.2.1", "1.3", "2.1"], "<=1.2", output) + self.assertEqual(result, "1.2") + result = satisfying(["1.1.1", "1.1.2", "1.2", "1.2.1", "1.3", "2.1"], "<1.3", output) + self.assertEqual(result, "1.2.1") + result = satisfying(["1.a.1", "master", "X.2", "1.2.1", "1.3", "2.1"], "1.3", output) + self.assertIn("Version 'master' is not semver", output) + self.assertEqual(result, "1.3") + result = satisfying(["1.1.1", "1.1.2", "1.2", "1.2.1", "1.3", "2.1"], "1.8||1.3", output) + self.assertEqual(result, "1.3") + + result = satisfying(["1.3", "1.3.1"], "<1.3", output) + self.assertEqual(result, None) + result = satisfying(["1.3.0", "1.3.1"], "<1.3", output) + self.assertEqual(result, None) + result = satisfying(["1.3", "1.3.1"], "<=1.3", output) + self.assertEqual(result, "1.3") + result = satisfying(["1.3.0", "1.3.1"], "<=1.3", output) + self.assertEqual(result, "1.3.0") + # >2 means >=3.0.0-0 + result = satisfying(["2.1"], ">2", output) + self.assertEqual(result, None) + result = satisfying(["2.1"], ">2.0", output) + self.assertEqual(result, "2.1") + # >2.1 means >=2.2.0-0 + result = satisfying(["2.1.1"], ">2.1", output) + self.assertEqual(result, None) + result = satisfying(["2.1.1"], ">2.1.0", output) + self.assertEqual(result, "2.1.1") + + +class Retriever(object): + def __init__(self, loader, output): + self.loader = loader + self.output = output + self.folder = temp_folder() + + def root(self, content): + conan_path = os.path.join(self.folder, "root") + save(conan_path, content) + conanfile = self.loader.load_conan(conan_path, self.output, consumer=True) + return conanfile + + def conan(self, conan_ref, content): + if isinstance(conan_ref, str): + conan_ref = ConanFileReference.loads(conan_ref) + conan_path = os.path.join(self.folder, "/".join(conan_ref), CONANFILE) + save(conan_path, content) + + def get_recipe(self, conan_ref): + conan_path = os.path.join(self.folder, "/".join(conan_ref), CONANFILE) + return conan_path + + def search(self, pattern): + from fnmatch import translate + pattern = translate(pattern) + pattern = re.compile(pattern) + + subdirs = list_folder_subdirs(basedir=self.folder, level=4) + + if not pattern: + result = [ConanFileReference(*folder.split("/")) for folder in subdirs] + else: + result = [] + for subdir in subdirs: + conan_ref = ConanFileReference(*subdir.split("/")) + if pattern: + if pattern.match(str(conan_ref)): + result.append(conan_ref) + return sorted(result) + + +hello_content = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2" + requires = "Say/[%s]@memsharded/testing" +""" + + +def _get_nodes(graph, name): + """ return all the nodes matching a particular name. Could be >1 in case + that private requirements embed different versions + """ + return [n for n in graph.nodes if n.conanfile.name == name] + + +Edge = namedtuple("Edge", "src dst") + + +def _get_edges(graph): + edges = set() + for n in graph.nodes: + edges.update([Edge(n, neigh) for neigh in graph.neighbors(n)]) + return edges + + +class MockSearchRemote(object): + def __init__(self, packages=None): + self.packages = packages or [] + + def search_remotes(self, pattern): # @UnusedVariable + return self.packages + + +class VersionRangesTest(unittest.TestCase): + + def setUp(self): + self.output = TestBufferConanOutput() + self.loader = ConanFileLoader(None, Settings.loads(""), None, + OptionsValues.loads(""), Scopes(), None, None) + self.retriever = Retriever(self.loader, self.output) + self.remote_search = MockSearchRemote() + self.resolver = RequireResolver(self.output, self.retriever, self.remote_search) + self.builder = DepsGraphBuilder(self.retriever, self.output, self.loader, self.resolver) + + for v in ["0.1", "0.2", "0.3", "1.1", "1.1.2", "1.2.1", "2.1", "2.2.1"]: + say_content = """ +from conans import ConanFile + +class SayConan(ConanFile): + name = "Say" + version = "%s" +""" % v + say_ref = ConanFileReference.loads("Say/%s@memsharded/testing" % v) + self.retriever.conan(say_ref, say_content) + + def root(self, content): + root_conan = self.retriever.root(content) + deps_graph = self.builder.load(None, root_conan) + return deps_graph + + def test_local_basic(self): + for expr, solution in [(">0.0", "2.2.1"), + (">0.1,<1", "0.3"), + (">0.1,<1||2.1", "2.1"), + ("", "2.2.1"), + ("~0", "0.3"), + ("~=1", "1.2.1"), + ("~1.1", "1.1.2"), + ("~=2", "2.2.1"), + ("~=2.1", "2.1"), + ]: + deps_graph = self.root(hello_content % expr) + + self.assertEqual(2, len(deps_graph.nodes)) + hello = _get_nodes(deps_graph, "Hello")[0] + say = _get_nodes(deps_graph, "Say")[0] + self.assertEqual(_get_edges(deps_graph), {Edge(hello, say)}) + + self.assertEqual(hello.conan_ref, None) + conanfile = hello.conanfile + self.assertEqual(conanfile.version, "1.2") + self.assertEqual(conanfile.name, "Hello") + say_ref = ConanFileReference.loads("Say/%s@memsharded/testing" % solution) + self.assertEqual(conanfile.requires, Requirements(str(say_ref))) + + def test_remote_basic(self): + self.resolver._local_search = None + remote_packages = [] + for v in ["0.1", "0.2", "0.3", "1.1", "1.1.2", "1.2.1", "2.1", "2.2.1"]: + say_ref = ConanFileReference.loads("Say/%s@memsharded/testing" % v) + remote_packages.append(say_ref) + self.remote_search.packages = remote_packages + self.test_local_basic() + + @parameterized.expand([("", "0.3", None, None), + ('"Say/1.1@memsharded/testing"', "1.1", False, False), + ('"Say/0.2@memsharded/testing"', "0.2", False, True), + ('("Say/1.1@memsharded/testing", "override")', "1.1", True, False), + ('("Say/0.2@memsharded/testing", "override")', "0.2", True, True), + # ranges + ('"Say/[<=1.2]@memsharded/testing"', "1.1.2", False, False), + ('"Say/[>=0.2,<=1.0]@memsharded/testing"', "0.3", False, True), + ('("Say/[<=1.2]@memsharded/testing", "override")', "1.1.2", True, False), + ('("Say/[>=0.2,<=1.0]@memsharded/testing", "override")', "0.3", True, True), + ]) + def transitive_test(self, version_range, solution, override, valid): + hello_text = hello_content % ">0.1, <1" + hello_ref = ConanFileReference.loads("Hello/1.0@memsharded/testing") + self.retriever.conan(hello_ref, hello_text) + + chat_content = """ +from conans import ConanFile + +class ChatConan(ConanFile): + name = "Chat" + version = "2.3" + requires = "Hello/1.0@memsharded/testing", %s +""" + + deps_graph = self.root(chat_content % version_range) + hello = _get_nodes(deps_graph, "Hello")[0] + say = _get_nodes(deps_graph, "Say")[0] + chat = _get_nodes(deps_graph, "Chat")[0] + edges = {Edge(hello, say), Edge(chat, hello)} + if override is not None: + self.assertIn("override", self.output) + else: + self.assertNotIn("override", self.output) + if override is False: + edges = {Edge(hello, say), Edge(chat, say), Edge(chat, hello)} + + if valid is True: + self.assertIn(" valid", self.output) + elif valid is False: + self.assertIn("not valid", self.output) + self.assertEqual(3, len(deps_graph.nodes)) + + self.assertEqual(_get_edges(deps_graph), edges) + + self.assertEqual(hello.conan_ref, hello_ref) + conanfile = hello.conanfile + self.assertEqual(conanfile.version, "1.2") + self.assertEqual(conanfile.name, "Hello") + say_ref = ConanFileReference.loads("Say/%s@memsharded/testing" % solution) + self.assertEqual(conanfile.requires, Requirements(str(say_ref))) + + def duplicated_error_test(self): + content = """ +from conans import ConanFile + +class Log3cppConan(ConanFile): + name = "log4cpp" + version = "1.1.1" +""" + log4cpp_ref = ConanFileReference.loads("log4cpp/1.1.1@memsharded/testing") + self.retriever.conan(log4cpp_ref, content) + + content = """ +from conans import ConanFile + +class LoggerInterfaceConan(ConanFile): + name = "LoggerInterface" + version = "0.1.1" + + def requirements(self): + self.requires("log4cpp/[~1.1]@memsharded/testing") +""" + logiface_ref = ConanFileReference.loads("LoggerInterface/0.1.1@memsharded/testing") + self.retriever.conan(logiface_ref, content) + + content = """ +from conans import ConanFile + +class OtherConan(ConanFile): + name = "other" + version = "2.0.11549" + requires = "LoggerInterface/[~0.1]@memsharded/testing" +""" + other_ref = ConanFileReference.loads("other/2.0.11549@memsharded/testing") + self.retriever.conan(other_ref, content) + + content = """ +from conans import ConanFile + +class Project(ConanFile): + requires = "LoggerInterface/[~0.1]@memsharded/testing", "other/[~2.0]@memsharded/testing" +""" + deps_graph = self.root(content) + + log4cpp = _get_nodes(deps_graph, "log4cpp")[0] + logger_interface = _get_nodes(deps_graph, "LoggerInterface")[0] + other = _get_nodes(deps_graph, "other")[0] + + self.assertEqual(4, len(deps_graph.nodes)) + + self.assertEqual(log4cpp.conan_ref, log4cpp_ref) + conanfile = log4cpp.conanfile + self.assertEqual(conanfile.version, "1.1.1") + self.assertEqual(conanfile.name, "log4cpp") + + self.assertEqual(logger_interface.conan_ref, logiface_ref) + conanfile = logger_interface.conanfile + self.assertEqual(conanfile.version, "0.1.1") + self.assertEqual(conanfile.name, "LoggerInterface") + + self.assertEqual(other.conan_ref, other_ref) + conanfile = other.conanfile + self.assertEqual(conanfile.version, "2.0.11549") + self.assertEqual(conanfile.name, "other") diff --git a/testbed/conan-io__conan/conans/test/model/version_test.py b/testbed/conan-io__conan/conans/test/model/version_test.py new file mode 100644 index 0000000000000000000000000000000000000000..2d8b22d5a1d5dfc90f87735217741e7087aa183d --- /dev/null +++ b/testbed/conan-io__conan/conans/test/model/version_test.py @@ -0,0 +1,37 @@ +import unittest +from conans.model.version import Version + + +class VersionTest(unittest.TestCase): + + def simple_test(self): + v1 = Version("1.2.3") + self.assertTrue(v1 == "1.2.3") + self.assertTrue(v1 > "1.1") + self.assertTrue(v1 > None) + self.assertTrue(v1 < "1.11") + self.assertTrue(v1 > "1.2") + self.assertTrue(v1 > "1.2.2.2") + self.assertTrue(v1 < "1.2.3.2") + self.assertEqual(v1.major(), "1.Y.Z") # 1.X.Y + self.assertEqual(v1.minor(), "1.2.Z") # 1.2.Y + self.assertTrue(v1.compatible("1.X")) + self.assertTrue(v1.compatible("1.2.Y")) + self.assertFalse(v1.compatible("0.X")) + v2 = v1.minor() + self.assertTrue(v2.compatible("1.X")) + self.assertTrue(v2.compatible("1.2.3.4")) + self.assertFalse(v2.compatible("1.3.3.4")) + + v1 = Version("1.2.rc1") + self.assertTrue(v1 < "1.2.0") + self.assertFalse(v1 < "1.1.9") + + self.assertTrue(Version("1.2.1-dev") < Version("1.2.1")) + self.assertTrue(Version("1.2.1-dev") < Version("1.2.2")) + self.assertTrue(Version("1.2.1-dev") < Version("1.3")) + self.assertTrue(Version("1.2.1-dev") < Version("1.3-alpha")) + self.assertTrue(Version("1.2.1-dev") > Version("1.2.0")) + self.assertTrue(Version("1.2.1-dev") > Version("1.2")) + self.assertTrue(Version("1.2.1-dev") > Version("1.2.alpha")) + self.assertTrue(Version("1.2.1-dev") > Version("1.2-alpha")) diff --git a/testbed/conan-io__conan/conans/test/options_caching_test.py b/testbed/conan-io__conan/conans/test/options_caching_test.py new file mode 100644 index 0000000000000000000000000000000000000000..bb957f7cf1968500f8141e31d490085591f8258f --- /dev/null +++ b/testbed/conan-io__conan/conans/test/options_caching_test.py @@ -0,0 +1,40 @@ +import unittest +from conans.test.tools import TestClient +from conans.paths import CONANINFO +from conans.util.files import load +import os + + +class OptionCachingTest(unittest.TestCase): + + def basic_test(self): + client = TestClient() + zlib = ''' +from conans import ConanFile + +class ConanLib(ConanFile): + name = "zlib" + version = "0.1" + options = {"shared": [True, False]} + default_options= "shared=False" +''' + + client.save({"conanfile.py": zlib}) + client.run("export lasote/testing") + + project = """[requires] +zlib/0.1@lasote/testing +""" + client.save({"conanfile.txt": project}, clean_first=True) + + client.run("install -o zlib:shared=True --build=missing") + self.assertIn("zlib/0.1@lasote/testing:2a623e3082a38f90cd2c3d12081161412de331b0", + client.user_io.out) + conaninfo = load(os.path.join(client.current_folder, CONANINFO)) + self.assertIn("zlib:shared=True", conaninfo) + + client.run("install --build=missing") + self.assertIn("zlib/0.1@lasote/testing:2a623e3082a38f90cd2c3d12081161412de331b0", + client.user_io.out) + conaninfo = load(os.path.join(client.current_folder, CONANINFO)) + self.assertIn("zlib:shared=True", conaninfo) diff --git a/testbed/conan-io__conan/conans/test/options_in_requirements_test.py b/testbed/conan-io__conan/conans/test/options_in_requirements_test.py new file mode 100644 index 0000000000000000000000000000000000000000..010d72993ac9575dc99a8bbd8e3391e70335c566 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/options_in_requirements_test.py @@ -0,0 +1,51 @@ +import unittest +from conans.test.tools import TestClient +from conans.paths import CONANINFO +from conans.util.files import load +import os + + +class ChangeOptionsInRequirementsTest(unittest.TestCase): + """ This test serves to check that the requirements() method can also define + options for its dependencies, just in case they were just added + """ + + def basic_test(self): + client = TestClient() + zlib = ''' +from conans import ConanFile + +class ConanLib(ConanFile): + name = "zlib" + version = "0.1" + options = {"shared": [True, False]} + default_options= "shared=False" +''' + + files = {"conanfile.py": zlib} + client.save(files) + client.run("export lasote/testing") + + boost = """from conans import ConanFile +from conans import tools +import platform, os, sys + +class BoostConan(ConanFile): + name = "BoostDbg" + version = "1.0" + options = {"shared": [True, False]} + default_options = "shared=False" + + def requirements(self): + self.requires("zlib/0.1@lasote/testing") + self.options["zlib"].shared = self.options.shared +""" + files = {"conanfile.py": boost} + client.save(files, clean_first=True) + client.run("export lasote/testing") + + files = {"conanfile.txt": "[requires]\nBoostDbg/1.0@lasote/testing"} + client.save(files, clean_first=True) + client.run("install -o BoostDbg:shared=True --build=missing") + conaninfo = load(os.path.join(client.current_folder, CONANINFO)) + self.assertIn("zlib:shared=True", conaninfo) diff --git a/testbed/conan-io__conan/conans/test/output_test.py b/testbed/conan-io__conan/conans/test/output_test.py new file mode 100644 index 0000000000000000000000000000000000000000..69cd098f617270759eee76985d543abc713f9f80 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/output_test.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +import unittest +from conans.client.output import ConanOutput +from six import StringIO +from conans.client.rest.uploader_downloader import print_progress +from conans.test.utils.test_files import temp_folder +from conans import tools +import zipfile +import os +from conans.util.files import save, load +import sys +from conans.test.tools import TestClient + + +class OutputTest(unittest.TestCase): + + def simple_output_test(self): + stream = StringIO() + output = ConanOutput(stream) + output.rewrite_line("This is a very long line that has to be truncated somewhere, " + "because it is so long it doesn't fit in the output terminal") + self.assertIn("This is a very long line that ha ... esn't fit in the output terminal", + stream.getvalue()) + + def error_test(self): + client = TestClient() + conanfile = """ +# -*- coding: utf-8 -*- + +from conans import ConanFile +from conans.errors import ConanException + +class PkgConan(ConanFile): + def source(self): + self.output.info("TEXT ÑÜíóúéáàèòù абвгдежзийкл 做戏之说 ENDTEXT") +""" + client.save({"conanfile.py": conanfile}) + client.run("source") + self.assertIn("TEXT", client.user_io.out) + self.assertIn("ENDTEXT", client.user_io.out) + + def print_progress_test(self): + stream = StringIO() + output = ConanOutput(stream) + for units in range(50): + print_progress(output, units) + output_str = stream.getvalue() + self.assertNotIn("=", output_str) + self.assertNotIn("[", output_str) + self.assertNotIn("]", output_str) + + def unzip_output_test(self): + tmp_dir = temp_folder() + file_path = os.path.join(tmp_dir, "example.txt") + save(file_path, "Hello world!") + + zip_path = os.path.join(tmp_dir, 'example.zip') + zipf = zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) + for root, _, files in os.walk(tmp_dir): + for f in files: + zipf.write(os.path.join(root, f), f) + zipf.close() + + output_dir = os.path.join(tmp_dir, "output_dir") + new_out = StringIO() + old_out = sys.stdout + try: + sys.stdout = new_out + tools.unzip(zip_path, output_dir) + finally: + sys.stdout = old_out + + output = new_out.getvalue() + self.assertRegexpMatches(output, "Unzipping [\d]+ bytes, this can take a while") + content = load(os.path.join(output_dir, "example.txt")) + self.assertEqual(content, "Hello world!") diff --git a/testbed/conan-io__conan/conans/test/package_copier_test.py b/testbed/conan-io__conan/conans/test/package_copier_test.py new file mode 100644 index 0000000000000000000000000000000000000000..ab2d874d712b068348d0fd4a50b407363369cbc8 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/package_copier_test.py @@ -0,0 +1,126 @@ +import unittest +from conans.test.utils.test_files import temp_folder +from conans.test.tools import TestBufferConanOutput +from conans.client.userio import UserIO +import sys +from conans.util.files import mkdir, save, load +from conans.model.ref import ConanFileReference, PackageReference +from conans.paths import SimplePaths +import os +from conans.client.package_copier import PackageCopier + + +class MockedBooleanUserIO(UserIO): + + def __init__(self, answer, ins=sys.stdin, out=None): + self.answer = answer + UserIO.__init__(self, ins, out) + + def request_boolean(self, msg, default_option=None): + self.out.info(msg) + return self.answer + + +class PackageCopierTest(unittest.TestCase): + + def testCopier(self): + output = TestBufferConanOutput() + userio = MockedBooleanUserIO(True, out=output) + paths = SimplePaths(temp_folder()) + copier = PackageCopier(paths, userio) + + # Create some packages to copy + reference = ConanFileReference.loads("Hello/0.1@lasote/testing") + self._create_conanfile(reference, paths) + self._create_package(reference, "0101001", paths) + self._create_package(reference, "2222222", paths) + + # Copy all to destination + copier.copy(reference, ["0101001", "2222222"], "lasote", "stable", force=False) + new_reference = ConanFileReference.loads("Hello/0.1@lasote/stable") + self._assert_conanfile_exists(new_reference, paths) + self._assert_package_exists(new_reference, "0101001", paths) + self._assert_package_exists(new_reference, "2222222", paths) + self.assertIn("Copied Hello/0.1@lasote/testing to Hello/0.1@lasote/stable", output) + self.assertIn("Copied 0101001 to Hello/0.1@lasote/stable", output) + self.assertIn("Copied 2222222 to Hello/0.1@lasote/stable", output) + + # Copy again, without force and answering yes + output._stream.truncate(0) # Reset output + copier.copy(reference, ["0101001", "2222222"], "lasote", "stable", force=False) + self.assertIn("Copied Hello/0.1@lasote/testing to Hello/0.1@lasote/stable", output) + self.assertIn("Copied 0101001 to Hello/0.1@lasote/stable", output) + self.assertIn("Copied 2222222 to Hello/0.1@lasote/stable", output) + self.assertIn("'Hello/0.1@lasote/stable' already exist. Override?", output) + self.assertIn("Package '2222222' already exist. Override?", output) + self.assertIn("Package '0101001' already exist. Override?", output) + + # Now alter the origin and copy again to same destination and confirm the copy + self._create_conanfile(reference, paths, "new content") + self._create_package(reference, "0101001", paths, "new lib content") + self._create_package(reference, "2222222", paths, "new lib content") + output._stream.truncate(0) # Reset output + copier.copy(reference, ["0101001", "2222222"], "lasote", "stable", force=False) + conanfile_content = load(os.path.join(paths.export(new_reference), "conanfile.py")) + self.assertEquals(conanfile_content, "new content") + package_content = load(os.path.join(paths.package(PackageReference(new_reference, "0101001")), + "package.lib")) + self.assertEquals(package_content, "new lib content") + + # Now we are going to answer always NO to override + output._stream.truncate(0) # Reset output + userio = MockedBooleanUserIO(False, out=output) + copier = PackageCopier(paths, userio) + + self._create_conanfile(reference, paths, "content22") + self._create_package(reference, "0101001", paths, "newlib22") + self._create_package(reference, "2222222", paths, "newlib22") + copier.copy(reference, ["0101001", "2222222"], "lasote", "stable", force=False) + conanfile_content = load(os.path.join(paths.export(new_reference), "conanfile.py")) + self.assertEquals(conanfile_content, "new content") # Not content22 + p_ref = PackageReference(new_reference, "0101001") + package_content = load(os.path.join(paths.package(p_ref), "package.lib")) + self.assertEquals(package_content, "new lib content") # Not newlib22 + # If conanfile is not override it exist + self.assertNotIn("Package '2222222' already exist. Override?", output) + self.assertNotIn("Package '0101001' already exist. Override?", output) + self.assertNotIn("Copied 0101001 to Hello/0.1@lasote/stable", output) + self.assertNotIn("Copied 2222222 to Hello/0.1@lasote/stable", output) + + # Now override + output._stream.truncate(0) # Reset output + copier.copy(reference, ["0101001", "2222222"], "lasote", "stable", force=True) + self.assertIn("Copied 0101001 to Hello/0.1@lasote/stable", output) + self.assertIn("Copied 2222222 to Hello/0.1@lasote/stable", output) + + # Now copy just one package to another user/channel + output._stream.truncate(0) # Reset output + copier.copy(reference, ["0101001"], "pepe", "mychannel", force=True) + self.assertIn("Copied 0101001 to Hello/0.1@pepe/mychannel", output) + self.assertNotIn("Copied 2222222 to Hello/0.1@pepe/mychannel", output) + new_reference = ConanFileReference.loads("Hello/0.1@pepe/mychannel") + self._assert_package_exists(new_reference, "0101001", paths) + self._assert_package_doesnt_exists(new_reference, "2222222", paths) + + def _assert_conanfile_exists(self, reference, paths): + self.assertTrue(os.path.exists(paths.conanfile(reference))) + + def _assert_package_exists(self, reference, package_id, paths): + p1 = PackageReference(reference, package_id) + self.assertTrue(os.path.exists(os.path.join(paths.package(p1), "package.lib"))) + + def _assert_package_doesnt_exists(self, reference, package_id, paths): + p1 = PackageReference(reference, package_id) + self.assertFalse(os.path.exists(os.path.join(paths.package(p1), "package.lib"))) + + def _create_conanfile(self, ref, paths, content="default_content"): + origin_reg = paths.export(ref) + mkdir(origin_reg) + save(os.path.join(origin_reg, "conanfile.py"), content) + + def _create_package(self, ref, package_id, paths, content="default_content"): + package1_ref = PackageReference(ref, package_id) + package1_dir = paths.package(package1_ref) + mkdir(package1_dir) + save(os.path.join(package1_dir, "package.lib"), content) + diff --git a/testbed/conan-io__conan/conans/test/path_exists_test.py b/testbed/conan-io__conan/conans/test/path_exists_test.py new file mode 100644 index 0000000000000000000000000000000000000000..ca83403ad4819b01998395df13868b2b0fa75960 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/path_exists_test.py @@ -0,0 +1,46 @@ +import unittest +from conans.util.files import mkdir, path_exists +import os +from conans.test.tools import TestServer, TestClient +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.util.files import load +from conans.test.utils.test_files import temp_folder + + +class PathExistsTest(unittest.TestCase): + + def test_paths(self): + """Unit test of path_exists""" + tmp_dir = temp_folder() + tmp_dir = os.path.join(tmp_dir, "WhatEver") + new_path = os.path.join(tmp_dir, "CapsDir") + mkdir(new_path) + self.assertTrue(path_exists(new_path, tmp_dir)) + self.assertFalse(path_exists(os.path.join(tmp_dir, "capsdir"), tmp_dir)) + + def test_conanfile_not_found(self): + """If package is OpenSSL is not openssl""" + + test_server = TestServer() + self.servers = {"default": test_server} + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + + files = cpp_hello_conan_files("Hello0", "0.1", build=False) + + self.client.save(files) + self.client.run("export lasote/stable") + + self.assertRaises(Exception, self.client.run, "install hello0/0.1@lasote/stable") + self.client.run("install Hello0/0.1@lasote/stable --build missing") + self.client.run("upload Hello0/0.1@lasote/stable") + + # Now with requirements.txt (bug in server) + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + self.client.save({"conanfile.txt": "[requires]\nHello0/0.1@lasote/stable\n[generators]\ntxt"}) + self.client.run("install --build missing ") + build_info = load(os.path.join(self.client.current_folder, "conanbuildinfo.txt")) + self.assertIn("helloHello0", build_info) + + self.client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]}) + self.client.save({"conanfile.txt": "[requires]\nhello0/0.1@lasote/stable\n[generators]\ntxt"}) + self.assertRaises(Exception, self.client.run, "install") diff --git a/testbed/conan-io__conan/conans/test/path_limit_test.py b/testbed/conan-io__conan/conans/test/path_limit_test.py new file mode 100644 index 0000000000000000000000000000000000000000..1e34251d16e5f3fbbc4393da728b9855d6fe00ab --- /dev/null +++ b/testbed/conan-io__conan/conans/test/path_limit_test.py @@ -0,0 +1,204 @@ +import unittest +from conans.test.tools import TestClient, TestServer +from conans.util.files import load +import os +from conans.model.ref import PackageReference, ConanFileReference +import platform +import time + + +base = ''' +from conans import ConanFile +from conans.util.files import load, save +import os + +class ConanLib(ConanFile): + name = "lib" + version = "0.1" + short_paths = True + + def source(self): + extra_path = "1/" * 108 + os.makedirs(extra_path) + myfile = os.path.join(extra_path, "myfile.txt") + # print("File length ", len(myfile)) + save(myfile, "Hello extra path length") + + def build(self): + extra_path = "1/" * 108 + myfile = os.path.join(extra_path, "myfile2.txt") + # print("File length ", len(myfile)) + save(myfile, "Hello2 extra path length") + + def package(self): + self.copy("*.txt", keep_path=False) +''' + + +class PathLengthLimitTest(unittest.TestCase): + + def upload_test(self): + test_server = TestServer([], # write permissions + users={"lasote": "mypass"}) # exported users and passwords + servers = {"default": test_server} + client = TestClient(servers=servers, users={"default": [("lasote", "mypass")]}) + files = {"conanfile.py": base} + client.save(files) + client.run("export lasote/channel") + client.run("install lib/0.1@lasote/channel --build") + client.run("upload lib/0.1@lasote/channel --all") + client.run("remove lib/0.1@lasote/channel -f") + client.run("search") + self.assertIn("There are no packages", client.user_io.out) + + for download in ("", "--all"): + client2 = TestClient(servers=servers, users={"default": [("lasote", "mypass")]}) + client2.run("install lib/0.1@lasote/channel %s" % download) + reference = ConanFileReference.loads("lib/0.1@lasote/channel") + export_folder = client2.client_cache.export(reference) + export_files = os.listdir(export_folder) + self.assertNotIn('conan_export.tgz', export_files) + package_ref = PackageReference.loads("lib/0.1@lasote/channel:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + package_folder = client2.client_cache.package(package_ref, short_paths=None) + if platform.system() == "Windows": + original_folder = client2.client_cache.package(package_ref) + link = load(os.path.join(original_folder, ".conan_link")) + self.assertEqual(link, package_folder) + + files = os.listdir(package_folder) + self.assertIn("myfile.txt", files) + self.assertIn("myfile2.txt", files) + self.assertNotIn("conan_package.tgz", files) + + def source_test(self): + client = TestClient() + files = {"conanfile.py": base} + client.save(files) + client.run("export user/channel") + + conan_ref = ConanFileReference.loads("lib/0.1@user/channel") + client.run("source lib/0.1@user/channel") + self.assertIn("Configuring sources", client.user_io.out) + + if platform.system() == "Windows": + source_folder = client.client_cache.source(conan_ref) + link_source = load(os.path.join(source_folder, ".conan_link")) + self.assertTrue(os.path.exists(link_source)) + + # Nothing changes, so source is still there + client.run("export user/channel") + client.run("source lib/0.1@user/channel") + self.assertNotIn("Configuring sources", client.user_io.out) + + # But if we remove the source, it will retrieve sources again + client.run("remove lib/0.1@user/channel -s -f") + client.run("source lib/0.1@user/channel") + self.assertIn("Configuring sources", client.user_io.out) + + def package_copier_test(self): + client = TestClient() + files = {"conanfile.py": base} + client.save(files) + client.run("export lasote/channel") + client.run("install lib/0.1@lasote/channel --build") + client.run("copy lib/0.1@lasote/channel memsharded/stable") + client.run("search") + self.assertIn("lib/0.1@lasote/channel", client.user_io.out) + self.assertIn("lib/0.1@memsharded/stable", client.user_io.out) + client.run("search lib/0.1@lasote/channel") + self.assertIn("Package_ID: 5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9", client.user_io.out) + client.run("search lib/0.1@memsharded/stable") + self.assertIn("Package_ID: 5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9", client.user_io.out) + + if platform.system() == "Windows": + conan_ref = ConanFileReference.loads("lib/0.1@lasote/channel") + package_ref = PackageReference(conan_ref, "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + package_folder = client.client_cache.package(package_ref) + link_package = load(os.path.join(package_folder, ".conan_link")) + self.assertTrue(os.path.exists(link_package)) + + def basic_test(self): + client = TestClient() + files = {"conanfile.py": base} + client.save(files) + client.run("export user/channel") + client.run("install lib/0.1@user/channel --build") + package_ref = PackageReference.loads("lib/0.1@user/channel:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + client.run("search") + self.assertIn("lib/0.1@user/channel", client.user_io.out) + client.run("search lib/0.1@user/channel") + self.assertIn("Package_ID: 5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9", client.user_io.out) + + package_folder = client.client_cache.package(package_ref, short_paths=None) + file1 = load(os.path.join(package_folder, "myfile.txt")) + self.assertEqual("Hello extra path length", file1) + file2 = load(os.path.join(package_folder, "myfile2.txt")) + self.assertEqual("Hello2 extra path length", file2) + + if platform.system() == "Windows": + conan_ref = ConanFileReference.loads("lib/0.1@user/channel") + source_folder = client.client_cache.source(conan_ref) + link_source = load(os.path.join(source_folder, ".conan_link")) + self.assertTrue(os.path.exists(link_source)) + + build_folder = client.client_cache.build(package_ref) + link_build = load(os.path.join(build_folder, ".conan_link")) + self.assertTrue(os.path.exists(link_build)) + + package_folder = client.client_cache.package(package_ref) + link_package = load(os.path.join(package_folder, ".conan_link")) + self.assertTrue(os.path.exists(link_package)) + + client.run("remove lib* -f") + self.assertFalse(os.path.exists(link_source)) + self.assertFalse(os.path.exists(link_build)) + self.assertFalse(os.path.exists(link_package)) + + def failure_test(self): + + base = ''' +from conans import ConanFile +from conans.util.files import load, save +import os + +class ConanLib(ConanFile): + name = "lib" + version = "0.1" + short_paths = True + exports = "*" + generators = "cmake" + + def build(self): + self.output.info("%s/%s" % (self.conanfile_directory, self.name)) + # print os.listdir(self.conanfile_directory) + path = os.path.join(self.conanfile_directory, self.name) + # print "PATH EXISTS ", os.path.exists(path) + # print os.listdir(path) + path = os.path.join(path, "myfile.txt") + # print "PATH EXISTS ", os.path.exists(path) + + def package(self): + self.copy("*.txt", keep_path=False) +''' + + client = TestClient() + files = {"conanfile.py": base, + "lib/myfile.txt": "Hello world!"} + client.save(files) + client.run("export user/channel") + client.run("install lib/0.1@user/channel --build") + # print client.paths.store + package_ref = PackageReference.loads("lib/0.1@user/channel:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + package_folder = client.client_cache.package(package_ref, short_paths=None) + file1 = load(os.path.join(package_folder, "myfile.txt")) + self.assertEqual("Hello world!", file1) + + client.run("install lib/0.1@user/channel --build") + package_ref = PackageReference.loads("lib/0.1@user/channel:" + "5ab84d6acfe1f23c4fae0ab88f26e3a396351ac9") + package_folder = client.client_cache.package(package_ref, short_paths=None) + file1 = load(os.path.join(package_folder, "myfile.txt")) + self.assertEqual("Hello world!", file1) diff --git a/testbed/conan-io__conan/conans/test/paths_test.py b/testbed/conan-io__conan/conans/test/paths_test.py new file mode 100644 index 0000000000000000000000000000000000000000..3cd7302d96ef1f5ba589f4d133d390e41d5c86f7 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/paths_test.py @@ -0,0 +1,39 @@ +import os +import platform +import unittest +from conans.paths import (BUILD_FOLDER, PACKAGES_FOLDER, EXPORT_FOLDER, conan_expand_user, + SimplePaths) +from conans.model.ref import ConanFileReference, PackageReference +from conans.test.utils.test_files import temp_folder + + +class PathsTest(unittest.TestCase): + + def expand_user_test(self): + if platform.system() == "Windows": + old_env = dict(os.environ) + try: + os.environ["HOME"] = "%USERPROFILE%" + user_home = conan_expand_user("~") + finally: + os.environ.clear() + os.environ.update(old_env) + self.assertTrue(os.path.exists(user_home)) + + def basic_test(self): + folder = temp_folder() + paths = SimplePaths(folder) + self.assertEqual(paths._store_folder, folder) + conan_ref = ConanFileReference.loads("opencv/2.4.10 @ lasote /testing") + package_ref = PackageReference(conan_ref, "456fa678eae68") + expected_base = os.path.join(folder, os.path.sep.join(["opencv", "2.4.10", + "lasote", "testing"])) + self.assertEqual(paths.conan(conan_ref), + os.path.join(paths.store, expected_base)) + self.assertEqual(paths.export(conan_ref), + os.path.join(paths.store, expected_base, EXPORT_FOLDER)) + self.assertEqual(paths.build(package_ref), + os.path.join(paths.store, expected_base, BUILD_FOLDER, "456fa678eae68")) + self.assertEqual(paths.package(package_ref), + os.path.join(paths.store, expected_base, PACKAGES_FOLDER, + "456fa678eae68")) diff --git a/testbed/conan-io__conan/conans/test/performance/__init__.py b/testbed/conan-io__conan/conans/test/performance/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/conans/test/performance/large_project.py b/testbed/conan-io__conan/conans/test/performance/large_project.py new file mode 100644 index 0000000000000000000000000000000000000000..fabdbf47539b77c85a897b83cf8459a33bc3195f --- /dev/null +++ b/testbed/conan-io__conan/conans/test/performance/large_project.py @@ -0,0 +1,46 @@ +import unittest +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +import time +from conans.test.tools import TestClient + + +class PerformanceTest(unittest.TestCase): + """ NOT really a test, but a helper to profile performance + FILE name is not "test" so it will not run under unit testing + """ + + def large_project_test(self): + client = TestClient() + num = 250 + deep = True # True for N ... -> 3 -> 2 -> 1 -> 0, False for N -> 0, 3-> 0, 2->0, 1->0 + for i in range(num): + if i == 0: + files = cpp_hello_conan_files("Hello0", "0.1", build=False) + else: + if not deep: + files = cpp_hello_conan_files("Hello%d" % i, "0.1", + ["Hello0/0.1@lasote/stable"], build=False) + else: + files = cpp_hello_conan_files("Hello%d" % i, "0.1", + ["Hello%s/0.1@lasote/stable" % (i-1)], + build=False) + + client.save(files, clean_first=True) + client.run("export lasote/stable") + + # Now lets depend on it + if deep: + files = cpp_hello_conan_files("HelloFinal", "0.1", + ["Hello%s/0.1@lasote/stable" % (num - 1)], build=False) + else: + files = cpp_hello_conan_files("HelloFinal", "0.1", + ["Hello%s/0.1@lasote/stable" % (i) for i in range(num)], + build=False) + + client.save(files, clean_first=True) + t1 = time.time() + client.run("install --build") + print("Final time with build %s" % (time.time() - t1)) + t1 = time.time() + client.run("install") + print("Final time %s" % (time.time() - t1)) diff --git a/testbed/conan-io__conan/conans/test/query_parse_test.py b/testbed/conan-io__conan/conans/test/query_parse_test.py new file mode 100644 index 0000000000000000000000000000000000000000..c265bd82ada704d36bb7b0f12c26ba39706529b1 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/query_parse_test.py @@ -0,0 +1,46 @@ +import unittest +from conans.search.query_parse import infix_to_postfix, evaluate_postfix + + +class QueryParseTest(unittest.TestCase): + + def test_get_postfix(self): + r = infix_to_postfix("") + self.assertEquals(r, []) + + r = infix_to_postfix("a=2") + self.assertEquals(r, ["a=2"]) + + r = infix_to_postfix("a=2 OR b=3") + self.assertEquals(r, ["a=2", "b=3", "|"]) + + r = infix_to_postfix("a= OR b=") + self.assertEquals(r, ["a=", "b=", "|"]) # Equivalent to "" + + r = infix_to_postfix("(a=2 OR b=3) AND (j=34 AND j=45) OR (a=1)") + self.assertEquals(r, ["a=2", "b=3", "|", "j=34", "j=45", "&", "a=1", "&", "|"]) + + with self.assertRaisesRegexp(Exception, "Invalid expression: 2"): + r = infix_to_postfix("a= 2 OR b=3") + + def test_evaluate_postfix(self): + + def evaluator(expr): + return expr in ("a=2", "j=45") + + def evaluate(q): + r = infix_to_postfix(q) + return evaluate_postfix(r, evaluator) + + self.assertTrue(evaluate("a=2")) + self.assertFalse(evaluate("a=4")) + self.assertTrue(evaluate("a=2 OR a=3")) + self.assertTrue(evaluate("a=4 OR j=45")) + self.assertFalse(evaluate("a=4 AND j=45")) + self.assertTrue(evaluate("a=2 AND (f=23 OR j=45)")) + self.assertFalse(evaluate("a=2 AND (f=23 OR j=435)")) + self.assertTrue(evaluate("a=2 AND j=45 OR h=23")) + self.assertTrue(evaluate("a=2 AND j=45 OR (h=23 AND a=2)")) + self.assertTrue(evaluate("((((a=2 AND ((((f=23 OR j=45))))))))")) + self.assertFalse(evaluate("((((a=2 AND ((((f=23 OR j=42))))))))")) + diff --git a/testbed/conan-io__conan/conans/test/registry_test.py b/testbed/conan-io__conan/conans/test/registry_test.py new file mode 100644 index 0000000000000000000000000000000000000000..27311f0ae992da7d359d2bac5b2fb6b84b418e1d --- /dev/null +++ b/testbed/conan-io__conan/conans/test/registry_test.py @@ -0,0 +1,67 @@ +import unittest +import os +from conans.test.utils.test_files import temp_folder +from conans.client.remote_registry import RemoteRegistry +from conans.model.ref import ConanFileReference +from conans.errors import ConanException +from conans.test.tools import TestBufferConanOutput +from conans.util.files import save + + +class RegistryTest(unittest.TestCase): + + def retro_compatibility_test(self): + f = os.path.join(temp_folder(), "aux_file") + save(f, """conan.io https://server.conan.io +""") # Without SSL parameter + registry = RemoteRegistry(f, TestBufferConanOutput()) + self.assertEqual(registry.remotes, [("conan.io", "https://server.conan.io", True)]) + + def add_remove_update_test(self): + f = os.path.join(temp_folder(), "aux_file") + registry = RemoteRegistry(f, TestBufferConanOutput()) + + # Add + registry.add("local", "http://localhost:9300") + self.assertEqual(registry.remotes, [("conan.io", "https://server.conan.io", True), + ("local", "http://localhost:9300", True)]) + # Add + registry.add("new", "new_url", False) + self.assertEqual(registry.remotes, [("conan.io", "https://server.conan.io", True), + ("local", "http://localhost:9300", True), + ("new", "new_url", False)]) + with self.assertRaises(ConanException): + registry.add("new", "new_url") + # Update + registry.update("new", "other_url") + self.assertEqual(registry.remotes, [("conan.io", "https://server.conan.io", True), + ("local", "http://localhost:9300", True), + ("new", "other_url", True)]) + with self.assertRaises(ConanException): + registry.update("new2", "new_url") + + registry.update("new", "other_url", False) + self.assertEqual(registry.remotes, [("conan.io", "https://server.conan.io", True), + ("local", "http://localhost:9300", True), + ("new", "other_url", False)]) + + # Remove + registry.remove("local") + self.assertEqual(registry.remotes, [("conan.io", "https://server.conan.io", True), + ("new", "other_url", False)]) + with self.assertRaises(ConanException): + registry.remove("new2") + + def refs_test(self): + f = os.path.join(temp_folder(), "aux_file") + registry = RemoteRegistry(f, TestBufferConanOutput()) + ref = ConanFileReference.loads("MyLib/0.1@lasote/stable") + + remotes = registry.remotes + registry.set_ref(ref, remotes[0]) + remote = registry.get_ref(ref) + self.assertEqual(remote, remotes[0]) + + registry.set_ref(ref, remotes[0]) + remote = registry.get_ref(ref) + self.assertEqual(remote, remotes[0]) diff --git a/testbed/conan-io__conan/conans/test/remote/__init__.py b/testbed/conan-io__conan/conans/test/remote/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..678b2fae9ff1c15ad29e7493b01ed14f95ab7e47 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/remote/__init__.py @@ -0,0 +1,3 @@ +import os + +CONAN_TEST_FOLDER = os.getenv('CONAN_TEST_FOLDER', None) diff --git a/testbed/conan-io__conan/conans/test/remote/rest_api_test.py b/testbed/conan-io__conan/conans/test/remote/rest_api_test.py new file mode 100644 index 0000000000000000000000000000000000000000..efb6b4593923653a8b9fe08a66d590f109674c44 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/remote/rest_api_test.py @@ -0,0 +1,253 @@ +import unittest +from conans.client.rest.rest_client import RestApiClient +from conans.model.ref import ConanFileReference, PackageReference +from conans.test.utils.test_files import hello_source_files +from conans.paths import CONANFILE, CONAN_MANIFEST, CONANINFO +import sys +from conans.client.output import ConanOutput, Color +from conans.model.info import ConanInfo +from conans.test.server.utils.server_launcher import TestServerLauncher +import requests +from conans.test.utils.test_files import temp_folder +from conans.model.version import Version +from conans.server.rest.bottle_plugins.version_checker import VersionCheckerPlugin +import platform +import os +from conans.util.files import md5, save +from conans.model.manifest import FileTreeManifest +from nose.plugins.attrib import attr + + +@attr('slow') +class RestApiTest(unittest.TestCase): + '''Open a real server (sockets) to test rest_api function.''' + + server = None + api = None + + @classmethod + def setUpClass(cls): + if not cls.server: + plugin = VersionCheckerPlugin(Version("0.16.0"), Version("0.16.0"), ["ImCool"]) + cls.server = TestServerLauncher(server_version=Version("0.16.0"), + min_client_compatible_version=Version("0.16.0"), + plugins=[plugin]) + cls.server.start() + + cls.api = RestApiClient(ConanOutput(sys.stdout, Color), requester=requests) + cls.api.remote_url = "http://localhost:%s" % str(cls.server.port) + + # Authenticate user + token = cls.api.authenticate("private_user", "private_pass") + cls.api.token = token + + @classmethod + def tearDownClass(cls): + cls.server.stop() + + def tearDown(self): + RestApiTest.server.clean() + + def server_info_test(self): + check, version, capabilities = self.api.server_info() + self.assertEquals(version, "0.16.0") + self.assertEquals(check, None) # None because we are not sending client version + self.assertEquals(capabilities, ["ImCool"]) + + def get_conan_test(self): + # Upload a conans + conan_reference = ConanFileReference.loads("conan1/1.0.0@private_user/testing") + self._upload_conan(conan_reference) + + # Get the conans + tmp_dir = temp_folder() + recipe_paths = self.api.get_recipe(conan_reference, tmp_dir) + self.assertIsNotNone(recipe_paths) + self.assertIn(CONANFILE, recipe_paths) + self.assertIn(CONAN_MANIFEST, recipe_paths) + + def get_conan_digest_test(self): + # Upload a conans + conan_reference = ConanFileReference.loads("conan2/1.0.0@private_user/testing") + self._upload_conan(conan_reference) + + # Get the conans digest + digest = self.api.get_conan_digest(conan_reference) + self.assertEquals(digest.summary_hash, "ca9373e144b546e6a716711f0e6b436b") + self.assertEquals(digest.time, 123123123) + + def get_package_test(self): + # Upload a conans + conan_reference = ConanFileReference.loads("conan3/1.0.0@private_user/testing") + self._upload_conan(conan_reference) + + # Upload an package + package_reference = PackageReference(conan_reference, "1F23223EFDA") + self._upload_package(package_reference) + + # Get the package + tmp_dir = temp_folder() + package = self.api.get_package(package_reference, tmp_dir) + self.assertIsNotNone(package) + self.assertIn("hello.cpp", package) + + def get_package_info_test(self): + # Upload a conans + conan_reference = ConanFileReference.loads("conan3/1.0.0@private_user/testing") + self._upload_conan(conan_reference) + + # Upload an package + package_reference = PackageReference(conan_reference, "1F23223EFDA") + conan_info = """[settings] + arch=x86_64 + compiler=gcc + os=Linux +[options] + 386=False +[requires] + Hello + Bye/2.9 + Say/2.1@user/testing + Chat/2.1@user/testing:SHA_ABC +""" + self._upload_package(package_reference, {CONANINFO: conan_info}) + + # Get the package info + info = self.api.get_package_info(package_reference) + self.assertIsInstance(info, ConanInfo) + self.assertEquals(info, ConanInfo.loads(conan_info)) + + def upload_huge_conan_test(self): + if platform.system() != "Windows": + # Upload a conans + conan_reference = ConanFileReference.loads("conanhuge/1.0.0@private_user/testing") + files = {"file%s.cpp" % name: "File conent" for name in range(1000)} + self._upload_conan(conan_reference, files) + + # Get the conans + tmp_dir = temp_folder() + pack = self.api.get_recipe(conan_reference, tmp_dir) + self.assertIsNotNone(pack) + self.assertIn("file999.cpp", pack) + + def search_test(self): + # Upload a conan1 + conan_name1 = "HelloOnly/0.10@private_user/testing" + conan_reference1 = ConanFileReference.loads(conan_name1) + self._upload_conan(conan_reference1) + + # Upload a package + conan_info = """[settings] + arch=x86_64 + compiler=gcc + os=Linux +[options] + 386=False +[requires] + Hello + Bye/2.9 + Say/2.1@user/testing + Chat/2.1@user/testing:SHA_ABC +""" + package_reference = PackageReference(conan_reference1, "1F23223EFDA") + self._upload_package(package_reference, {CONANINFO: conan_info}) + + # Upload a conan2 + conan_name2 = "helloonlyToo/2.1@private_user/stable" + conan_reference2 = ConanFileReference.loads(conan_name2) + self._upload_conan(conan_reference2) + + # Get the info about this ConanFileReference + info = self.api.search_packages(conan_reference1, None) + self.assertEqual(ConanInfo.loads(conan_info).serialize_min(), info["1F23223EFDA"]) + + # Search packages + results = self.api.search("HelloOnly*", ignorecase=False) + + self.assertEqual(results, [conan_reference1]) + + def remove_test(self): + # Upload a conans + conan_reference1 = ConanFileReference.loads("MyFirstConan/1.0.0@private_user/testing") + self._upload_conan(conan_reference1) + path1 = self.server.search_manager._paths.conan(conan_reference1) + self.assertTrue(os.path.exists(path1)) + # Remove conans and packages + self.api.remove_conanfile(conan_reference1) + self.assertFalse(os.path.exists(path1)) + + def remove_packages_test(self): + conan_ref = ConanFileReference.loads("MySecondConan/2.0.0@private_user/testing") + self._upload_conan(conan_ref) + + folders = {} + for sha in ["1", "2", "3", "4", "5"]: + # Upload an package + package_ref = PackageReference(conan_ref, sha) + self._upload_package(package_ref) + folder = self.server.search_manager._paths.package(package_ref) + self.assertTrue(os.path.exists(folder)) + folders[sha] = folder + + self.api.remove_packages(conan_ref, ["1"]) + self.assertTrue(os.path.exists(self.server.search_manager._paths.conan(conan_ref))) + self.assertFalse(os.path.exists(folders["1"])) + self.assertTrue(os.path.exists(folders["2"])) + self.assertTrue(os.path.exists(folders["3"])) + self.assertTrue(os.path.exists(folders["4"])) + self.assertTrue(os.path.exists(folders["5"])) + + self.api.remove_packages(conan_ref, ["2", "3"]) + self.assertTrue(os.path.exists(self.server.search_manager._paths.conan(conan_ref))) + self.assertFalse(os.path.exists(folders["1"])) + self.assertFalse(os.path.exists(folders["2"])) + self.assertFalse(os.path.exists(folders["3"])) + self.assertTrue(os.path.exists(folders["4"])) + self.assertTrue(os.path.exists(folders["5"])) + + self.api.remove_packages(conan_ref, []) + self.assertTrue(os.path.exists(self.server.search_manager._paths.conan(conan_ref))) + for sha in ["1", "2", "3", "4", "5"]: + self.assertFalse(os.path.exists(folders[sha])) + + def _upload_package(self, package_reference, base_files=None): + + files = hello_source_files(3, [1, 12]) + if base_files: + files.update(base_files) + + tmp_dir = temp_folder() + abs_paths = {} + for filename, content in files.items(): + abs_path = os.path.join(tmp_dir, filename) + save(abs_path, content) + abs_paths[filename] = abs_path + + self.api.upload_package(package_reference, abs_paths, retry=1, retry_wait=0) + + def _upload_conan(self, conan_reference, base_files=None, retry=1, retry_wait=0): + + files = hello_source_files(3, [1, 12]) + if base_files: + files.update(base_files) + content = """ +from conans import ConanFile + +class MyConan(ConanFile): + name = "%s" + version = "%s" + settings = arch, compiler, os +""" % (conan_reference.name, conan_reference.version) + files[CONANFILE] = content + files_md5s = {filename: md5(content) for filename, content in files.items()} + conan_digest = FileTreeManifest(123123123, files_md5s) + files[CONAN_MANIFEST] = str(conan_digest) + + tmp_dir = temp_folder() + abs_paths = {} + for filename, content in files.items(): + abs_path = os.path.join(tmp_dir, filename) + save(abs_path, content) + abs_paths[filename] = abs_path + + self.api.upload_conan(conan_reference, abs_paths, retry, retry_wait) diff --git a/testbed/conan-io__conan/conans/test/remote_manager_test.py b/testbed/conan-io__conan/conans/test/remote_manager_test.py new file mode 100644 index 0000000000000000000000000000000000000000..4e57db1cc8430d2f6f4dbab60c1febdadd03ca2d --- /dev/null +++ b/testbed/conan-io__conan/conans/test/remote_manager_test.py @@ -0,0 +1,83 @@ +import os +import tempfile +import unittest + +from mock import Mock + +from conans.client.client_cache import ClientCache +from conans.client.remote_manager import RemoteManager +from conans.client.remote_registry import Remote +from conans.errors import NotFoundException +from conans.model.ref import ConanFileReference, PackageReference +from conans.model.manifest import FileTreeManifest +from conans.paths import CONANFILE, CONAN_MANIFEST, CONANINFO +from conans.test.tools import TestBufferConanOutput, TestClient +from conans.test.utils.test_files import temp_folder +from conans.test.utils.cpp_test_files import cpp_hello_conan_files +from conans.util.files import save + + +class MockRemoteClient(object): + + def __init__(self): + self.upload_package = Mock() + self.get_conan_digest = Mock() + tmp_folder = tempfile.mkdtemp(suffix='conan_download') + save(os.path.join(tmp_folder, "one.txt"), "ONE") + self.get_recipe = Mock(return_value={"one.txt": os.path.join(tmp_folder, "one.txt")}) + + tmp_folder = tempfile.mkdtemp(suffix='conan_download') + save(os.path.join(tmp_folder, "one.txt"), "ONE") + self.get_package = Mock(return_value={"one.txt": os.path.join(tmp_folder, "one.txt")}) + self.remote_url = None + + self.raise_count = 0 + + def upload_conan(self, *argc, **argv): # @UnusedVariable + if self.remote_url != "url3": + self.raise_count += 1 + raise NotFoundException(self.remote_url) + else: + return self.remote_url + + +class RemoteManagerTest(unittest.TestCase): + """Unit test""" + + def setUp(self): + self.conan_reference = ConanFileReference.loads("openssl/2.0.3@lasote/testing") + self.package_reference = PackageReference(self.conan_reference, "123123123") + self.remote_client = MockRemoteClient() + self.output = TestBufferConanOutput() + self.client_cache = ClientCache(temp_folder(), temp_folder(), self.output) + self.manager = RemoteManager(self.client_cache, self.remote_client, self.output) + + def test_no_remotes(self): + client = TestClient() + files = cpp_hello_conan_files("Hello0", "0.1") + client.save(files) + client.run("export lasote/stable") + client.run("upload Hello0/0.1@lasote/stable", ignore_error=True) + self.assertIn("ERROR: No default remote defined", client.user_io.out) + + def method_called_test(self): + + save(os.path.join(self.client_cache.package(self.package_reference), CONANINFO), "asdasd") + manifest = FileTreeManifest.create(self.client_cache.package(self.package_reference)) + save(os.path.join(self.client_cache.package(self.package_reference), CONAN_MANIFEST), str(manifest)) + + self.assertFalse(self.remote_client.upload_package.called) + self.manager.upload_package(self.package_reference, Remote("other", "url", True), 1, 0) + self.assertTrue(self.remote_client.upload_package.called) + + self.assertFalse(self.remote_client.get_conan_digest.called) + self.manager.get_conan_digest(self.conan_reference, Remote("other", "url", True)) + self.assertTrue(self.remote_client.get_conan_digest.called) + + self.assertFalse(self.remote_client.get_recipe.called) + self.manager.get_recipe(self.conan_reference, temp_folder(), Remote("other", "url", True)) + self.assertTrue(self.remote_client.get_recipe.called) + + self.assertFalse(self.remote_client.get_package.called) + self.manager.get_package(self.package_reference, temp_folder(), Remote("other", "url", True)) + self.assertTrue(self.remote_client.get_package.called) diff --git a/testbed/conan-io__conan/conans/test/runner_test.py b/testbed/conan-io__conan/conans/test/runner_test.py new file mode 100644 index 0000000000000000000000000000000000000000..f51a48793fdf96776ba29fad9bb18a216643138e --- /dev/null +++ b/testbed/conan-io__conan/conans/test/runner_test.py @@ -0,0 +1,147 @@ +import unittest +from conans.test.tools import TestClient +import os +from conans.client.runner import ConanRunner + + +class RunnerTest(unittest.TestCase): + + def _install_and_build(self, conanfile_text, runner=None): + client = TestClient(runner=runner) + files = {"conanfile.py": conanfile_text} + test_folder = os.path.join(client.current_folder, "test_folder") + self.assertFalse(os.path.exists(test_folder)) + client.save(files) + client.run("install") + client.run("build") + return client + + def basic_test(self): + conanfile = ''' +from conans import ConanFile +from conans.client.runner import ConanRunner +import platform + +class ConanFileToolsTest(ConanFile): + + def build(self): + self._runner = ConanRunner() + self.run("mkdir test_folder") + ''' + client = self._install_and_build(conanfile) + test_folder = os.path.join(client.current_folder, "test_folder") + self.assertTrue(os.path.exists(test_folder)) + + def log_test(self): + conanfile = ''' +from conans import ConanFile +from conans.client.runner import ConanRunner +import platform + +class ConanFileToolsTest(ConanFile): + + def build(self): + self.run("cmake --version") + ''' + # A runner logging everything + runner = ConanRunner(print_commands_to_output=True, + generate_run_log_file=True, + log_run_to_output=True) + client = self._install_and_build(conanfile, runner=runner) + self.assertIn("--Running---", client.user_io.out) + self.assertIn("> cmake --version", client.user_io.out) + self.assertIn("cmake version", client.user_io.out) + self.assertIn("Logging command output to file ", client.user_io.out) + + # A runner logging everything + runner = ConanRunner(print_commands_to_output=True, + generate_run_log_file=False, + log_run_to_output=True) + client = self._install_and_build(conanfile, runner=runner) + self.assertIn("--Running---", client.user_io.out) + self.assertIn("> cmake --version", client.user_io.out) + self.assertIn("cmake version", client.user_io.out) + self.assertNotIn("Logging command output to file ", client.user_io.out) + + runner = ConanRunner(print_commands_to_output=False, + generate_run_log_file=True, + log_run_to_output=True) + client = self._install_and_build(conanfile, runner=runner) + self.assertNotIn("--Running---", client.user_io.out) + self.assertNotIn("> cmake --version", client.user_io.out) + self.assertIn("cmake version", client.user_io.out) + self.assertIn("Logging command output to file ", client.user_io.out) + + runner = ConanRunner(print_commands_to_output=False, + generate_run_log_file=False, + log_run_to_output=True) + client = self._install_and_build(conanfile, runner=runner) + self.assertNotIn("--Running---", client.user_io.out) + self.assertNotIn("> cmake --version", client.user_io.out) + self.assertIn("cmake version", client.user_io.out) + self.assertNotIn("Logging command output to file ", client.user_io.out) + + runner = ConanRunner(print_commands_to_output=False, + generate_run_log_file=False, + log_run_to_output=False) + client = self._install_and_build(conanfile, runner=runner) + self.assertNotIn("--Running---", client.user_io.out) + self.assertNotIn("> cmake --version", client.user_io.out) + self.assertNotIn("cmake version", client.user_io.out) + self.assertNotIn("Logging command output to file ", client.user_io.out) + + runner = ConanRunner(print_commands_to_output=False, + generate_run_log_file=True, + log_run_to_output=False) + client = self._install_and_build(conanfile, runner=runner) + self.assertNotIn("--Running---", client.user_io.out) + self.assertNotIn("> cmake --version", client.user_io.out) + self.assertNotIn("cmake version", client.user_io.out) + self.assertIn("Logging command output to file ", client.user_io.out) + + def cwd_test(self): + conanfile = ''' +from conans import ConanFile +from conans.client.runner import ConanRunner +import platform + +class ConanFileToolsTest(ConanFile): + + def build(self): + self._runner = ConanRunner() + self.run("mkdir test_folder", cwd="child_folder") + ''' + files = {"conanfile.py": conanfile} + + client = TestClient() + os.makedirs(os.path.join(client.current_folder, "child_folder")) + test_folder = os.path.join(client.current_folder, "child_folder", "test_folder") + self.assertFalse(os.path.exists(test_folder)) + client.save(files) + client.run("install") + client.run("build") + self.assertTrue(os.path.exists(test_folder)) + + def cwd_error_test(self): + conanfile = ''' +from conans import ConanFile +from conans.client.runner import ConanRunner +import platform + +class ConanFileToolsTest(ConanFile): + + def build(self): + self._runner = ConanRunner() + self.run("mkdir test_folder", cwd="non_existing_folder") + ''' + files = {"conanfile.py": conanfile} + + client = TestClient() + test_folder = os.path.join(client.current_folder, "child_folder", "test_folder") + self.assertFalse(os.path.exists(test_folder)) + client.save(files) + client.run("install") + error = client.run("build", ignore_error=True) + self.assertTrue(error) + self.assertIn("Error while executing 'mkdir test_folder'", client.user_io.out) + self.assertFalse(os.path.exists(test_folder)) diff --git a/testbed/conan-io__conan/conans/test/server/__init__.py b/testbed/conan-io__conan/conans/test/server/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/conans/test/server/conf_test.py b/testbed/conan-io__conan/conans/test/server/conf_test.py new file mode 100644 index 0000000000000000000000000000000000000000..992857087f5a4d3c8a0276b04404b0856ac0318a --- /dev/null +++ b/testbed/conan-io__conan/conans/test/server/conf_test.py @@ -0,0 +1,71 @@ +import unittest +from conans.util.files import save +import os +from conans.server.conf import ConanServerConfigParser +from datetime import timedelta +from conans.test.utils.test_files import temp_folder +from conans.paths import conan_expand_user + + +fileconfig = ''' +[server] +jwt_secret: mysecret +jwt_expire_minutes: 121 +disk_storage_path: %s +ssl_enabled: true +port: 9220 + + +[write_permissions] +openssl/2.0.1@lasote/testing: pepe + +[read_permissions] +*/*@*/*: * +openssl/2.0.1@lasote/testing: pepe + +[users] +lasote: defaultpass +pepe: pepepass +''' + + +class ServerConfTest(unittest.TestCase): + + def setUp(self): + self.file_path = temp_folder() + server_conf = os.path.join(self.file_path, '.conan_server/server.conf') + self.storage_path = os.path.join(self.file_path, "storage") + save(server_conf, fileconfig % self.storage_path) + self.environ = {} + + def test_values(self): + config = ConanServerConfigParser(self.file_path, environment=self.environ) + self.assertEquals(config.jwt_secret, "mysecret") + self.assertEquals(config.jwt_expire_time, timedelta(minutes=121)) + self.assertEquals(config.disk_storage_path, self.storage_path) + self.assertTrue(config.ssl_enabled) + self.assertEquals(config.port, 9220) + self.assertEquals(config.write_permissions, [("openssl/2.0.1@lasote/testing", "pepe")]) + self.assertEquals(config.read_permissions, [("*/*@*/*", "*"), + ("openssl/2.0.1@lasote/testing", "pepe")]) + self.assertEquals(config.users, {"lasote": "defaultpass", "pepe": "pepepass"}) + + # Now check with environments + tmp_storage = temp_folder() + self.environ["CONAN_STORAGE_PATH"] = tmp_storage + self.environ["CONAN_JWT_SECRET"] = "newkey" + self.environ["CONAN_JWT_EXPIRE_MINUTES"] = "123" + self.environ["CONAN_SSL_ENABLED"] = "False" + self.environ["CONAN_SERVER_PORT"] = "1233" + self.environ["CONAN_SERVER_USERS"] = "lasote:lasotepass,pepe2:pepepass2" + + config = ConanServerConfigParser(self.file_path, environment=self.environ) + self.assertEquals(config.jwt_secret, "newkey") + self.assertEquals(config.jwt_expire_time, timedelta(minutes=123)) + self.assertEquals(config.disk_storage_path, conan_expand_user(tmp_storage)) + self.assertFalse(config.ssl_enabled) + self.assertEquals(config.port, 1233) + self.assertEquals(config.write_permissions, [("openssl/2.0.1@lasote/testing", "pepe")]) + self.assertEquals(config.read_permissions, [("*/*@*/*", "*"), + ("openssl/2.0.1@lasote/testing", "pepe")]) + self.assertEquals(config.users, {"lasote": "lasotepass", "pepe2": "pepepass2"}) diff --git a/testbed/conan-io__conan/conans/test/server/crypto/__init__.py b/testbed/conan-io__conan/conans/test/server/crypto/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/conans/test/server/crypto/jwt_test.py b/testbed/conan-io__conan/conans/test/server/crypto/jwt_test.py new file mode 100644 index 0000000000000000000000000000000000000000..f74565801fb603bf55fb4dfe9ae48bc82279a0a7 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/server/crypto/jwt_test.py @@ -0,0 +1,38 @@ +import unittest +from conans.server.crypto.jwt.jwt_credentials_manager import JWTCredentialsManager +from conans.server.crypto.jwt.jwt_manager import JWTManager +from datetime import timedelta +import time +import jwt +from jwt import DecodeError + + +class JwtTest(unittest.TestCase): + + def setUp(self): + unittest.TestCase.setUp(self) + self.secret = "123123123qweqwe" + self.expire_time = timedelta(seconds=1) # No lower resolution available + + def jwt_manager_test(self): + # Instance the manager to generate tokens that expires in 10 ms + + manager = JWTManager(self.secret, self.expire_time) + + # Encrypt a profile + profile = {"hello": "world"} + token = manager.get_token_for(profile) + + # Decrypt the profile + decrypted_profile = manager.get_profile(token) + self.assertEquals(profile, decrypted_profile) + + # Now wait 2 seconds and check if its valid now + time.sleep(2) + self.assertRaises(jwt.ExpiredSignature, manager.get_profile, token) + + def jwt_credentials_manager_test(self): + manager = JWTCredentialsManager(self.secret, self.expire_time) + token = manager.get_token_for("lasote") + self.assertEquals(manager.get_user(token), "lasote") + self.assertRaises(DecodeError, manager.get_user, "invalid_user") diff --git a/testbed/conan-io__conan/conans/test/server/service/__init__.py b/testbed/conan-io__conan/conans/test/server/service/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/conans/test/server/service/authorizer_test.py b/testbed/conan-io__conan/conans/test/server/service/authorizer_test.py new file mode 100644 index 0000000000000000000000000000000000000000..661493a405d6d8561c1510e38d229988aa117cf4 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/server/service/authorizer_test.py @@ -0,0 +1,139 @@ +import unittest +from conans.server.service.authorize import BasicAuthorizer +from conans.errors import ForbiddenException, InternalErrorException +from conans.model.ref import ConanFileReference, PackageReference + + +class AuthorizerTest(unittest.TestCase): + + def setUp(self): + unittest.TestCase.setUp(self) + self.openssl_ref = ConanFileReference.loads("openssl/2.0.1@lasote/testing") + self.package_reference = PackageReference(self.openssl_ref, "123123123") + self.openssl_ref2 = ConanFileReference.loads("openssl/2.0.2@lasote/testing") + self.package_reference2 = PackageReference(self.openssl_ref2, "123123123") + + def invalid_rule_test(self): + """Invalid rule input""" + read_perms = ["invalid_reference", "lasote", ("*/*@*/*", "")] + write_perms = [] + + authorizer = BasicAuthorizer(read_perms, write_perms) + self.assertRaises(InternalErrorException, + authorizer.check_read_conan, "pepe", self.openssl_ref) + + def check_wildcards_test(self): + # Only pepe can read openssl versions + read_perms = [("openssl/*@lasote/testing", "pepe"), ("*/*@*/*", "*")] + # Only pepe (and lasote because its owner) can write it and no more users can write + write_perms = [(str(self.openssl_ref), "pepe")] + + authorizer = BasicAuthorizer(read_perms, write_perms) + # Pepe can read all openssl versions + authorizer.check_read_conan("pepe", self.openssl_ref) + authorizer.check_read_conan("pepe", self.openssl_ref2) + # Other user can't + self.assertRaises(ForbiddenException, + authorizer.check_read_conan, "juan", self.openssl_ref) + self.assertRaises(ForbiddenException, + authorizer.check_read_conan, "juan", self.openssl_ref2) + + # Only pepe can read versions 2.0.1 of lasote/testing + read_perms = [("*/2.0.2@lasote/testing", "pepe"), ("*/*@*/*", "*")] + authorizer = BasicAuthorizer(read_perms, write_perms) + # Pepe can read openssl 2.0.1 version and 2.0.2 (only matches 2.0.2, so other is allowed) + authorizer.check_read_conan("pepe", self.openssl_ref2) + authorizer.check_read_conan("pepe", self.openssl_ref2) + # Other user can't read 2.0.2 + authorizer.check_read_conan("juan", self.openssl_ref) + self.assertRaises(ForbiddenException, + authorizer.check_read_conan, "juan", self.openssl_ref2) + + # Only pepe can read openssl version 2.0.1 from any owner + read_perms = [("openssl/2.0.1@*/testing", "pepe")] + # Only pepe (and lasote because its owner) can write it and no more users can write + write_perms = [(str(self.openssl_ref), "pepe")] + + authorizer = BasicAuthorizer(read_perms, write_perms) + # Pepe can read any openssl/2.0.1 + authorizer.check_read_conan("pepe", self.openssl_ref) + tmp_ref = ConanFileReference.loads("openssl/2.0.1@alfred/testing") + authorizer.check_read_conan("pepe", tmp_ref) + self.assertRaises(ForbiddenException, + authorizer.check_read_conan, "juan", self.openssl_ref) + self.assertRaises(ForbiddenException, + authorizer.check_read_conan, "juan", tmp_ref) + + # Only pepe can read openssl version 2.0.1 from lasote/any channel + read_perms = [("openssl/2.0.1@lasote/*", "pepe")] + # Only pepe (and lasote because its owner) can write it and no more users can write + write_perms = [(str(self.openssl_ref), "pepe")] + + authorizer = BasicAuthorizer(read_perms, write_perms) + # Pepe can read openssl/2.0.1 from any channel but only from lasote + authorizer.check_read_conan("pepe", self.openssl_ref) + tmp_ref = ConanFileReference.loads("openssl/2.0.1@alfred/testing") + self.assertRaises(ForbiddenException, + authorizer.check_read_conan, "pepe", tmp_ref) + + tmp_ref = ConanFileReference.loads("openssl/2.0.1@lasote/otherchannel") + authorizer.check_read_conan("pepe", tmp_ref) + + def permissions_test(self): + """Check that permissions logic is ok""" + # Only lasote can read it but other conans can be readed + read_perms = [(str(self.openssl_ref), "lasote"), ("*/*@*/*", "*")] + # Only pepe (and lasote because its owner) can write it and no more users can write + write_perms = [(str(self.openssl_ref), "pepe")] + + authorizer = BasicAuthorizer(read_perms, write_perms) + + # READ PERMISSIONS + + # Pepe can't read conans + self.assertRaises(ForbiddenException, + authorizer.check_read_conan, "pepe", self.openssl_ref) + + # Owner can read conans + authorizer.check_read_conan("lasote", self.openssl_ref) + + # Pepe can read other conans + authorizer.check_read_conan("pepe", self.openssl_ref2) + + # Pepe can't read package + self.assertRaises(ForbiddenException, + authorizer.check_read_package, "pepe", self.package_reference) + + # Owner can read package + authorizer.check_read_package("lasote", self.package_reference) + + # Pepe can read other package + authorizer.check_read_package("pepe", self.package_reference2) + + # WRITE PERMISSIONS + + # Pepe can write conans + authorizer.check_write_conan("pepe", self.openssl_ref) + + # Juan can't write conans + self.assertRaises(ForbiddenException, + authorizer.check_write_conan, "juan", self.openssl_ref) + + # Owner can write conans + authorizer.check_write_conan("lasote", self.openssl_ref) + + # Pepe can't write other conans + self.assertRaises(ForbiddenException, + authorizer.check_write_conan, "pepe", self.openssl_ref2) + + # Owner can write package + authorizer.check_write_package("lasote", self.package_reference) + + # Pepe can write package + authorizer.check_write_package("pepe", self.package_reference) + + # Pepe can't write other package + self.assertRaises(ForbiddenException, + authorizer.check_write_package, "pepe", self.package_reference2) + + diff --git a/testbed/conan-io__conan/conans/test/server/service/service_test.py b/testbed/conan-io__conan/conans/test/server/service/service_test.py new file mode 100644 index 0000000000000000000000000000000000000000..6e0fef4fa8bc937c8c3d0f33b6803574118b0b9d --- /dev/null +++ b/testbed/conan-io__conan/conans/test/server/service/service_test.py @@ -0,0 +1,249 @@ +import unittest +from conans.model.ref import ConanFileReference, PackageReference +from conans.server.service.service import ConanService, FileUploadDownloadService,\ + SearchService +from conans.paths import CONAN_MANIFEST, CONANINFO, SimplePaths +from conans.util.files import save_files, save, mkdir, load, md5sum +from conans.server.service.authorize import BasicAuthorizer +import os +from conans.errors import NotFoundException, RequestErrorException +from conans.test.utils.test_files import hello_source_files +from conans.server.store.file_manager import FileManager +from conans.server.crypto.jwt.jwt_updown_manager import JWTUpDownAuthManager +from datetime import timedelta +from time import sleep +from conans.model.manifest import FileTreeManifest +from conans.test.utils.test_files import temp_folder +from conans.server.store.disk_adapter import ServerDiskAdapter +from conans.search.search import DiskSearchManager, DiskSearchAdapter + + +class MockFileSaver(): + + def __init__(self, filename, content): + self.filename = filename + self.content = content + + def save(self, abspath): + save(os.path.join(abspath, self.filename), self.content) + + +class FileUploadDownloadServiceTest(unittest.TestCase): + + def setUp(self): + self.updown_auth_manager = JWTUpDownAuthManager("secret", + timedelta(seconds=1)) + + self.storage_dir = temp_folder() + self.service = FileUploadDownloadService(self.updown_auth_manager, self.storage_dir) + self.disk_path = os.path.join(self.storage_dir, "dir", "other") + self.relative_file_path = "dir/other/thefile.txt" + self.absolute_file_path = os.path.join(self.disk_path, "thefile.txt") + mkdir(self.disk_path) + self.content = "the content" + + def test_file_download(self): + save(os.path.join(self.disk_path, "thefile.txt"), self.content) + token = self.updown_auth_manager.get_token_for(self.relative_file_path, + "pepe", len(self.content)) + path_to_file = self.service.get_file_path(self.relative_file_path, token) + + self.assertEquals(path_to_file, self.absolute_file_path) + + readed_content = load(self.absolute_file_path) + self.assertEquals(readed_content, self.content) + + # Expire token + sleep(2) + self.assertRaises(NotFoundException, self.service.get_file_path, + self.relative_file_path, token) + + def test_file_upload(self): + token = self.updown_auth_manager.get_token_for(self.relative_file_path, + "pepe", len(self.content)) + + file_saver = MockFileSaver("thefile.txt", self.content) + self.assertFalse(os.path.exists(self.absolute_file_path)) + self.service.put_file(file_saver, self.absolute_file_path, token, len(self.content)) + + self.assertTrue(os.path.exists(self.absolute_file_path)) + + # Raises if wrong size + self.assertRaises(RequestErrorException, self.service.put_file, file_saver, + self.absolute_file_path, token, len(self.content) + 1) + + +class ConanServiceTest(unittest.TestCase): + + def setUp(self): + self.conan_reference = ConanFileReference.loads("openssl/2.0.3@lasote/testing") + self.package_reference = PackageReference(self.conan_reference, "123123123") + self.tmp_dir = temp_folder() + + read_perms = [("*/*@*/*", "*")] + write_perms = [] + authorizer = BasicAuthorizer(read_perms, write_perms) + + self.fake_url = "http://url" + updown_auth_manager = JWTUpDownAuthManager("secret", + timedelta(seconds=200)) + adapter = ServerDiskAdapter(self.fake_url, self.tmp_dir, updown_auth_manager) + self.paths = SimplePaths(self.tmp_dir) + self.file_manager = FileManager(self.paths, adapter) + + search_adapter = DiskSearchAdapter() + self.search_manager = DiskSearchManager(self.paths, search_adapter) + + self.service = ConanService(authorizer, self.file_manager, "lasote") + self.search_service = SearchService(authorizer, self.search_manager, "lasote") + + files = hello_source_files("test") + save_files(self.paths.export(self.conan_reference), files) + self.conan_digest = FileTreeManifest.create(self.paths.export(self.conan_reference)) + conan_digest_path = os.path.join(self.paths.export(self.conan_reference), CONAN_MANIFEST) + save(conan_digest_path, str(self.conan_digest)) + + files = hello_source_files("package") + save_files(self.paths.package(self.package_reference), files) + + def test_get_conanfile_snapshot(self): + snap = self.service.get_conanfile_snapshot(self.conan_reference) + base_path = self.paths.export(self.conan_reference) + + snap_expected = {'hello.cpp': md5sum(os.path.join(base_path, "hello.cpp")), + 'conanmanifest.txt': md5sum(os.path.join(base_path, "conanmanifest.txt")), + 'executable': md5sum(os.path.join(base_path, "executable")), + 'main.cpp': md5sum(os.path.join(base_path, "main.cpp")), + 'CMakeLists.txt': md5sum(os.path.join(base_path, "CMakeLists.txt")), + 'hellotest.h': md5sum(os.path.join(base_path, "hellotest.h"))} + + self.assertEquals(snap, snap_expected) + + def test_get_conanfile_download_urls(self): + urls = self.service.get_conanfile_download_urls(self.conan_reference) + # Remove parameters + urls = {name: url.split("?signature")[0] for name, url in urls.items()} + + def fake_url_build(filename): + return self.fake_url + "/" + "/".join(self.conan_reference) + "/export/" + filename + + expected_urls = {'CMakeLists.txt': fake_url_build('CMakeLists.txt'), + 'conanmanifest.txt': fake_url_build('conanmanifest.txt'), + 'executable': fake_url_build('executable'), + 'hello.cpp': fake_url_build('hello.cpp'), + 'hellotest.h': fake_url_build('hellotest.h'), + 'main.cpp': fake_url_build('main.cpp')} + self.assertEquals(urls, expected_urls) + + def test_get_package_download_urls(self): + urls = self.service.get_package_download_urls(self.package_reference) + # Remove parameters + urls = {name: url.split("?signature")[0] for name, url in urls.items()} + + def fake_url_build(filename): + return self.fake_url + "/" + "/".join(self.package_reference.conan) \ + + "/package/" + self.package_reference.package_id + "/" + filename + + expected_urls = {'CMakeLists.txt': fake_url_build('CMakeLists.txt'), + 'executable': fake_url_build('executable'), + 'hello.cpp': fake_url_build('hello.cpp'), + 'hellopackage.h': fake_url_build('hellopackage.h'), + 'main.cpp': fake_url_build('main.cpp')} + self.assertEquals(urls, expected_urls) + + def test_get_conanfile_upload_urls(self): + urls = self.service.get_conanfile_upload_urls(self.conan_reference, + {"conanfile.py": 23, + "conanmanifest.txt": 24}) + # Remove parameters + urls = {name: url.split("?signature")[0] for name, url in urls.items()} + + def fake_url_build(filename): + return self.fake_url + "/" + "/".join(self.conan_reference) + "/export/" + filename + + expected_urls = {'conanfile.py': fake_url_build('conanfile.py'), + 'conanmanifest.txt': fake_url_build('conanmanifest.txt')} + self.assertEquals(urls, expected_urls) + + def test_get_package_upload_urls(self): + urls = self.service.get_package_upload_urls(self.package_reference, {"uno.lib": 23, + "dos.dll": 24}) + # Remove parameters + urls = {name: url.split("?signature")[0] for name, url in urls.items()} + + def fake_url_build(filename): + return self.fake_url + "/" + "/".join(self.package_reference.conan) \ + + "/package/" + self.package_reference.package_id + "/" + filename + + expected_urls = {'uno.lib': fake_url_build('uno.lib'), + 'dos.dll': fake_url_build('dos.dll')} + self.assertEquals(urls, expected_urls) + + def test_search(self): + """ check the dict is returned by get_packages_info service + """ + # Creating and saving conans, packages, and conans.vars + conan_ref2 = ConanFileReference("openssl", "3.0", "lasote", "stable") + conan_ref3 = ConanFileReference("Assimp", "1.10", "fenix", "stable") + conan_ref4 = ConanFileReference("assimpFake", "0.1", "phil", "stable") + + package_ref2 = PackageReference(conan_ref2, "12345587754") + package_ref3 = PackageReference(conan_ref3, "77777777777") + + conan_vars = """ +[options] + use_Qt=%s +""" + conan_vars1 = conan_vars % "True" + conan_vars2 = conan_vars % "False" + conan_vars3 = conan_vars % "True" + + save_files(self.paths.package(self.package_reference), {CONANINFO: conan_vars1}) + save_files(self.paths.package(package_ref2), {CONANINFO: conan_vars2}) + save_files(self.paths.package(package_ref3), {CONANINFO: conan_vars3}) + save_files(self.paths.export(conan_ref4), {"dummy.txt": "//"}) + + info = self.search_service.search() + expected = [conan_ref3, conan_ref4, self.conan_reference, conan_ref2] + self.assertEqual(expected, info) + + info = self.search_service.search(pattern="Assimp*", ignorecase=False) + self.assertEqual(info, [conan_ref3]) + + info = self.search_service.search_packages(conan_ref2, None) + self.assertEqual(info, {'12345587754': {'full_requires': [], + 'options': {'use_Qt': 'False'}, + 'settings': {}, + 'recipe_hash': None}}) + + info = self.search_service.search_packages(conan_ref3, None) + self.assertEqual(info, {'77777777777': {'full_requires': [], + 'options': {'use_Qt': 'True'}, + 'settings': {}, + 'recipe_hash': None}}) + + def remove_test(self): + conan_ref2 = ConanFileReference("OpenCV", "3.0", "lasote", "stable") + conan_ref3 = ConanFileReference("Assimp", "1.10", "lasote", "stable") + + package_ref2 = PackageReference(conan_ref2, "12345587754") + package_ref3 = PackageReference(conan_ref3, "77777777777") + + save_files(self.paths.export(conan_ref2), {"fake.txt": "//fake"}) + save_files(self.paths.package(package_ref2), {"fake.txt": "//fake"}) + save_files(self.paths.package(package_ref3), {"fake.txt": "//fake"}) + + # Delete all the conans folder + self.service.remove_conanfile(self.conan_reference) + conan_path = self.paths.conan(self.conan_reference) + self.assertFalse(os.path.exists(conan_path)) + + # Delete one package + self.service.remove_packages(conan_ref3, ["77777777777"]) + package_folder_3 = self.paths.package(PackageReference(conan_ref3, '077777777777')) + self.assertFalse(os.path.exists(package_folder_3)) + + # Raise an exception + self.assertRaises(NotFoundException, + self.service.remove_conanfile, + ConanFileReference("Fake", "1.0", "lasote", "stable")) diff --git a/testbed/conan-io__conan/conans/test/server/utils/__init__.py b/testbed/conan-io__conan/conans/test/server/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/conans/test/server/utils/server_launcher.py b/testbed/conan-io__conan/conans/test/server/utils/server_launcher.py new file mode 100644 index 0000000000000000000000000000000000000000..0f29456b429d4e25f3d0f5614d322acddd073670 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/server/utils/server_launcher.py @@ -0,0 +1,124 @@ +#!/usr/bin/python +from conans.server.service.authorize import BasicAuthorizer, BasicAuthenticator +import os +from conans.server.conf import get_file_manager +from conans.server.rest.server import ConanServer +from conans.server.crypto.jwt.jwt_credentials_manager import JWTCredentialsManager +from conans.server.crypto.jwt.jwt_updown_manager import JWTUpDownAuthManager +from conans.util.log import logger +from conans.util.files import mkdir +from conans.test.utils.test_files import temp_folder +from conans.server.migrate import migrate_and_get_server_config +from conans.search.search import DiskSearchAdapter, DiskSearchManager +from conans.paths import SimplePaths +import time +import shutil +from conans import SERVER_CAPABILITIES + + +TESTING_REMOTE_PRIVATE_USER = "private_user" +TESTING_REMOTE_PRIVATE_PASS = "private_pass" + + +class TestServerLauncher(object): + port = 0 + + def __init__(self, base_path=None, read_permissions=None, + write_permissions=None, users=None, base_url=None, plugins=None, + server_version=None, + min_client_compatible_version=None, + server_capabilities=None): + + plugins = plugins or [] + if not base_path: + base_path = temp_folder() + + if server_capabilities is None: + server_capabilities = SERVER_CAPABILITIES # Default enabled + + if not os.path.exists(base_path): + raise Exception("Base path not exist! %s") + + # Define storage_folder, if not, it will be readed from conf file and pointed to real user home + self.storage_folder = os.path.join(base_path, ".conan_server", "data") + mkdir(self.storage_folder) + + server_config = migrate_and_get_server_config(base_path, self.storage_folder) + + if TestServerLauncher.port == 0: + TestServerLauncher.port = server_config.port + + # Encode and Decode signature for Upload and Download service + updown_auth_manager = JWTUpDownAuthManager(server_config.updown_secret, + server_config.authorize_timeout) + self.file_manager = get_file_manager(server_config, public_url=base_url, + updown_auth_manager=updown_auth_manager) + + search_adapter = DiskSearchAdapter() + self.search_manager = DiskSearchManager(SimplePaths(server_config.disk_storage_path), search_adapter) + # Prepare some test users + if not read_permissions: + read_permissions = server_config.read_permissions + read_permissions.append(("private_library/1.0.0@private_user/testing", "*")) + read_permissions.append(("*/*@*/*", "*")) + + if not write_permissions: + write_permissions = server_config.write_permissions + + if not users: + users = dict(server_config.users) + + users[TESTING_REMOTE_PRIVATE_USER] = TESTING_REMOTE_PRIVATE_PASS + + authorizer = BasicAuthorizer(read_permissions, write_permissions) + authenticator = BasicAuthenticator(users) + credentials_manager = JWTCredentialsManager(server_config.jwt_secret, + server_config.jwt_expire_time) + + logger.debug("Storage path: %s" % self.storage_folder) + self.port = TestServerLauncher.port + + self.ra = ConanServer(self.port, credentials_manager, updown_auth_manager, + authorizer, authenticator, self.file_manager, self.search_manager, + server_version, min_client_compatible_version, + server_capabilities) + for plugin in plugins: + self.ra.api_v1.install(plugin) + + def start(self, daemon=True): + """from multiprocessing import Process + self.p1 = Process(target=ra.run, kwargs={"host": "0.0.0.0"}) + self.p1.start() + self.p1""" + import threading + + class StoppableThread(threading.Thread): + """Thread class with a stop() method. The thread itself has to check + regularly for the stopped() condition.""" + + def __init__(self, *args, **kwargs): + super(StoppableThread, self).__init__(*args, **kwargs) + self._stop = threading.Event() + + def stop(self): + self._stop.set() + + def stopped(self): + return self._stop.isSet() + + self.t1 = StoppableThread(target=self.ra.run, kwargs={"host": "0.0.0.0", "quiet": True}) + self.t1.daemon = daemon + self.t1.start() + time.sleep(1) + + def stop(self): + self.ra.root_app.close() + self.t1.stop() + + def clean(self): + if os.path.exists(self.storage_folder): + shutil.rmtree(self.storage_folder) + +if __name__ == "__main__": + server = TestServerLauncher() + server.start(daemon=False) diff --git a/testbed/conan-io__conan/conans/test/tgz_md5_test.py b/testbed/conan-io__conan/conans/test/tgz_md5_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e31f4f3ad25971bc5ea35524fe35b1efabeba634 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/tgz_md5_test.py @@ -0,0 +1,36 @@ +import unittest +from conans.test.utils.test_files import temp_folder +from conans.util.files import save, md5sum +from conans.paths import PACKAGE_TGZ_NAME +import os +import time +from conans.client.remote_manager import compress_files + + +class TgzMd5Test(unittest.TestCase): + """The md5 of a tgz should be the same if the files inside are the same""" + + def testMd5Name(self): + folder = temp_folder() + save(os.path.join(folder, "one_file.txt"), b"The contents") + save(os.path.join(folder, "Two_file.txt"), b"Two contents") + + files = { + "one_file.txt": os.path.join(folder, "one_file.txt"), + "Two_file.txt": os.path.join(folder, "Two_file.txt"), + } + + compress_files(files, PACKAGE_TGZ_NAME, excluded=[PACKAGE_TGZ_NAME], dest_dir=folder) + file_path = os.path.join(folder, PACKAGE_TGZ_NAME) + + md5_a = md5sum(file_path) + + time.sleep(1) # Timestamps change + + folder = temp_folder() + compress_files(files, PACKAGE_TGZ_NAME, excluded=[PACKAGE_TGZ_NAME], dest_dir=folder) + file_path = os.path.join(folder, PACKAGE_TGZ_NAME) + + md5_b = md5sum(file_path) + + self.assertEquals(md5_a, md5_b) diff --git a/testbed/conan-io__conan/conans/test/tools.py b/testbed/conan-io__conan/conans/test/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..f393ee86422305b8e7de1a2d5fcae64c0a0abaaa --- /dev/null +++ b/testbed/conan-io__conan/conans/test/tools.py @@ -0,0 +1,434 @@ +from conans.client.output import ConanOutput +from conans.client.command import Command, migrate_and_get_client_cache +from io import StringIO +import shlex +from conans.util.files import save_files, load, save +from conans.test.utils.runner import TestRunner +import os +from conans.client.remote_manager import RemoteManager +from conans.client.store.localdb import LocalDB +from conans.client.rest.auth_manager import ConanApiAuthManager +from conans.client.userio import UserIO +import sys +from conans.util.log import logger +import shutil +import requests +from mock import Mock +import uuid +from webtest.app import TestApp +from conans.client.rest.rest_client import RestApiClient +from six.moves.urllib.parse import urlsplit, urlunsplit +from conans.test.server.utils.server_launcher import (TESTING_REMOTE_PRIVATE_USER, + TESTING_REMOTE_PRIVATE_PASS, + TestServerLauncher) +from conans.util.env_reader import get_env +from conans import __version__ as CLIENT_VERSION +from conans.client.conf import MIN_SERVER_COMPATIBLE_VERSION +from conans.client.rest.version_checker import VersionCheckerRequester +from conans.model.version import Version +from conans.test.utils.test_files import temp_folder +from conans.client.remote_registry import RemoteRegistry +from collections import Counter +import six +from conans.client.rest.uploader_downloader import IterableToFileAdapter +from conans.client.client_cache import ClientCache +from conans.search.search import DiskSearchManager, DiskSearchAdapter + + +class TestingResponse(object): + """Wraps a response from TestApp external tool + to guarantee the presence of response.ok, response.content + and response.status_code, as it was a requests library object. + + Is instanced by TestRequester on each request""" + + def __init__(self, test_response): + self.test_response = test_response + + @property + def headers(self): + return self.test_response.headers + + @property + def ok(self): + return self.test_response.status_code == 200 + + @property + def content(self): + return self.test_response.body + + @property + def charset(self): + return self.test_response.charset + + @charset.setter + def charset(self, newcharset): + self.test_response.charset = newcharset + + @property + def text(self): + return self.test_response.text + + def iter_content(self, chunk_size=1): # @UnusedVariable + return [self.content] + + @property + def status_code(self): + return self.test_response.status_code + + +class TestRequester(object): + """Fake requests module calling server applications + with TestApp""" + + def __init__(self, test_servers): + self.test_servers = test_servers + + def _get_url_path(self, url): + # Remove schema from url + _, _, path, query, _ = urlsplit(url) + url = urlunsplit(("", "", path, query, "")) + return url + + def _get_wsgi_app(self, url): + for test_server in self.test_servers.values(): + if url.startswith(test_server.fake_url): + return test_server.app + + raise Exception("Testing error: Not remote found") + + def get(self, url, auth=None, headers=None, verify=None, stream=None): + headers = headers or {} + app, url = self._prepare_call(url, headers, auth) + if app: + response = app.get(url, headers=headers, expect_errors=True) + return TestingResponse(response) + else: + return requests.get(url, headers=headers) + + def put(self, url, data, headers=None, verify=None, auth=None): + headers = headers or {} + app, url = self._prepare_call(url, headers, auth=auth) + if app: + if isinstance(data, IterableToFileAdapter): + data_accum = b"" + for tmp in data: + data_accum += tmp + data = data_accum + response = app.put(url, data, expect_errors=True, headers=headers) + return TestingResponse(response) + else: + return requests.put(url, data=data.read()) + + def delete(self, url, auth, headers, verify=None): + headers = headers or {} + app, url = self._prepare_call(url, headers, auth) + if app: + response = app.delete(url, "", headers=headers, expect_errors=True) + return TestingResponse(response) + else: + return requests.delete(url, headers=headers) + + def post(self, url, auth=None, headers=None, verify=None, stream=None, data=None, json=None): + headers = headers or {} + app, url = self._prepare_call(url, headers, auth) + if app: + content_type = None + if json: + import json as JSON + data = JSON.dumps(json) + content_type = "application/json" + response = app.post(url, data, headers=headers, + content_type=content_type, expect_errors=True) + return TestingResponse(response) + else: + requests.post(url, data=data, json=json) + + def _prepare_call(self, url, headers, auth): + if not url.startswith("http://fake"): # Call to S3 (or external), perform a real request + return None, url + app = self._get_wsgi_app(url) + url = self._get_url_path(url) # Remove http://server.com + + self._set_auth_headers(auth, headers) + return app, url + + def _set_auth_headers(self, auth, headers): + if auth: + mock_request = Mock() + mock_request.headers = {} + auth(mock_request) + headers.update(mock_request.headers) + + +class TestServer(object): + from conans import __version__ as SERVER_VERSION + from conans.server.conf import MIN_CLIENT_COMPATIBLE_VERSION + + def __init__(self, read_permissions=None, + write_permissions=None, users=None, plugins=None, base_path=None, + server_version=Version(SERVER_VERSION), + min_client_compatible_version=Version(MIN_CLIENT_COMPATIBLE_VERSION), + server_capabilities=None): + """ + 'read_permissions' and 'write_permissions' is a list of: + [("opencv/2.3.4@lasote/testing", "user1, user2")] + + 'users': {username: plain-text-passwd} + """ + # Unique identifier for this server, will be used by TestRequester + # to determine where to call. Why? remote_manager just assing an url + # to the rest_client, so rest_client doesn't know about object instances, + # just urls, so testing framework performs a map between fake urls and instances + if read_permissions is None: + read_permissions = [("*/*@*/*", "*")] + if write_permissions is None: + write_permissions = [] + if users is None: + users = {"lasote": "mypass"} + + self.fake_url = "http://fake%s.com" % str(uuid.uuid4()).replace("-", "") + min_client_ver = min_client_compatible_version + self.test_server = TestServerLauncher(base_path, read_permissions, + write_permissions, users, + base_url=self.fake_url + "/v1", + plugins=plugins, + server_version=server_version, + min_client_compatible_version=min_client_ver, + server_capabilities=server_capabilities) + self.app = TestApp(self.test_server.ra.root_app) + + @property + def paths(self): + return self.test_server.file_manager.paths + + def __repr__(self): + return "TestServer @ " + self.fake_url + + def __str__(self): + return self.fake_url + + +class TestBufferConanOutput(ConanOutput): + + """ wraps the normal output of the application, captures it into an stream + and gives it operators similar to string, so it can be compared in tests + """ + + def __init__(self): + self._buffer = StringIO() + ConanOutput.__init__(self, self._buffer, color=False) + + def __repr__(self): + # FIXME: I'm sure there is a better approach. Look at six docs. + if six.PY2: + return str(self._buffer.getvalue().encode("ascii", "ignore")) + else: + return self._buffer.getvalue() + + def __str__(self, *args, **kwargs): + return self.__repr__() + + def __eq__(self, value): + return self.__repr__() == value + + def __ne__(self, value): + return not self.__eq__(value) + + def __contains__(self, value): + return value in self.__repr__() + + +class MockedUserIO(UserIO): + + """ + Mock for testing. If get_username or get_password is requested will raise + an exception except we have a value to return. + """ + + def __init__(self, logins, ins=sys.stdin, out=None): + """ + logins is a dict of {remote: list(user, password)} + will return sequentially + """ + assert isinstance(logins, dict) + self.logins = logins + self.login_index = Counter() + UserIO.__init__(self, ins, out) + + def get_username(self, remote_name): + """Overridable for testing purpose""" + sub_dict = self.logins[remote_name] + index = self.login_index[remote_name] + if len(sub_dict) - 1 < index: + raise Exception("Bad user/password in testing framework, " + "provide more tuples or input the right ones") + return sub_dict[index][0] + + def get_password(self, remote_name): + """Overridable for testing purpose""" + sub_dict = self.logins[remote_name] + index = self.login_index[remote_name] + tmp = sub_dict[index][1] + self.login_index.update([remote_name]) + return tmp + + +class TestClient(object): + + """ Test wrap of the conans application to launch tests in the same way as + in command line + """ + + def __init__(self, base_folder=None, current_folder=None, + servers=None, users=None, client_version=CLIENT_VERSION, + min_server_compatible_version=MIN_SERVER_COMPATIBLE_VERSION, + requester_class=None, runner=None): + """ + storage_folder: Local storage path + current_folder: Current execution folder + servers: dict of {remote_name: TestServer} + logins is a list of (user, password) for auto input in order + if required==> [("lasote", "mypass"), ("other", "otherpass")] + """ + self.all_output = "" # For debugging purpose, append all the run outputs + self.users = users or {"default": + [(TESTING_REMOTE_PRIVATE_USER, TESTING_REMOTE_PRIVATE_PASS)]} + self.servers = servers or {} + + self.client_version = Version(str(client_version)) + self.min_server_compatible_version = Version(str(min_server_compatible_version)) + + self.base_folder = base_folder or temp_folder() + # Define storage_folder, if not, it will be read from conf file & pointed to real user home + self.storage_folder = os.path.join(self.base_folder, ".conan", "data") + self.client_cache = ClientCache(self.base_folder, self.storage_folder, TestBufferConanOutput()) + + search_adapter = DiskSearchAdapter() + self.search_manager = DiskSearchManager(self.client_cache, search_adapter) + + self._default_settings(get_env("CONAN_COMPILER", "gcc"), + get_env("CONAN_COMPILER_VERSION", "4.8"), + get_env("CONAN_LIBCXX", "libstdc++")) + + self.requester_class = requester_class + self.conan_runner = runner + + self.init_dynamic_vars() + + save(self.client_cache.registry, "") + registry = RemoteRegistry(self.client_cache.registry, TestBufferConanOutput()) + for name, server in self.servers.items(): + if isinstance(server, TestServer): + registry.add(name, server.fake_url) + else: + registry.add(name, server) + + logger.debug("Client storage = %s" % self.storage_folder) + self.current_folder = current_folder or temp_folder() + + @property + def paths(self): + return self.client_cache + + def _default_settings(self, compiler, compiler_version, libcxx): + """ allows to change the default settings in the file, to change compiler, version + """ + # Set default settings in global defined + self.client_cache.conan_config # For create the default file if not existing + text = load(self.client_cache.conan_conf_path) + # prevent TestClient instances with reused paths to write again the compiler + if compiler != "Visual Studio": + text = text.replace("compiler.runtime=MD", "") + if "compiler=" not in text: + # text = text.replace("build_type=Release", "") + + text += "\ncompiler=%s" % compiler + text += "\ncompiler.version=%s" % compiler_version + if compiler != "Visual Studio": + text += "\ncompiler.libcxx=%s" % libcxx + save(self.client_cache.conan_conf_path, text) + + @property + def default_compiler_visual_studio(self): + text = load(self.client_cache.conan_conf_path) + return "compiler=Visual Studio" in text + + def _init_collaborators(self, user_io=None): + + output = TestBufferConanOutput() + self.user_io = user_io or MockedUserIO(self.users, out=output) + + self.runner = TestRunner(output, runner=self.conan_runner) + + # Check if servers are real + real_servers = False + for server in self.servers.values(): + if isinstance(server, str): # Just URI + real_servers = True + + if real_servers: + requester = requests + else: + if self.requester_class: + requester = self.requester_class(self.servers) + else: + requester = TestRequester(self.servers) + + # Verify client version against remotes + self.requester = VersionCheckerRequester(requester, self.client_version, + self.min_server_compatible_version, output) + + self.rest_api_client = RestApiClient(output, requester=self.requester) + # To store user and token + self.localdb = LocalDB(self.client_cache.localdb) + # Wraps RestApiClient to add authentication support (same interface) + auth_manager = ConanApiAuthManager(self.rest_api_client, self.user_io, self.localdb) + # Handle remote connections + self.remote_manager = RemoteManager(self.client_cache, auth_manager, self.user_io.out) + + def init_dynamic_vars(self, user_io=None): + # Migration system + self.client_cache = migrate_and_get_client_cache(self.base_folder, TestBufferConanOutput(), + storage_folder=self.storage_folder) + + # Maybe something have changed with migrations + self._init_collaborators(user_io) + + def run(self, command_line, user_io=None, ignore_error=False): + """ run a single command as in the command line. + If user or password is filled, user_io will be mocked to return this + tuple if required + """ + self.init_dynamic_vars(user_io) + + command = Command(self.client_cache, self.user_io, self.runner, self.remote_manager, self.search_manager) + args = shlex.split(command_line) + current_dir = os.getcwd() + os.chdir(self.current_folder) + + old_modules = list(sys.modules.keys()) + try: + error = command.run(args) + finally: + os.chdir(current_dir) + # Reset sys.modules to its prev state. A .copy() DOES NOT WORK + added_modules = set(sys.modules).difference(old_modules) + for added in added_modules: + sys.modules.pop(added, None) + + if not ignore_error and error: + logger.error(self.user_io.out) + raise Exception("Command failed:\n%s" % command_line) + + self.all_output += str(self.user_io.out) + return error + + def save(self, files, path=None, clean_first=False): + """ helper metod, will store files in the current folder + param files: dict{filename: filecontents} + """ + path = path or self.current_folder + if clean_first: + shutil.rmtree(self.current_folder, ignore_errors=True) + save_files(path, files) diff --git a/testbed/conan-io__conan/conans/test/tools_test.py b/testbed/conan-io__conan/conans/test/tools_test.py new file mode 100644 index 0000000000000000000000000000000000000000..b7754a8095035e11ed0b22cd8b0bf2143565b163 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/tools_test.py @@ -0,0 +1,228 @@ +import unittest +from conans.tools import SystemPackageTool, replace_in_file +import os +from conans.test.utils.test_files import temp_folder +from conans import tools +from conans.test.utils.visual_project_files import get_vs_project_files +from conans.test.tools import TestClient, TestBufferConanOutput +from conans.paths import CONANFILE +import platform +from conans.errors import ConanException +from nose.plugins.attrib import attr + + +class RunnerMock(object): + + def __init__(self, return_ok=True): + self.command_called = None + self.return_ok = return_ok + + def __call__(self, command, output): + self.command_called = command + return 0 if self.return_ok else 1 + + +class ReplaceInFileTest(unittest.TestCase): + + def setUp(self): + text = u'J\xe2nis\xa7' + self.tmp_folder = temp_folder() + + self.win_file = os.path.join(self.tmp_folder, "win_encoding.txt") + text = text.encode("Windows-1252", "ignore") + with open(self.win_file, "wb") as handler: + handler.write(text) + + self.bytes_file = os.path.join(self.tmp_folder, "bytes_encoding.txt") + with open(self.bytes_file, "wb") as handler: + handler.write(text) + + def test_replace_in_file(self): + replace_in_file(self.win_file, "nis", "nus") + replace_in_file(self.bytes_file, "nis", "nus") + + with open(self.win_file, "rt") as handler: + content = handler.read() + self.assertNotIn("nis", content) + self.assertIn("nus", content) + + with open(self.bytes_file, "rt") as handler: + content = handler.read() + self.assertNotIn("nis", content) + self.assertIn("nus", content) + + +class ToolsTest(unittest.TestCase): + + def cpu_count_test(self): + cpus = tools.cpu_count() + self.assertIsInstance(cpus, int) + self.assertGreaterEqual(cpus, 1) + + def test_environment_nested(self): + with tools.environment_append({"A": "1", "Z": "40"}): + with tools.environment_append({"A": "1", "B": "2"}): + with tools.environment_append({"A": "2", "B": "2"}): + self.assertEquals(os.getenv("A"), "2") + self.assertEquals(os.getenv("B"), "2") + self.assertEquals(os.getenv("Z"), "40") + self.assertEquals(os.getenv("A", None), "1") + self.assertEquals(os.getenv("B", None), "2") + self.assertEquals(os.getenv("A", None), "1") + self.assertEquals(os.getenv("Z", None), "40") + + self.assertEquals(os.getenv("A", None), None) + self.assertEquals(os.getenv("B", None), None) + self.assertEquals(os.getenv("Z", None), None) + + def system_package_tool_fail_when_not_0_returned_test(self): + runner = RunnerMock(return_ok=False) + spt = SystemPackageTool(runner=runner) + if platform.system() != "Windows": + msg = "Command 'sudo apt-get update' failed" if platform.system() == "Linux" \ + else "Command 'brew update' failed" + with self.assertRaisesRegexp(ConanException, msg): + spt.update() + else: + spt.update() # Won't raise anything because won't do anything + + def system_package_tool_test(self): + + runner = RunnerMock() + spt = SystemPackageTool(runner=runner) + + # fake os info to linux debian, default sudo + spt._os_info.is_linux = True + spt._os_info.linux_distro = "debian" + spt.update() + self.assertEquals(runner.command_called, "sudo apt-get update") + + spt._os_info.linux_distro = "ubuntu" + spt.update() + self.assertEquals(runner.command_called, "sudo apt-get update") + + spt._os_info.linux_distro = "knoppix" + spt.update() + self.assertEquals(runner.command_called, "sudo apt-get update") + + spt._os_info.linux_distro = "fedora" + spt.update() + self.assertEquals(runner.command_called, "sudo yum check-update") + + spt._os_info.linux_distro = "redhat" + spt.install("a_package") + self.assertEquals(runner.command_called, "sudo yum install -y a_package") + + spt._os_info.linux_distro = "debian" + spt.install("a_package") + self.assertEquals(runner.command_called, "sudo apt-get install -y a_package") + + spt._os_info.is_macos = True + spt._os_info.is_linux = False + + spt.update() + self.assertEquals(runner.command_called, "brew update") + spt.install("a_package") + self.assertEquals(runner.command_called, "brew install a_package") + + os.environ["CONAN_SYSREQUIRES_SUDO"] = "False" + + spt = SystemPackageTool(runner=runner) + spt._os_info.is_linux = True + + spt._os_info.linux_distro = "redhat" + spt.install("a_package") + self.assertEquals(runner.command_called, "yum install -y a_package") + spt.update() + self.assertEquals(runner.command_called, "yum check-update") + + spt._os_info.linux_distro = "ubuntu" + spt.install("a_package") + self.assertEquals(runner.command_called, "apt-get install -y a_package") + + spt.update() + self.assertEquals(runner.command_called, "apt-get update") + + spt._os_info.is_macos = True + spt._os_info.is_linux = False + + spt.update() + self.assertEquals(runner.command_called, "brew update") + spt.install("a_package") + self.assertEquals(runner.command_called, "brew install a_package") + + del os.environ["CONAN_SYSREQUIRES_SUDO"] + + @attr('slow') + def build_vs_project_test(self): + if platform.system() != "Windows": + return + conan_build_vs = """ +from conans import ConanFile, tools, ConfigureEnvironment +import platform + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2.1" + exports = "*" + settings = "os", "build_type", "arch", "compiler" + + def build(self): + build_command = tools.build_sln_command(self.settings, "MyProject.sln") + env = ConfigureEnvironment(self) + command = "%s && %s" % (env.command_line_env, build_command) + self.output.warn(command) + self.run(command) + + def package(self): + self.copy(pattern="*.exe") + +""" + client = TestClient() + files = get_vs_project_files() + files[CONANFILE] = conan_build_vs + + # Try with x86_64 + client.save(files) + client.run("export lasote/stable") + client.run("install Hello/1.2.1@lasote/stable --build -s arch=x86_64") + self.assertTrue("Release|x64", client.user_io.out) + self.assertTrue("Copied 1 '.exe' files: MyProject.exe", client.user_io.out) + + # Try with x86 + client.save(files, clean_first=True) + client.run("export lasote/stable") + client.run("install Hello/1.2.1@lasote/stable --build -s arch=x86") + self.assertTrue("Release|x86", client.user_io.out) + self.assertTrue("Copied 1 '.exe' files: MyProject.exe", client.user_io.out) + + # Try with x86 debug + client.save(files, clean_first=True) + client.run("export lasote/stable") + client.run("install Hello/1.2.1@lasote/stable --build -s arch=x86 -s build_type=Debug") + self.assertTrue("Debug|x86", client.user_io.out) + self.assertTrue("Copied 1 '.exe' files: MyProject.exe", client.user_io.out) + + def download_retries_test(self): + out = TestBufferConanOutput() + + # Connection error + with self.assertRaisesRegexp(ConanException, "HTTPConnectionPool"): + tools.download("http://fakeurl3.es/nonexists", + os.path.join(temp_folder(), "file.txt"), out=out, + retry=3, retry_wait=0) + + # Not found error + self.assertEquals(str(out).count("Waiting 0 seconds to retry..."), 2) + with self.assertRaisesRegexp(ConanException, "Error 404 downloading file"): + tools.download("https://github.com/conan-io/conan/blob/develop/FILE_NOT_FOUND.txt", + os.path.join(temp_folder(), "README.txt"), out=out, + retry=3, retry_wait=0) + + # And OK + dest = os.path.join(temp_folder(), "README.txt") + tools.download("https://raw.githubusercontent.com/conan-io/conan/develop/README.rst", + dest, out=out, + retry=3, retry_wait=0) + + self.assertTrue(os.path.exists(dest)) diff --git a/testbed/conan-io__conan/conans/test/update_settings_yml_test.py b/testbed/conan-io__conan/conans/test/update_settings_yml_test.py new file mode 100644 index 0000000000000000000000000000000000000000..1eeefc882f5c8360093f65440a9c0e434784eb42 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/update_settings_yml_test.py @@ -0,0 +1,81 @@ +import unittest +from conans.test.tools import TestClient +from conans.util.files import load, save +from conans.client.conf import default_settings_yml +from conans.model.settings import Settings + + +class UpdateSettingsYmlTest(unittest.TestCase): + """ This test is to validate that after adding a new settings, that allows a None + value, this None value does not modify exisisting packages SHAs + """ + + def test_update_settings(self): + file_content = ''' +from conans import ConanFile + +class ConanFileToolsTest(ConanFile): + name = "test" + version = "1.9" + settings = "os", "compiler", "arch" + + def source(self): + self.output.warn("Sourcing...") + + def build(self): + self.output.warn("Building...") + ''' + prev_settings = """ +os: [Windows, Linux, Macos, Android, FreeBSD, SunOS] +arch: [x86, x86_64, armv6, armv7, armv7hf, armv8] +compiler: + sun-cc: + version: ["5.10", "5.11", "5.12", "5.13", "5.14"] + libcxx: [libCstd, libstdcxx libstlport, libstdc++] + gcc: + version: ["4.4", "4.5", "4.6", "4.7", "4.8", "4.9", "5.1", "5.2", "5.3", "5.4", "6.1", "6.2", "6.3"] + libcxx: [libstdc++, libstdc++11] + Visual Studio: + runtime: [None, MD, MT, MTd, MDd] + version: ["8", "9", "10", "11", "12", "14"] + clang: + version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8"] + libcxx: [libstdc++, libstdc++11, libc++] + apple-clang: + version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.1", "7.2", "7.3"] + libcxx: [libstdc++, libc++] + +""" + files = {"conanfile.py": file_content} + client = TestClient() + save(client.paths.settings_path, prev_settings) + conf = load(client.paths.conan_conf_path) + conf = conf.replace("build_type=Release", "") + self.assertNotIn("build_type", conf) + save(client.paths.conan_conf_path, conf) + + settings = Settings.loads(default_settings_yml) + client.paths.conan_config.settings_defaults(settings) + self.assertNotIn("build_type", settings.values.dumps()) + client.save(files) + client.run("export lasote/testing") + self.assertNotIn("build_type", load(client.paths.settings_path)) + self.assertNotIn("build_type", load(client.paths.conan_conf_path)) + settings = Settings.loads(default_settings_yml) + client.paths.conan_config.settings_defaults(settings) + self.assertNotIn("build_type", settings.values.dumps()) + + client.run("install test/1.9@lasote/testing --build -s arch=x86_64 -s compiler=gcc " + "-s compiler.version=4.9 -s os=Windows -s compiler.libcxx=libstdc++") + self.assertIn("390146894f59dda18c902ee25e649ef590140732", client.user_io.out) + + # Now the new one + files = {"conanfile.py": file_content.replace('"arch"', '"arch", "build_type"')} + client = TestClient() + client.save(files) + client.run("export lasote/testing") + + client.run("install test/1.9@lasote/testing --build -s arch=x86_64 -s compiler=gcc " + "-s compiler.version=4.9 -s os=Windows -s build_type=None -s compiler.libcxx=libstdc++") + self.assertIn("build_type", load(client.paths.settings_path)) + self.assertIn("390146894f59dda18c902ee25e649ef590140732", client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/upload_test.py b/testbed/conan-io__conan/conans/test/upload_test.py new file mode 100644 index 0000000000000000000000000000000000000000..b1a9f62eb8e45d09f95ab7f145d295937ce4b312 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/upload_test.py @@ -0,0 +1,334 @@ +import unittest +from conans.test.tools import TestClient, TestServer, TestRequester +from conans.test.utils.test_files import hello_source_files, temp_folder,\ + hello_conan_files +from conans.client.manager import CONANFILE +import os +from conans.paths import CONAN_MANIFEST, EXPORT_TGZ_NAME, CONANINFO +import platform +import stat +from conans.util.files import save +from conans.model.ref import ConanFileReference, PackageReference +from conans.model.manifest import FileTreeManifest +from conans.test.utils.test_files import uncompress_packaged_files +from conans.tools import untargz +from requests.packages.urllib3.exceptions import ConnectionError +from conans.test.utils.cpp_test_files import cpp_hello_conan_files + +myconan1 = """ +from conans import ConanFile + +class HelloConan(ConanFile): + name = "Hello" + version = "1.2.1" +""" + + +class BadConnectionUploader(TestRequester): + fail_on = 1 + + def __init__(self, *args, **kwargs): + super(BadConnectionUploader, self).__init__(*args, **kwargs) + self.counter_fail = 0 + + def put(self, *args, **kwargs): + self.counter_fail += 1 + if self.counter_fail == self.fail_on: + raise ConnectionError("Can't connect because of the evil mock") + else: + return super(BadConnectionUploader, self).put(*args, **kwargs) + + +class TerribleConnectionUploader(BadConnectionUploader): + def put(self, *args, **kwargs): + raise ConnectionError("Can't connect because of the evil mock") + + +class FailPairFilesUploader(BadConnectionUploader): + + def put(self, *args, **kwargs): + self.counter_fail += 1 + if self.counter_fail % 2 == 1: + raise ConnectionError("Pair file, error!") + else: + return super(BadConnectionUploader, self).put(*args, **kwargs) + + +class UploadTest(unittest.TestCase): + + def _get_client(self, requester=None): + servers = {} + # All can write (for avoid authentication until we mock user_io) + self.test_server = TestServer([("*/*@*/*", "*")], [("*/*@*/*", "*")], + users={"lasote": "mypass"}) + servers["default"] = self.test_server + return TestClient(servers=servers, users={"default": [("lasote", "mypass")]}, + requester_class=requester) + + def setUp(self): + self.client = self._get_client() + conan_digest = FileTreeManifest('123123123', {}) + self.conan_ref = ConanFileReference.loads("Hello/1.2.1@frodo/stable") + reg_folder = self.client.paths.export(self.conan_ref) + + self.client.run('upload %s' % str(self.conan_ref), ignore_error=True) + self.assertIn("There is no local conanfile exported as %s" % str(self.conan_ref), + self.client.user_io.out) + + files = hello_source_files() + self.client.save(files, path=reg_folder) + self.client.save({CONANFILE: myconan1, + CONAN_MANIFEST: str(conan_digest), + "include/math/lib1.h": "//copy", + "my_lib/debug/libd.a": "//copy", + "my_data/readme.txt": "//copy", + "my_bin/executable": "//copy"}, path=reg_folder) + + self.package_ref = PackageReference(self.conan_ref, "myfakeid") + self.server_pack_folder = self.test_server.paths.package(self.package_ref) + + package_folder = self.client.paths.package(self.package_ref) + save(os.path.join(package_folder, "include", "lib1.h"), "//header") + save(os.path.join(package_folder, "lib", "my_lib", "libd.a"), "//lib") + save(os.path.join(package_folder, "res", "shares", "readme.txt"), + "//res") + save(os.path.join(package_folder, "bin", "my_bin", "executable"), "//bin") + save(os.path.join(package_folder, CONANINFO), "info") + save(os.path.join(package_folder, CONAN_MANIFEST), "manifest") + + os.chmod(os.path.join(package_folder, "bin", "my_bin", "executable"), + os.stat(os.path.join(package_folder, "bin", "my_bin", "executable")).st_mode | + stat.S_IRWXU) + + digest_path = self.client.client_cache.digestfile_package(self.package_ref) + expected_manifest = FileTreeManifest.create(os.path.dirname(digest_path)) + save(os.path.join(package_folder, CONAN_MANIFEST), str(expected_manifest)) + + self.server_reg_folder = self.test_server.paths.export(self.conan_ref) + self.assertFalse(os.path.exists(self.server_reg_folder)) + self.assertFalse(os.path.exists(self.server_pack_folder)) + + def try_upload_bad_recipe_test(self): + files = hello_conan_files("Hello0", "1.2.1") + self.client.save(files) + self.client.run("export frodo/stable") + ref = ConanFileReference.loads("Hello0/1.2.1@frodo/stable") + os.unlink(os.path.join(self.client.client_cache.export(ref), CONAN_MANIFEST)) + with self.assertRaisesRegexp(Exception, "Command failed"): + self.client.run("upload %s" % str(ref)) + + self.assertIn("Cannot upload corrupted recipe", self.client.user_io.out) + + def upload_with_pattern_test(self): + for num in range(5): + files = hello_conan_files("Hello%s" % num, "1.2.1") + self.client.save(files) + self.client.run("export frodo/stable") + + self.client.run("upload Hello* --confirm") + for num in range(5): + self.assertIn("Uploading Hello%s/1.2.1@frodo/stable" % num, self.client.user_io.out) + + self.client.run("upload Hello0* --confirm") + self.assertIn("Uploaded conan recipe 'Hello0/1.2.1@frodo/stable' to 'default'", + self.client.user_io.out) + self.assertNotIn("Uploading Hello1/1.2.1@frodo/stable", self.client.user_io.out) + + def upload_error_test(self): + """Cause an error in the transfer and see some message""" + + # This will fail in the first put file, so, as we need to + # upload 3 files (conanmanifest, conanfile and tgz) will do it with 2 retries + client = self._get_client(BadConnectionUploader) + files = cpp_hello_conan_files("Hello0", "1.2.1", build=False) + client.save(files) + client.run("export frodo/stable") + client.run("upload Hello* --confirm --retry_wait=0") + self.assertIn("Can't connect because of the evil mock", client.user_io.out) + self.assertIn("Waiting 0 seconds to retry...", client.user_io.out) + + # but not with 1 + client = self._get_client(BadConnectionUploader) + files = cpp_hello_conan_files("Hello0", "1.2.1", build=False) + client.save(files) + client.run("export frodo/stable") + client.run("upload Hello* --confirm --retry 1 --retry_wait=1", ignore_error=True) + self.assertNotIn("Waiting 1 seconds to retry...", client.user_io.out) + self.assertIn("ERROR: Execute upload again to retry upload the failed files: " + "conanmanifest.txt. [Remote: default]", client.user_io.out) + + # Try with broken connection even with 10 retries + client = self._get_client(TerribleConnectionUploader) + files = cpp_hello_conan_files("Hello0", "1.2.1", build=False) + client.save(files) + client.run("export frodo/stable") + client.run("upload Hello* --confirm --retry 10 --retry_wait=0", ignore_error=True) + self.assertIn("Waiting 0 seconds to retry...", client.user_io.out) + self.assertIn("ERROR: Execute upload again to retry upload the failed files", client.user_io.out) + + # For each file will fail the first time and will success in the second one + client = self._get_client(FailPairFilesUploader) + files = cpp_hello_conan_files("Hello0", "1.2.1", build=False) + client.save(files) + client.run("export frodo/stable") + client.run("install Hello0/1.2.1@frodo/stable --build") + client.run("upload Hello* --confirm --retry 3 --retry_wait=0 --all") + self.assertEquals(str(client.user_io.out).count("ERROR: Pair file, error!"), 6) + + def upload_with_pattern_and_package_error_test(self): + files = hello_conan_files("Hello1", "1.2.1") + self.client.save(files) + self.client.run("export frodo/stable") + + self.client.run("upload Hello* --confirm -p 234234234", ignore_error=True) + self.assertIn("-p parameter only allowed with a valid recipe reference", + self.client.user_io.out) + + def check_upload_confirm_question_test(self): + user_io = self.client.user_io + files = hello_conan_files("Hello1", "1.2.1") + self.client.save(files) + self.client.run("export frodo/stable") + + user_io.request_string = lambda x: "y" + self.client.run("upload Hello*", user_io=user_io) + self.assertIn("Uploading Hello1/1.2.1@frodo/stable", self.client.user_io.out) + + files = hello_conan_files("Hello2", "1.2.1") + self.client.save(files) + self.client.run("export frodo/stable") + + user_io.request_string = lambda x: "n" + self.client.run("upload Hello*", user_io=user_io) + self.assertNotIn("Uploading Hello2/1.2.1@frodo/stable", self.client.user_io.out) + + def upload_same_package_dont_compress_test(self): + # Create a manifest for the faked package + pack_path = self.client.paths.package(self.package_ref) + digest_path = self.client.client_cache.digestfile_package(self.package_ref) + expected_manifest = FileTreeManifest.create(os.path.dirname(digest_path)) + save(os.path.join(pack_path, CONAN_MANIFEST), str(expected_manifest)) + + self.client.run("upload %s --all" % str(self.conan_ref), ignore_error=False) + self.assertIn("Compressing exported files", self.client.user_io.out) + self.assertIn("Compressing package", str(self.client.user_io.out)) + + self.client.run("upload %s --all" % str(self.conan_ref), ignore_error=False) + self.assertNotIn("Compressing exported files", self.client.user_io.out) + self.assertNotIn("Compressing package", str(self.client.user_io.out)) + self.assertIn("Package is up to date", str(self.client.user_io.out)) + + def upload_with_no_valid_settings_test(self): + '''Check if upload is still working even if the specified setting is not valid. + If this test fails, will fail in Linux/OSx''' + conanfile = """ +from conans import ConanFile +class TestConan(ConanFile): + name = "Hello" + version = "1.2" + settings = {"os": ["Windows"]} +""" + files = {CONANFILE: conanfile} + self.client.save(files) + self.client.run("export lasote/stable") + self.assertIn("WARN: Conanfile doesn't have 'license'", self.client.user_io.out) + self.client.run("upload Hello/1.2@lasote/stable", ignore_error=False) + self.assertIn("Uploading conan_export.tgz", self.client.user_io.out) + + def simple_test(self): + """ basic installation of a new conans + """ + + # Try to upload an package without upload conans first + self.client.run('upload %s -p %s' % (self.conan_ref, str(self.package_ref.package_id)), + ignore_error=True) + self.assertIn("There are no remote conanfiles like %s" % str(self.conan_ref), + self.client.user_io.out) + + # Upload conans + self.client.run('upload %s' % str(self.conan_ref)) + self.assertTrue(os.path.exists(self.server_reg_folder)) + self.assertFalse(os.path.exists(self.server_pack_folder)) + # Upload package + self.client.run('upload %s -p %s' + % (str(self.conan_ref), str(self.package_ref.package_id))) + self.assertTrue(os.path.exists(self.server_reg_folder)) + self.assertTrue(os.path.exists(self.server_pack_folder)) + + # Test the file in the downloaded conans + files = ['CMakeLists.txt', + 'my_lib/debug/libd.a', + 'hello.cpp', + 'hello0.h', + CONANFILE, + CONAN_MANIFEST, + 'main.cpp', + 'include/math/lib1.h', + 'my_data/readme.txt', + 'my_bin/executable'] + + self.assertTrue(os.path.exists(os.path.join(self.server_reg_folder, CONANFILE))) + self.assertTrue(os.path.exists(os.path.join(self.server_reg_folder, EXPORT_TGZ_NAME))) + tmp = temp_folder() + untargz(os.path.join(self.server_reg_folder, EXPORT_TGZ_NAME), tmp) + for f in files: + if f not in (CONANFILE, CONAN_MANIFEST): + self.assertTrue(os.path.exists(os.path.join(tmp, f))) + else: + self.assertFalse(os.path.exists(os.path.join(tmp, f))) + + folder = uncompress_packaged_files(self.test_server.paths, self.package_ref) + + self.assertTrue(os.path.exists(os.path.join(folder, + "include", + "lib1.h"))) + self.assertTrue(os.path.exists(os.path.join(folder, + "lib", + "my_lib/libd.a"))) + self.assertTrue(os.path.exists(os.path.join(folder, + "res", + "shares/readme.txt"))) + + if platform.system() != "Windows": + self.assertEqual(os.stat(os.path.join(folder, + "bin", + "my_bin/executable")).st_mode & + stat.S_IRWXU, stat.S_IRWXU) + + def upload_all_test(self): + '''Upload conans and package together''' + # Try to upload all conans and packages + self.client.run('upload %s --all' % str(self.conan_ref)) + lines = [line.strip() for line in str(self.client.user_io.out).splitlines() + if line.startswith("Uploading")] + self.assertEqual(lines, ["Uploading Hello/1.2.1@frodo/stable", + "Uploading conanmanifest.txt", + "Uploading conanfile.py", + "Uploading conan_export.tgz", + "Uploading package 1/1: myfakeid", + "Uploading conanmanifest.txt", + "Uploading conaninfo.txt", + "Uploading conan_package.tgz", + ]) + self.assertTrue(os.path.exists(self.server_reg_folder)) + self.assertTrue(os.path.exists(self.server_pack_folder)) + + def force_test(self): + '''Tries to upload a conans exported after than remote version.''' + # Upload all conans and packages + self.client.run('upload %s --all' % str(self.conan_ref)) + self.assertTrue(os.path.exists(self.server_reg_folder)) + self.assertTrue(os.path.exists(self.server_pack_folder)) + + # Fake datetime from exported date and upload again + digest_path = os.path.join(self.client.paths.export(self.conan_ref), CONAN_MANIFEST) + old_digest = self.client.paths.load_manifest(self.conan_ref) + fake_digest = FileTreeManifest(2, old_digest.file_sums) + save(digest_path, str(fake_digest)) + + self.client.run('upload %s' % str(self.conan_ref), ignore_error=True) + self.assertIn("Remote recipe is newer than local recipe", self.client.user_io.out) + + self.client.run('upload %s --force' % str(self.conan_ref)) + self.assertIn("Uploading %s" % str(self.conan_ref), + self.client.user_io.out) diff --git a/testbed/conan-io__conan/conans/test/utils/__init__.py b/testbed/conan-io__conan/conans/test/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/conans/test/utils/context_manager.py b/testbed/conan-io__conan/conans/test/utils/context_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..fe74e015024e478940b2f35f1b6344e6ca54b86c --- /dev/null +++ b/testbed/conan-io__conan/conans/test/utils/context_manager.py @@ -0,0 +1,68 @@ +import os +import glob +import copy + + +class CustomEnvPath(): + ''' + Class to create a custom environment with 3 paramenters: + -paths_to_add: list with all the paths that you need in the environments + -paths_to_remove: list with all the paths that you need to remove from the environment + -cmds_to_remove: list of commands that you need to remove from paths. + This class looks for its path and remove it from paths. + ''' + def __init__(self, paths_to_add=None, paths_to_remove=None, + var_to_add=None, var_to_remove=None, cmds_to_remove=None): + self._original_env = copy.deepcopy(os.environ) + self._paths_to_add = paths_to_add + self._paths_to_remove = paths_to_remove or [] + self._var_to_add = var_to_add + self._var_to_remove = var_to_remove or [] + self._cmds_to_remove = cmds_to_remove + + def __enter__(self): + if self._var_to_add: + for name, value in self._var_to_add: + os.environ[name] = value + if self._var_to_remove: + for name in self._var_to_add: + os.environ[name] = None + if self._paths_to_add: + os.environ['PATH'] = "%s%s%s" % (os.environ['PATH'], + os.pathsep, + os.pathsep.join(self._paths_to_add)) + + if self._cmds_to_remove: + for cmd in self._cmds_to_remove: + self._paths_to_remove.extend(which(cmd)) + + if self._paths_to_remove: + env = os.environ['PATH'].split(os.pathsep) + os.environ['PATH'] = os.pathsep.join([p for p in env if p not in self._paths_to_remove]) + + def __exit__(self, _type, value, traceback): + os.environ = self._original_env + + +def which(program): + path_found = [] + + def is_exe(fpath): + return os.path.exists(fpath) and os.access(fpath, os.X_OK) + + fpath, _ = os.path.split(program) + if fpath: + if is_exe(program): + path_found.append(fpath) + else: + _, ext = os.path.splitext(program) + if ext: + file_to_find = program + else: + file_to_find = "%s.*" % program + for path in os.environ["PATH"].split(os.pathsep): + for _file in glob.glob(os.path.join(path, file_to_find)): + if is_exe(os.path.join(path, _file)): + path_found.append(path) + + return path_found diff --git a/testbed/conan-io__conan/conans/test/utils/cpp_test_files.py b/testbed/conan-io__conan/conans/test/utils/cpp_test_files.py new file mode 100644 index 0000000000000000000000000000000000000000..96b6869df34397b8ad4d4aa349c2bcc75f0ec254 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/utils/cpp_test_files.py @@ -0,0 +1,334 @@ +from conans.paths import CONANFILE, BUILD_INFO_CMAKE + + +conanfile_build_cmake = """ def build(self): + static_flags = "-DBUILD_SHARED_LIBS=ON" if not self.options.static else "" + lang = '-DCONAN_LANGUAGE=%s' % self.options.language + cmake = CMake(self.settings) + cmake_flags = cmake.command_line + cmd = 'cmake "%s" %s %s %s' % (self.conanfile_directory, cmake_flags, lang, static_flags) + # print "Executing command: %s" % cmd + self.run(cmd) + self.run("cmake --build . %s" % cmake.build_config)""" + +conanfile_build_env = """ + def build(self): + import os + from conans import ConfigureEnvironment + + environment = ConfigureEnvironment(self) + env = environment.command_line_env + flags = environment.compile_flags + + if self.settings.compiler == "Visual Studio": + lang = '/DCONAN_LANGUAGE=%s' % self.options.language + if self.options.static: + self.run('{} && cl /c /EHsc hello.cpp {}'.format(env, lang)) + self.run('{} && lib hello.obj -OUT:hello{}.lib'.format(env, self.name)) + else: + self.run('{} && cl /EHsc /LD hello.cpp {} {} /link /IMPLIB:hello{}.lib ' + '/link /OUT:hello{}.dll'.format(env, lang, flags, self.name, self.name)) + + command = ('{} && cl /EHsc main.cpp hello{}.lib {}'.format(env, self.name, flags)) + self.run(command) + elif self.settings.compiler == "gcc" or "clang" in str(self.settings.compiler): + # libs = " ".join("-l%s" % lib for lib in self.deps_cpp_info.libs) + lang = '-DCONAN_LANGUAGE=%s' % self.options.language + if self.options.static: + self.run("c++ -c hello.cpp {} {}".format(lang, flags)) + self.run("{} && ar rcs libhello{}.a hello.o".format(env, self.name)) + else: + if self.settings.os == "Windows": + self.run("{} && c++ -o libhello{}.dll -shared -fPIC hello.cpp {} {} " + "-Wl,--out-implib,libhello{}.a". + format(env, self.name, lang, flags, self.name)) + else: + self.run("{} && c++ -o libhello{}.so -shared -fPIC hello.cpp {} {}". + format(env, self.name, flags, lang)) + self.run('{} && c++ -o main main.cpp -L. -lhello{} {}'.format(env, self.name, flags)) + elif self.settings.compiler == "sun-cc": + lang = '-DCONAN_LANGUAGE=%s' % self.options.language + if self.options.static: + self.run("CC -c hello.cpp {} {}".format(lang, flags)) + self.run("{} && ar rcs libhello{}.a hello.o".format(env, self.name)) + else: + self.run("{} && CC -o libhello{}.so -G -Kpic hello.cpp {} {}". + format(env, self.name, flags, lang)) + self.run('{} && CC -o main main.cpp -L. -lhello{} {}'.format(env, self.name, flags)) + + + try: + os.makedirs("bin") + except: + pass + + try: + if self.settings.os == "Windows": + os.rename("main.exe", "bin/say_hello.exe") + else: + os.rename("main", "bin/say_hello") + if not self.options.static: + os.rename("libhello.so", "bin/libhello.so") + except: + pass +""" + +conanfile_template = """ +from conans import ConanFile, CMake +from conans.tools import replace_in_file +import platform + +class {name}Conan(ConanFile): + name = "{name}" + version = "{version}" + options = {{"language": [0, 1], + "static": [True, False]}} + default_options = '''language={language} + static= {static}''' + requires = ({requires}) + settings = "os", "compiler", "arch" + generators = "cmake" + exports = '*' + + def config(self): + {libcxx_remove} + for name, req in self.requires.iteritems(): + self.options[name].language = self.options.language + + def source(self): + # Try-except necessary, not all tests have all files + try: + replace_in_file("CMakeLists.txt", "projct", "project") + except: + pass + try: + replace_in_file("main.cpp", "retunr", "return") + except: + pass + +{build} + + def package(self): + self.copy(pattern="*.h", dst="include", keep_path=False) + self.copy(pattern="*.lib", dst="lib", keep_path=False) + self.copy(pattern="*lib*.a", dst="lib", keep_path=False) + self.copy(pattern="*.dll", dst="bin", keep_path=False) + self.copy(pattern="*.dylib", dst="lib", keep_path=False) + self.copy(pattern="*.so", dst="lib", keep_path=False) + + def package_info(self): + self.cpp_info.libs = ["hello{name}"] + + def imports(self): + self.copy(pattern="*.dylib", dst=".", src="lib") + self.copy(pattern="*.dll", dst=".", src="bin") + self.copy(pattern="*", dst="bin", src="bin") +""" + +cmake_file = """ +project(MyHello) +cmake_minimum_required(VERSION 2.8.12) + +include(${{CMAKE_BINARY_DIR}}/%s) + +add_definitions(-DCONAN_LANGUAGE=${{CONAN_LANGUAGE}}) +message("HELLO LANGUAGE " ${{CONAN_LANGUAGE}}) +conan_basic_setup() + +add_library(hello{name} hello{ext}) +target_link_libraries(hello{name} ${{CONAN_LIBS}}) +set_target_properties(hello{name} PROPERTIES POSITION_INDEPENDENT_CODE ON) +add_executable(say_hello main{ext}) +target_link_libraries(say_hello hello{name}) + + +""" % BUILD_INFO_CMAKE + +cmake_targets_file = """ +project(MyHello) +cmake_minimum_required(VERSION 2.8.12) + +include(${{CMAKE_BINARY_DIR}}/%s) + +add_definitions(-DCONAN_LANGUAGE=${{CONAN_LANGUAGE}}) +message("HELLO LANGUAGE " ${{CONAN_LANGUAGE}}) +conan_basic_setup(TARGETS) + +add_library(hello{name} hello{ext}) +target_link_libraries(hello{name} PUBLIC {targets}) +set_target_properties(hello{name} PROPERTIES POSITION_INDEPENDENT_CODE ON) +add_executable(say_hello main{ext}) +target_link_libraries(say_hello hello{name}) + + +""" % BUILD_INFO_CMAKE + +body = r"""#include "hello{name}.h" + +#include +using namespace std; + +{includes} + +void hello{name}(){{ +#if CONAN_LANGUAGE == 0 + cout<<"Hello {msg}\n"; +#elif CONAN_LANGUAGE == 1 + cout<<"Hola {msg}\n"; +#endif + {other_calls} +}} +""" + +body_c = r"""#include "hello{name}.h" + +#include + +{includes} + +void hello{name}(){{ +#if CONAN_LANGUAGE == 0 + printf("Hello {msg}\n"); +#elif CONAN_LANGUAGE == 1 + printf("Hola {msg}\n"); +#endif + {other_calls} +}} +""" +header = """ +#pragma once +{includes} +{export}void hello{name}(); +""" + +main = """ +#include "hello{name}.h" + +int main(){{ + hello{name}(); + return 0; +}} +""" + +executable = """ +""" + + +def cpp_hello_source_files(name="Hello", deps=None, private_includes=False, msg=None, + dll_export=False, need_patch=False, pure_c=False, cmake_targets=False): + """ + param number: integer, defining name of the conans Hello0, Hello1, HelloX + param deps: [] list of integers, defining which dependencies this conans + depends on + param private_includes: includes will exist only in cpp, then hidden from + downstream consumers + param msg: the message to append to Hello/Hola, will be equal the number + by default + param dll_export: Adds __declspec(dllexport) to the .h declaration + (to be exported to lib with a dll) + param need_patch: It will generated wrong CMakeLists and main.cpp files, + so they will need to be fixed/patched in the source() method. + Such method just have to replace_in_file in those two files to have a + correct "source" directory. This was introduced to be sure that the + source and build methods are executed using their respective folders + while packaging. + e.g. (3, [4, 7]) means that a Hello3 conans will be created, with message + "Hello 3", that depends both in Hello4 and Hello7. + The output of such a conans exe could be like: Hello 3, Hello 4, Hello7 + """ + assert deps is None or isinstance(deps, list) + deps = deps or [] + if msg is None: + msg = name + ret = {} + ext = ".c" if pure_c else ".cpp" + ret["main%s" % ext] = main.format(name=name) + includes = "\n".join(['#include "hello%s.h"' % d for d in deps]) + export = "__declspec(dllexport) " if dll_export else "" + ret["hello%s.h" % name] = header.format(name=name, + export=export, + includes=(includes if not private_includes else "")) + + other_calls = "\n".join(["hello%s();" % d for d in deps]) + body_content = body if not pure_c else body_c + ret["hello%s" % ext] = body_content.format(name=name, + includes=includes, + other_calls=other_calls, + msg=msg) + + # Naive approximation, NO DEPS + if cmake_targets: + ret["CMakeLists.txt"] = cmake_targets_file.format(name=name, ext=ext, + targets=" ".join("CONAN_PKG::%s" + % d for d in deps)) + else: + ret["CMakeLists.txt"] = cmake_file.format(name=name, ext=ext) + if pure_c: + ret["CMakeLists.txt"] = ret["CMakeLists.txt"].replace("project(MyHello)", + "project(MyHello C)") + if need_patch: + ret["CMakeLists.txt"] = ret["CMakeLists.txt"].replace("project", "projct") + ret["main%s" % ext] = ret["main%s" % ext].replace("return", "retunr") + ret["executable"] = executable + + return ret + + +def cpp_hello_conan_files(name="Hello", version="0.1", deps=None, language=0, static=True, + private_includes=False, msg=None, dll_export=False, need_patch=False, + pure_c=False, config=True, build=True, collect_libs=False, + use_cmake=True, cmake_targets=False): + """Generate hello_files, as described above, plus the necessary + CONANFILE to manage it + param number: integer, defining name of the conans Hello0, Hello1, HelloX + param version: string with the version of the current conans "0.1" by default + param deps: [] list of string of the form "0/0.1@user/channel" + param language: 0 = English, 1 = Spanish + param dll_export: Adds __declspec(dllexport) to the .h declaration + (to be exported to lib with a dll) + + e.g. (3, [4, 7]) means that a Hello3 conans will be created, with message + "Hello 3", that depends both in Hello4 and Hello7. + The output of such a conans exe could be like: Hello 3, Hello 4, Hello7""" + assert deps is None or isinstance(deps, list) + + code_deps = [] + requires = [] + for d in deps or []: + if isinstance(d, str): + requires.append('"%s"' % d) + code_dep = d.split("/", 1)[0] + elif isinstance(d, tuple): + requires.append('(%s)' % (", ".join('"%s"' % e for e in d))) + code_dep = d[0].split("/", 1)[0] + else: + raise Exception("Wrong input %s %s" % (d, type(d))) + code_deps.append(code_dep) + requires.append("") + requires = ", ".join(requires) + + base_files = cpp_hello_source_files(name, code_deps, private_includes, msg=msg, + dll_export=dll_export, need_patch=need_patch, + pure_c=pure_c, cmake_targets=cmake_targets) + libcxx_remove = "del self.settings.compiler.libcxx" if pure_c else "" + build_env = conanfile_build_cmake if use_cmake else conanfile_build_env + conanfile = conanfile_template.format(name=name, + version=version, + requires=requires, + language=language, + static=static, + libcxx_remove=libcxx_remove, + build=build_env) + + if pure_c: + conanfile = conanfile.replace("hello.cpp", "hello.c").replace("main.cpp", "main.c") + conanfile = conanfile.replace("c++", "cc") + if not build: + conanfile = conanfile.replace("build(", "build2(") + if not config: + conanfile = conanfile.replace("config(", "config2(") + if collect_libs: + conanfile = conanfile.replace('["hello%s"]' % name, + "self.collect_libs()") + base_files[CONANFILE] = conanfile + return base_files diff --git a/testbed/conan-io__conan/conans/test/utils/go_test_files.py b/testbed/conan-io__conan/conans/test/utils/go_test_files.py new file mode 100644 index 0000000000000000000000000000000000000000..8b6a163e6e5eeebfb09224040bfc32160699f46f --- /dev/null +++ b/testbed/conan-io__conan/conans/test/utils/go_test_files.py @@ -0,0 +1,94 @@ +from conans.paths import CONANFILE + + +conanfile_template = r""" +from conans import ConanFile + +class {name}Conan(ConanFile): + name = "{name}" + version = "{version}" + requires = ({requires}) + exports = '*' + + def imports(self): + self.copy("*", "src") + + def build(self): + pass + + def package(self): + self.copy('*.go',"", "src") +""" + +hello = r''' +package hello%NUMBER% + +import ( + "fmt" +%INCLUDES% +) + +func Hello() { + fmt.Printf("Hello %NUMBER%\n") +%OTHER CALLS% +} +''' + + +main = r'''package main + +import ( + "hello%NUMBER%" +) + +func main() { + hello%NUMBER%.Hello() +} +''' + + +def go_hello_source_files(number=0, deps=None): + """ + param number: integer, defining name of the conans Hello0, Hello1, HelloX + param deps: [] list of integers, defining which dependencies this conans + depends on + e.g. (3, [4, 7]) means that a Hello3 conans will be created, with message + "Hello 3", that depends both in Hello4 and Hello7. + The output of such a conans exe could be like: Hello 3, Hello 4, Hello7 + """ + assert deps is None or isinstance(deps, list) + deps = deps or [] + ret = {} + number = str(number) + other_includes = "\n".join([' "hello%i"' % i for i in deps]) + ret["src/hello%s_main/main.go" % number] = main.replace("%NUMBER%", number) + other_calls = "\n".join([" hello%d.Hello();" % i for i in deps]) + hello_go = hello.replace("%NUMBER%", number) + hello_go = hello_go.replace("%OTHER CALLS%", other_calls) + hello_go = hello_go.replace("%INCLUDES%", other_includes) + ret["src/hello%s/hello.go" % number] = hello_go + return ret + + +def go_hello_conan_files(conan_reference, number=0, deps=None): + """Generate hello_files, as described above, plus the necessary + CONANFILE to manage it + param number: integer, defining name of the conans Hello0, Hello1, HelloX + param deps: [] list of integers, defining which dependencies this conans + depends on + e.g. (3, [4, 7]) means that a Hello3 conans will be created, with message + "Hello 3", that depends both in Hello4 and Hello7. + The output of such a conans exe could be like: Hello 3, Hello 4, Hello7""" + assert deps is None or isinstance(deps, list) + number = str(number) + base_files = go_hello_source_files(number, deps) + requires = [] + for d in deps or []: + requires.append('"hello%d/0.1@lasote/stable"' % d) + requires = ", ".join(requires) + conanfile = conanfile_template.format(name=conan_reference.name, + version=conan_reference.version, + requires=requires) + base_files[CONANFILE] = conanfile + + return base_files diff --git a/testbed/conan-io__conan/conans/test/utils/profiles.py b/testbed/conan-io__conan/conans/test/utils/profiles.py new file mode 100644 index 0000000000000000000000000000000000000000..897d7a20c889e14ec5f3eee158ba176e6d167238 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/utils/profiles.py @@ -0,0 +1,27 @@ +from conans.model.profile import Profile +from conans.model.scope import Scopes +from conans.util.files import save +import os + + +def create_profile(folder, name, settings=None, scopes=None, package_settings=None, env=None, + package_env=None): + + package_env = package_env or {} + + profile = Profile() + profile._settings = settings or {} + if scopes: + profile.scopes = Scopes.from_list(["%s=%s" % (key, value) for key, value in scopes.items()]) + + if package_settings: + profile._package_settings = package_settings + + for package_name, envs in package_env.items(): + for var_name, value in envs: + profile._package_env[package_name][var_name] = value + + for var_name, value in env or {}: + profile._env[var_name] = value + + save(os.path.join(folder, name), profile.dumps()) diff --git a/testbed/conan-io__conan/conans/test/utils/python_test_files.py b/testbed/conan-io__conan/conans/test/utils/python_test_files.py new file mode 100644 index 0000000000000000000000000000000000000000..278a634f82f26437fd94cc422b77bc43f29c80fa --- /dev/null +++ b/testbed/conan-io__conan/conans/test/utils/python_test_files.py @@ -0,0 +1,87 @@ +from conans.paths import CONANFILE + + +conanfile_template = r""" +from conans import ConanFile, tools +import sys + +class {name}Conan(ConanFile): + name = "{name}" + version = "{version}" + requires = ({requires}) + exports = '*' + generators = "virtualenv", "env" + build_policy = "missing" + + def build(self): + with tools.pythonpath(self): + pass +{build} + + def package(self): + self.copy('*.py') + + def package_info(self): + self.env_info.PYTHONPATH.append(self.package_folder) +""" + +hello = r''' +%INCLUDES% + +def hello(): + print("Hello %NUMBER%") +%OTHER CALLS% + +def build_helper(conanfile): + conanfile.output.info("Build stuff %NUMBER%") +%OTHER_BUILD_HELPERS% + +''' + + +main = r''' +from hello%NUMBER% import hello as h%NUMBER% + +if __name__ == "__main__": + h%NUMBER%.hello() + +''' + + +def py_hello_source_files(number=0, deps=None): + assert deps is None or isinstance(deps, list) + deps = deps or [] + ret = {} + number = str(number).split("/", 1)[0] + deps_names = [str(n).split("/", 1)[0] for n in deps] + other_includes = "\n".join(['from hello%s import hello as h%s' % (i, i) for i in deps_names]) + ret["main.py"] = main.replace("%NUMBER%", number) + other_calls = "\n".join([" h%s.hello()" % i for i in deps_names]) + other_build_helpers = "\n".join([" h%s.build_helper(conanfile)" % i for i in deps_names]) + hello_py = hello.replace("%NUMBER%", number) + hello_py = hello_py.replace("%OTHER CALLS%", other_calls) + hello_py = hello_py.replace("%OTHER_BUILD_HELPERS%", other_build_helpers) + hello_py = hello_py.replace("%INCLUDES%", other_includes) + ret["hello%s/hello.py" % number] = hello_py + ret["hello%s/__init__.py" % number] = "" + return ret + + +def py_hello_conan_files(name, version, deps=None): + assert deps is None or isinstance(deps, list) + base_files = py_hello_source_files(name, deps) + requires = [] + for d in deps or []: + requires.append(d) + requires = ", ".join('"%s"' % r for r in requires) + deps_names = [str(n).split("/", 1)[0] for n in deps or []] + if deps: + build = "\n".join([" from hello%s import hello as h%s\n" + " h%s.build_helper(self)" % (i, i, i) for i in deps_names]) + else: + build = "" + conanfile = conanfile_template.format(name=name, version=version, requires=requires, + build=build) + base_files[CONANFILE] = conanfile + + return base_files diff --git a/testbed/conan-io__conan/conans/test/utils/runner.py b/testbed/conan-io__conan/conans/test/utils/runner.py new file mode 100644 index 0000000000000000000000000000000000000000..62538d0da3a4c56140848b19292c7fe10686f115 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/utils/runner.py @@ -0,0 +1,15 @@ +from conans.client.runner import ConanRunner + + +class TestRunner(object): + """Wraps Conan runner and allows to redirect all the ouput to an StrinIO passed + in the __init__ method""" + + def __init__(self, output, runner=None): + self._output = output + self.runner = runner or ConanRunner(print_commands_to_output=True, + generate_run_log_file=True, + log_run_to_output=True) + + def __call__(self, command, output=None, log_filepath=None, cwd=None): + return self.runner(command, output=self._output, log_filepath=log_filepath, cwd=cwd) diff --git a/testbed/conan-io__conan/conans/test/utils/test_files.py b/testbed/conan-io__conan/conans/test/utils/test_files.py new file mode 100644 index 0000000000000000000000000000000000000000..a93169eb2b794bbeb4c0b499583d00b8678edc30 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/utils/test_files.py @@ -0,0 +1,90 @@ +from conans.test.utils.cpp_test_files import cpp_hello_source_files, cpp_hello_conan_files +from conans.test.utils.go_test_files import go_hello_source_files, go_hello_conan_files +import os +from conans.paths import PACKAGE_TGZ_NAME +import tempfile +from conans.test import CONAN_TEST_FOLDER +from conans.tools import untargz +from conans.errors import ConanException +import time +import shutil +import platform + + +def wait_until_removed(folder): + latest_exception = None + for _ in range(50): # Max 5 seconds + time.sleep(0.1) + try: + shutil.rmtree(folder) + break + except Exception as e: + latest_exception = e + else: + raise Exception("Could remove folder %s: %s" % (folder, latest_exception)) + + +def temp_folder(): + t = tempfile.mkdtemp(suffix='conans', dir=CONAN_TEST_FOLDER) + # necessary for Mac OSX, where the temp folders in /var/ are symlinks to /private/var/ + t = os.path.realpath(t) + # FreeBSD and Solaris do not use GNU Make as a the default 'make' program which has trouble + # with spaces in paths generated by CMake + if platform.system() == "FreeBSD" or platform.system() == "SunOS": + path = "pathwithoutspaces" + else: + path = "path with spaces" + nt = os.path.join(t, path) + os.makedirs(nt) + return nt + + +def uncompress_packaged_files(paths, package_reference): + package_path = paths.package(package_reference) + if not(os.path.exists(os.path.join(package_path, PACKAGE_TGZ_NAME))): + raise ConanException("%s not found in %s" % (PACKAGE_TGZ_NAME, package_path)) + tmp = temp_folder() + untargz(os.path.join(package_path, PACKAGE_TGZ_NAME), tmp) + return tmp + + +def scan_folder(folder): + scanned_files = [] + for root, _, files in os.walk(folder): + relative_path = os.path.relpath(root, folder) + for f in files: + relative_name = os.path.normpath(os.path.join(relative_path, f)).replace("\\", "/") + scanned_files.append(relative_name) + + return sorted(scanned_files) + + +def hello_source_files(number=0, deps=None, lang='cpp'): + """ + param number: integer, defining name of the conans Hello0, Hello1, HelloX + param deps: [] list of integers, defining which dependencies this conans + depends on + e.g. (3, [4, 7]) means that a Hello3 conans will be created, with message + "Hello 3", that depends both in Hello4 and Hello7. + The output of such a conans exe could be like: Hello 3, Hello 4, Hello7 + """ + if lang == 'cpp': + return cpp_hello_source_files(number, deps) + elif lang == 'go': + return go_hello_source_files(number, deps) + + +def hello_conan_files(conan_reference, number=0, deps=None, language=0, lang='cpp'): + """Generate hello_files, as described above, plus the necessary + CONANFILE to manage it + param number: integer, defining name of the conans Hello0, Hello1, HelloX + param deps: [] list of integers, defining which dependencies this conans + depends on + param language: 0 = English, 1 = Spanish + e.g. (3, [4, 7]) means that a Hello3 conans will be created, with message + "Hello 3", that depends both in Hello4 and Hello7. + The output of such a conans exe could be like: Hello 3, Hello 4, Hello7""" + if lang == 'cpp': + return cpp_hello_conan_files(conan_reference, number, deps, language) + elif lang == 'go': + return go_hello_conan_files(conan_reference, number, deps) diff --git a/testbed/conan-io__conan/conans/test/utils/visual_project_files.py b/testbed/conan-io__conan/conans/test/utils/visual_project_files.py new file mode 100644 index 0000000000000000000000000000000000000000..3e01ac4331e9413792cdcb499dbe6632b7822e89 --- /dev/null +++ b/testbed/conan-io__conan/conans/test/utils/visual_project_files.py @@ -0,0 +1,253 @@ +# All harcoded (names, paths etc), refactor it if needed + +sln_file = ''' +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 2013 +VisualStudioVersion = 12.0.31101.0 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "MyProject", "MyProject\MyProject.vcxproj", "{143D99A7-C9F3-434F-BA39-514BB63835E8}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|ARM = Debug|ARM + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|ARM = Release|ARM + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {143D99A7-C9F3-434F-BA39-514BB63835E8}.Debug|ARM.ActiveCfg = Debug|ARM + {143D99A7-C9F3-434F-BA39-514BB63835E8}.Debug|ARM.Build.0 = Debug|ARM + {143D99A7-C9F3-434F-BA39-514BB63835E8}.Debug|x64.ActiveCfg = Debug|x64 + {143D99A7-C9F3-434F-BA39-514BB63835E8}.Debug|x64.Build.0 = Debug|x64 + {143D99A7-C9F3-434F-BA39-514BB63835E8}.Debug|x86.ActiveCfg = Debug|Win32 + {143D99A7-C9F3-434F-BA39-514BB63835E8}.Debug|x86.Build.0 = Debug|Win32 + {143D99A7-C9F3-434F-BA39-514BB63835E8}.Release|ARM.ActiveCfg = Release|ARM + {143D99A7-C9F3-434F-BA39-514BB63835E8}.Release|ARM.Build.0 = Release|ARM + {143D99A7-C9F3-434F-BA39-514BB63835E8}.Release|x64.ActiveCfg = Release|x64 + {143D99A7-C9F3-434F-BA39-514BB63835E8}.Release|x64.Build.0 = Release|x64 + {143D99A7-C9F3-434F-BA39-514BB63835E8}.Release|x86.ActiveCfg = Release|Win32 + {143D99A7-C9F3-434F-BA39-514BB63835E8}.Release|x86.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal + + +''' + +vcxproj_file = ''' + + + + Debug + ARM + + + Debug + Win32 + + + Debug + x64 + + + Release + ARM + + + Release + Win32 + + + Release + x64 + + + + {143D99A7-C9F3-434F-BA39-514BB63835E8} + MyProject + + + + Application + true + v140 + MultiByte + + + Application + true + v140 + MultiByte + + + Application + true + v140 + MultiByte + + + Application + false + v140 + true + MultiByte + + + Application + false + v140 + true + MultiByte + + + Application + false + v140 + true + MultiByte + + + + + + + + + + + + + + + + + + + + + + + + + + + Level3 + Disabled + true + + + true + + + + + Level3 + Disabled + true + + + true + + + + + Level3 + Disabled + true + + + true + + + + + Level3 + MaxSpeed + true + true + true + + + true + true + true + + + + + Level3 + MaxSpeed + true + true + true + + + true + true + true + + + + + Level3 + MaxSpeed + true + true + true + + + true + true + true + + + + + + + + +''' + +filters_file = ''' + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hh;hpp;hxx;hm;inl;inc;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + + + Source Files + + +''' + +main_file = '''#include + +int main() +{ + std::cout << "Hello World!" << std::endl; + return 0; +} +''' + + +def get_vs_project_files(): + return {"MyProject.sln": sln_file, + "MyProject/MyProject.vcxproj": vcxproj_file, + "MyProject/MyProject.vcxproj.filters": filters_file, + "MyProject/main.cpp": main_file} diff --git a/testbed/conan-io__conan/conans/tools.py b/testbed/conan-io__conan/conans/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..b35dc5c0633830cf6b46f9368b1d2b1793d08bef --- /dev/null +++ b/testbed/conan-io__conan/conans/tools.py @@ -0,0 +1,438 @@ +""" ConanFile user tools, as download, etc +""" +from __future__ import print_function +import sys +import os +from conans.errors import ConanException +from conans.util.files import _generic_algorithm_sum, load +from patch import fromfile, fromstring +from conans.client.rest.uploader_downloader import Downloader +import requests +from conans.client.output import ConanOutput +import platform +from conans.model.version import Version +from conans.util.log import logger +from conans.client.runner import ConanRunner +from contextlib import contextmanager +import multiprocessing + + +@contextmanager +def pythonpath(conanfile): + old_path = sys.path[:] + sys.path.extend(conanfile.deps_env_info.PYTHONPATH) + yield + sys.path = old_path + + +@contextmanager +def environment_append(env_vars): + old_env = dict(os.environ) + os.environ.update(env_vars) + try: + yield + finally: + os.environ.clear() + os.environ.update(old_env) + + +def build_sln_command(settings, sln_path, targets=None, upgrade_project=True): + ''' + Use example: + build_command = build_sln_command(self.settings, "myfile.sln", targets=["SDL2_image"]) + env = ConfigureEnvironment(self) + command = "%s && %s" % (env.command_line_env, build_command) + self.run(command) + ''' + targets = targets or [] + command = "devenv %s /upgrade && " % sln_path if upgrade_project else "" + command += "msbuild %s /p:Configuration=%s" % (sln_path, settings.build_type) + if str(settings.arch) in ["x86_64", "x86"]: + command += ' /p:Platform=' + command += '"x64"' if settings.arch == "x86_64" else '"x86"' + elif "ARM" in str(settings.arch).upper(): + command += ' /p:Platform="ARM"' + + if targets: + command += " /target:%s" % ";".join(targets) + return command + + +def vcvars_command(settings): + param = "x86" if settings.arch == "x86" else "amd64" + existing_version = os.environ.get("VisualStudioVersion") + if existing_version: + command = "" + existing_version = existing_version.split(".")[0] + if existing_version != settings.compiler.version: + raise ConanException("Error, Visual environment already set to %s\n" + "Current settings visual version: %s" + % (existing_version, settings.compiler.version)) + else: + command = ('call "%%vs%s0comntools%%../../VC/vcvarsall.bat" %s' + % (settings.compiler.version, param)) + return command + + +def cpu_count(): + try: + return multiprocessing.cpu_count() + except NotImplementedError: + print("WARN: multiprocessing.cpu_count() not implemented. Defaulting to 1 cpu") + return 1 # Safe guess + + +def human_size(size_bytes): + """ + format a size in bytes into a 'human' file size, e.g. bytes, KB, MB, GB, TB, PB + Note that bytes/KB will be reported in whole numbers but MB and above will have + greater precision. e.g. 1 byte, 43 bytes, 443 KB, 4.3 MB, 4.43 GB, etc + """ + if size_bytes == 1: + return "1 byte" + + suffixes_table = [('bytes', 0), ('KB', 0), ('MB', 1), ('GB', 2), ('TB', 2), ('PB', 2)] + + num = float(size_bytes) + for suffix, precision in suffixes_table: + if num < 1024.0: + break + num /= 1024.0 + + if precision == 0: + formatted_size = "%d" % num + else: + formatted_size = str(round(num, ndigits=precision)) + + return "%s %s" % (formatted_size, suffix) + + +def unzip(filename, destination="."): + if (filename.endswith(".tar.gz") or filename.endswith(".tgz") or + filename.endswith(".tbz2") or filename.endswith(".tar.bz2") or + filename.endswith(".tar")): + return untargz(filename, destination) + import zipfile + full_path = os.path.normpath(os.path.join(os.getcwd(), destination)) + + if hasattr(sys.stdout, "isatty") and sys.stdout.isatty(): + def print_progress(extracted_size, uncompress_size): + txt_msg = "Unzipping %.0f %%\r" % (extracted_size * 100.0 / uncompress_size) + print(txt_msg, end='') + else: + def print_progress(extracted_size, uncompress_size): + pass + + with zipfile.ZipFile(filename, "r") as z: + uncompress_size = sum((file_.file_size for file_ in z.infolist())) + print("Unzipping %s, this can take a while" % human_size(uncompress_size)) + extracted_size = 0 + if platform.system() == "Windows": + for file_ in z.infolist(): + extracted_size += file_.file_size + print_progress(extracted_size, uncompress_size) + try: + # Win path limit is 260 chars + if len(file_.filename) + len(full_path) >= 260: + raise ValueError("Filename too long") + z.extract(file_, full_path) + except Exception as e: + print("Error extract %s\n%s" % (file_.filename, str(e))) + else: # duplicated for, to avoid a platform check for each zipped file + for file_ in z.infolist(): + extracted_size += file_.file_size + print_progress(extracted_size, uncompress_size) + try: + z.extract(file_, full_path) + except Exception as e: + print("Error extract %s\n%s" % (file_.filename, str(e))) + + +def untargz(filename, destination="."): + import tarfile + with tarfile.TarFile.open(filename, 'r:*') as tarredgzippedFile: + tarredgzippedFile.extractall(destination) + + +def get(url): + """ high level downloader + unziper + delete temporary zip + """ + filename = os.path.basename(url) + download(url, filename) + unzip(filename) + os.unlink(filename) + + +def download(url, filename, verify=True, out=None, retry=2, retry_wait=5): + out = out or ConanOutput(sys.stdout, True) + if verify: + # We check the certificate using a list of known verifiers + import conans.client.rest.cacert as cacert + verify = cacert.file_path + downloader = Downloader(requests, out, verify=verify) + downloader.download(url, filename, retry=retry, retry_wait=retry_wait) + out.writeln("") +# save(filename, content) + + +def replace_in_file(file_path, search, replace): + content = load(file_path) + content = content.replace(search, replace) + content = content.encode("utf-8") + with open(file_path, "wb") as handle: + handle.write(content) + + +def check_with_algorithm_sum(algorithm_name, file_path, signature): + + real_signature = _generic_algorithm_sum(file_path, algorithm_name) + if real_signature != signature: + raise ConanException("%s signature failed for '%s' file." + " Computed signature: %s" % (algorithm_name, + os.path.basename(file_path), + real_signature)) + + +def check_sha1(file_path, signature): + check_with_algorithm_sum("sha1", file_path, signature) + + +def check_md5(file_path, signature): + check_with_algorithm_sum("md5", file_path, signature) + + +def check_sha256(file_path, signature): + check_with_algorithm_sum("sha256", file_path, signature) + + +def patch(base_path=None, patch_file=None, patch_string=None, strip=0): + """Applies a diff from file (patch_file) or string (patch_string) + in base_path directory or current dir if None""" + + if not patch_file and not patch_string: + return + if patch_file: + patchset = fromfile(patch_file) + else: + patchset = fromstring(patch_string.encode()) + + if not patchset.apply(root=base_path, strip=strip): + raise ConanException("Failed to apply patch: %s" % patch_file) + + +# DETECT OS, VERSION AND DISTRIBUTIONS + +class OSInfo(object): + ''' Usage: + print(os_info.is_linux) # True/False + print(os_info.is_windows) # True/False + print(os_info.is_macos) # True/False + print(os_info.is_freebsd) # True/False + print(os_info.is_solaris) # True/False + + print(os_info.linux_distro) # debian, ubuntu, fedora, centos... + + print(os_info.os_version) # 5.1 + print(os_info.os_version_name) # Windows 7, El Capitan + + if os_info.os_version > "10.1": + pass + if os_info.os_version == "10.1.0": + pass + ''' + + def __init__(self): + self.os_version = None + self.os_version_name = None + self.is_linux = platform.system() == "Linux" + self.linux_distro = None + self.is_windows = platform.system() == "Windows" + self.is_macos = platform.system() == "Darwin" + self.is_freebsd = platform.system() == "FreeBSD" + self.is_solaris = platform.system() == "SunOS" + + if self.is_linux: + tmp = platform.linux_distribution(full_distribution_name=0) + self.linux_distro = None + self.linux_distro = tmp[0].lower() + self.os_version = Version(tmp[1]) + self.os_version_name = tmp[2] + if not self.os_version_name and self.linux_distro == "debian": + self.os_version_name = self.get_debian_version_name(self.os_version) + elif self.is_windows: + self.os_version = self.get_win_os_version() + self.os_version_name = self.get_win_version_name(self.os_version) + elif self.is_macos: + self.os_version = Version(platform.mac_ver()[0]) + self.os_version_name = self.get_osx_version_name(self.os_version) + elif self.is_freebsd: + self.os_version = self.get_freebsd_version() + self.os_version_name = "FreeBSD %s" % self.os_version + elif self.is_solaris: + self.os_version = Version(platform.release()) + self.os_version_name = self.get_solaris_version_name(self.os_version) + + @property + def with_apt(self): + return self.is_linux and self.linux_distro in ("debian", "ubuntu", "knoppix") + + @property + def with_yum(self): + return self.is_linux and self.linux_distro in ("centos", "redhat", "fedora") + + def get_win_os_version(self): + """ + Get's the OS major and minor versions. Returns a tuple of + (OS_MAJOR, OS_MINOR). + """ + import ctypes + + class _OSVERSIONINFOEXW(ctypes.Structure): + _fields_ = [('dwOSVersionInfoSize', ctypes.c_ulong), + ('dwMajorVersion', ctypes.c_ulong), + ('dwMinorVersion', ctypes.c_ulong), + ('dwBuildNumber', ctypes.c_ulong), + ('dwPlatformId', ctypes.c_ulong), + ('szCSDVersion', ctypes.c_wchar*128), + ('wServicePackMajor', ctypes.c_ushort), + ('wServicePackMinor', ctypes.c_ushort), + ('wSuiteMask', ctypes.c_ushort), + ('wProductType', ctypes.c_byte), + ('wReserved', ctypes.c_byte)] + + os_version = _OSVERSIONINFOEXW() + os_version.dwOSVersionInfoSize = ctypes.sizeof(os_version) + retcode = ctypes.windll.Ntdll.RtlGetVersion(ctypes.byref(os_version)) + if retcode != 0: + return None + + return Version("%d.%d" % (os_version.dwMajorVersion, os_version.dwMinorVersion)) + + def get_debian_version_name(self, version): + if not version: + return None + elif version.major() == "8.Y.Z": + return "jessie" + elif version.major() == "7.Y.Z": + return "wheezy" + elif version.major() == "6.Y.Z": + return "squeeze" + elif version.major() == "5.Y.Z": + return "lenny" + elif version.major() == "4.Y.Z": + return "etch" + elif version.minor() == "3.1.Z": + return "sarge" + elif version.minor() == "3.0.Z": + return "woody" + + def get_win_version_name(self, version): + if not version: + return None + elif version.major() == "5.Y.Z": + return "Windows XP" + elif version.minor() == "6.0.Z": + return "Windows Vista" + elif version.minor() == "6.1.Z": + return "Windows 7" + elif version.minor() == "6.2.Z": + return "Windows 8" + elif version.minor() == "6.3.Z": + return "Windows 8.1" + elif version.minor() == "10.0.Z": + return "Windows 10" + + def get_osx_version_name(self, version): + if not version: + return None + elif version.minor() == "10.12.Z": + return "Sierra" + elif version.minor() == "10.11.Z": + return "El Capitan" + elif version.minor() == "10.10.Z": + return "Yosemite" + elif version.minor() == "10.9.Z": + return "Mavericks" + elif version.minor() == "10.8.Z": + return "Mountain Lion" + elif version.minor() == "10.7.Z": + return "Lion" + elif version.minor() == "10.6.Z": + return "Snow Leopard" + elif version.minor() == "10.5.Z": + return "Leopard" + elif version.minor() == "10.4.Z": + return "Tiger" + elif version.minor() == "10.3.Z": + return "Panther" + elif version.minor() == "10.2.Z": + return "Jaguar" + elif version.minor() == "10.1.Z": + return "Puma" + elif version.minor() == "10.0.Z": + return "Cheetha" + + def get_freebsd_version(self): + return platform.release().split("-")[0] + + def get_solaris_version_name(self, version): + if not version: + return None + elif version.minor() == "5.10": + return "Solaris 10" + elif version.minor() == "5.11": + return "Solaris 11" + +try: + os_info = OSInfo() +except Exception as exc: + logger.error(exc) + print("Error detecting os_info") + + +class SystemPackageTool(object): + + def __init__(self, runner=None): + self._runner = runner or ConanRunner() + env_sudo = os.environ.get("CONAN_SYSREQUIRES_SUDO", None) + self._sudo = (env_sudo != "False" and env_sudo != "0") + self._os_info = OSInfo() + + def update(self): + """ + Get the system package tool update command + """ + sudo_str = "sudo " if self._sudo else "" + update_command = None + if self._os_info.with_apt: + update_command = "%sapt-get update" % sudo_str + elif self._os_info.with_yum: + update_command = "%syum check-update" % sudo_str + elif self._os_info.is_macos: + update_command = "brew update" + + if update_command: + print("Running: %s" % update_command) + if self._runner(update_command, True) != 0: + raise ConanException("Command '%s' failed" % update_command) + + def install(self, package_name): + ''' + Get the system package tool install command. + ''' + sudo_str = "sudo " if self._sudo else "" + install_command = None + if self._os_info.with_apt: + install_command = "%sapt-get install -y %s" % (sudo_str, package_name) + elif self._os_info.with_yum: + install_command = "%syum install -y %s" % (sudo_str, package_name) + elif self._os_info.is_macos: + install_command = "brew install %s" % package_name + + if install_command: + print("Running: %s" % install_command) + if self._runner(install_command, True) != 0: + raise ConanException("Command '%s' failed" % install_command) + else: + print("Warn: Only available for linux with apt-get or yum or OSx with brew") + return None diff --git a/testbed/conan-io__conan/conans/util/__init__.py b/testbed/conan-io__conan/conans/util/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/conan-io__conan/conans/util/config_parser.py b/testbed/conan-io__conan/conans/util/config_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..534f5df381d3fe7edef82f0a5da6b4199d47ba13 --- /dev/null +++ b/testbed/conan-io__conan/conans/util/config_parser.py @@ -0,0 +1,65 @@ +import re +from conans.errors import ConanException + + +def get_bool_from_text_value(value): + """ to be deprecated + It has issues, as accepting into the registry whatever=value, as False, withoug + complaining + """ + return (value == "1" or value.lower() == "yes" or value.lower() == "y" or + value.lower() == "true") if value else True + + +def get_bool_from_text(value): + value = value.lower() + if value in ["1", "yes", "y", "true"]: + return True + if value in ["0", "no", "n", "false"]: + return False + raise ConanException("Unrecognized boolean value '%s'" % value) + + +class ConfigParser(object): + """ util class to load a file with sections as [section1] + checking the values of those sections, and returns each section + as parser.section + Currently used in ConanInfo and ConanFileTextLoader + """ + def __init__(self, text, allowed_fields=None, parse_lines=False): + self._sections = {} + self._allowed_fields = allowed_fields or [] + pattern = re.compile("^\[([a-z_]{2,50})\]") + current_lines = None + for line in text.splitlines(): + line = line.strip() + if not line or line[0] == '#': + continue + field = None + if line[0] == '[': + m = pattern.match(line) + if m: + field = m.group(1) + else: + raise ConanException("ConfigParser: Bad syntax '%s'" % line) + if field: + if self._allowed_fields and field not in self._allowed_fields: + raise ConanException("ConfigParser: Unrecognized field '%s'" % field) + current_lines = [] + self._sections[field] = current_lines + else: + if current_lines is None: + raise ConanException("ConfigParser: Unexpected line '%s'" % line) + if parse_lines: + line = line.split('#')[0] + line = line.strip() + current_lines.append(line) + + def __getattr__(self, name): + if name in self._sections: + return "\n".join(self._sections[name]) + else: + if self._allowed_fields and name in self._allowed_fields: + return "" + else: + raise ConanException("ConfigParser: Unrecognized field '%s'" % name) diff --git a/testbed/conan-io__conan/conans/util/env_reader.py b/testbed/conan-io__conan/conans/util/env_reader.py new file mode 100644 index 0000000000000000000000000000000000000000..e49e24819f8dd3680f43a5f3006c4ae2dd1f43b0 --- /dev/null +++ b/testbed/conan-io__conan/conans/util/env_reader.py @@ -0,0 +1,26 @@ +""" + Get variables from environment. + Automatically handle types inferring datatype from default value. + + Usage: + get_env('CONAN_SSL_ENABLED', False) => Will autotransform ENV CONAN_SSL_ENABLED to boolean + +""" +import os + + +def get_env(env_key, default=None, environment=os.environ): + '''Get the env variable associated with env_key''' + env_var = environment.get(env_key, default) + if env_var != default: + if isinstance(default, str): + return env_var + elif isinstance(default, bool): + return env_var == "1" + elif isinstance(default, int): + return int(env_var) + elif isinstance(default, float): + return float(env_var) + elif isinstance(default, list): + return env_var.split(",") + return env_var diff --git a/testbed/conan-io__conan/conans/util/files.py b/testbed/conan-io__conan/conans/util/files.py new file mode 100644 index 0000000000000000000000000000000000000000..65ec63479b5ebc485a851d3833753e2a9b2db3ef --- /dev/null +++ b/testbed/conan-io__conan/conans/util/files.py @@ -0,0 +1,238 @@ +import os +import shutil +from errno import ENOENT, EEXIST +import hashlib +import sys +from os.path import abspath, realpath, join as joinpath +import platform +import re +import six +from conans.util.log import logger +import tarfile + + +def decode_text(text): + decoders = ["utf-8", "Windows-1252"] + for decoder in decoders: + try: + return text.decode(decoder) + except UnicodeDecodeError: + continue + logger.warn("can't decode %s" % str(text)) + return text.decode("utf-8", "ignore") # Ignore not compatible characters + + +def touch(fname, times=None): + os.utime(fname, times) + + +def normalize(text): + if platform.system() == "Windows": + return re.sub("\r?\n", "\r\n", text) + else: + return text + + +def md5(content): + md5alg = hashlib.md5() + if isinstance(content, bytes): + tmp = content + else: + tmp = content.encode() + md5alg.update(tmp) + return md5alg.hexdigest() + + +def md5sum(file_path): + return _generic_algorithm_sum(file_path, "md5") + + +def sha1sum(file_path): + return _generic_algorithm_sum(file_path, "sha1") + + +def _generic_algorithm_sum(file_path, algorithm_name): + + with open(file_path, 'rb') as fh: + m = hashlib.new(algorithm_name) + while True: + data = fh.read(8192) + if not data: + break + m.update(data) + return m.hexdigest() + + +def save(path, content, append=False): + ''' + Saves a file with given content + Params: + path: path to write file to + load: contents to save in the file + ''' + try: + os.makedirs(os.path.dirname(path)) + except: + pass + + if six.PY3: + if not isinstance(content, bytes): + content = bytes(content, "utf-8") + mode = 'wb' if not append else 'ab' + with open(path, mode) as handle: + handle.write(content) + + +def save_files(path, files): + for name, content in list(files.items()): + save(os.path.join(path, name), content) + + +def load(path, binary=False): + '''Loads a file content''' + with open(path, 'rb') as handle: + tmp = handle.read() + return tmp if binary else decode_text(tmp) + + +def relative_dirs(path): + ''' Walks a dir and return a list with the relative paths ''' + ret = [] + for dirpath, _, fnames in os.walk(path): + for filename in fnames: + tmp = os.path.join(dirpath, filename) + tmp = tmp[len(path) + 1:] + ret.append(tmp) + return ret + + +def _change_permissions(func, path, exc_info): + import stat + if not os.access(path, os.W_OK): + os.chmod(path, stat.S_IWUSR) + func(path) + else: + raise + + +def rmdir(path): + '''Recursive rm of a directory. If dir not exists + only raise exception if raise_if_not_exist''' + try: + shutil.rmtree(path, onerror=_change_permissions) + except OSError as err: + if err.errno == ENOENT: + return + raise + + +def mkdir(path): + """Recursive mkdir, doesnt fail if already existing""" + try: + os.makedirs(path) + except OSError as err: + if err.errno != EEXIST: + raise + + +def path_exists(path, basedir): + """Case sensitive, for windows, optional + basedir for skip caps check for tmp folders in testing for example (returned always + in lowercase for some strange reason)""" + exists = os.path.exists(path) + if not exists or sys.platform == "linux2": + return exists + + path = os.path.normpath(path) + path = os.path.relpath(path, basedir) + chunks = path.split(os.sep) + tmp = basedir + + for chunk in chunks: + if chunk and chunk not in os.listdir(tmp): + return False + tmp = os.path.normpath(tmp + os.sep + chunk) + return True + + +def gzopen_without_timestamps(name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """ !! Method overrided by laso to pass mtime=0 (!=None) to avoid time.time() was + setted in Gzip file causing md5 to change. Not possible using the + previous tarfile open because arguments are not passed to GzipFile constructor + """ + from tarfile import CompressionError, ReadError + + if mode not in ("r", "w"): + raise ValueError("mode must be 'r' or 'w'") + + try: + import gzip + gzip.GzipFile + except (ImportError, AttributeError): + raise CompressionError("gzip module is not available") + + try: + fileobj = gzip.GzipFile(name, mode, compresslevel, fileobj, mtime=0) + except OSError: + if fileobj is not None and mode == 'r': + raise ReadError("not a gzip file") + raise + + try: + t = tarfile.TarFile.taropen(name, mode, fileobj, **kwargs) + except IOError: + fileobj.close() + if mode == 'r': + raise ReadError("not a gzip file") + raise + except: + fileobj.close() + raise + t._extfileobj = False + return t + + +def tar_extract(fileobj, destination_dir): + '''Extract tar file controlling not absolute paths and fixing the routes + if the tar was zipped in windows''' + def badpath(path, base): + # joinpath will ignore base if path is absolute + return not realpath(abspath(joinpath(base, path))).startswith(base) + + def safemembers(members): + base = realpath(abspath(".")) + + for finfo in members: + if badpath(finfo.name, base) or finfo.islnk(): + continue + else: + # Fixes unzip a windows zipped file in linux + finfo.name = finfo.name.replace("\\", "/") + yield finfo + + the_tar = tarfile.open(fileobj=fileobj) + the_tar.extractall(path=destination_dir, members=safemembers(the_tar)) + the_tar.close() + + +def list_folder_subdirs(basedir="", level=None): + ret = [] + for root, dirs, _ in os.walk(basedir): + rel_path = os.path.relpath(root, basedir) + if rel_path == ".": + continue + dir_split = rel_path.split(os.sep) + if level is not None: + if len(dir_split) == level: + ret.append("/".join(dir_split)) + dirs[:] = [] # Stop iterate subdirs + else: + ret.append("/".join(dir_split)) + return ret + + +def exception_message_safe(exc): + try: + return str(exc) + except: + return decode_text(repr(exc)) diff --git a/testbed/conan-io__conan/conans/util/log.py b/testbed/conan-io__conan/conans/util/log.py new file mode 100644 index 0000000000000000000000000000000000000000..8c5efa3c55cc95246a020e957b5cb965f359aef2 --- /dev/null +++ b/testbed/conan-io__conan/conans/util/log.py @@ -0,0 +1,46 @@ +import logging +from logging import StreamHandler +import sys +from conans.util.env_reader import get_env + + +# #### LOGGER, MOVED FROM CONF BECAUSE OF MULTIPLE PROBLEM WITH CIRCULAR INCLUDES ##### +CONAN_LOGGING_LEVEL = get_env('CONAN_LOGGING_LEVEL', logging.CRITICAL) +CONAN_LOGGING_FILE = get_env('CONAN_LOGGING_FILE', None) # None is stdout + + +class MultiLineFormatter(logging.Formatter): + def format(self, record): + str_ = logging.Formatter.format(self, record) + separator = record.message if record.message else None + if separator is None: + return separator + tmp = str_.split(separator) + if len(tmp) == 2: + header, _ = tmp + else: + header = tmp + str_ = str_.replace('\n', '\n' + ' ' * len(header)) + return str_ + +logger = logging.getLogger('conans') +if CONAN_LOGGING_FILE is not None: + hdlr = logging.FileHandler(CONAN_LOGGING_FILE) +else: + hdlr = StreamHandler(sys.stderr) + +formatter = MultiLineFormatter('%(levelname)-6s:%(filename)-15s[%(lineno)d]: ' + '%(message)s [%(asctime)s]') +hdlr.setFormatter(formatter) +logger.addHandler(hdlr) +logger.setLevel(CONAN_LOGGING_LEVEL) + + +# CRITICAL = 50 +# FATAL = CRITICAL +# ERROR = 40 +# WARNING = 30 +# WARN = WARNING +# INFO = 20 +# DEBUG = 10 +# NOTSET = 0 diff --git a/testbed/conan-io__conan/conans/util/sha.py b/testbed/conan-io__conan/conans/util/sha.py new file mode 100644 index 0000000000000000000000000000000000000000..1e50aff2c80d76612055794be6e51968441159ce --- /dev/null +++ b/testbed/conan-io__conan/conans/util/sha.py @@ -0,0 +1,9 @@ +import hashlib + + +def sha1(value): + if value is None: + return None + md = hashlib.sha1() + md.update(value) + return md.hexdigest() diff --git a/testbed/conan-io__conan/conans/util/tracer.py b/testbed/conan-io__conan/conans/util/tracer.py new file mode 100644 index 0000000000000000000000000000000000000000..5f674e29106a0f369ec0fe916ff7a7b64be16171 --- /dev/null +++ b/testbed/conan-io__conan/conans/util/tracer.py @@ -0,0 +1,131 @@ +import os +from conans.errors import ConanException +import fasteners +from conans.util.log import logger +import json +from conans.model.ref import PackageReference, ConanFileReference +import time +from os.path import isdir +import copy + +TRACER_ACTIONS = ["UPLOADED_RECIPE", "UPLOADED_PACKAGE", + "DOWNLOADED_RECIPE", "DOWNLOADED_PACKAGE", + "PACKAGE_BUILT_FROM_SOURCES", + "GOT_RECIPE_FROM_LOCAL_CACHE", "GOT_PACKAGE_FROM_LOCAL_CACHE", + "REST_API_CALL", "COMMAND", + "EXCEPTION", + "DOWNLOAD"] + +MASKED_FIELD = "**********" + + +def _validate_action(action_name): + if action_name not in TRACER_ACTIONS: + raise ConanException("Unknown action %s" % action_name) + +tracer_file = None + + +def _get_tracer_file(): + ''' + If CONAN_TRACE_FILE is a file in an existing dir will log to it creating the file if needed + Otherwise won't log anything + ''' + global tracer_file + if tracer_file is None: + trace_path = os.environ.get("CONAN_TRACE_FILE", None) + if trace_path is not None: + if not os.path.exists(os.path.dirname(trace_path)): + raise ConanException("The specified path doesn't exist: '%s'" % trace_path) + if isdir(trace_path): + raise ConanException("CONAN_TRACE_FILE is a directory. Please, specify a file path") + tracer_file = trace_path + return tracer_file + + +def _append_to_log(obj): + """Add a new line to the log file locking the file to protect concurrent access""" + if _get_tracer_file(): + filepath = _get_tracer_file() + with fasteners.InterProcessLock(filepath + ".lock", logger=logger): + with open(filepath, "a") as logfile: + logfile.write(json.dumps(obj, sort_keys=True) + "\n") + + +def _append_action(action_name, props): + """Validate the action_name and append to logs""" + _validate_action(action_name) + props["_action"] = action_name + props["time"] = time.time() + _append_to_log(props) + + +# ############## LOG METHODS ###################### + +def log_recipe_upload(conan_reference, duration, files_uploaded): + assert(isinstance(conan_reference, ConanFileReference)) + _append_action("UPLOADED_RECIPE", {"_id": str(conan_reference), + "duration": duration, + "files": files_uploaded}) + + +def log_package_upload(package_ref, duration, files_uploaded): + '''files_uploaded is a dict with relative path as keys and abs path as values''' + assert(isinstance(package_ref, PackageReference)) + _append_action("UPLOADED_PACKAGE", {"_id": str(package_ref), + "duration": duration, + "files": files_uploaded}) + + +def log_recipe_download(conan_reference, duration, remote, files_downloaded): + assert(isinstance(conan_reference, ConanFileReference)) + _append_action("DOWNLOADED_RECIPE", {"_id": str(conan_reference), + "duration": duration, + "remote": remote.name, + "files": files_downloaded}) + + +def log_package_download(package_ref, duration, remote, files_downloaded): + assert(isinstance(package_ref, PackageReference)) + _append_action("DOWNLOADED_PACKAGE", {"_id": str(package_ref), + "duration": duration, + "remote": remote.name, + "files": files_downloaded}) + + +def log_recipe_got_from_local_cache(conan_reference): + assert(isinstance(conan_reference, ConanFileReference)) + _append_action("GOT_RECIPE_FROM_LOCAL_CACHE", {"_id": str(conan_reference)}) + + +def log_package_got_from_local_cache(package_ref): + assert(isinstance(package_ref, PackageReference)) + _append_action("GOT_PACKAGE_FROM_LOCAL_CACHE", {"_id": str(package_ref)}) + + +def log_package_built(package_ref, duration, log_run=None): + assert(isinstance(package_ref, PackageReference)) + _append_action("PACKAGE_BUILT_FROM_SOURCES", {"_id": str(package_ref), "duration": duration, "log": log_run}) + + +def log_client_rest_api_call(url, method, duration, headers): + headers = copy.copy(headers) + headers["Authorization"] = MASKED_FIELD + headers["X-Client-Anonymous-Id"] = MASKED_FIELD + _append_action("REST_API_CALL", {"method": method, "url": url, + "duration": duration, "headers": headers}) + + +def log_command(name, parameters): + if name == "user" and "password" in parameters: + parameters = copy.copy(parameters) # Ensure we don't alter any app object like args + parameters["password"] = MASKED_FIELD + _append_action("COMMAND", {"name": name, "parameters": parameters}) + + +def log_exception(exc, message): + _append_action("EXCEPTION", {"class": str(exc.__class__.__name__), "message": message}) + + +def log_download(url, duration): + _append_action("DOWNLOAD", {"url": url, "duration": duration}) diff --git a/testbed/conan-io__conan/contributors.txt b/testbed/conan-io__conan/contributors.txt new file mode 100644 index 0000000000000000000000000000000000000000..c4133f52ffdb24a6264ac32585b30b2a1a417481 --- /dev/null +++ b/testbed/conan-io__conan/contributors.txt @@ -0,0 +1,19 @@ +Contributors +------------- + +This is the list of contributors to this project source code, in alphabetical order. +Many thanks to all of them! + +- Bocanegra Algarra, Raul (raul.bocanegra.algarra@gmail.com) +- Dauphin, Loïc (astralien3000@yahoo.fr, @astralien3000) +- Díaz Más, Luis (piponazo@gmail, @pipotux) +- Dragly, Svenn-Arne (dragly.org) +- Hieta, Tobias (tobias@plex.tv, @tobiashieta) +- Hochstedler, Reid +- Ivek, Tomislav (tomislav.ivek@gmail.com, @tomivek) +- Kourkoulis, Dimitri (@dimi309) +- Lee, Jeongseok (jslee02@gmail.com, @jslee02) +- Márki, Róbert (gsmiko@gmail.com, @robertmrk) +- Ray, Chris (chris@xaltotun.com) +- Sechet, Olivier (osechet@gmail.com) +- Ford, Andrew (andrewford55139@gmail.com) diff --git a/testbed/conan-io__conan/pyinstaller.py b/testbed/conan-io__conan/pyinstaller.py new file mode 100644 index 0000000000000000000000000000000000000000..ab14ba8710dee704564bf173e1fe90ea1dae5dc2 --- /dev/null +++ b/testbed/conan-io__conan/pyinstaller.py @@ -0,0 +1,65 @@ +from __future__ import print_function +import os +import platform +import subprocess +import shutil +from distutils import dir_util + + +def _install_pyintaller(pyinstaller_path): + # try to install pyinstaller if not installed + if not os.path.exists(pyinstaller_path): + subprocess.call('git clone https://github.com/pyinstaller/pyinstaller.git', + cwd=os.path.curdir, shell=True) + subprocess.call('git checkout v3.1.1', cwd=pyinstaller_path, shell=True) + + +def _run_bin(pyinstaller_path): + # run the binary to test if working + conan_bin = os.path.join(pyinstaller_path, 'conan', 'dist', 'conan', 'conan') + if platform.system() == 'Windows': + conan_bin += '.exe' + retcode = os.system(conan_bin) + if retcode != 0: + raise Exception("Binary not working") + + +def pyinstall(source_folder): + pyinstaller_path = os.path.join(os.path.curdir, 'pyinstaller') + _install_pyintaller(pyinstaller_path) + + try: + shutil.rmtree(os.path.join(pyinstaller_path, 'conan')) + except Exception as e: + print("Unable to remove old folder", e) + try: + shutil.rmtree(os.path.join(pyinstaller_path, 'conan_server')) + except Exception as e: + print("Unable to remove old server folder", e) + + conan_path = os.path.join(source_folder, 'conans', 'conan.py') + conan_server_path = os.path.join(source_folder, 'conans', 'conan_server.py') + hidden = "--hidden-import=glob" + subprocess.call('python pyinstaller.py -y -p %s --console %s %s' + % (source_folder, conan_path, hidden), + cwd=pyinstaller_path, shell=True) + _run_bin(pyinstaller_path) + + subprocess.call('python pyinstaller.py -y -p %s --console %s' + % (source_folder, conan_server_path), + cwd=pyinstaller_path, shell=True) + + conan_bin = os.path.join(pyinstaller_path, 'conan', 'dist', 'conan') + conan_server_folder = os.path.join(pyinstaller_path, 'conan_server', 'dist', 'conan_server') + dir_util.copy_tree(conan_server_folder, conan_bin) + _run_bin(pyinstaller_path) + + return os.path.abspath(os.path.join(pyinstaller_path, 'conan', 'dist', 'conan')) + + +if __name__ == "__main__": + source_folder = os.path.abspath(os.path.dirname(os.path.abspath(__file__))) + output_folder = pyinstall(source_folder) + print("\n**************Conan binaries created!******************\n \ + \nAppend this folder to your system PATH: '%s'\nFeel free to move the whole folder to another location." % output_folder) + diff --git a/testbed/conan-io__conan/pylint.cnf b/testbed/conan-io__conan/pylint.cnf new file mode 100644 index 0000000000000000000000000000000000000000..9749c13666cba1b6faae40c142f51c8c8cc010fc --- /dev/null +++ b/testbed/conan-io__conan/pylint.cnf @@ -0,0 +1,266 @@ +[MASTER] + +# Specify a configuration file. +#rcfile= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Profiled execution. +profile=no + +# Add files or directories to the blacklist. They should be base names, not +# paths. +# ignore=CVS +# ignore=resources +# ignore=ply_parsers + +# Pickle collected data for later comparisons. +persistent=yes + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + + +[MESSAGES CONTROL] + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time. See also the "--disable" option for examples. +#enable= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +disable=W0105 + + +[REPORTS] + +# Set the output format. Available formats are text, parseable, colorized, msvs +# (visual studio) and html. You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=html + +# Include message's id in output +# include-ids=no + +# Include symbolic ids of messages in output +# symbols=no + +# Put messages in a separate file for each module / package specified on the +# command line instead of printing them on stdout. Reports (if any) will be +# written in a file name "pylint_global.[txt|html]". +files-output=no + +# Tells whether to display a full report or only the messages +reports=yes + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Add a comment according to your evaluation note. This is used by the global +# evaluation report (RP0004). +comment=no + + +[BASIC] + +# Required attributes for module, separated by a comma +required-attributes= + +# List of builtins function names that should not be used, separated by a comma +bad-functions=map,filter,apply,input + +# Regular expression which should only match correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression which should only match correct module level names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression which should only match correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression which should only match correct function names +function-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct method names +method-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct instance attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct variable names +variable-rgx=[a-z_][a-z0-9_]{0,30}$ + +# Regular expression which should only match correct list comprehension / +# generator expression variable names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_,ID,db,ui,io,cl + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Regular expression which should only match functions or classes name which do +# not require a docstring +no-docstring-rgx=__.*__|serialize|deserialize|.*Exception + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module +max-module-lines=1000 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO,ToDo + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=4 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of classes names for which member attributes should not be checked +# (useful for classes with attributes dynamically set). +ignored-classes=SQLObject,AnsiCodes,BiiType,IDEType + +# When zope mode is activated, add a predefined set of Zope acquired attributes +# to generated-members. +zope=no + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E0201 when accessed. Python regular +# expressions are accepted. +generated-members=REQUEST,acl_users,aq_parent + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the beginning of the name of dummy variables +# (i.e. not used). +dummy-variables-rgx=_|dummy + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + + +[CLASSES] + +# List of interface methods to ignore, separated by a comma. This is used for +# instance to not check methods defines in Zope's Interface base class. +ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.* + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of branch for function / method body +max-branchs=12 + +# Maximum number of statements in function / method body +max-statements=50 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=0 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=25 + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,string,TERMIOS,Bastion,rexec + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/testbed/conan-io__conan/setup.py b/testbed/conan-io__conan/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..e366f0e2d5b01aecd0f54207e6954f921915d2c8 --- /dev/null +++ b/testbed/conan-io__conan/setup.py @@ -0,0 +1,131 @@ +"""A setuptools based setup module. +See: +https://packaging.python.org/en/latest/distributing.html +https://github.com/pypa/sampleproject +""" + +# Always prefer setuptools over distutils +from setuptools import setup, find_packages +# To use a consistent encoding +from codecs import open +from os import path +import os +import re +import platform + + +here = path.abspath(path.dirname(__file__)) + + +def get_requires(filename): + requirements = [] + with open(filename, "rt") as req_file: + for line in req_file.read().splitlines(): + if not line.strip().startswith("#"): + requirements.append(line) + return requirements + +project_requirements = get_requires("conans/requirements.txt") +if platform.system() == "Darwin": + project_requirements.extend(get_requires("conans/requirements_osx.txt")) +project_requirements.extend(get_requires("conans/requirements_server.txt")) +dev_requirements = get_requires("conans/requirements_dev.txt") + + +def load_version(): + '''Loads a file content''' + filename = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), + "conans", "__init__.py")) + with open(filename, "rt") as version_file: + conan_init = version_file.read() + version = re.search("__version__ = '([0-9a-z.-]+)'", conan_init).group(1) + return version + + +# def generate_long_description_file(): +# import pypandoc +# +# output = pypandoc.convert('README.md', 'rst') +# return output + +setup( + name='conan', + # Versions should comply with PEP440. For a discussion on single-sourcing + # the version across setup.py and the project code, see + # https://packaging.python.org/en/latest/single_source_version.html + version=load_version(), # + ".rc5", + + description='Conan C/C++ package manager', + # long_description="An open source, decentralized package manager, to automate building and sharing of packages", + # long_description=generate_long_description_file(), + + # The project's main homepage. + url='https://conan.io', + + # Author details + author='JFrog LTD', + author_email='luism@jfrog.com', + + # Choose your license + license='MIT', + + # See https://pypi.python.org/pypi?%3Aaction=list_classifiers + classifiers=[ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'Topic :: Software Development :: Build Tools', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + ], + + # What does your project relate to? + keywords=['C/C++', 'package', 'libraries', 'developer', 'manager', + 'dependency', 'tool', 'c', 'c++', 'cpp'], + + # You can just specify the packages manually here if your project is + # simple. Or you can use find_packages(). + packages=find_packages(), + + # Alternatively, if you want to distribute just a my_module.py, uncomment + # this: + # py_modules=["my_module"], + + # List run-time dependencies here. These will be installed by pip when + # your project is installed. For an analysis of "install_requires" vs pip's + # requirements files see: + # https://packaging.python.org/en/latest/requirements.html + install_requires=project_requirements, + + # List additional groups of dependencies here (e.g. development + # dependencies). You can install these using the following syntax, + # for example: + # $ pip install -e .[dev,test] + extras_require={ + 'dev': dev_requirements, + 'test': dev_requirements, + }, + + # If there are data files included in your packages that need to be + # installed, specify them here. If using Python 2.6 or less, then these + # have to be included in MANIFEST.in as well. + package_data={ + 'conans': ['*.txt'], + }, + + # Although 'package_data' is the preferred approach, in some case you may + # need to place data files outside of your packages. See: + # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa + # In this case, 'data_file' will be installed into '/my_data' + # data_files=[('my_data', ['data/data_file'])], + + # To provide executable scripts, use entry points in preference to the + # "scripts" keyword. Entry points provide cross-platform support and allow + # pip to create the appropriate form of executable for the target platform. + entry_points={ + 'console_scripts': [ + 'conan=conans.conan:run', + 'conan_server=conans.conan_server:run', + ], + }, +) diff --git a/testbed/deepset-ai__haystack/.github/CODEOWNERS b/testbed/deepset-ai__haystack/.github/CODEOWNERS new file mode 100644 index 0000000000000000000000000000000000000000..c559d08fd74807d204882392d9456f0f847e076a --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/CODEOWNERS @@ -0,0 +1,11 @@ +# See https://help.github.com/articles/about-codeowners/ for syntax + +# Core Engineering will be the default owners for everything +# in the repo. Unless a later match takes precedence, +# @deepset-ai/core-engineering will be requested for review +# when someone opens a pull request. +* @deepset-ai/open-source-engineering + +# Documentation +*.md @deepset-ai/documentation @deepset-ai/open-source-engineering +releasenotes/notes/* @deepset-ai/documentation @deepset-ai/open-source-engineering diff --git a/testbed/deepset-ai__haystack/.github/ISSUE_TEMPLATE/breaking-change-proposal.md b/testbed/deepset-ai__haystack/.github/ISSUE_TEMPLATE/breaking-change-proposal.md new file mode 100644 index 0000000000000000000000000000000000000000..312243cb4072436e300d279e81d84cb36c8c3b1b --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/ISSUE_TEMPLATE/breaking-change-proposal.md @@ -0,0 +1,27 @@ +--- +name: Breaking change proposal +about: Track a breaking change in Haystack +title: '' +labels: breaking change +assignees: '' + +--- + +## Summary and motivation + +Briefly explain how the change is breaking and why it is needed. + +## Checklist + +```[tasklist] +### Tasks +- [ ] The changes are merged in the `main` branch (Code + Docstrings) +- [ ] Release notes have documented the breaking change +- [ ] A new version of `haystack-ai` has been released on PyPI +- [ ] Docs at https://docs.haystack.deepset.ai/ were updated +- [ ] Integrations on [haystack-core-integrations](https://github.com/deepset-ai/haystack-core-integrations) were updated (if needed) - This step might require a [Breaking change proposal](https://github.com/deepset-ai/haystack-core-integrations/issues/new?assignees=&labels=breaking+change&projects=&template=breaking-change-proposal.md&title=) on the repo +- [ ] Notebooks on https://github.com/deepset-ai/haystack-cookbook were updated (if needed) +- [ ] Tutorials on https://github.com/deepset-ai/haystack-tutorials were updated (if needed) +- [ ] Articles on https://github.com/deepset-ai/haystack-home/tree/main/content were updated (if needed) +- [ ] Integration tile on https://github.com/deepset-ai/haystack-integrations was updated (if needed) +``` diff --git a/testbed/deepset-ai__haystack/.github/ISSUE_TEMPLATE/bug_report.md b/testbed/deepset-ai__haystack/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000000000000000000000000000000000..ecfae37a685ca1c90e578d5997009ec3aa81320f --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,34 @@ +--- +name: Bug report +about: Errors you encountered +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**Error message** +Error that was thrown (if available) + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Additional context** +Add any other context about the problem here, like document types / preprocessing steps / settings of reader etc. + +**To Reproduce** +Steps to reproduce the behavior + +**FAQ Check** +- [ ] Have you had a look at [our new FAQ page](https://docs.haystack.deepset.ai/docs/faq)? + +**System:** + - OS: + - GPU/CPU: + - Haystack version (commit or version number): + - DocumentStore: + - Reader: + - Retriever: diff --git a/testbed/deepset-ai__haystack/.github/ISSUE_TEMPLATE/config.yml b/testbed/deepset-ai__haystack/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000000000000000000000000000000000..9d36944740c974d365593b74e693f13341514bc1 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: true +contact_links: + - name: Something unclear? Just ask :) + url: https://github.com/deepset-ai/haystack/discussions/new + about: Start a Github discussion with your question diff --git a/testbed/deepset-ai__haystack/.github/ISSUE_TEMPLATE/feature_request.md b/testbed/deepset-ai__haystack/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000000000000000000000000000000000..bbcbbe7d61558adde3cbfd0c7a63a67c27ed6d30 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/testbed/deepset-ai__haystack/.github/actionlint.yml b/testbed/deepset-ai__haystack/.github/actionlint.yml new file mode 100644 index 0000000000000000000000000000000000000000..6064695225d48d4bd1c51d6e64441e2e22a9a80b --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/actionlint.yml @@ -0,0 +1,3 @@ +self-hosted-runner: + # Labels of self-hosted runner in array of string + labels: ["cml", "ubuntu-latest-4-cores"] diff --git a/testbed/deepset-ai__haystack/.github/dependabot.yml b/testbed/deepset-ai__haystack/.github/dependabot.yml new file mode 100644 index 0000000000000000000000000000000000000000..6778b0493a160f3c54eac25861965bad98965496 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: 'github-actions' + directory: '/' + schedule: + interval: 'daily' diff --git a/testbed/deepset-ai__haystack/.github/labeler.yml b/testbed/deepset-ai__haystack/.github/labeler.yml new file mode 100644 index 0000000000000000000000000000000000000000..6763e9af6e6f82cb394855ef9962510fa7949e36 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/labeler.yml @@ -0,0 +1,37 @@ +# Release lines +1.x: + - base-branch: 'v1.x' + +# Proposals +proposal: +- changed-files: + - any-glob-to-any-file: proposals/text/* + +# Topics +topic:tests: +- changed-files: + - any-glob-to-any-file: ['test/**/*','test/*'] + +topic:docker: +- changed-files: + - any-glob-to-any-file: docker/* + +topic:CI: +- changed-files: + - any-glob-to-any-file: ['.github/*','.github/**/*'] + +topic:DX: +- changed-files: + - any-glob-to-any-file: ["CONTRIBUTING.md", ".pre-commit-config.yaml",".gitignore"] + +topic:build/distribution: +- changed-files: + - any-glob-to-any-file: pyproject.toml + +topic:security: +- changed-files: + - any-glob-to-any-file: SECURITY.md + +topic:core: +- changed-files: + - any-glob-to-any-file: haystack/core/**/* diff --git a/testbed/deepset-ai__haystack/.github/pull_request_template.md b/testbed/deepset-ai__haystack/.github/pull_request_template.md new file mode 100644 index 0000000000000000000000000000000000000000..c6a58287cc0fe7a14cc08072a373de148b76e506 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/pull_request_template.md @@ -0,0 +1,25 @@ +### Related Issues + +- fixes #issue-number + +### Proposed Changes: + + + + +### How did you test it? + + + +### Notes for the reviewer + + + +### Checklist + +- I have read the [contributors guidelines](https://github.com/deepset-ai/haystack/blob/main/CONTRIBUTING.md) and the [code of conduct](https://github.com/deepset-ai/haystack/blob/main/code_of_conduct.txt) +- I have updated the related issue with new insights and changes +- I added unit tests and updated the docstrings +- I've used one of the [conventional commit types](https://www.conventionalcommits.org/en/v1.0.0/) for my PR title: `fix:`, `feat:`, `build:`, `chore:`, `ci:`, `docs:`, `style:`, `refactor:`, `perf:`, `test:`. +- I documented my code +- I ran [pre-commit hooks](https://github.com/deepset-ai/haystack/blob/main/CONTRIBUTING.md#installation) and fixed any issue diff --git a/testbed/deepset-ai__haystack/.github/utils/create_unstable_docs.py b/testbed/deepset-ai__haystack/.github/utils/create_unstable_docs.py new file mode 100644 index 0000000000000000000000000000000000000000..a60d0328c922ada1ec637d45471565b39151aefc --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/utils/create_unstable_docs.py @@ -0,0 +1,54 @@ +import re +import sys +import argparse + +from readme_api import get_versions, create_new_unstable + + +VERSION_VALIDATOR = re.compile(r"^[0-9]+\.[0-9]+$") + + +def calculate_new_unstable(version: str): + # version must be formatted like so . + major, minor = version.split(".") + return f"{major}.{int(minor) + 1}-unstable" + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "-v", "--new-version", help="The new unstable version that is being created (e.g. 1.9).", required=True + ) + args = parser.parse_args() + + if VERSION_VALIDATOR.match(args.new_version) is None: + sys.exit("Version must be formatted like so .") + + # This two are the version that we must have published in the end + new_stable = f"{args.new_version}" + new_unstable = calculate_new_unstable(args.new_version) + + versions = get_versions() + new_stable_is_published = new_stable in versions + new_unstable_is_published = new_unstable in versions + + if new_stable_is_published and new_unstable_is_published: + # If both versions are published there's nothing to do. + # We fail gracefully. + print(f"Both new version {new_stable} and {new_unstable} are already published.") + sys.exit(0) + elif new_stable_is_published or new_unstable_is_published: + # Either new stable or unstable is already published, it's to risky to + # proceed so we abort the publishing process. + sys.exit(f"Either version {new_stable} or {new_unstable} are already published. Too risky to proceed.") + + # This version must exist since it's the one we're trying to promote + # to stable. + current_unstable = f"{new_stable}-unstable" + + if current_unstable not in versions: + sys.exit(f"Can't find version {current_unstable} to promote to {new_stable}") + + # Create create new unstable from the currently existing one. + # The new unstable will be made stable at a later time by another workflow + create_new_unstable(current_unstable, new_unstable) diff --git a/testbed/deepset-ai__haystack/.github/utils/delete_outdated_docs.py b/testbed/deepset-ai__haystack/.github/utils/delete_outdated_docs.py new file mode 100644 index 0000000000000000000000000000000000000000..1d88c1169df2e283539e1becb56d4dc996d0911e --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/utils/delete_outdated_docs.py @@ -0,0 +1,72 @@ +import argparse +import base64 +import os +import re +from pathlib import Path +from typing import List + +import requests +import yaml + +VERSION_VALIDATOR = re.compile(r"^[0-9]+\.[0-9]+$") + + +def readme_token(): + api_key = os.getenv("README_API_KEY", None) + if not api_key: + raise Exception("README_API_KEY env var is not set") + + api_key = f"{api_key}:" + return base64.b64encode(api_key.encode("utf-8")).decode("utf-8") + + +def create_headers(version: str): + return {"authorization": f"Basic {readme_token()}", "x-readme-version": version} + + +def get_docs_in_category(category_slug: str, version: str) -> List[str]: + """ + Returns the slugs of all documents in a category for the specific version. + """ + url = f"https://dash.readme.com/api/v1/categories/{category_slug}/docs" + headers = create_headers(version) + res = requests.get(url, headers=headers, timeout=10) + return [doc["slug"] for doc in res.json()] + + +def delete_doc(slug: str, version: str): + url = f"https://dash.readme.com/api/v1/docs/{slug}" + headers = create_headers(version) + res = requests.delete(url, headers=headers, timeout=10) + res.raise_for_status() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Delete outdated documentation from Readme.io. " + "It will delete all documents that are not present in the current config files." + ) + parser.add_argument( + "-c", "--config-path", help="Path to folder containing YAML documentation configs", required=True, type=Path + ) + parser.add_argument("-v", "--version", help="The version that will have its documents deleted", required=True) + args = parser.parse_args() + + configs = [yaml.safe_load(c.read_text()) for c in args.config_path.glob("*.yml")] + + remote_docs = {} + for config in configs: + category_slug = config["renderer"]["category_slug"] + if category_slug in remote_docs: + continue + docs = get_docs_in_category(category_slug, args.version) + + remote_docs[category_slug] = docs + + for config in configs: + doc_slug = config["renderer"]["slug"] + category_slug = config["renderer"]["category_slug"] + if doc_slug in remote_docs[category_slug]: + continue + + delete_doc(doc_slug, args.version) diff --git a/testbed/deepset-ai__haystack/.github/utils/docstrings_checksum.py b/testbed/deepset-ai__haystack/.github/utils/docstrings_checksum.py new file mode 100644 index 0000000000000000000000000000000000000000..75eefb0b6e92471dd1afa3000f5089732d15c4dd --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/utils/docstrings_checksum.py @@ -0,0 +1,49 @@ +from pathlib import Path +from typing import Iterator + +import ast +import hashlib + + +def docstrings_checksum(python_files: Iterator[Path]): + files_content = (f.read_text() for f in python_files) + trees = (ast.parse(c) for c in files_content) + + # Get all docstrings from async functions, functions, + # classes and modules definitions + docstrings = [] + for tree in trees: + for node in ast.walk(tree): + if not isinstance(node, (ast.AsyncFunctionDef, ast.FunctionDef, ast.ClassDef, ast.Module)): + # Skip all node types that can't have docstrings to prevent failures + continue + docstring = ast.get_docstring(node) + if docstring: + docstrings.append(docstring) + + # Sort them to be safe, since ast.walk() returns + # nodes in no specified order. + # See https://docs.python.org/3/library/ast.html#ast.walk + docstrings.sort() + + return hashlib.md5(str(docstrings).encode("utf-8")).hexdigest() + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--root", help="Haystack root folder", required=True, type=Path) + args = parser.parse_args() + + # Get all Haystack and rest_api python files + root: Path = args.root.absolute() + haystack_files = root.glob("haystack/**/*.py") + rest_api_files = root.glob("rest_api/**/*.py") + + import itertools + + python_files = itertools.chain(haystack_files, rest_api_files) + + md5 = docstrings_checksum(python_files) + print(md5) diff --git a/testbed/deepset-ai__haystack/.github/utils/generate_openapi_specs.py b/testbed/deepset-ai__haystack/.github/utils/generate_openapi_specs.py new file mode 100644 index 0000000000000000000000000000000000000000..a5b14a894389c96edab7a001bc200c7b3baec741 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/utils/generate_openapi_specs.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 + +import json +from pathlib import Path +import os +import sys + +import logging + +logging.basicConfig(level=logging.INFO) + + +sys.path.append(".") +from rest_api.utils import get_openapi_specs, get_app, get_pipelines # pylint: disable=wrong-import-position +from haystack import __version__ # pylint: disable=wrong-import-position + +REST_PATH = Path("./rest_api/rest_api").absolute() +PIPELINE_PATH = str(REST_PATH / "pipeline" / "pipeline_empty.haystack-pipeline.yml") +APP_PATH = str(REST_PATH / "application.py") + +os.environ["PIPELINE_YAML_PATH"] = PIPELINE_PATH + +logging.info("Loading OpenAPI specs from %s with pipeline at %s", APP_PATH, PIPELINE_PATH) + +# To initialize the app and the pipelines +get_app() +get_pipelines() + +# Generate the openapi specs +specs = get_openapi_specs() +# Add `x-readme` to disable proxy and limit sample languages on documentation (see https://docs.readme.com/main/docs/openapi-extensions) +specs.update({"x-readme": {"proxy-enabled": False, "samples-languages": ["curl", "python"]}}) + +# Dump the specs into a JSON file +with open("openapi.json", "w") as f: + json.dump(specs, f, indent=4) + f.write("\n") # We need to add a newline, otherwise there will be a conflict with end-of-file-fixer pre-commit hook diff --git a/testbed/deepset-ai__haystack/.github/utils/promote_unstable_docs.py b/testbed/deepset-ai__haystack/.github/utils/promote_unstable_docs.py new file mode 100644 index 0000000000000000000000000000000000000000..52df7940515fb90160129ee735d02a8df89ea804 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/utils/promote_unstable_docs.py @@ -0,0 +1,29 @@ +import re +import sys +import argparse + +from readme_api import get_versions, promote_unstable_to_stable + +VERSION_VALIDATOR = re.compile(r"^[0-9]+\.[0-9]+$") + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "-v", "--version", help="The version to promote to stable (e.g. 2.1).", required=True + ) + args = parser.parse_args() + + if VERSION_VALIDATOR.match(args.version) is None: + sys.exit("Version must be formatted like so .") + + unstable_version = f"{args.version}-unstable" + stable_version = args.version + + versions = get_versions() + if stable_version in versions: + sys.exit(f"Version {stable_version} is already published.") + + if unstable_version not in versions: + sys.exit(f"Can't find version {unstable_version} to promote to {stable_version}") + + promote_unstable_to_stable(unstable_version, stable_version) diff --git a/testbed/deepset-ai__haystack/.github/utils/pydoc-markdown.sh b/testbed/deepset-ai__haystack/.github/utils/pydoc-markdown.sh new file mode 100644 index 0000000000000000000000000000000000000000..670bd09697f7e693afc611e025875de50aca1ad8 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/utils/pydoc-markdown.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -e # Fails on any error in the following loop +cd docs/pydoc +rm -rf temp && mkdir temp +cd temp +for file in ../config/* ; do + echo "Converting $file..." + pydoc-markdown "$file" +done diff --git a/testbed/deepset-ai__haystack/.github/utils/pyproject_to_requirements.py b/testbed/deepset-ai__haystack/.github/utils/pyproject_to_requirements.py new file mode 100644 index 0000000000000000000000000000000000000000..b6c8b9781739d9c2d5eec2f53b9ef70e0f0be544 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/utils/pyproject_to_requirements.py @@ -0,0 +1,47 @@ +import argparse +import re +import sys +from pathlib import Path + +import toml + +matcher = re.compile(r"farm-haystack\[(.+)\]") +parser = argparse.ArgumentParser( + prog="pyproject_to_requirements.py", description="Convert pyproject.toml to requirements.txt" +) +parser.add_argument("pyproject_path") +parser.add_argument("--extra", default="") + + +def resolve(target: str, extras: dict, results: set): + if target not in extras: + results.add(target) + return + + for t in extras[target]: + m = matcher.match(t) + if m: + for i in m.group(1).split(","): + resolve(i, extras, results) + else: + resolve(t, extras, results) + + +def main(pyproject_path: Path, extra: str = ""): + content = toml.load(pyproject_path) + # basic set of dependencies + deps = set(content["project"]["dependencies"]) + + if extra: + extras = content["project"]["optional-dependencies"] + resolve(extra, extras, deps) + + sys.stdout.write("\n".join(sorted(deps))) + sys.stdout.write("\n") + + +if __name__ == "__main__": + args = parser.parse_args() + pyproject_path = Path(args.pyproject_path).absolute() + + main(pyproject_path, args.extra) diff --git a/testbed/deepset-ai__haystack/.github/utils/readme_api.py b/testbed/deepset-ai__haystack/.github/utils/readme_api.py new file mode 100644 index 0000000000000000000000000000000000000000..617a2b1397c6e81c7992404981ab8c7a91031b03 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/utils/readme_api.py @@ -0,0 +1,55 @@ +import os +import base64 +import requests + + + +class ReadmeAuth(requests.auth.AuthBase): + def __call__(self, r): + r.headers["authorization"] = f"Basic {readme_token()}" + return r + + +def readme_token(): + api_key = os.getenv("RDME_API_KEY", None) + if not api_key: + raise Exception("RDME_API_KEY env var is not set") + + api_key = f"{api_key}:" + return base64.b64encode(api_key.encode("utf-8")).decode("utf-8") + + +def get_versions(): + """ + Return all versions currently published in Readme.io. + """ + url = "https://dash.readme.com/api/v1/version" + res = requests.get(url, auth=ReadmeAuth(), timeout=30) + res.raise_for_status() + return [v["version"] for v in res.json()] + + +def create_new_unstable(current: str, new: str): + """ + Create new version by copying current. + + :param current: Existing current unstable version + :param new: Non existing new unstable version + """ + url = "https://dash.readme.com/api/v1/version/" + payload = {"is_beta": False, "version": new, "from": current, "is_hidden": False, "is_stable": False} + res = requests.post(url, json=payload, auth=ReadmeAuth(), timeout=30) + res.raise_for_status() + + +def promote_unstable_to_stable(unstable: str, stable: str): + """ + Rename the current unstable to stable and set it as stable. + + :param unstable: Existing unstable version + :param stable: Non existing new stable version + """ + url = f"https://dash.readme.com/api/v1/version/{unstable}" + payload = {"is_beta": False, "version": stable, "from": unstable, "is_hidden": False, "is_stable": True} + res = requests.put(url, json=payload, auth=ReadmeAuth(), timeout=30) + res.raise_for_status() diff --git a/testbed/deepset-ai__haystack/.github/workflows/ci_metrics.yml b/testbed/deepset-ai__haystack/.github/workflows/ci_metrics.yml new file mode 100644 index 0000000000000000000000000000000000000000..fa6aa86fb09d3a1ea18147722099c282d4558197 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/ci_metrics.yml @@ -0,0 +1,23 @@ +name: CI Metrics + +on: + workflow_run: + workflows: + - "end-to-end" + - "Linting" + - "Tests" + types: + - completed + pull_request: + types: + - opened + - closed +jobs: + send: + runs-on: ubuntu-latest + steps: + - uses: int128/datadog-actions-metrics@v1 + with: + datadog-api-key: ${{ secrets.DATADOG_API_KEY }} + datadog-site: "datadoghq.eu" + collect-job-metrics: true diff --git a/testbed/deepset-ai__haystack/.github/workflows/docker_release.yml b/testbed/deepset-ai__haystack/.github/workflows/docker_release.yml new file mode 100644 index 0000000000000000000000000000000000000000..6eee4d42caf25898de6233568790e136557ce602 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/docker_release.yml @@ -0,0 +1,69 @@ +name: Docker image release + +on: + workflow_dispatch: + push: + branches: + - main + tags: + - "v2.[0-9]+.[0-9]+*" + +env: + DOCKER_REPO_NAME: deepset/haystack + +jobs: + build-and-push: + name: Build base image + runs-on: ubuntu-latest + if: github.repository_owner == 'deepset-ai' + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_HUB_USER }} + password: ${{ secrets.DOCKER_HUB_TOKEN }} + + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: $DOCKER_REPO_NAME + + - name: Build base images + uses: docker/bake-action@v5 + env: + IMAGE_TAG_SUFFIX: ${{ steps.meta.outputs.version }} + HAYSTACK_VERSION: ${{ steps.meta.outputs.version }} + with: + workdir: docker + targets: base + push: true + + - name: Test base image + run: | + EXPECTED_VERSION=$(cat VERSION.txt) + if [[ $EXPECTED_VERSION == *"-"* ]]; then + EXPECTED_VERSION=$(cut -d '-' -f 1 < VERSION.txt)$(cut -d '-' -f 2 < VERSION.txt) + fi + TAG="base-${{ steps.meta.outputs.version }}" + + PLATFORM="linux/amd64" + VERSION=$(docker run --platform "$PLATFORM" --rm "deepset/haystack:$TAG" python -c"from haystack.version import __version__; print(__version__)") + [[ "$VERSION" = "$EXPECTED_VERSION" ]] || echo "::error 'Haystack version in deepset/haystack:$TAG image for $PLATFORM is different from expected'" + + PLATFORM="linux/arm64" + VERSION=$(docker run --platform "$PLATFORM" --rm "deepset/haystack:$TAG" python -c"from haystack.version import __version__; print(__version__)") + [[ "$VERSION" = "$EXPECTED_VERSION" ]] || echo "::error 'Haystack version in deepset/haystack:$TAG image for $PLATFORM is different from expected'" + + # Remove image after test to avoid filling the GitHub runner and prevent its failure + docker rmi "deepset/haystack:$TAG" diff --git a/testbed/deepset-ai__haystack/.github/workflows/docstring_labeler.yml b/testbed/deepset-ai__haystack/.github/workflows/docstring_labeler.yml new file mode 100644 index 0000000000000000000000000000000000000000..1dc5ddd9dc394c78f7a7243b042029d6d4270492 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/docstring_labeler.yml @@ -0,0 +1,59 @@ +name: Add label on docstrings edit + +on: + pull_request_target: + paths: + - "haystack/**/*.py" + +env: + PYTHON_VERSION: "3.11" + +jobs: + label: + runs-on: ubuntu-latest + + steps: + - name: Checkout base commit + uses: actions/checkout@v4 + with: + ref: ${{ github.base_ref }} + + - name: Copy file + # We copy our script after base ref checkout so we keep executing + # the same version even after checking out the HEAD ref. + # This is done to prevent executing malicious code in forks' PRs. + run: cp .github/utils/docstrings_checksum.py "${{ runner.temp }}/docstrings_checksum.py" + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Get docstrings + id: base-docstrings + run: | + CHECKSUM=$(python "${{ runner.temp }}/docstrings_checksum.py" --root "${{ github.workspace }}") + echo "checksum=$CHECKSUM" >> "$GITHUB_OUTPUT" + + - name: Checkout HEAD commit + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.ref }} + # This must be set to correctly checkout a fork + repository: ${{ github.event.pull_request.head.repo.full_name }} + + - name: Get docstrings + id: head-docstrings + run: | + CHECKSUM=$(python "${{ runner.temp }}/docstrings_checksum.py" --root "${{ github.workspace }}") + echo "checksum=$CHECKSUM" >> "$GITHUB_OUTPUT" + + - name: Check if we should label + id: run-check + run: echo "should_run=${{ steps.base-docstrings.outputs.checksum != steps.head-docstrings.outputs.checksum }}" >> "$GITHUB_OUTPUT" + + - name: Add label + if: ${{ steps.run-check.outputs.should_run == 'true' }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: gh pr edit ${{ github.event.pull_request.html_url }} --add-label "type:documentation" diff --git a/testbed/deepset-ai__haystack/.github/workflows/docstrings_linting.yml b/testbed/deepset-ai__haystack/.github/workflows/docstrings_linting.yml new file mode 100644 index 0000000000000000000000000000000000000000..beb18facc71465370dc99dbbf36c56c7508aaa0d --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/docstrings_linting.yml @@ -0,0 +1,28 @@ +name: run docstrings linting + +on: + pull_request: + types: + - opened + - reopened + - synchronize + - ready_for_review + paths: + - "**.py" + +env: + HATCH_VERSION: "1.13.0" + +jobs: + docstrings-linting: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install Hatch + run: pip install hatch==${{ env.HATCH_VERSION }} + + - name: ruff docstrings linting + run: hatch run ruff check haystack diff --git a/testbed/deepset-ai__haystack/.github/workflows/e2e.yml b/testbed/deepset-ai__haystack/.github/workflows/e2e.yml new file mode 100644 index 0000000000000000000000000000000000000000..7e817f2039f0e545b6be1ec1a26133c2c2a3e79a --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/e2e.yml @@ -0,0 +1,58 @@ +# If you change this name also do it in ci_metrics.yml +name: end-to-end + +on: + workflow_dispatch: # Activate this workflow manually + schedule: + - cron: "0 0 * * *" + pull_request: + types: + - opened + - reopened + - synchronize + - ready_for_review + paths: + - "e2e/**/*.py" + - ".github/workflows/e2e.yml" + +env: + PYTHON_VERSION: "3.9" + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + HATCH_VERSION: "1.13.0" + +jobs: + run: + timeout-minutes: 60 + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Install Hatch + run: pip install hatch==${{ env.HATCH_VERSION }} + + - name: Run tests + run: hatch run test:e2e + + - name: Send event to Datadog + if: failure() && github.event_name == 'schedule' + uses: masci/datadog@v1 + with: + api-key: ${{ secrets.CORE_DATADOG_API_KEY }} + api-url: https://api.datadoghq.eu + events: | + - title: "${{ github.workflow }} workflow" + text: "Job ${{ github.job }} in branch ${{ github.ref_name }}" + alert_type: "error" + source_type_name: "Github" + host: ${{ github.repository_owner }} + tags: + - "project:${{ github.repository }}" + - "job:${{ github.job }}" + - "run_id:${{ github.run_id }}" + - "workflow:${{ github.workflow }}" + - "branch:${{ github.ref_name }}" + - "url:https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" diff --git a/testbed/deepset-ai__haystack/.github/workflows/github_release.yml b/testbed/deepset-ai__haystack/.github/workflows/github_release.yml new file mode 100644 index 0000000000000000000000000000000000000000..4a6a00eb01acc8221ddb17b5140a4f48ae151f45 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/github_release.yml @@ -0,0 +1,74 @@ +name: Project release on Github + +on: + workflow_dispatch: # this is useful to re-generate the release page without a new tag being pushed + push: + tags: + - "v2.[0-9]+.[0-9]+*" + # Ignore release versions tagged with -rc0 suffix + - "!v2.[0-9]+.[0-9]-rc0" + +jobs: + generate-notes: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-tags: true + fetch-depth: 0 # slow but needed by reno + + - name: Parse version + id: version + run: | + echo "current_release=$(awk -F \\- '{print $1}' < VERSION.txt)" >> "$GITHUB_OUTPUT" + echo "current_pre_release=$(awk -F \\- '{print $2}' < VERSION.txt)" >> "$GITHUB_OUTPUT" + + - name: Install reno + run: | + python -m pip install --upgrade pip + pip install "reno<5" + + - name: Generate release notes for release candidates - minor releases + if: steps.version.outputs.current_pre_release != '' && endsWith(steps.version.outputs.current_release, '.0') + env: + # When generating notes for release candidates of minor versions, pick every vX.Y.Z-rcN but + # stop when encounter vX.Y.Z-rc0. The -rc0 tag is added automatically when + # we create the release branch, so we can assume it's always there. + EARLIEST_VERSION: v${{ steps.version.outputs.current_release }}-rc0 + run: | + reno report --no-show-source --ignore-cache --earliest-version "$EARLIEST_VERSION" -o relnotes.rst + + - name: Generate release notes for release candidates - bugfix releases + if: steps.version.outputs.current_pre_release != '' && !endsWith(steps.version.outputs.current_release, '.0') + env: + # When generating notes for release candidates of bugfix releases, pick every vX.Y.Z-rcN but + # stop when encounter vX.Y.Z-rc1. + # In this case, we don't have the -rc0 tag, because we don't need to go through commits on main, + # as we cherry-pick them into the release branch. + EARLIEST_VERSION: v${{ steps.version.outputs.current_release }}-rc1 + run: | + reno report --no-show-source --ignore-cache --earliest-version "$EARLIEST_VERSION" -o relnotes.rst + + - name: Generate release notes for the final release + if: steps.version.outputs.current_pre_release == '' + # When generating notes for the final release vX.Y.Z, we just pass --version and reno + # will automatically collapse all the vX.Y.Z-rcN. + run: | + reno report --no-show-source --ignore-cache --version v${{ steps.version.outputs.current_release }} -o relnotes.rst + + - name: Convert to Markdown + uses: docker://pandoc/core:3.1 + with: + args: "--from rst --to markdown_github --no-highlight relnotes.rst -o relnotes.md --wrap=none" + + - name: Debug + run: | + cat relnotes.md + + - uses: ncipollo/release-action@v1 + with: + bodyFile: "relnotes.md" + prerelease: ${{ steps.version.outputs.current_pre_release }} + allowUpdates: true diff --git a/testbed/deepset-ai__haystack/.github/workflows/labeler.yml b/testbed/deepset-ai__haystack/.github/workflows/labeler.yml new file mode 100644 index 0000000000000000000000000000000000000000..2af558297ef107bb37d632577729e510cb1dc964 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/labeler.yml @@ -0,0 +1,15 @@ +name: "Labeler" +on: +- pull_request_target + +permissions: + contents: read + pull-requests: write + +jobs: + triage: + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@v5 + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" diff --git a/testbed/deepset-ai__haystack/.github/workflows/license_compliance.yml b/testbed/deepset-ai__haystack/.github/workflows/license_compliance.yml new file mode 100644 index 0000000000000000000000000000000000000000..1bf55ee536b107dc9fc7df32e45b1e3b2343f110 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/license_compliance.yml @@ -0,0 +1,92 @@ +name: License Compliance + +on: + pull_request: + paths: + - "**/pyproject.toml" + # Since we test PRs, there is no need to run the workflow at each + # merge on `main`. Let's use a cron job instead. + schedule: + - cron: "0 0 * * *" # every day at midnight + +env: + CORE_DATADOG_API_KEY: ${{ secrets.CORE_DATADOG_API_KEY }} + PYTHON_VERSION: "3.10" + +jobs: + license_check_direct: + name: Direct dependencies only + env: + REQUIREMENTS_FILE: requirements_direct.txt + runs-on: ubuntu-latest + steps: + - name: Checkout the code + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Get direct dependencies + run: | + pip install toml + python .github/utils/pyproject_to_requirements.py pyproject.toml > ${{ env.REQUIREMENTS_FILE }} + + - name: Check Licenses + id: license_check_report + uses: pilosus/action-pip-license-checker@v2 + with: + github-token: ${{ secrets.GH_ACCESS_TOKEN }} + requirements: ${{ env.REQUIREMENTS_FILE }} + fail: "Copyleft,Other,Error" + # Exclusions in the vanilla distribution must be explicitly motivated + # + # - tqdm is MLP but there are no better alternatives + exclude: "(?i)^(tqdm).*" + + # We keep the license inventory on FOSSA + - name: Send license report to Fossa + uses: fossas/fossa-action@v1.4.0 + continue-on-error: true # not critical + with: + api-key: ${{ secrets.FOSSA_LICENSE_SCAN_TOKEN }} + + - name: Print report + if: ${{ always() }} + run: echo "${{ steps.license_check_report.outputs.report }}" + + - name: Calculate alert data + id: calculator + shell: bash + if: (success() || failure()) + run: | + if [ "${{ job.status }}" = "success" ]; then + echo "alert_type=success" >> "$GITHUB_OUTPUT"; + else + echo "alert_type=error" >> "$GITHUB_OUTPUT"; + fi + + - name: Send event to Datadog + # This step would fail when running in PRs opened from forks since + # secrets are not accessible. + # To prevent showing bogus failures in those PRs we skip the step. + # The workflow will fail in any case if the actual check fails in the previous steps. + if: (success() || failure()) && env.CORE_DATADOG_API_KEY != '' + uses: masci/datadog@v1 + with: + api-key: ${{ env.CORE_DATADOG_API_KEY }} + api-url: https://api.datadoghq.eu + events: | + - title: "${{ github.job }} in ${{ github.workflow }} workflow" + text: "License compliance check: direct dependencies only." + alert_type: "${{ steps.calculator.outputs.alert_type }}" + source_type_name: "Github" + host: ${{ github.repository_owner }} + tags: + - "project:${{ github.repository }}" + - "job:${{ github.job }}" + - "run_id:${{ github.run_id }}" + - "workflow:${{ github.workflow }}" + - "branch:${{ github.ref_name }}" + - "url:https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" diff --git a/testbed/deepset-ai__haystack/.github/workflows/minor_version_release.yml b/testbed/deepset-ai__haystack/.github/workflows/minor_version_release.yml new file mode 100644 index 0000000000000000000000000000000000000000..20979ea91dbcc44720978ab2a35040ce9d10a22e --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/minor_version_release.yml @@ -0,0 +1,79 @@ +name: Minor Version Release + +on: + workflow_dispatch: + +env: + PYTHON_VERSION: "3.9" + +jobs: + sync: + runs-on: ubuntu-latest + steps: + - name: Checkout this repo + uses: actions/checkout@v4 + with: + ref: main + + - name: Define all versions + id: versions + shell: bash + # We only need `major.minor` in Readme so we cut the full version string to the first two tokens + run: | + echo "current_release_minor=$(cut -d "." -f 1,2 < VERSION.txt)" >> "$GITHUB_OUTPUT" + + - name: Bump version on main + shell: bash + env: + # We use the HAYSTACK_BOT_TOKEN here so the PR created by the step will + # trigger required workflows and can be merged by anyone + GITHUB_TOKEN: ${{ secrets.HAYSTACK_BOT_TOKEN }} + run: | + git config --global user.name "github-actions[bot]" + git config --global user.email "github-actions[bot]@users.noreply.github.com" + + git checkout main + + # Create the release branch from the current unstable + git checkout -b v${{ steps.versions.outputs.current_release_minor }}.x + git push -u origin v${{ steps.versions.outputs.current_release_minor }}.x + + # Tag the base with X.Y.Z-rc0. + # At this point VERSION.txt still contains the previous version and not + # the one specified by the tag. + # This is good though as we just need this to make reno work properly. + NEW_VERSION=$(awk -F. '/[0-9]+\./{$2++;print}' OFS=. < VERSION.txt) + echo "$NEW_VERSION" > VERSION.txt + VERSION_TAG="v$NEW_VERSION" + git tag "$VERSION_TAG" -m"$VERSION_TAG" + git push --tags + + # Create the branch that bump version in dev branch + cat VERSION.txt + git checkout -b bump-version + git add . + git commit -m "Update unstable version to $NEW_VERSION" + git push -u origin bump-version + + # Create the PR + gh pr create -B main \ + -H bump-version \ + --title "Bump unstable version" \ + --body "This PR bumps the unstable version for \`v2.x\`. \ + The release branch \`v${{ steps.versions.outputs.current_release_minor }}.x\` has been correctly created. \ + Verify documentation on Readme has been correctly updated before approving and merging this PR." \ + --label "ignore-for-release-notes" + + - uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Install create_unstable_docs.py dependencies + run: pip install requests + + - name: Release Readme version + env: + RDME_API_KEY: ${{ secrets.README_API_KEY }} + run: | + git checkout main + python ./.github/utils/create_unstable_docs.py --new-version ${{ steps.versions.outputs.current_release_minor }} diff --git a/testbed/deepset-ai__haystack/.github/workflows/project.yml b/testbed/deepset-ai__haystack/.github/workflows/project.yml new file mode 100644 index 0000000000000000000000000000000000000000..45a8ca5f3b2cb531dedb26477f61e10ea658eff5 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/project.yml @@ -0,0 +1,16 @@ +name: Track issues with Github project + +on: + issues: + types: + - opened + +jobs: + add-to-project: + name: Add new issues to project for triage + runs-on: ubuntu-latest + steps: + - uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/deepset-ai/projects/5 + github-token: ${{ secrets.GH_PROJECT_PAT }} diff --git a/testbed/deepset-ai__haystack/.github/workflows/promote_unstable_docs.yml b/testbed/deepset-ai__haystack/.github/workflows/promote_unstable_docs.yml new file mode 100644 index 0000000000000000000000000000000000000000..a898ad5c285f2a106108e161402ee22008158df1 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/promote_unstable_docs.yml @@ -0,0 +1,38 @@ +name: Release new minor version docs + +on: + push: + tags: + # Trigger this only for the first patch release of the new minor + - "v[0-9]+.[0-9]+.0" + # Exclude 1.x tags + - "!v1.[0-9]+.[0-9]+" +env: + PYTHON_VERSION: "3.9" + +jobs: + promote: + runs-on: ubuntu-latest + steps: + - name: Checkout this repo + uses: actions/checkout@v4 + + - name: Get version to release + id: version + shell: bash + # We only need `major.minor` in Readme so we cut the full version string to the first two tokens + run: | + echo "version=$(cut -d "." -f 1,2 < VERSION.txt)" >> "$GITHUB_OUTPUT" + + - uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Install promote_unstable_docs.py dependencies + run: pip install requests + + - name: Release Readme version + env: + RDME_API_KEY: ${{ secrets.README_API_KEY }} + run: | + python ./.github/utils/promote_unstable_docs.py --version ${{ steps.version.outputs.version }} diff --git a/testbed/deepset-ai__haystack/.github/workflows/pypi_release.yml b/testbed/deepset-ai__haystack/.github/workflows/pypi_release.yml new file mode 100644 index 0000000000000000000000000000000000000000..807adff22625ff5c5b824751e0aa5c62474ef8e2 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/pypi_release.yml @@ -0,0 +1,31 @@ +name: Project release on PyPi + +on: + push: + tags: + - "v[0-9]+.[0-9]+.[0-9]+*" + # We must not release versions tagged with -rc0 suffix + - "!v[0-9]+.[0-9]+.[0-9]-rc0" + +env: + HATCH_VERSION: "1.13.0" + +jobs: + release-on-pypi: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install Hatch + run: pip install hatch==${{ env.HATCH_VERSION }} + + - name: Build Haystack + run: hatch build + + - name: Publish on PyPi + env: + HATCH_INDEX_USER: __token__ + HATCH_INDEX_AUTH: ${{ secrets.HAYSTACK_AI_PYPI_TOKEN }} + run: hatch publish -y diff --git a/testbed/deepset-ai__haystack/.github/workflows/readme_sync.yml b/testbed/deepset-ai__haystack/.github/workflows/readme_sync.yml new file mode 100644 index 0000000000000000000000000000000000000000..b387dbb6220738759ee562edc5dc0c0500c2f607 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/readme_sync.yml @@ -0,0 +1,65 @@ +name: Sync docs with Readme + +on: + pull_request: + paths: + - "docs/pydoc/**" + push: + branches: + - main + # release branches have the form v1.9.x + - "v[0-9]+.[0-9]+.x" + # Exclude 1.x release branches, there's another workflow handling those + - "!v1.[0-9]+.x" + +env: + HATCH_VERSION: "1.13.0" + PYTHON_VERSION: "3.10" + +jobs: + sync: + runs-on: ubuntu-latest + steps: + - name: Checkout this repo + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Install Hatch + run: pip install hatch==${{ env.HATCH_VERSION }} + + - name: Generate API docs + env: + # This is necessary to fetch the documentation categories + # from Readme.io as we need them to associate the slug + # in config files with their id. + README_API_KEY: ${{ secrets.README_API_KEY }} + # The command is a bit misleading, we're not actually syncing anything here, + # we're just generating the markdown files from the yaml configs. + run: hatch run readme:sync + + - name: Get version + id: version-getter + run: | + VERSION="$(hatch version | cut -d '.' -f 1,2)" + CURRENT_BRANCH="${{ github.ref_name }}" + # If we're on `main` branch we should push docs to the unstable version + if [ "$CURRENT_BRANCH" = "main" ]; then + VERSION="$VERSION-unstable" + fi + echo "version=$VERSION" >> "$GITHUB_OUTPUT" + + - name: Sync docs + if: github.event_name == 'push' + uses: readmeio/rdme@v8 + with: + rdme: docs ./docs/pydoc/temp --key=${{ secrets.README_API_KEY }} --version=${{ steps.version-getter.outputs.version }} + + - name: Delete outdated + if: github.event_name == 'push' + env: + README_API_KEY: ${{ secrets.README_API_KEY }} + run: hatch run readme:delete-outdated --version="${{ steps.version-getter.outputs.version }}" --config-path ./docs/pydoc/config diff --git a/testbed/deepset-ai__haystack/.github/workflows/release_notes.yml b/testbed/deepset-ai__haystack/.github/workflows/release_notes.yml new file mode 100644 index 0000000000000000000000000000000000000000..5203c0837c6d980ddea8d624f50bec0c0ca6b433 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/release_notes.yml @@ -0,0 +1,48 @@ +name: Check Release Notes + +on: + pull_request: + types: + - opened + - reopened + - synchronize + - ready_for_review + - labeled + - unlabeled + paths: + - "**.py" + - "pyproject.toml" + - "!.github/**/*.py" + +jobs: + reno: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # With the default value of 1, there are corner cases where tj-actions/changed-files + # fails with a `no merge base` error + fetch-depth: 0 + + - name: Get release note files + id: changed-files + uses: tj-actions/changed-files@v45 + with: + files: releasenotes/notes/*.yaml + + - name: Check release notes + if: steps.changed-files.outputs.any_changed == 'false' && !contains( github.event.pull_request.labels.*.name, 'ignore-for-release-notes') + run: | + # Check if any of the commit messages contain tags ci/docs/test + if git log --pretty=%s origin/main..HEAD | grep -E '^(ci:|docs:|test:)' > /dev/null; then + echo "Skipping release note check for commits with 'ci:', 'docs:', or 'test:' tags." + else + echo "::error::The release notes file is missing, please add one or attach the label 'ignore-for-release-notes' to this PR." + exit 1 + fi + + - name: Verify release notes formatting + if: steps.changed-files.outputs.any_changed == 'true' && !contains( github.event.pull_request.labels.*.name, 'ignore-for-release-notes') + run: | + yamllint -d "{extends: default, rules: {line-length: {max: 1200}}}" ${{ steps.changed-files.outputs.all_changed_files }} diff --git a/testbed/deepset-ai__haystack/.github/workflows/release_notes_skipper.yml b/testbed/deepset-ai__haystack/.github/workflows/release_notes_skipper.yml new file mode 100644 index 0000000000000000000000000000000000000000..41c9be1e81e24556655fd40a94b1d72a93827998 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/release_notes_skipper.yml @@ -0,0 +1,22 @@ +name: Check Release Notes + +on: + pull_request: + types: + - opened + - reopened + - synchronize + - ready_for_review + - labeled + - unlabeled + paths-ignore: + - "**.py" + - "pyproject.toml" + - "!.github/**/*.py" + +jobs: + reno: + runs-on: ubuntu-latest + steps: + - name: Skip mandatory job + run: echo "Skipped!" diff --git a/testbed/deepset-ai__haystack/.github/workflows/stale.yml b/testbed/deepset-ai__haystack/.github/workflows/stale.yml new file mode 100644 index 0000000000000000000000000000000000000000..4ac6c92fcd05bfc58043f60d7a22231f6dfb248b --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/stale.yml @@ -0,0 +1,15 @@ +name: 'Stalebot' +on: + schedule: + - cron: '30 1 * * *' + +jobs: + makestale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9 + with: + any-of-labels: 'proposal,community-triage' + stale-pr-message: 'This issue is stale because it has been open 30 days with no activity. Remove stale label or comment or this will be closed in 10 days.' + days-before-stale: 30 + days-before-close: 10 diff --git a/testbed/deepset-ai__haystack/.github/workflows/tests.yml b/testbed/deepset-ai__haystack/.github/workflows/tests.yml new file mode 100644 index 0000000000000000000000000000000000000000..5e2af839ac0543975bc9fe788403ab21ba8c386f --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/tests.yml @@ -0,0 +1,442 @@ +# If you change this name also do it in tests_skipper.yml and ci_metrics.yml +name: Tests + +on: + workflow_dispatch: # Activate this workflow manually + push: + branches: + - main + # release branches have the form v1.9.x + - "v[0-9].*[0-9].x" + pull_request: + types: + - opened + - reopened + - synchronize + - ready_for_review + paths: + # Keep the list in sync with the paths defined in the `tests_skipper.yml` workflow + - "haystack/**/*.py" + - "haystack/core/pipeline/predefined/*" + - "test/**/*.py" + - "pyproject.toml" + +env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + CORE_AZURE_CS_ENDPOINT: ${{ secrets.CORE_AZURE_CS_ENDPOINT }} + CORE_AZURE_CS_API_KEY: ${{ secrets.CORE_AZURE_CS_API_KEY }} + AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }} + AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + HF_API_TOKEN: ${{ secrets.HUGGINGFACE_API_KEY }} + PYTHON_VERSION: "3.9" + HATCH_VERSION: "1.13.0" + +jobs: + format: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Install Hatch + run: pip install hatch==${{ env.HATCH_VERSION }} + + - name: Check file format + run: hatch run format-check + + - name: Check linting + run: hatch run check + + - name: Check presence of license header + run: docker run --rm -v "$(pwd):/github/workspace" ghcr.io/korandoru/hawkeye check + + - name: Calculate alert data + id: calculator + shell: bash + if: (success() || failure()) && github.ref_name == 'main' + run: | + if [ "${{ job.status }}" = "success" ]; then + echo "alert_type=success" >> "$GITHUB_OUTPUT"; + else + echo "alert_type=error" >> "$GITHUB_OUTPUT"; + fi + + - name: Send event to Datadog + if: (success() || failure()) && github.ref_name == 'main' + uses: masci/datadog@v1 + with: + api-key: ${{ secrets.CORE_DATADOG_API_KEY }} + api-url: https://api.datadoghq.eu + events: | + - title: "${{ github.workflow }} workflow" + text: "Job ${{ github.job }} in branch ${{ github.ref_name }}" + alert_type: "${{ steps.calculator.outputs.alert_type }}" + source_type_name: "Github" + host: ${{ github.repository_owner }} + tags: + - "project:${{ github.repository }}" + - "job:${{ github.job }}" + - "run_id:${{ github.run_id }}" + - "workflow:${{ github.workflow }}" + - "branch:${{ github.ref_name }}" + - "url:https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" + + unit-tests: + name: Unit / ${{ matrix.os }} + needs: format + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - windows-latest + - macos-latest + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Install Hatch + id: hatch + shell: bash + run: | + pip install hatch==${{ env.HATCH_VERSION }} + echo "env=$(hatch env find test)" >> "$GITHUB_OUTPUT" + + - name: Run + run: hatch run test:unit + + - uses: actions/cache/save@v4 + id: cache + with: + path: ${{ steps.hatch.outputs.env }} + key: ${{ runner.os }}-${{ github.sha }} + + - name: Coveralls + # We upload only coverage for ubuntu as handling both os + # complicates the workflow too much for little to no gain + if: matrix.os == 'ubuntu-latest' + uses: coverallsapp/github-action@v2 + with: + path-to-lcov: coverage.xml + + - name: Calculate alert data + id: calculator + shell: bash + if: (success() || failure()) && github.ref_name == 'main' + run: | + if [ "${{ job.status }}" = "success" ]; then + echo "alert_type=success" >> "$GITHUB_OUTPUT"; + else + echo "alert_type=error" >> "$GITHUB_OUTPUT"; + fi + + - name: Send event to Datadog + if: (success() || failure()) && github.ref_name == 'main' + uses: masci/datadog@v1 + with: + api-key: ${{ secrets.CORE_DATADOG_API_KEY }} + api-url: https://api.datadoghq.eu + events: | + - title: "${{ github.workflow }} workflow" + text: "Job ${{ github.job }} in branch ${{ github.ref_name }}" + alert_type: "${{ steps.calculator.outputs.alert_type }}" + source_type_name: "Github" + host: ${{ github.repository_owner }} + tags: + - "project:${{ github.repository }}" + - "job:${{ github.job }}" + - "run_id:${{ github.run_id }}" + - "workflow:${{ github.workflow }}" + - "branch:${{ github.ref_name }}" + - "url:https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" + + lint: + needs: unit-tests + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # With the default value of 1, there are corner cases where tj-actions/changed-files + # fails with a `no merge base` error + fetch-depth: 0 + + - name: Get changed files + id: files + uses: tj-actions/changed-files@v45 + with: + files: | + **/*.py + files_ignore: | + test/** + + - uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Install Hatch + id: hatch + run: | + pip install hatch==${{ env.HATCH_VERSION }} + echo "env=$(hatch env find test)" >> "$GITHUB_OUTPUT" + + - uses: actions/cache/restore@v4 + id: cache + with: + path: ${{ steps.hatch.outputs.env }} + key: ${{ runner.os }}-${{ github.sha }} + + - name: Mypy + if: steps.files.outputs.any_changed == 'true' + run: | + mkdir .mypy_cache + hatch run test:types ${{ steps.files.outputs.all_changed_files }} + + - name: Pylint + if: steps.files.outputs.any_changed == 'true' + run: | + hatch run test:lint ${{ steps.files.outputs.all_changed_files }} + + - name: Calculate alert data + id: calculator + shell: bash + if: (success() || failure()) && github.ref_name == 'main' + run: | + if [ "${{ job.status }}" = "success" ]; then + echo "alert_type=success" >> "$GITHUB_OUTPUT"; + else + echo "alert_type=error" >> "$GITHUB_OUTPUT"; + fi + + - name: Send event to Datadog + if: (success() || failure()) && github.ref_name == 'main' + uses: masci/datadog@v1 + with: + api-key: ${{ secrets.CORE_DATADOG_API_KEY }} + api-url: https://api.datadoghq.eu + events: | + - title: "${{ github.workflow }} workflow" + text: "Job ${{ github.job }} in branch ${{ github.ref_name }}" + alert_type: "${{ steps.calculator.outputs.alert_type }}" + source_type_name: "Github" + host: ${{ github.repository_owner }} + tags: + - "project:${{ github.repository }}" + - "job:${{ github.job }}" + - "run_id:${{ github.run_id }}" + - "workflow:${{ github.workflow }}" + - "branch:${{ github.ref_name }}" + - "url:https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" + + integration-tests-linux: + name: Integration / ubuntu-latest + needs: unit-tests + runs-on: ubuntu-latest + services: + tika: + image: apache/tika:2.9.0.0 + ports: + - 9998:9998 + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Install Hatch + id: hatch + shell: bash + run: | + pip install hatch==${{ env.HATCH_VERSION }} + echo "env=$(hatch env find test)" >> "$GITHUB_OUTPUT" + + - uses: actions/cache/restore@v4 + id: cache + with: + path: ${{ steps.hatch.outputs.env }} + key: ${{ runner.os }}-${{ github.sha }} + + - name: Install dependencies + run: | + sudo apt update + sudo apt install ffmpeg # for local Whisper tests + + - name: Run + run: hatch run test:integration + + - name: Calculate alert data + id: calculator + shell: bash + if: (success() || failure()) && github.ref_name == 'main' + run: | + if [ "${{ job.status }}" = "success" ]; then + echo "alert_type=success" >> "$GITHUB_OUTPUT"; + else + echo "alert_type=error" >> "$GITHUB_OUTPUT"; + fi + + - name: Send event to Datadog + if: (success() || failure()) && github.ref_name == 'main' + uses: masci/datadog@v1 + with: + api-key: ${{ secrets.CORE_DATADOG_API_KEY }} + api-url: https://api.datadoghq.eu + events: | + - title: "${{ github.workflow }} workflow" + text: "Job ${{ github.job }} in branch ${{ github.ref_name }}" + alert_type: "${{ steps.calculator.outputs.alert_type }}" + source_type_name: "Github" + host: ${{ github.repository_owner }} + tags: + - "project:${{ github.repository }}" + - "job:${{ github.job }}" + - "run_id:${{ github.run_id }}" + - "workflow:${{ github.workflow }}" + - "branch:${{ github.ref_name }}" + - "url:https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" + + integration-tests-macos: + name: Integration / macos-latest + needs: unit-tests + runs-on: macos-latest + env: + HAYSTACK_MPS_ENABLED: false + + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Install Hatch + id: hatch + shell: bash + run: | + pip install hatch==${{ env.HATCH_VERSION }} + echo "env=$(hatch env find test)" >> "$GITHUB_OUTPUT" + + - uses: actions/cache/restore@v4 + id: cache + with: + path: ${{ steps.hatch.outputs.env }} + key: ${{ runner.os }}-${{ github.sha }} + + - name: Install dependencies + run: | + brew install ffmpeg # for local Whisper tests + + - name: Run + run: hatch run test:integration-mac + + - name: Calculate alert data + id: calculator + shell: bash + if: (success() || failure()) && github.ref_name == 'main' + run: | + if [ "${{ job.status }}" = "success" ]; then + echo "alert_type=success" >> "$GITHUB_OUTPUT"; + else + echo "alert_type=error" >> "$GITHUB_OUTPUT"; + fi + + - name: Send event to Datadog + if: (success() || failure()) && github.ref_name == 'main' + uses: masci/datadog@v1 + with: + api-key: ${{ secrets.CORE_DATADOG_API_KEY }} + api-url: https://api.datadoghq.eu + events: | + - title: "${{ github.workflow }} workflow" + text: "Job ${{ github.job }} in branch ${{ github.ref_name }}" + alert_type: "${{ steps.calculator.outputs.alert_type }}" + source_type_name: "Github" + host: ${{ github.repository_owner }} + tags: + - "project:${{ github.repository }}" + - "job:${{ github.job }}" + - "run_id:${{ github.run_id }}" + - "workflow:${{ github.workflow }}" + - "branch:${{ github.ref_name }}" + - "url:https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" + + integration-tests-windows: + name: Integration / windows-latest + needs: unit-tests + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: Install Hatch + id: hatch + shell: bash + run: | + pip install hatch==${{ env.HATCH_VERSION }} + echo "env=$(hatch env find test)" >> "$GITHUB_OUTPUT" + + - uses: actions/cache/restore@v4 + id: cache + with: + path: ${{ steps.hatch.outputs.env }} + key: ${{ runner.os }}-${{ github.sha }} + + - name: Run + run: hatch run test:integration-windows + + - name: Calculate alert data + id: calculator + shell: bash + if: (success() || failure()) && github.ref_name == 'main' + run: | + if [ "${{ job.status }}" = "success" ]; then + echo "alert_type=success" >> "$GITHUB_OUTPUT"; + else + echo "alert_type=error" >> "$GITHUB_OUTPUT"; + fi + + - name: Send event to Datadog + if: (success() || failure()) && github.ref_name == 'main' + uses: masci/datadog@v1 + with: + api-key: ${{ secrets.CORE_DATADOG_API_KEY }} + api-url: https://api.datadoghq.eu + events: | + - title: "${{ github.workflow }} workflow" + text: "Job ${{ github.job }} in branch ${{ github.ref_name }}" + alert_type: "${{ steps.calculator.outputs.alert_type }}" + source_type_name: "Github" + host: ${{ github.repository_owner }} + tags: + - "project:${{ github.repository }}" + - "job:${{ github.job }}" + - "run_id:${{ github.run_id }}" + - "workflow:${{ github.workflow }}" + - "branch:${{ github.ref_name }}" + - "url:https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" + + trigger-catch-all: + name: Tests completed + # This job will be executed only after all the other tests + # are successful. + # This way we'll be able to mark only this test as required + # and skip it accordingly. + needs: + - integration-tests-linux + - integration-tests-macos + - integration-tests-windows + uses: ./.github/workflows/tests_skipper_workflow.yml + with: + tests_were_skipped: false diff --git a/testbed/deepset-ai__haystack/.github/workflows/tests_skipper_trigger.yml b/testbed/deepset-ai__haystack/.github/workflows/tests_skipper_trigger.yml new file mode 100644 index 0000000000000000000000000000000000000000..b2974539768bb70d2b31fb44c1a05b34486ae2db --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/tests_skipper_trigger.yml @@ -0,0 +1,48 @@ +# If you change this name also do it in tests.yml and ci_metrics.yml +name: Tests + +on: + pull_request: + types: + - opened + - reopened + - synchronize + - ready_for_review + paths-ignore: + # we skip the tests unless the code changes. The problem is that GitHub will run the check anyway if any other + # file outside the code changed (e.g. the release notes). Hence, we need a second filter down below. + # keep the list in sync with the paths defined in the `tests.yml` workflow + - "haystack/**/*.py" + - "haystack/core/pipeline/predefined/*" + - "test/**/*.py" + +jobs: + check_if_changed: + name: Check if changed + runs-on: ubuntu-latest + permissions: + pull-requests: read + outputs: + code_changes: ${{ steps.changes.outputs.code_changes }} + steps: + - uses: actions/checkout@v4 + - name: Check for changed code + id: changes + uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 + with: + # keep the list in sync with the paths defined in the `tests.yml` workflow + filters: | + code_changes: + - haystack/**/*.py + - "haystack/templates/predefined/*" + - test/**/*.py + - "pyproject.toml" + + trigger-catch-all: + name: Tests completed + # Don't run this check if the PR contains both code and non-code changes (e.g. release notes) + needs: check_if_changed + if: needs.check_if_changed.outputs.code_changes == 'false' + uses: ./.github/workflows/tests_skipper_workflow.yml + with: + tests_were_skipped: true diff --git a/testbed/deepset-ai__haystack/.github/workflows/tests_skipper_workflow.yml b/testbed/deepset-ai__haystack/.github/workflows/tests_skipper_workflow.yml new file mode 100644 index 0000000000000000000000000000000000000000..4554c8275f34cbaa2ec5c6502a45af214e0ca1c5 --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/tests_skipper_workflow.yml @@ -0,0 +1,24 @@ +# If you change this name also do it in tests.yml and ci_metrics.yml +# We use a separate workflow to skip the tests if the PR contains both code and non-code changes (e.g. release notes). +# Skipping the job unfortunately doesn't work because GitHub will treat these jobs as successful even if they are +# skipped. Hence, we need to revert to a separate workflow. +name: Tests +on: + workflow_call: + inputs: + tests_were_skipped: + type: boolean + required: true + +jobs: + catch-all: + # Don't run this check if the PR contains both code and non-code changes (e.g. release notes) + name: Mark tests as completed + runs-on: ubuntu-latest + steps: + - name: Skip tests + if: ${{ github.event.inputs.tests_were_skipped }} + run: echo "Skipped!" + - name: Tests completed successfully + if: ${{ !github.event.inputs.tests_were_skipped }} + run: echo "Tests completed!" diff --git a/testbed/deepset-ai__haystack/.github/workflows/workflows_linting.yml b/testbed/deepset-ai__haystack/.github/workflows/workflows_linting.yml new file mode 100644 index 0000000000000000000000000000000000000000..7b5c021184c198e4fe52aad85ee6b989f5f10d6d --- /dev/null +++ b/testbed/deepset-ai__haystack/.github/workflows/workflows_linting.yml @@ -0,0 +1,23 @@ +name: Github workflows linter + +on: + pull_request: + paths: + - ".github/workflows/**" + +jobs: + lint-workflows: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - uses: actions/setup-go@v5 + + - name: Install actionlint + run: go install github.com/rhysd/actionlint/cmd/actionlint@latest + + - name: Run actionlint + env: + SHELLCHECK_OPTS: --exclude=SC2102 + run: actionlint diff --git a/testbed/deepset-ai__haystack/docker/Dockerfile.base b/testbed/deepset-ai__haystack/docker/Dockerfile.base new file mode 100644 index 0000000000000000000000000000000000000000..c5c4c76f14386d1a46c62d158bfa8124f83c1a5f --- /dev/null +++ b/testbed/deepset-ai__haystack/docker/Dockerfile.base @@ -0,0 +1,35 @@ +ARG build_image +ARG base_image + +FROM $build_image AS build-image + +ARG DEBIAN_FRONTEND=noninteractive +ARG haystack_version + +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + build-essential \ + git + +# Shallow clone Haystack repo, we'll install from the local sources +RUN git clone --depth=1 --branch=${haystack_version} https://github.com/deepset-ai/haystack.git /opt/haystack +WORKDIR /opt/haystack + +# Use a virtualenv we can copy over the next build stage +RUN python3 -m venv --system-site-packages /opt/venv +ENV PATH="/opt/venv/bin:$PATH" + +# Upgrade setuptools due to https://nvd.nist.gov/vuln/detail/CVE-2022-40897 +RUN pip install --upgrade pip && \ + pip install --no-cache-dir -U setuptools && \ + pip install --no-cache-dir . + +FROM $base_image AS final + +COPY --from=build-image /opt/venv /opt/venv +COPY --from=deepset/xpdf:latest /opt/pdftotext /usr/local/bin + +# pdftotext requires fontconfig runtime +RUN apt-get update && apt-get install -y libfontconfig && rm -rf /var/lib/apt/lists/* + +ENV PATH="/opt/venv/bin:$PATH" diff --git a/testbed/deepset-ai__haystack/docker/README.md b/testbed/deepset-ai__haystack/docker/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d133d445037872bcdab037a4a106e8442ff04e9b --- /dev/null +++ b/testbed/deepset-ai__haystack/docker/README.md @@ -0,0 +1,67 @@ +

+ Haystack by deepset +

+ +[Haystack](https://github.com/deepset-ai/haystack) is an end-to-end LLM framework that allows you to build applications powered by LLMs, Transformer models, vector search and more. Whether you want to perform retrieval-augmented generation (RAG), document search, question answering or answer generation, Haystack can orchestrate state-of-the-art embedding models and LLMs into pipelines to build end-to-end NLP applications and solve your use case. + +## Haystack 2.0 + +For the latest version of Haystack there's only one image available: + +- `haystack:base-` contains a working Python environment with Haystack preinstalled. This image is expected to + be derived `FROM`. + +## Haystack 1.x image variants + +The Docker image for Haystack 1.x comes in six variants: +- `haystack:gpu-` contains Haystack dependencies as well as what's needed to run the REST API and UI. It comes with the CUDA runtime and is capable of running on GPUs. +- `haystack:cpu-remote-inference-` is a slimmed down version of the CPU image with the REST API and UI. It is specifically designed for PromptNode inferencing using remotely hosted models, such as Hugging Face Inference, OpenAI, Cohere, Anthropic, and similar. +- `haystack:cpu-` contains Haystack dependencies as well as what's needed to run the REST API and UI. It has no support for GPU so must be run on CPU. +- `haystack:base-gpu-` only contains the Haystack dependencies. It comes with the CUDA runtime and can run on GPUs. +- `haystack:base-cpu-remote-inference-` is a slimmed down version of the CPU image, specifically designed for PromptNode inferencing using remotely hosted models, such as Hugging Face Inference, OpenAI, Cohere, Anthropic, and similar. +- `haystack:base-cpu-` only contains the Haystack dependencies. It has no support for GPU so must be run on CPU. + +## Image Development + +Images are built with BuildKit and we use `bake` to orchestrate the process. +You can build a specific image by running: +```sh +docker buildx bake gpu +``` + +You can override any `variable` defined in the `docker-bake.hcl` file and build custom +images, for example if you want to use a branch from the Haystack repo, run: +```sh +HAYSTACK_VERSION=mybranch_or_tag BASE_IMAGE_TAG_SUFFIX=latest docker buildx bake gpu --no-cache +``` + +### Multi-Platform Builds + +Haystack images support multiple architectures. But depending on your operating system and Docker +environment, you might not be able to build all of them locally. + +You may encounter the following error when trying to build the image: + +``` +multiple platforms feature is currently not supported for docker driver. Please switch to a different driver +(eg. “docker buildx create --use”) +``` + +To get around this, you need to override the `platform` option and limit local builds to the same architecture as +your computer's. For example, on an Apple M1 you can limit the builds to ARM only by invoking `bake` like this: + +```sh +docker buildx bake base-cpu --set "*.platform=linux/arm64" +``` + +# License + +View [license information](https://github.com/deepset-ai/haystack/blob/main/LICENSE) for +the software contained in this image. + +As with all Docker images, these likely also contain other software which may be under +other licenses (such as Bash, etc from the base distribution, along with any direct or +indirect dependencies of the primary software being contained). + +As for any pre-built image usage, it is the image user's responsibility to ensure that any +use of this image complies with any relevant licenses for all software contained within. diff --git a/testbed/deepset-ai__haystack/docker/docker-bake.hcl b/testbed/deepset-ai__haystack/docker/docker-bake.hcl new file mode 100644 index 0000000000000000000000000000000000000000..72cdd3393d0570850f4a07b6f638a21c37753294 --- /dev/null +++ b/testbed/deepset-ai__haystack/docker/docker-bake.hcl @@ -0,0 +1,34 @@ +variable "HAYSTACK_VERSION" { + default = "main" +} + +variable "GITHUB_REF" { + default = "" +} + +variable "IMAGE_NAME" { + default = "deepset/haystack" +} + +variable "IMAGE_TAG_SUFFIX" { + default = "local" +} + +variable "BASE_IMAGE_TAG_SUFFIX" { + default = "local" +} + +variable "HAYSTACK_EXTRAS" { + default = "" +} + +target "base" { + dockerfile = "Dockerfile.base" + tags = ["${IMAGE_NAME}:base-${IMAGE_TAG_SUFFIX}"] + args = { + build_image = "python:3.12-slim" + base_image = "python:3.12-slim" + haystack_version = "${HAYSTACK_VERSION}" + } + platforms = ["linux/amd64", "linux/arm64"] +} diff --git a/testbed/deepset-ai__haystack/e2e/__init__.py b/testbed/deepset-ai__haystack/e2e/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c1764a6e039233b694403c434fa97c13e847f6ba --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/__init__.py @@ -0,0 +1,3 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/testbed/deepset-ai__haystack/e2e/conftest.py b/testbed/deepset-ai__haystack/e2e/conftest.py new file mode 100644 index 0000000000000000000000000000000000000000..f355bb492fe37db0bf7756bc3ffade871c03891c --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/conftest.py @@ -0,0 +1,16 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from pathlib import Path + +import pytest + +from haystack.testing.test_utils import set_all_seeds + +set_all_seeds(0) + + +@pytest.fixture +def samples_path(): + return Path(__file__).parent / "samples" diff --git a/testbed/deepset-ai__haystack/e2e/pipelines/test_dense_doc_search.py b/testbed/deepset-ai__haystack/e2e/pipelines/test_dense_doc_search.py new file mode 100644 index 0000000000000000000000000000000000000000..39a587a10624049544039a46fd252596fc04d238 --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/pipelines/test_dense_doc_search.py @@ -0,0 +1,85 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import json + +from haystack import Pipeline +from haystack.components.converters import PyPDFToDocument, TextFileToDocument +from haystack.components.embedders import SentenceTransformersDocumentEmbedder, SentenceTransformersTextEmbedder +from haystack.components.joiners import DocumentJoiner +from haystack.components.preprocessors import DocumentCleaner, DocumentSplitter +from haystack.components.retrievers.in_memory import InMemoryEmbeddingRetriever +from haystack.components.routers import FileTypeRouter +from haystack.components.writers import DocumentWriter +from haystack.document_stores.in_memory import InMemoryDocumentStore + + +def test_dense_doc_search_pipeline(tmp_path, samples_path): + # Create the indexing pipeline + indexing_pipeline = Pipeline() + indexing_pipeline.add_component( + instance=FileTypeRouter(mime_types=["text/plain", "application/pdf"]), name="file_type_router" + ) + indexing_pipeline.add_component(instance=TextFileToDocument(), name="text_file_converter") + indexing_pipeline.add_component(instance=PyPDFToDocument(), name="pdf_file_converter") + indexing_pipeline.add_component(instance=DocumentJoiner(), name="joiner") + indexing_pipeline.add_component(instance=DocumentCleaner(), name="cleaner") + indexing_pipeline.add_component( + instance=DocumentSplitter(split_by="sentence", split_length=250, split_overlap=30), name="splitter" + ) + indexing_pipeline.add_component( + instance=SentenceTransformersDocumentEmbedder(model="sentence-transformers/all-MiniLM-L6-v2"), name="embedder" + ) + indexing_pipeline.add_component(instance=DocumentWriter(document_store=InMemoryDocumentStore()), name="writer") + + indexing_pipeline.connect("file_type_router.text/plain", "text_file_converter.sources") + indexing_pipeline.connect("file_type_router.application/pdf", "pdf_file_converter.sources") + indexing_pipeline.connect("text_file_converter.documents", "joiner.documents") + indexing_pipeline.connect("pdf_file_converter.documents", "joiner.documents") + indexing_pipeline.connect("joiner.documents", "cleaner.documents") + indexing_pipeline.connect("cleaner.documents", "splitter.documents") + indexing_pipeline.connect("splitter.documents", "embedder.documents") + indexing_pipeline.connect("embedder.documents", "writer.documents") + + # Draw the indexing pipeline + indexing_pipeline.draw(tmp_path / "test_dense_doc_search_indexing_pipeline.png") + + # Serialize the indexing pipeline to YAML. + with open(tmp_path / "test_dense_doc_search_indexing_pipeline.yaml", "w") as f: + indexing_pipeline.dump(f) + + # Load the indexing pipeline back + with open(tmp_path / "test_dense_doc_search_indexing_pipeline.yaml", "r") as f: + indexing_pipeline = Pipeline.load(f) + + indexing_result = indexing_pipeline.run({"file_type_router": {"sources": list(samples_path.iterdir())}}) + filled_document_store = indexing_pipeline.get_component("writer").document_store + + assert indexing_result["writer"]["documents_written"] == 2 + assert filled_document_store.count_documents() == 2 + + # Create the querying pipeline + query_pipeline = Pipeline() + query_pipeline.add_component( + instance=SentenceTransformersTextEmbedder(model="sentence-transformers/all-MiniLM-L6-v2"), name="text_embedder" + ) + query_pipeline.add_component( + instance=InMemoryEmbeddingRetriever(document_store=filled_document_store, top_k=20), name="embedding_retriever" + ) + query_pipeline.connect("text_embedder", "embedding_retriever") + + querying_result = query_pipeline.run({"text_embedder": {"text": "Who lives in Rome?"}}) + assert querying_result["embedding_retriever"]["documents"][0].content == "My name is Giorgio and I live in Rome." + + # Draw the querying pipeline + query_pipeline.draw(tmp_path / "test_dense_doc_search_query_pipeline.png") + + # Serialize the querying pipeline to JSON + with open(tmp_path / "test_dense_doc_search_query_pipeline.json", "w") as f: + print(json.dumps(query_pipeline.to_dict(), indent=4)) + json.dump(query_pipeline.to_dict(), f) + + # Load the querying pipeline back + with open(tmp_path / "test_dense_doc_search_query_pipeline.json", "r") as f: + query_pipeline = Pipeline.from_dict(json.load(f)) diff --git a/testbed/deepset-ai__haystack/e2e/pipelines/test_evaluation_pipeline.py b/testbed/deepset-ai__haystack/e2e/pipelines/test_evaluation_pipeline.py new file mode 100644 index 0000000000000000000000000000000000000000..1cd41fa4d8b7b0cc9caa4b92dca013ba4b12c8b4 --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/pipelines/test_evaluation_pipeline.py @@ -0,0 +1,292 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import os +from typing import List + +import pytest + +from haystack import Document, Pipeline +from haystack.components.builders import AnswerBuilder, PromptBuilder +from haystack.components.embedders import SentenceTransformersDocumentEmbedder, SentenceTransformersTextEmbedder +from haystack.components.evaluators import ( + ContextRelevanceEvaluator, + DocumentMAPEvaluator, + DocumentMRREvaluator, + DocumentRecallEvaluator, + FaithfulnessEvaluator, + SASEvaluator, +) +from haystack.components.evaluators.document_recall import RecallMode +from haystack.components.generators import OpenAIGenerator +from haystack.components.retrievers import InMemoryEmbeddingRetriever +from haystack.components.writers import DocumentWriter +from haystack.document_stores.in_memory import InMemoryDocumentStore +from haystack.document_stores.types import DuplicatePolicy +from haystack.evaluation import EvaluationRunResult + +EMBEDDINGS_MODEL = "sentence-transformers/all-MiniLM-L6-v2" + + +def indexing_pipeline(documents: List[Document]): + """Indexing the documents""" + document_store = InMemoryDocumentStore() + doc_writer = DocumentWriter(document_store=document_store, policy=DuplicatePolicy.SKIP) + doc_embedder = SentenceTransformersDocumentEmbedder(model=EMBEDDINGS_MODEL, progress_bar=False) + ingestion_pipe = Pipeline() + ingestion_pipe.add_component(instance=doc_embedder, name="doc_embedder") # type: ignore + ingestion_pipe.add_component(instance=doc_writer, name="doc_writer") # type: ignore + ingestion_pipe.connect("doc_embedder.documents", "doc_writer.documents") + ingestion_pipe.run({"doc_embedder": {"documents": documents}}) + return document_store + + +def rag_pipeline(document_store: InMemoryDocumentStore, top_k: int): # type: ignore + """RAG pipeline""" + template = """ + You have to answer the following question based on the given context information only. + + Context: + {% for document in documents %} + {{ document.content }} + {% endfor %} + + Question: {{question}} + Answer: + """ + rag = Pipeline() + rag.add_component("embedder", SentenceTransformersTextEmbedder(model=EMBEDDINGS_MODEL, progress_bar=False)) # type: ignore + rag.add_component("retriever", InMemoryEmbeddingRetriever(document_store, top_k=top_k)) # type: ignore + rag.add_component("prompt_builder", PromptBuilder(template=template)) # type: ignore + rag.add_component("generator", OpenAIGenerator(model="gpt-4o-mini")) # type: ignore + rag.add_component("answer_builder", AnswerBuilder()) # type: ignore + rag.connect("embedder", "retriever.query_embedding") + rag.connect("retriever", "prompt_builder.documents") + rag.connect("prompt_builder", "generator") + rag.connect("generator.replies", "answer_builder.replies") + rag.connect("generator.meta", "answer_builder.meta") + rag.connect("retriever", "answer_builder.documents") + + return rag + + +def evaluation_pipeline(): + """ + Create an evaluation pipeline with the following evaluators: + + - DocumentMRREvaluator + - FaithfulnessEvaluator + - SASEvaluator + - DocumentMAPEvaluator + - DocumentRecallEvaluator + - ContextRelevanceEvaluator + """ + eval_pipeline = Pipeline() + eval_pipeline.add_component("doc_mrr", DocumentMRREvaluator()) + eval_pipeline.add_component("groundedness", FaithfulnessEvaluator()) + eval_pipeline.add_component("sas", SASEvaluator(model=EMBEDDINGS_MODEL)) + eval_pipeline.add_component("doc_map", DocumentMAPEvaluator()) + eval_pipeline.add_component("doc_recall_single_hit", DocumentRecallEvaluator(mode=RecallMode.SINGLE_HIT)) + eval_pipeline.add_component("doc_recall_multi_hit", DocumentRecallEvaluator(mode=RecallMode.MULTI_HIT)) + eval_pipeline.add_component("relevance", ContextRelevanceEvaluator()) + + return eval_pipeline + + +def built_eval_input(questions, truth_docs, truth_answers, retrieved_docs, contexts, pred_answers): + """Helper function to build the input for the evaluation pipeline""" + return { + "doc_mrr": {"ground_truth_documents": truth_docs, "retrieved_documents": retrieved_docs}, + "groundedness": {"questions": questions, "contexts": contexts, "predicted_answers": pred_answers}, + "sas": {"predicted_answers": pred_answers, "ground_truth_answers": truth_answers}, + "doc_map": {"ground_truth_documents": truth_docs, "retrieved_documents": retrieved_docs}, + "doc_recall_single_hit": {"ground_truth_documents": truth_docs, "retrieved_documents": retrieved_docs}, + "doc_recall_multi_hit": {"ground_truth_documents": truth_docs, "retrieved_documents": retrieved_docs}, + "relevance": {"questions": questions, "contexts": contexts}, + } + + +def run_rag_pipeline(documents, evaluation_questions, rag_pipeline_a): + """ + Run the RAG pipeline and return the contexts, predicted answers, retrieved documents and ground truth documents + """ + + truth_docs = [] + retrieved_docs = [] + contexts = [] + predicted_answers = [] + + for q in evaluation_questions: + response = rag_pipeline_a.run( + { + "embedder": {"text": q["question"]}, + "prompt_builder": {"question": q["question"]}, + "answer_builder": {"query": q["question"]}, + } + ) + truth_docs.append([doc for doc in documents if doc.meta["name"] in q["ground_truth_doc"] and doc.content]) + retrieved_docs.append(response["answer_builder"]["answers"][0].documents) + contexts.append([doc.content for doc in response["answer_builder"]["answers"][0].documents]) + predicted_answers.append(response["answer_builder"]["answers"][0].data) + + return contexts, predicted_answers, retrieved_docs, truth_docs + + +def built_input_for_results_eval(rag_results): + """Helper function to build the input for the results evaluation""" + return { + "Mean Reciprocal Rank": { + "individual_scores": rag_results["doc_mrr"]["individual_scores"], + "score": rag_results["doc_mrr"]["score"], + }, + "Semantic Answer Similarity": { + "individual_scores": rag_results["sas"]["individual_scores"], + "score": rag_results["sas"]["score"], + }, + "Faithfulness": { + "individual_scores": rag_results["groundedness"]["individual_scores"], + "score": rag_results["groundedness"]["score"], + }, + "Document MAP": { + "individual_scores": rag_results["doc_map"]["individual_scores"], + "score": rag_results["doc_map"]["score"], + }, + "Document Recall Single Hit": { + "individual_scores": rag_results["doc_recall_single_hit"]["individual_scores"], + "score": rag_results["doc_recall_single_hit"]["score"], + }, + "Document Recall Multi Hit": { + "individual_scores": rag_results["doc_recall_multi_hit"]["individual_scores"], + "score": rag_results["doc_recall_multi_hit"]["score"], + }, + "Contextual Relevance": { + "individual_scores": rag_results["relevance"]["individual_scores"], + "score": rag_results["relevance"]["score"], + }, + } + + +@pytest.mark.skipif( + not os.environ.get("OPENAI_API_KEY", None), + reason="Export an env var called OPENAI_API_KEY containing the OpenAI API key to run this test.", +) +def test_evaluation_pipeline(samples_path): + """Test an evaluation pipeline""" + eval_questions = [ + { + "question": 'What falls within the term "cultural anthropology"?', + "answer": "the ideology and analytical stance of cultural relativism", + "ground_truth_doc": ["Culture.txt"], + }, + { + "question": "Who was the spiritual guide during the Protestant Reformation?", + "answer": "Martin Bucer", + "ground_truth_doc": ["Strasbourg.txt"], + }, + { + "question": "What is materialism?", + "answer": "a form of philosophical monism", + "ground_truth_doc": ["Materialism.txt"], + }, + ] + + questions = [q["question"] for q in eval_questions] + truth_answers = [q["answer"] for q in eval_questions] + + # indexing documents + docs = [] + full_path = os.path.join(str(samples_path) + "/test_documents/") + for article in os.listdir(full_path): + with open(f"{full_path}/{article}", "r") as f: + for text in f.read().split("\n"): + if doc := Document(content=text, meta={"name": article}) if text else None: + docs.append(doc) + doc_store = indexing_pipeline(docs) + + # running the RAG pipeline A + evaluation pipeline + rag_pipeline_a = rag_pipeline(doc_store, top_k=2) + contexts_a, pred_answers_a, retrieved_docs_a, truth_docs = run_rag_pipeline(docs, eval_questions, rag_pipeline_a) + eval_pipeline = evaluation_pipeline() + eval_input = built_eval_input(questions, truth_docs, truth_answers, retrieved_docs_a, contexts_a, pred_answers_a) + results_rag_a = eval_pipeline.run(eval_input) + + # running the evaluation EvaluationRunResult + inputs_a = { + "question": questions, + "contexts": contexts_a, + "answer": truth_answers, + "predicted_answer": pred_answers_a, + } + results_a = built_input_for_results_eval(results_rag_a) + evaluation_result_a = EvaluationRunResult(run_name="rag_pipeline_a", results=results_a, inputs=inputs_a) + df_score_report = evaluation_result_a.score_report() + + # assert the score report has all the metrics + assert len(df_score_report) == 7 + assert list(df_score_report.columns) == ["metrics", "score"] + assert list(df_score_report.metrics) == [ + "Mean Reciprocal Rank", + "Semantic Answer Similarity", + "Faithfulness", + "Document MAP", + "Document Recall Single Hit", + "Document Recall Multi Hit", + "Contextual Relevance", + ] + + # assert the evaluation result has all the metrics, inputs and questions + df = evaluation_result_a.to_pandas() + assert list(df.columns) == [ + "question", + "contexts", + "answer", + "predicted_answer", + "Mean Reciprocal Rank", + "Semantic Answer Similarity", + "Faithfulness", + "Document MAP", + "Document Recall Single Hit", + "Document Recall Multi Hit", + "Contextual Relevance", + ] + assert len(df) == 3 + + # running the RAG pipeline B + rag_pipeline_b = rag_pipeline(doc_store, top_k=4) + contexts_b, pred_answers_b, retrieved_docs_b, truth_docs = run_rag_pipeline(docs, eval_questions, rag_pipeline_b) + eval_input = built_eval_input(questions, truth_docs, truth_answers, retrieved_docs_b, contexts_b, pred_answers_b) + results_rag_b = eval_pipeline.run(eval_input) + + inputs_b = { + "question": questions, + "contexts": contexts_b, + "answer": truth_answers, + "predicted_answer": pred_answers_b, + } + results_b = built_input_for_results_eval(results_rag_b) + evaluation_result_b = EvaluationRunResult(run_name="rag_pipeline_b", results=results_b, inputs=inputs_b) + df_comparative = evaluation_result_a.comparative_individual_scores_report(evaluation_result_b) + + # assert the comparative score report has all the metrics, inputs and questions + assert len(df_comparative) == 3 + assert list(df_comparative.columns) == [ + "question", + "contexts", + "answer", + "predicted_answer", + "rag_pipeline_a_Mean Reciprocal Rank", + "rag_pipeline_a_Semantic Answer Similarity", + "rag_pipeline_a_Faithfulness", + "rag_pipeline_a_Document MAP", + "rag_pipeline_a_Document Recall Single Hit", + "rag_pipeline_a_Document Recall Multi Hit", + "rag_pipeline_a_Contextual Relevance", + "rag_pipeline_b_Mean Reciprocal Rank", + "rag_pipeline_b_Semantic Answer Similarity", + "rag_pipeline_b_Faithfulness", + "rag_pipeline_b_Document MAP", + "rag_pipeline_b_Document Recall Single Hit", + "rag_pipeline_b_Document Recall Multi Hit", + "rag_pipeline_b_Contextual Relevance", + ] diff --git a/testbed/deepset-ai__haystack/e2e/pipelines/test_extractive_qa_pipeline.py b/testbed/deepset-ai__haystack/e2e/pipelines/test_extractive_qa_pipeline.py new file mode 100644 index 0000000000000000000000000000000000000000..d30f69cef3d0c5ed268b02437650fc067f1e5e7f --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/pipelines/test_extractive_qa_pipeline.py @@ -0,0 +1,73 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import json + +from haystack import Document, Pipeline +from haystack.components.readers import ExtractiveReader +from haystack.components.retrievers.in_memory import InMemoryBM25Retriever +from haystack.document_stores.in_memory import InMemoryDocumentStore + + +def test_extractive_qa_pipeline(tmp_path): + # Create the pipeline + qa_pipeline = Pipeline() + qa_pipeline.add_component(instance=InMemoryBM25Retriever(document_store=InMemoryDocumentStore()), name="retriever") + qa_pipeline.add_component(instance=ExtractiveReader(model="deepset/tinyroberta-squad2"), name="reader") + qa_pipeline.connect("retriever", "reader") + + # Draw the pipeline + qa_pipeline.draw(tmp_path / "test_extractive_qa_pipeline.png") + + # Serialize the pipeline to YAML + with open(tmp_path / "test_bm25_rag_pipeline.yaml", "w") as f: + qa_pipeline.dump(f) + + # Load the pipeline back + with open(tmp_path / "test_bm25_rag_pipeline.yaml", "r") as f: + qa_pipeline = Pipeline.load(f) + + # Populate the document store + documents = [ + Document(content="My name is Jean and I live in Paris."), + Document(content="My name is Mark and I live in Berlin."), + Document(content="My name is Giorgio and I live in Rome."), + ] + qa_pipeline.get_component("retriever").document_store.write_documents(documents) + + # Query and assert + questions = ["Who lives in Paris?", "Who lives in Berlin?", "Who lives in Rome?"] + answers_spywords = ["Jean", "Mark", "Giorgio"] + + for question, spyword, doc in zip(questions, answers_spywords, documents): + result = qa_pipeline.run({"retriever": {"query": question}, "reader": {"query": question}}) + + extracted_answers = result["reader"]["answers"] + + # we expect at least one real answer and no_answer + assert len(extracted_answers) > 1 + + # the best answer should contain the spyword + assert spyword in extracted_answers[0].data + + # no_answer + assert extracted_answers[-1].data is None + + # since these questions are easily answerable, the best answer should have higher score than no_answer + assert extracted_answers[0].score >= extracted_answers[-1].score + + for answer in extracted_answers: + assert answer.query == question + + assert hasattr(answer, "score") + assert hasattr(answer, "document_offset") + + assert hasattr(answer, "document") + + # the top answer is extracted from the correct document + top_answer = extracted_answers[0] + if top_answer.document is not None: + if top_answer.document.id != doc.id: + print(top_answer.document.id, doc.id) + assert top_answer.document.id == doc.id diff --git a/testbed/deepset-ai__haystack/e2e/pipelines/test_hybrid_doc_search_pipeline.py b/testbed/deepset-ai__haystack/e2e/pipelines/test_hybrid_doc_search_pipeline.py new file mode 100644 index 0000000000000000000000000000000000000000..2e4da6f449433c765e57d53a6ef7f80ded9f56d4 --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/pipelines/test_hybrid_doc_search_pipeline.py @@ -0,0 +1,61 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + + +from haystack import Document, Pipeline +from haystack.components.embedders import SentenceTransformersDocumentEmbedder, SentenceTransformersTextEmbedder +from haystack.components.joiners.document_joiner import DocumentJoiner +from haystack.components.rankers import TransformersSimilarityRanker +from haystack.components.retrievers.in_memory import InMemoryBM25Retriever, InMemoryEmbeddingRetriever +from haystack.document_stores.in_memory import InMemoryDocumentStore + + +def test_hybrid_doc_search_pipeline(tmp_path): + # Create the pipeline + document_store = InMemoryDocumentStore() + hybrid_pipeline = Pipeline() + hybrid_pipeline.add_component(instance=InMemoryBM25Retriever(document_store=document_store), name="bm25_retriever") + hybrid_pipeline.add_component( + instance=SentenceTransformersTextEmbedder(model="sentence-transformers/all-MiniLM-L6-v2"), name="text_embedder" + ) + hybrid_pipeline.add_component( + instance=InMemoryEmbeddingRetriever(document_store=document_store), name="embedding_retriever" + ) + hybrid_pipeline.add_component(instance=DocumentJoiner(), name="joiner") + hybrid_pipeline.add_component(instance=TransformersSimilarityRanker(top_k=20), name="ranker") + + hybrid_pipeline.connect("bm25_retriever", "joiner") + hybrid_pipeline.connect("text_embedder", "embedding_retriever") + hybrid_pipeline.connect("embedding_retriever", "joiner") + hybrid_pipeline.connect("joiner", "ranker") + + # Draw the pipeline + hybrid_pipeline.draw(tmp_path / "test_hybrid_doc_search_pipeline.png") + + # Serialize the pipeline to YAML + with open(tmp_path / "test_hybrid_doc_search_pipeline.yaml", "w") as f: + hybrid_pipeline.dump(f) + + # Load the pipeline back + with open(tmp_path / "test_hybrid_doc_search_pipeline.yaml", "r") as f: + hybrid_pipeline = Pipeline.load(f) + + # Populate the document store + documents = [ + Document(content="My name is Jean and I live in Paris."), + Document(content="My name is Mark and I live in Berlin."), + Document(content="My name is Mario and I live in the capital of Italy."), + Document(content="My name is Giorgio and I live in Rome."), + ] + doc_embedder = SentenceTransformersDocumentEmbedder(model="sentence-transformers/all-MiniLM-L6-v2") + doc_embedder.warm_up() + embedded_documents = doc_embedder.run(documents=documents)["documents"] + hybrid_pipeline.get_component("embedding_retriever").document_store.write_documents(embedded_documents) + + query = "Who lives in Rome?" + result = hybrid_pipeline.run( + {"bm25_retriever": {"query": query}, "text_embedder": {"text": query}, "ranker": {"query": query}} + ) + assert result["ranker"]["documents"][0].content == "My name is Giorgio and I live in Rome." + assert result["ranker"]["documents"][1].content == "My name is Mario and I live in the capital of Italy." diff --git a/testbed/deepset-ai__haystack/e2e/pipelines/test_named_entity_extractor.py b/testbed/deepset-ai__haystack/e2e/pipelines/test_named_entity_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..2fc15a9a1a29c9e3d7ca77180e4f8b4fe3457190 --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/pipelines/test_named_entity_extractor.py @@ -0,0 +1,108 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import pytest + +from haystack import Document, Pipeline +from haystack.components.extractors import NamedEntityAnnotation, NamedEntityExtractor, NamedEntityExtractorBackend + + +@pytest.fixture +def raw_texts(): + return [ + "My name is Clara and I live in Berkeley, California.", + "I'm Merlin, the happy pig!", + "New York State declared a state of emergency after the announcement of the end of the world.", + "", # Intentionally empty. + ] + + +@pytest.fixture +def hf_annotations(): + return [ + [ + NamedEntityAnnotation(entity="PER", start=11, end=16), + NamedEntityAnnotation(entity="LOC", start=31, end=39), + NamedEntityAnnotation(entity="LOC", start=41, end=51), + ], + [NamedEntityAnnotation(entity="PER", start=4, end=10)], + [NamedEntityAnnotation(entity="LOC", start=0, end=14)], + [], + ] + + +@pytest.fixture +def spacy_annotations(): + return [ + [ + NamedEntityAnnotation(entity="PERSON", start=11, end=16), + NamedEntityAnnotation(entity="GPE", start=31, end=39), + NamedEntityAnnotation(entity="GPE", start=41, end=51), + ], + [NamedEntityAnnotation(entity="PERSON", start=4, end=10)], + [NamedEntityAnnotation(entity="GPE", start=0, end=14)], + [], + ] + + +def test_ner_extractor_init(): + extractor = NamedEntityExtractor(backend=NamedEntityExtractorBackend.HUGGING_FACE, model="dslim/bert-base-NER") + + with pytest.raises(RuntimeError, match=r"not warmed up"): + extractor.run(documents=[]) + + assert not extractor.initialized + extractor.warm_up() + assert extractor.initialized + + +@pytest.mark.parametrize("batch_size", [1, 3]) +def test_ner_extractor_hf_backend(raw_texts, hf_annotations, batch_size): + extractor = NamedEntityExtractor(backend=NamedEntityExtractorBackend.HUGGING_FACE, model="dslim/bert-base-NER") + extractor.warm_up() + + _extract_and_check_predictions(extractor, raw_texts, hf_annotations, batch_size) + + +@pytest.mark.parametrize("batch_size", [1, 3]) +def test_ner_extractor_spacy_backend(raw_texts, spacy_annotations, batch_size): + extractor = NamedEntityExtractor(backend=NamedEntityExtractorBackend.SPACY, model="en_core_web_trf") + extractor.warm_up() + + _extract_and_check_predictions(extractor, raw_texts, spacy_annotations, batch_size) + + +@pytest.mark.parametrize("batch_size", [1, 3]) +def test_ner_extractor_in_pipeline(raw_texts, hf_annotations, batch_size): + pipeline = Pipeline() + pipeline.add_component( + name="ner_extractor", + instance=NamedEntityExtractor(backend=NamedEntityExtractorBackend.HUGGING_FACE, model="dslim/bert-base-NER"), + ) + + outputs = pipeline.run( + {"ner_extractor": {"documents": [Document(content=text) for text in raw_texts], "batch_size": batch_size}} + )["ner_extractor"]["documents"] + predicted = [NamedEntityExtractor.get_stored_annotations(doc) for doc in outputs] + _check_predictions(predicted, hf_annotations) + + +def _extract_and_check_predictions(extractor, texts, expected, batch_size): + docs = [Document(content=text) for text in texts] + outputs = extractor.run(documents=docs, batch_size=batch_size)["documents"] + assert all(id(a) == id(b) for a, b in zip(docs, outputs)) + predicted = [NamedEntityExtractor.get_stored_annotations(doc) for doc in outputs] + + _check_predictions(predicted, expected) + + +def _check_predictions(predicted, expected): + assert len(predicted) == len(expected) + for pred, exp in zip(predicted, expected): + assert len(pred) == len(exp) + + for a, b in zip(pred, exp): + assert a.entity == b.entity + assert a.start == b.start + assert a.end == b.end diff --git a/testbed/deepset-ai__haystack/e2e/pipelines/test_preprocessing_pipeline.py b/testbed/deepset-ai__haystack/e2e/pipelines/test_preprocessing_pipeline.py new file mode 100644 index 0000000000000000000000000000000000000000..82375f89d8ed0381a21d3a3db279c5fd7c587222 --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/pipelines/test_preprocessing_pipeline.py @@ -0,0 +1,91 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import json + +from haystack import Pipeline +from haystack.components.classifiers import DocumentLanguageClassifier +from haystack.components.converters import TextFileToDocument +from haystack.components.embedders import SentenceTransformersDocumentEmbedder +from haystack.components.preprocessors import DocumentCleaner, DocumentSplitter +from haystack.components.routers import FileTypeRouter, MetadataRouter +from haystack.components.writers import DocumentWriter +from haystack.document_stores.in_memory import InMemoryDocumentStore + + +def test_preprocessing_pipeline(tmp_path): + # Create the pipeline and its components + document_store = InMemoryDocumentStore() + preprocessing_pipeline = Pipeline() + preprocessing_pipeline.add_component(instance=FileTypeRouter(mime_types=["text/plain"]), name="file_type_router") + preprocessing_pipeline.add_component(instance=TextFileToDocument(), name="text_file_converter") + preprocessing_pipeline.add_component(instance=DocumentLanguageClassifier(), name="language_classifier") + preprocessing_pipeline.add_component( + instance=MetadataRouter(rules={"en": {"field": "language", "operator": "==", "value": "en"}}), name="router" + ) + preprocessing_pipeline.add_component(instance=DocumentCleaner(), name="cleaner") + preprocessing_pipeline.add_component( + instance=DocumentSplitter(split_by="sentence", split_length=1), name="splitter" + ) + preprocessing_pipeline.add_component( + instance=SentenceTransformersDocumentEmbedder(model="sentence-transformers/all-MiniLM-L6-v2"), name="embedder" + ) + preprocessing_pipeline.add_component(instance=DocumentWriter(document_store=document_store), name="writer") + preprocessing_pipeline.connect("file_type_router.text/plain", "text_file_converter.sources") + preprocessing_pipeline.connect("text_file_converter.documents", "language_classifier.documents") + preprocessing_pipeline.connect("language_classifier.documents", "router.documents") + preprocessing_pipeline.connect("router.en", "cleaner.documents") + preprocessing_pipeline.connect("cleaner.documents", "splitter.documents") + preprocessing_pipeline.connect("splitter.documents", "embedder.documents") + preprocessing_pipeline.connect("embedder.documents", "writer.documents") + + # Draw the pipeline + preprocessing_pipeline.draw(tmp_path / "test_preprocessing_pipeline.png") + + # Serialize the pipeline to YAML + with open(tmp_path / "test_preprocessing_pipeline.yaml", "w") as f: + preprocessing_pipeline.dump(f) + + # Load the pipeline back + with open(tmp_path / "test_preprocessing_pipeline.yaml", "r") as f: + preprocessing_pipeline = Pipeline.load(f) + + # Write a txt file + with open(tmp_path / "test_file_english.txt", "w") as f: + f.write( + "This is an english sentence. There is more to it. It's a long text." + "Spans multiple lines." + "" + "Even contains empty lines. And extra whitespaces." + ) + + # Write a txt file + with open(tmp_path / "test_file_german.txt", "w") as f: + f.write("Ein deutscher Satz ohne Verb.") + + # Add two txt files and one non-txt file + paths = [ + tmp_path / "test_file_english.txt", + tmp_path / "test_file_german.txt", + tmp_path / "test_preprocessing_pipeline.json", + ] + + result = preprocessing_pipeline.run({"file_type_router": {"sources": paths}}) + + assert result["writer"]["documents_written"] == 6 + filled_document_store = preprocessing_pipeline.get_component("writer").document_store + assert filled_document_store.count_documents() == 6 + + # Check preprocessed texts + stored_documents = filled_document_store.filter_documents() + expected_texts = [ + "This is an english sentence.", + " There is more to it.", + " It's a long text.", + "Spans multiple lines.", + "Even contains empty lines.", + " And extra whitespaces.", + ] + assert expected_texts == [document.content for document in stored_documents] + assert all(document.meta["language"] == "en" for document in stored_documents) diff --git a/testbed/deepset-ai__haystack/e2e/pipelines/test_rag_pipelines_e2e.py b/testbed/deepset-ai__haystack/e2e/pipelines/test_rag_pipelines_e2e.py new file mode 100644 index 0000000000000000000000000000000000000000..5727e9ee0ba00a31dac09345f1efbd665551cce8 --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/pipelines/test_rag_pipelines_e2e.py @@ -0,0 +1,163 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import json +import os + +import pytest + +from haystack import Document, Pipeline +from haystack.components.builders.answer_builder import AnswerBuilder +from haystack.components.builders.prompt_builder import PromptBuilder +from haystack.components.embedders import SentenceTransformersDocumentEmbedder, SentenceTransformersTextEmbedder +from haystack.components.generators import OpenAIGenerator +from haystack.components.retrievers.in_memory import InMemoryBM25Retriever, InMemoryEmbeddingRetriever +from haystack.components.writers import DocumentWriter +from haystack.document_stores.in_memory import InMemoryDocumentStore + + +@pytest.mark.skipif( + not os.environ.get("OPENAI_API_KEY", None), + reason="Export an env var called OPENAI_API_KEY containing the OpenAI API key to run this test.", +) +def test_bm25_rag_pipeline(tmp_path): + # Create the RAG pipeline + prompt_template = """ + Given these documents, answer the question.\nDocuments: + {% for doc in documents %} + {{ doc.content }} + {% endfor %} + + \nQuestion: {{question}} + \nAnswer: + """ + rag_pipeline = Pipeline() + rag_pipeline.add_component(instance=InMemoryBM25Retriever(document_store=InMemoryDocumentStore()), name="retriever") + rag_pipeline.add_component(instance=PromptBuilder(template=prompt_template), name="prompt_builder") + rag_pipeline.add_component(instance=OpenAIGenerator(), name="llm") + rag_pipeline.add_component(instance=AnswerBuilder(), name="answer_builder") + rag_pipeline.connect("retriever", "prompt_builder.documents") + rag_pipeline.connect("prompt_builder", "llm") + rag_pipeline.connect("llm.replies", "answer_builder.replies") + rag_pipeline.connect("llm.meta", "answer_builder.meta") + rag_pipeline.connect("retriever", "answer_builder.documents") + + # Draw the pipeline + rag_pipeline.draw(tmp_path / "test_bm25_rag_pipeline.png") + + # Serialize the pipeline to YAML + with open(tmp_path / "test_bm25_rag_pipeline.yaml", "w") as f: + rag_pipeline.dump(f) + + # Load the pipeline back + with open(tmp_path / "test_bm25_rag_pipeline.yaml", "r") as f: + rag_pipeline = Pipeline.load(f) + + # Populate the document store + documents = [ + Document(content="My name is Jean and I live in Paris."), + Document(content="My name is Mark and I live in Berlin."), + Document(content="My name is Giorgio and I live in Rome."), + ] + rag_pipeline.get_component("retriever").document_store.write_documents(documents) + + # Query and assert + questions = ["Who lives in Paris?", "Who lives in Berlin?", "Who lives in Rome?"] + answers_spywords = ["Jean", "Mark", "Giorgio"] + + for question, spyword in zip(questions, answers_spywords): + result = rag_pipeline.run( + { + "retriever": {"query": question}, + "prompt_builder": {"question": question}, + "answer_builder": {"query": question}, + } + ) + + assert len(result["answer_builder"]["answers"]) == 1 + generated_answer = result["answer_builder"]["answers"][0] + assert spyword in generated_answer.data + assert generated_answer.query == question + assert hasattr(generated_answer, "documents") + assert hasattr(generated_answer, "meta") + + +@pytest.mark.skipif( + not os.environ.get("OPENAI_API_KEY", None), + reason="Export an env var called OPENAI_API_KEY containing the OpenAI API key to run this test.", +) +def test_embedding_retrieval_rag_pipeline(tmp_path): + # Create the RAG pipeline + prompt_template = """ + Given these documents, answer the question.\nDocuments: + {% for doc in documents %} + {{ doc.content }} + {% endfor %} + + \nQuestion: {{question}} + \nAnswer: + """ + rag_pipeline = Pipeline() + rag_pipeline.add_component( + instance=SentenceTransformersTextEmbedder(model="sentence-transformers/all-MiniLM-L6-v2"), name="text_embedder" + ) + rag_pipeline.add_component( + instance=InMemoryEmbeddingRetriever(document_store=InMemoryDocumentStore()), name="retriever" + ) + rag_pipeline.add_component(instance=PromptBuilder(template=prompt_template), name="prompt_builder") + rag_pipeline.add_component(instance=OpenAIGenerator(), name="llm") + rag_pipeline.add_component(instance=AnswerBuilder(), name="answer_builder") + rag_pipeline.connect("text_embedder", "retriever") + rag_pipeline.connect("retriever", "prompt_builder.documents") + rag_pipeline.connect("prompt_builder", "llm") + rag_pipeline.connect("llm.replies", "answer_builder.replies") + rag_pipeline.connect("llm.meta", "answer_builder.meta") + rag_pipeline.connect("retriever", "answer_builder.documents") + + # Draw the pipeline + rag_pipeline.draw(tmp_path / "test_embedding_rag_pipeline.png") + + # Serialize the pipeline to JSON + with open(tmp_path / "test_embedding_rag_pipeline.json", "w") as f: + json.dump(rag_pipeline.to_dict(), f) + + # Load the pipeline back + with open(tmp_path / "test_embedding_rag_pipeline.json", "r") as f: + rag_pipeline = Pipeline.from_dict(json.load(f)) + + # Populate the document store + documents = [ + Document(content="My name is Jean and I live in Paris."), + Document(content="My name is Mark and I live in Berlin."), + Document(content="My name is Giorgio and I live in Rome."), + ] + document_store = rag_pipeline.get_component("retriever").document_store + indexing_pipeline = Pipeline() + indexing_pipeline.add_component( + instance=SentenceTransformersDocumentEmbedder(model="sentence-transformers/all-MiniLM-L6-v2"), + name="document_embedder", + ) + indexing_pipeline.add_component(instance=DocumentWriter(document_store=document_store), name="document_writer") + indexing_pipeline.connect("document_embedder", "document_writer") + indexing_pipeline.run({"document_embedder": {"documents": documents}}) + + # Query and assert + questions = ["Who lives in Paris?", "Who lives in Berlin?", "Who lives in Rome?"] + answers_spywords = ["Jean", "Mark", "Giorgio"] + + for question, spyword in zip(questions, answers_spywords): + result = rag_pipeline.run( + { + "text_embedder": {"text": question}, + "prompt_builder": {"question": question}, + "answer_builder": {"query": question}, + } + ) + + assert len(result["answer_builder"]["answers"]) == 1 + generated_answer = result["answer_builder"]["answers"][0] + assert spyword in generated_answer.data + assert generated_answer.query == question + assert hasattr(generated_answer, "documents") + assert hasattr(generated_answer, "meta") diff --git a/testbed/deepset-ai__haystack/e2e/samples/doc_1.txt b/testbed/deepset-ai__haystack/e2e/samples/doc_1.txt new file mode 100644 index 0000000000000000000000000000000000000000..1d3da15eb978b2a3108d8c06fa9ae55d65bd072d --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/samples/doc_1.txt @@ -0,0 +1 @@ +My name is Giorgio and I live in Rome. diff --git a/testbed/deepset-ai__haystack/e2e/samples/sample_pdf_1.pdf b/testbed/deepset-ai__haystack/e2e/samples/sample_pdf_1.pdf new file mode 100644 index 0000000000000000000000000000000000000000..87259b897f83b462f521276bf32d210ea008bcd3 Binary files /dev/null and b/testbed/deepset-ai__haystack/e2e/samples/sample_pdf_1.pdf differ diff --git a/testbed/deepset-ai__haystack/e2e/samples/test_documents/Culture.txt b/testbed/deepset-ai__haystack/e2e/samples/test_documents/Culture.txt new file mode 100644 index 0000000000000000000000000000000000000000..64843b52c0657435b8233ebd44a2e62f4f98d7ae --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/samples/test_documents/Culture.txt @@ -0,0 +1,24 @@ +Cambridge English Dictionary states that culture is, "the way of life, especially the general customs and beliefs, of a particular group of people at a particular time." Terror Management Theory posits that culture is a series of activities and worldviews that provide humans with the illusion of being individuals of value in a world meaning—raising themselves above the merely physical aspects of existence, in order to deny the animal insignificance and death that Homo Sapiens became aware of when they acquired a larger brain. +As a defining aspect of what it means to be human, culture is a central concept in anthropology, encompassing the range of phenomena that are transmitted through social learning in human societies. The word is used in a general sense as the evolved ability to categorize and represent experiences with symbols and to act imaginatively and creatively. This ability arose with the evolution of behavioral modernity in humans around 50,000 years ago.[citation needed] This capacity is often thought to be unique to humans, although some other species have demonstrated similar, though much less complex abilities for social learning. It is also used to denote the complex networks of practices and accumulated knowledge and ideas that is transmitted through social interaction and exist in specific human groups, or cultures, using the plural form. Some aspects of human behavior, such as language, social practices such as kinship, gender and marriage, expressive forms such as art, music, dance, ritual, religion, and technologies such as cooking, shelter, clothing are said to be cultural universals, found in all human societies. The concept material culture covers the physical expressions of culture, such as technology, architecture and art, whereas the immaterial aspects of culture such as principles of social organization (including, practices of political organization and social institutions), mythology, philosophy, literature (both written and oral), and science make up the intangible cultural heritage of a society. +In the humanities, one sense of culture, as an attribute of the individual, has been the degree to which they have cultivated a particular level of sophistication, in the arts, sciences, education, or manners. The level of cultural sophistication has also sometimes been seen to distinguish civilizations from less complex societies. Such hierarchical perspectives on culture are also found in class-based distinctions between a high culture of the social elite and a low culture, popular culture or folk culture of the lower classes, distinguished by the stratified access to cultural capital. In common parlance, culture is often used to refer specifically to the symbolic markers used by ethnic groups to distinguish themselves visibly from each other such as body modification, clothing or jewelry.[dubious – discuss] Mass culture refers to the mass-produced and mass mediated forms of consumer culture that emerged in the 20th century. Some schools of philosophy, such as Marxism and critical theory, have argued that culture is often used politically as a tool of the elites to manipulate the lower classes and create a false consciousness, such perspectives common in the discipline of cultural studies. In the wider social sciences, the theoretical perspective of cultural materialism holds that human symbolic culture arises from the material conditions of human life, as humans create the conditions for physical survival, and that the basis of culture is found in evolved biological dispositions. +When used as a count noun "a culture", is the set of customs, traditions and values of a society or community, such as an ethnic group or nation. In this sense, multiculturalism is a concept that values the peaceful coexistence and mutual respect between different cultures inhabiting the same territory. Sometimes "culture" is also used to describe specific practices within a subgroup of a society, a subculture (e.g. "bro culture"), or a counter culture. Within cultural anthropology, the ideology and analytical stance of cultural relativism holds that cultures cannot easily be objectively ranked or evaluated because any evaluation is necessarily situated within the value system of a given culture. +The modern term "culture" is based on a term used by the Ancient Roman orator Cicero in his Tusculanae Disputationes, where he wrote of a cultivation of the soul or "cultura animi", using an agricultural metaphor for the development of a philosophical soul, understood teleologically as the highest possible ideal for human development. Samuel Pufendorf took over this metaphor in a modern context, meaning something similar, but no longer assuming that philosophy was man's natural perfection. His use, and that of many writers after him "refers to all the ways in which human beings overcome their original barbarism, and through artifice, become fully human". +Social conflict and the development of technologies can produce changes within a society by altering social dynamics and promoting new cultural models, and spurring or enabling generative action. These social shifts may accompany ideological shifts and other types of cultural change. For example, the U.S. feminist movement involved new practices that produced a shift in gender relations, altering both gender and economic structures. Environmental conditions may also enter as factors. For example, after tropical forests returned at the end of the last ice age, plants suitable for domestication were available, leading to the invention of agriculture, which in turn brought about many cultural innovations and shifts in social dynamics. +Cultures are externally affected via contact between societies, which may also produce—or inhibit—social shifts and changes in cultural practices. War or competition over resources may impact technological development or social dynamics. Additionally, cultural ideas may transfer from one society to another, through diffusion or acculturation. In diffusion, the form of something (though not necessarily its meaning) moves from one culture to another. For example, hamburgers, fast food in the United States, seemed exotic when introduced into China. "Stimulus diffusion" (the sharing of ideas) refers to an element of one culture leading to an invention or propagation in another. "Direct Borrowing" on the other hand tends to refer to technological or tangible diffusion from one culture to another. Diffusion of innovations theory presents a research-based model of why and when individuals and cultures adopt new ideas, practices, and products. +Immanuel Kant (1724–1804) has formulated an individualist definition of "enlightenment" similar to the concept of bildung: "Enlightenment is man's emergence from his self-incurred immaturity." He argued that this immaturity comes not from a lack of understanding, but from a lack of courage to think independently. Against this intellectual cowardice, Kant urged: Sapere aude, "Dare to be wise!" In reaction to Kant, German scholars such as Johann Gottfried Herder (1744–1803) argued that human creativity, which necessarily takes unpredictable and highly diverse forms, is as important as human rationality. Moreover, Herder proposed a collective form of bildung: "For Herder, Bildung was the totality of experiences that provide a coherent identity, and sense of common destiny, to a people." +In 1795, the Prussian linguist and philosopher Wilhelm von Humboldt (1767–1835) called for an anthropology that would synthesize Kant's and Herder's interests. During the Romantic era, scholars in Germany, especially those concerned with nationalist movements—such as the nationalist struggle to create a "Germany" out of diverse principalities, and the nationalist struggles by ethnic minorities against the Austro-Hungarian Empire—developed a more inclusive notion of culture as "worldview" (Weltanschauung). According to this school of thought, each ethnic group has a distinct worldview that is incommensurable with the worldviews of other groups. Although more inclusive than earlier views, this approach to culture still allowed for distinctions between "civilized" and "primitive" or "tribal" cultures. +In 1860, Adolf Bastian (1826–1905) argued for "the psychic unity of mankind". He proposed that a scientific comparison of all human societies would reveal that distinct worldviews consisted of the same basic elements. According to Bastian, all human societies share a set of "elementary ideas" (Elementargedanken); different cultures, or different "folk ideas" (Völkergedanken), are local modifications of the elementary ideas. This view paved the way for the modern understanding of culture. Franz Boas (1858–1942) was trained in this tradition, and he brought it with him when he left Germany for the United States. +In practice, culture referred to an élite ideal and was associated with such activities as art, classical music, and haute cuisine. As these forms were associated with urban life, "culture" was identified with "civilization" (from lat. civitas, city). Another facet of the Romantic movement was an interest in folklore, which led to identifying a "culture" among non-elites. This distinction is often characterized as that between high culture, namely that of the ruling social group, and low culture. In other words, the idea of "culture" that developed in Europe during the 18th and early 19th centuries reflected inequalities within European societies. +Matthew Arnold contrasted "culture" with anarchy; other Europeans, following philosophers Thomas Hobbes and Jean-Jacques Rousseau, contrasted "culture" with "the state of nature". According to Hobbes and Rousseau, the Native Americans who were being conquered by Europeans from the 16th centuries on were living in a state of nature; this opposition was expressed through the contrast between "civilized" and "uncivilized." According to this way of thinking, one could classify some countries and nations as more civilized than others and some people as more cultured than others. This contrast led to Herbert Spencer's theory of Social Darwinism and Lewis Henry Morgan's theory of cultural evolution. Just as some critics have argued that the distinction between high and low cultures is really an expression of the conflict between European elites and non-elites, some critics have argued that the distinction between civilized and uncivilized people is really an expression of the conflict between European colonial powers and their colonial subjects. +Other 19th-century critics, following Rousseau have accepted this differentiation between higher and lower culture, but have seen the refinement and sophistication of high culture as corrupting and unnatural developments that obscure and distort people's essential nature. These critics considered folk music (as produced by "the folk", i.e., rural, illiterate, peasants) to honestly express a natural way of life, while classical music seemed superficial and decadent. Equally, this view often portrayed indigenous peoples as "noble savages" living authentic and unblemished lives, uncomplicated and uncorrupted by the highly stratified capitalist systems of the West. +Although anthropologists worldwide refer to Tylor's definition of culture, in the 20th century "culture" emerged as the central and unifying concept of American anthropology, where it most commonly refers to the universal human capacity to classify and encode human experiences symbolically, and to communicate symbolically encoded experiences socially.[citation needed] American anthropology is organized into four fields, each of which plays an important role in research on culture: biological anthropology, linguistic anthropology, cultural anthropology, and archaeology. +The sociology of culture concerns culture—usually understood as the ensemble of symbolic codes used by a society—as manifested in society. For Georg Simmel (1858–1918), culture referred to "the cultivation of individuals through the agency of external forms which have been objectified in the course of history". Culture in the sociological field can be defined as the ways of thinking, the ways of acting, and the material objects that together shape a people's way of life. Culture can be any of two types, non-material culture or material culture. Non-material culture refers to the non physical ideas that individuals have about their culture, including values, belief system, rules, norms, morals, language, organizations, and institutions. While Material culture is the physical evidence of a culture in the objects and architecture they make, or have made. The term tends to be relevant only in archeological and anthropological studies, but it specifically means all material evidence which can be attributed to culture past or present. +Cultural sociology first emerged in Weimar Germany (1918–1933), where sociologists such as Alfred Weber used the term Kultursoziologie (cultural sociology). Cultural sociology was then "reinvented" in the English-speaking world as a product of the "cultural turn" of the 1960s, which ushered in structuralist and postmodern approaches to social science. This type of cultural sociology may loosely be regarded as an approach incorporating cultural analysis and critical theory. Cultural sociologists tend to reject scientific methods,[citation needed] instead hermeneutically focusing on words, artifacts and symbols. "Culture" has since become an important concept across many branches of sociology, including resolutely scientific fields like social stratification and social network analysis. As a result, there has been a recent influx of quantitative sociologists to the field. Thus there is now a growing group of sociologists of culture who are, confusingly, not cultural sociologists. These scholars reject the abstracted postmodern aspects of cultural sociology, and instead look for a theoretical backing in the more scientific vein of social psychology and cognitive science. "Cultural sociology" is one of the largest sections of the American Sociological Association. The British establishment of cultural studies means the latter is often taught as a loosely distinct discipline in the UK. +The sociology of culture grew from the intersection between sociology (as shaped by early theorists like Marx, Durkheim, and Weber) with the growing discipline of anthropology, where in researchers pioneered ethnographic strategies for describing and analyzing a variety of cultures around the world. Part of the legacy of the early development of the field lingers in the methods (much of cultural sociological research is qualitative), in the theories (a variety of critical approaches to sociology are central to current research communities), and in the substantive focus of the field. For instance, relationships between popular culture, political control, and social class were early and lasting concerns in the field. +In the United Kingdom, sociologists and other scholars influenced by Marxism, such as Stuart Hall (1932–2014) and Raymond Williams (1921–1988), developed cultural studies. Following nineteenth-century Romantics, they identified "culture" with consumption goods and leisure activities (such as art, music, film, food, sports, and clothing). Nevertheless, they saw patterns of consumption and leisure as determined by relations of production, which led them to focus on class relations and the organization of production. +In the United States, "Cultural Studies" focuses largely on the study of popular culture, that is, on the social meanings of mass-produced consumer and leisure goods. Richard Hoggart coined the term in 1964 when he founded the Birmingham Centre for Contemporary Cultural Studies or CCCS. It has since become strongly associated with Stuart Hall, who succeeded Hoggart as Director. Cultural studies in this sense, then, can be viewed as a limited concentration scoped on the intricacies of consumerism, which belongs to a wider culture sometimes referred to as "Western Civilization" or as "Globalism." +From the 1970s onward, Stuart Hall's pioneering work, along with that of his colleagues Paul Willis, Dick Hebdige, Tony Jefferson, and Angela McRobbie, created an international intellectual movement. As the field developed it began to combine political economy, communication, sociology, social theory, literary theory, media theory, film/video studies, cultural anthropology, philosophy, museum studies and art history to study cultural phenomena or cultural texts. In this field researchers often concentrate on how particular phenomena relate to matters of ideology, nationality, ethnicity, social class, and/or gender.[citation needed] Cultural studies has a concern with the meaning and practices of everyday life. These practices comprise the ways people do particular things (such as watching television, or eating out) in a given culture. This field studies the meanings and uses people attribute to various objects and practices. Specifically, culture involves those meanings and practices held independently of reason. Watching television in order to view a public perspective on a historical event should not be thought of as culture, unless referring to the medium of television itself, which may have been selected culturally; however, schoolchildren watching television after school with their friends in order to "fit in" certainly qualifies, since there is no grounded reason for one's participation in this practice. Recently, as capitalism has spread throughout the world (a process called globalization), cultural studies has begun[when?] to analyze local and global forms of resistance to Western hegemony.[citation needed] Globalization in this context can be defined as western civilization in other ways, it undermines the cultural integrity of other culture and it is therefore repressive, exploitative and harmful to most people in different places. +In the context of cultural studies, the idea of a text includes not only written language, but also films, photographs, fashion or hairstyles: the texts of cultural studies comprise all the meaningful artifacts of culture.[citation needed] Similarly, the discipline widens the concept of "culture". "Culture" for a cultural-studies researcher not only includes traditional high culture (the culture of ruling social groups) and popular culture, but also everyday meanings and practices. The last two, in fact, have become the main focus of cultural studies. A further and recent approach is comparative cultural studies, based on the disciplines of comparative literature and cultural studies.[citation needed] +Scholars in the United Kingdom and the United States developed somewhat different versions of cultural studies after the late 1970s. The British version of cultural studies had originated in the 1950s and 1960s, mainly under the influence first of Richard Hoggart, E. P. Thompson, and Raymond Williams, and later that of Stuart Hall and others at the Centre for Contemporary Cultural Studies at the University of Birmingham. This included overtly political, left-wing views, and criticisms of popular culture as "capitalist" mass culture; it absorbed some of the ideas of the Frankfurt School critique of the "culture industry" (i.e. mass culture). This emerges in the writings of early British cultural-studies scholars and their influences: see the work of (for example) Raymond Williams, Stuart Hall, Paul Willis, and Paul Gilroy. +In the United States, Lindlof and Taylor write, "Cultural studies [were] grounded in a pragmatic, liberal-pluralist tradition". The American version of cultural studies initially concerned itself more with understanding the subjective and appropriative side of audience reactions to, and uses of, mass culture; for example, American cultural-studies advocates wrote about the liberatory aspects of fandom.[citation needed] The distinction between American and British strands, however, has faded.[citation needed] Some researchers, especially in early British cultural studies, apply a Marxist model to the field. This strain of thinking has some influence from the Frankfurt School, but especially from the structuralist Marxism of Louis Althusser and others. The main focus of an orthodox Marxist approach concentrates on the production of meaning. This model assumes a mass production of culture and identifies power as residing with those producing cultural artifacts. In a Marxist view, those who control the means of production (the economic base) essentially control a culture.[citation needed] Other approaches to cultural studies, such as feminist cultural studies and later American developments of the field, distance themselves from this view. They criticize the Marxist assumption of a single, dominant meaning, shared by all, for any cultural product. The non-Marxist approaches suggest that different ways of consuming cultural artifacts affect the meaning of the product. This view comes through in the book Doing Cultural Studies: The Story of the Sony Walkman (by Paul du Gay et al.), which seeks to challenge the notion that those who produce commodities control the meanings that people attribute to them. Feminist cultural analyst, theorist and art historian Griselda Pollock contributed to cultural studies from viewpoints of art history and psychoanalysis. The writer Julia Kristeva is among influential voices at the turn of the century, contributing to cultural studies from the field of art and psychoanalytical French feminism.[citation needed] +Raimon Panikkar pointed out 29 ways in which cultural change can be brought about. Some of these are: growth, development, evolution, involution, renovation, reconception, reform, innovation, revivalism, revolution, mutation, progress, diffusion, osmosis, borrowing, eclecticism, syncretism, modernization, indigenization, and transformation. Hence Modernization could be similar or related to the enlightenment but a 'looser' term set to ideal and values that flourish. a belief in objectivity progress. Also seen as a belief in a secular society (free from religious influences) example objective and rational, science vs religion and finally been modern means not being religious. diff --git a/testbed/deepset-ai__haystack/e2e/samples/test_documents/Materialism.txt b/testbed/deepset-ai__haystack/e2e/samples/test_documents/Materialism.txt new file mode 100644 index 0000000000000000000000000000000000000000..5d5ff8a3de290416ec83cd0e504bafb9f1494001 --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/samples/test_documents/Materialism.txt @@ -0,0 +1,27 @@ +Materialism is a form of philosophical monism which holds that matter is the fundamental substance in nature, and that all phenomena, including mental phenomena and consciousness, are identical with material interactions. +Materialism is closely related to physicalism, the view that all that exists is ultimately physical. Philosophical physicalism has evolved from materialism with the discoveries of the physical sciences to incorporate more sophisticated notions of physicality than mere ordinary matter, such as: spacetime, physical energies and forces, dark matter, and so on. Thus the term "physicalism" is preferred over "materialism" by some, while others use the terms as if they are synonymous. +Materialism belongs to the class of monist ontology. As such, it is different from ontological theories based on dualism or pluralism. For singular explanations of the phenomenal reality, materialism would be in contrast to idealism, neutral monism, and spiritualism. +Despite the large number of philosophical schools and subtle nuances between many, all philosophies are said to fall into one of two primary categories, which are defined in contrast to each other: Idealism, and materialism.[a] The basic proposition of these two categories pertains to the nature of reality, and the primary distinction between them is the way they answer two fundamental questions: "what does reality consist of?" and "how does it originate?" To idealists, spirit or mind or the objects of mind (ideas) are primary, and matter secondary. To materialists, matter is primary, and mind or spirit or ideas are secondary, the product of matter acting upon matter. +The materialist view is perhaps best understood in its opposition to the doctrines of immaterial substance applied to the mind historically, famously by René Descartes. However, by itself materialism says nothing about how material substance should be characterized. In practice, it is frequently assimilated to one variety of physicalism or another. +During the 19th century, Karl Marx and Friedrich Engels extended the concept of materialism to elaborate a materialist conception of history centered on the roughly empirical world of human activity (practice, including labor) and the institutions created, reproduced, or destroyed by that activity (see materialist conception of history). Later Marxists developed the notion of dialectical materialism which characterized later Marxist philosophy and method. +Materialism developed, possibly independently, in several geographically separated regions of Eurasia during what Karl Jaspers termed the Axial Age (approximately 800 to 200 BC). +In Ancient Indian philosophy, materialism developed around 600 BC with the works of Ajita Kesakambali, Payasi, Kanada, and the proponents of the Cārvāka school of philosophy. Kanada became one of the early proponents of atomism. The Nyaya–Vaisesika school (600 BC - 100 BC) developed one of the earliest forms of atomism, though their proofs of God and their positing that the consciousness was not material precludes labelling them as materialists. Buddhist atomism and the Jaina school continued the atomic tradition. +Materialism is often associated with reductionism, according to which the objects or phenomena individuated at one level of description, if they are genuine, must be explicable in terms of the objects or phenomena at some other level of description — typically, at a more reduced level. Non-reductive materialism explicitly rejects this notion, however, taking the material constitution of all particulars to be consistent with the existence of real objects, properties, or phenomena not explicable in the terms canonically used for the basic material constituents. Jerry Fodor influentially argues this view, according to which empirical laws and explanations in "special sciences" like psychology or geology are invisible from the perspective of basic physics. A lot of vigorous literature has grown up around the relation between these views. +Ancient Greek philosophers like Thales, Anaxagoras (ca. 500 BC – 428 BC), Epicurus and Democritus prefigure later materialists. The Latin poem De Rerum Natura by Lucretius (ca. 99 BC – ca. 55 BC) reflects the mechanistic philosophy of Democritus and Epicurus. According to this view, all that exists is matter and void, and all phenomena result from different motions and conglomerations of base material particles called "atoms" (literally: "indivisibles"). De Rerum Natura provides mechanistic explanations for phenomena such as erosion, evaporation, wind, and sound. Famous principles like "nothing can touch body but body" first appeared in the works of Lucretius. Democritus and Epicurus however did not hold to a monist ontology since they held to the ontological separation of matter and space i.e. space being "another kind" of being, indicating that the definition of "materialism" is wider than given scope for in this article. +Later Indian materialist Jayaraashi Bhatta (6th century) in his work Tattvopaplavasimha ("The upsetting of all principles") refuted the Nyaya Sutra epistemology. The materialistic Cārvāka philosophy appears to have died out some time after 1400. When Madhavacharya compiled Sarva-darśana-samgraha (a digest of all philosophies) in the 14th century, he had no Cārvāka/Lokāyata text to quote from, or even refer to. +In early 12th-century al-Andalus, the Arabian philosopher, Ibn Tufail (Abubacer), wrote discussions on materialism in his philosophical novel, Hayy ibn Yaqdhan (Philosophus Autodidactus), while vaguely foreshadowing the idea of a historical materialism. +The French cleric Pierre Gassendi (1592-1665) represented the materialist tradition in opposition to the attempts of René Descartes (1596-1650) to provide the natural sciences with dualist foundations. There followed the materialist and atheist abbé Jean Meslier (1664-1729), Julien Offray de La Mettrie, the German-French Paul-Henri Thiry Baron d'Holbach (1723-1789), the Encyclopedist Denis Diderot (1713-1784), and other French Enlightenment thinkers; as well as (in England) John "Walking" Stewart (1747-1822), whose insistence in seeing matter as endowed with a moral dimension had a major impact on the philosophical poetry of William Wordsworth (1770-1850). +Arthur Schopenhauer (1788-1860) wrote that "...materialism is the philosophy of the subject who forgets to take account of himself". He claimed that an observing subject can only know material objects through the mediation of the brain and its particular organization. That is, the brain itself is the "determiner" of how material objects will be experienced or perceived: +The German materialist and atheist anthropologist Ludwig Feuerbach would signal a new turn in materialism through his book, The Essence of Christianity (1841), which provided a humanist account of religion as the outward projection of man's inward nature. Feuerbach's materialism would later heavily influence Karl Marx. +Many current and recent philosophers—e.g., Daniel Dennett, Willard Van Orman Quine, Donald Davidson, and Jerry Fodor—operate within a broadly physicalist or materialist framework, producing rival accounts of how best to accommodate mind, including functionalism, anomalous monism, identity theory, and so on. +The nature and definition of matter - like other key concepts in science and philosophy - have occasioned much debate. Is there a single kind of matter (hyle) which everything is made of, or multiple kinds? Is matter a continuous substance capable of expressing multiple forms (hylomorphism), or a number of discrete, unchanging constituents (atomism)? Does it have intrinsic properties (substance theory), or is it lacking them (prima materia)? +One challenge to the traditional concept of matter as tangible "stuff" came with the rise of field physics in the 19th century. Relativity shows that matter and energy (including the spatially distributed energy of fields) are interchangeable. This enables the ontological view that energy is prima materia and matter is one of its forms. On the other hand, the Standard Model of Particle physics uses quantum field theory to describe all interactions. On this view it could be said that fields are prima materia and the energy is a property of the field. +According to the dominant cosmological model, the Lambda-CDM model, less than 5% of the universe's energy density is made up of the "matter" described by the Standard Model of Particle Physics, and the majority of the universe is composed of dark matter and dark energy - with little agreement amongst scientists about what these are made of. +With the advent of quantum physics, some scientists believed the concept of matter had merely changed, while others believed the conventional position could no longer be maintained. For instance Werner Heisenberg said "The ontology of materialism rested upon the illusion that the kind of existence, the direct 'actuality' of the world around us, can be extrapolated into the atomic range. This extrapolation, however, is impossible... atoms are not things." Likewise, some philosophers[which?] feel that these dichotomies necessitate a switch from materialism to physicalism. Others use the terms "materialism" and "physicalism" interchangeably. +Some modern day physicists and science writers—such as Paul Davies and John Gribbin—have argued that materialism has been disproven by certain scientific findings in physics, such as quantum mechanics and chaos theory. In 1991, Gribbin and Davies released their book The Matter Myth, the first chapter of which, "The Death of Materialism", contained the following passage: +Davies' and Gribbin's objections are shared by proponents of digital physics who view information rather than matter to be fundamental. Their objections were also shared by some founders of quantum theory, such as Max Planck, who wrote: +According to the Catholic Encyclopedia of 1907-1912, materialism, defined as "a philosophical system which regards matter as the only reality in the world [...] denies the existence of God and the soul". Materialism, in this view, therefore becomes incompatible with most world religions, including Christianity, Judaism, and Islam. In such a context one can conflate materialism with atheism. Most of Hinduism and transcendentalism regards all matter as an illusion called Maya, blinding humans from knowing "the truth". Maya is the limited, purely physical and mental reality in which our everyday consciousness has become entangled. Maya gets destroyed for a person when s/he perceives Brahman with transcendental knowledge. +In contrast, Joseph Smith, the founder of the Latter Day Saint movement, taught: "There is no such thing as immaterial matter. All spirit is matter, but it is more fine or pure, and can only be discerned by purer eyes; We cannot see it; but when our bodies are purified we shall see that it is all matter." This spirit element has always existed; it is co-eternal with God. It is also called "intelligence" or "the light of truth", which like all observable matter "was not created or made, neither indeed can be". Members of the Church of Jesus Christ of Latter-day Saints view the revelations of Joseph Smith as a restoration of original Christian doctrine, which they believe post-apostolic theologians began to corrupt in the centuries after Christ. The writings of many[quantify] of these theologians indicate a clear influence of Greek metaphysical philosophies such as Neoplatonism, which characterized divinity as an utterly simple, immaterial, formless, substance/essence (ousia) that transcended all that was physical. Despite strong opposition from many Christians, this metaphysical depiction of God eventually became incorporated into the doctrine of the Christian church, displacing the original Judeo-Christian concept of a physical, corporeal God who created humans in His image and likeness. +An argument for idealism, such as those of Hegel and Berkeley, is ipso facto an argument against materialism. Matter can be argued to be redundant, as in bundle theory, and mind-independent properties can in turn be reduced to subjective percepts. Berkeley presents an example of the latter by pointing out that it is impossible to gather direct evidence of matter, as there is no direct experience of matter; all that is experienced is perception, whether internal or external. As such, the existence of matter can only be assumed from the apparent (perceived) stability of perceptions; it finds absolutely no evidence in direct experience. +If matter and energy are seen as necessary to explain the physical world, but incapable of explaining mind, dualism results. Emergence, holism, and process philosophy seek to ameliorate the perceived shortcomings of traditional (especially mechanistic) materialism without abandoning materialism entirely. +Some critics object to materialism as part of an overly skeptical, narrow or reductivist approach to theorizing, rather than to the ontological claim that matter is the only substance. Particle physicist and Anglican theologian John Polkinghorne objects to what he calls promissory materialism — claims that materialistic science will eventually succeed in explaining phenomena it has not so far been able to explain. Polkinghorne prefers "dual-aspect monism" to faith in materialism. diff --git a/testbed/deepset-ai__haystack/e2e/samples/test_documents/Strasbourg.txt b/testbed/deepset-ai__haystack/e2e/samples/test_documents/Strasbourg.txt new file mode 100644 index 0000000000000000000000000000000000000000..bf64384234a9fe5b20151a17b98f20a45924e231 --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/samples/test_documents/Strasbourg.txt @@ -0,0 +1,34 @@ +Strasbourg (/ˈstræzbɜːrɡ/, French pronunciation: ​[stʁaz.buʁ, stʁas.buʁ]; Alsatian: Strossburi; German: Straßburg, [ˈʃtʁaːsbʊɐ̯k]) is the capital and largest city of the Alsace-Champagne-Ardenne-Lorraine (ACAL) region in eastern France and is the official seat of the European Parliament. Located close to the border with Germany, it is the capital of the Bas-Rhin département. The city and the region of Alsace were historically predominantly Alemannic-speaking, hence the city's Germanic name. In 2013, the city proper had 275,718 inhabitants, Eurométropole de Strasbourg (Greater Strasbourg) had 475,934 inhabitants and the Arrondissement of Strasbourg had 482,384 inhabitants. With a population of 768,868 in 2012, Strasbourg's metropolitan area (only the part of the metropolitan area on French territory) is the ninth largest in France and home to 13% of the ACAL region's inhabitants. The transnational Eurodistrict Strasbourg-Ortenau had a population of 915,000 inhabitants in 2014. +Strasbourg's historic city centre, the Grande Île (Grand Island), was classified a World Heritage site by UNESCO in 1988, the first time such an honour was placed on an entire city centre. Strasbourg is immersed in the Franco-German culture and although violently disputed throughout history, has been a bridge of unity between France and Germany for centuries, especially through the University of Strasbourg, currently the second largest in France, and the coexistence of Catholic and Protestant culture. The largest Islamic place of worship in France, the Strasbourg Grand Mosque, was inaugurated by French Interior Minister Manuel Valls on 27 September 2012. +Strasbourg is situated on the eastern border of France with Germany. This border is formed by the River Rhine, which also forms the eastern border of the modern city, facing across the river to the German town Kehl. The historic core of Strasbourg however lies on the Grande Île in the River Ill, which here flows parallel to, and roughly 4 kilometres (2.5 mi) from, the Rhine. The natural courses of the two rivers eventually join some distance downstream of Strasbourg, although several artificial waterways now connect them within the city. +The Romans under Nero Claudius Drusus established a military outpost belonging to the Germania Superior Roman province at Strasbourg's current location, and named it Argentoratum. (Hence the town is commonly called Argentina in medieval Latin.) The name "Argentoratum" was first mentioned in 12 BC and the city celebrated its 2,000th birthday in 1988. "Argentorate" as the toponym of the Gaulish settlement preceded it before being Latinized, but it is not known by how long. The Roman camp was destroyed by fire and rebuilt six times between the first and the fifth centuries AD: in 70, 97, 235, 355, in the last quarter of the fourth century, and in the early years of the fifth century. It was under Trajan and after the fire of 97 that Argentoratum received its most extended and fortified shape. From the year 90 on, the Legio VIII Augusta was permanently stationed in the Roman camp of Argentoratum. It then included a cavalry section and covered an area of approximately 20 hectares. Other Roman legions temporarily stationed in Argentoratum were the Legio XIV Gemina and the Legio XXI Rapax, the latter during the reign of Nero. +The centre of Argentoratum proper was situated on the Grande Île (Cardo: current Rue du Dôme, Decumanus: current Rue des Hallebardes). The outline of the Roman "castrum" is visible in the street pattern in the Grande Ile. Many Roman artifacts have also been found along the current Route des Romains, the road that led to Argentoratum, in the suburb of Kœnigshoffen. This was where the largest burial places were situated, as well as the densest concentration of civilian dwelling places and commerces next to the camp. Among the most outstanding finds in Kœnigshoffen were (found in 1911–12) the fragments of a grand Mithraeum that had been shattered by early Christians in the fourth century. From the fourth century, Strasbourg was the seat of the Bishopric of Strasbourg (made an Archbishopric in 1988). Archaeological excavations below the current Église Saint-Étienne in 1948 and 1956 unearthed the apse of a church dating back to the late fourth or early fifth century, considered to be the oldest church in Alsace. It is supposed that this was the first seat of the Roman Catholic Diocese of Strasbourg. +In the fifth century Strasbourg was occupied successively by Alemanni, Huns, and Franks. In the ninth century it was commonly known as Strazburg in the local language, as documented in 842 by the Oaths of Strasbourg. This trilingual text contains, alongside texts in Latin and Old High German (teudisca lingua), the oldest written variety of Gallo-Romance (lingua romana) clearly distinct from Latin, the ancestor of Old French. The town was also called Stratisburgum or Strateburgus in Latin, from which later came Strossburi in Alsatian and Straßburg in Standard German, and then Strasbourg in French. The Oaths of Strasbourg is considered as marking the birth of the two countries of France and Germany with the division of the Carolingian Empire. +A revolution in 1332 resulted in a broad-based city government with participation of the guilds, and Strasbourg declared itself a free republic. The deadly bubonic plague of 1348 was followed on 14 February 1349 by one of the first and worst pogroms in pre-modern history: over a thousand Jews were publicly burnt to death, with the remainder of the Jewish population being expelled from the city. Until the end of the 18th century, Jews were forbidden to remain in town after 10 pm. The time to leave the city was signalled by a municipal herald blowing the Grüselhorn (see below, Museums, Musée historique);. A special tax, the Pflastergeld (pavement money), was furthermore to be paid for any horse that a Jew would ride or bring into the city while allowed to. +In the 1520s during the Protestant Reformation, the city, under the political guidance of Jacob Sturm von Sturmeck and the spiritual guidance of Martin Bucer embraced the religious teachings of Martin Luther. Their adherents established a Gymnasium, headed by Johannes Sturm, made into a University in the following century. The city first followed the Tetrapolitan Confession, and then the Augsburg Confession. Protestant iconoclasm caused much destruction to churches and cloisters, notwithstanding that Luther himself opposed such a practice. Strasbourg was a centre of humanist scholarship and early book-printing in the Holy Roman Empire, and its intellectual and political influence contributed much to the establishment of Protestantism as an accepted denomination in the southwest of Germany. (John Calvin spent several years as a political refugee in the city). The Strasbourg Councillor Sturm and guildmaster Matthias represented the city at the Imperial Diet of Speyer (1529), where their protest led to the schism of the Catholic Church and the evolution of Protestantism. Together with four other free cities, Strasbourg presented the confessio tetrapolitana as its Protestant book of faith at the Imperial Diet of Augsburg in 1530, where the slightly different Augsburg Confession was also handed over to Charles V, Holy Roman Emperor. +Louis' advisors believed that, as long as Strasbourg remained independent, it would endanger the King's newly annexed territories in Alsace, and, that to defend these large rural lands effectively, a garrison had to be placed in towns such as Strasbourg. Indeed, the bridge over the Rhine at Strasbourg had been used repeatedly by Imperial (Holy Roman Empire) forces, and three times during the Franco-Dutch War Strasbourg had served as a gateway for Imperial invasions into Alsace. In September 1681 Louis' forces, though lacking a clear casus belli, surrounded the city with overwhelming force. After some negotiation, Louis marched into the city unopposed on 30 September 1681 and proclaimed its annexation. +This annexation was one of the direct causes of the brief and bloody War of the Reunions whose outcome left the French in possession. The French annexation was recognized by the Treaty of Ryswick (1697). The official policy of religious intolerance which drove most Protestants from France after the revocation of the Edict of Nantes in 1685 was not applied in Strasbourg and in Alsace, because both had a special status as a province à l'instar de l'étranger effectif (a kind of foreign province of the king of France). Strasbourg Cathedral, however, was taken from the Lutherans to be returned to the Catholics as the French authorities tried to promote Catholicism wherever they could (some other historic churches remained in Protestant hands). Its language also remained overwhelmingly German: the German Lutheran university persisted until the French Revolution. Famous students included Goethe and Herder. +Strasbourg's status as a free city was revoked by the French Revolution. Enragés, most notoriously Eulogius Schneider, ruled the city with an increasingly iron hand. During this time, many churches and monasteries were either destroyed or severely damaged. The cathedral lost hundreds of its statues (later replaced by copies in the 19th century) and in April 1794, there was talk of tearing its spire down, on the grounds that it was against the principle of equality. The tower was saved, however, when in May of the same year citizens of Strasbourg crowned it with a giant tin Phrygian cap. This artifact was later kept in the historical collections of the city until it was destroyed by the Germans in 1870 during the Franco-Prussian war. +During the Franco-Prussian War and the Siege of Strasbourg, the city was heavily bombarded by the Prussian army. The bombardment of the city was meant to break the morale of the people of Strasbourg. On 24 and 26 August 1870, the Museum of Fine Arts was destroyed by fire, as was the Municipal Library housed in the Gothic former Dominican church, with its unique collection of medieval manuscripts (most famously the Hortus deliciarum), rare Renaissance books, archeological finds and historical artifacts. The gothic cathedral was damaged as well as the medieval church of Temple Neuf, the theatre, the city hall, the court of justice and many houses. At the end of the siege 10,000 inhabitants were left without shelter; over 600 died, including 261 civilians, and 3200 were injured, including 1,100 civilians. +In 1871, after the end of the war, the city was annexed to the newly established German Empire as part of the Reichsland Elsass-Lothringen under the terms of the Treaty of Frankfurt. As part of Imperial Germany, Strasbourg was rebuilt and developed on a grand and representative scale, such as the Neue Stadt, or "new city" around the present Place de la République. Historian Rodolphe Reuss and Art historian Wilhelm von Bode were in charge of rebuilding the municipal archives, libraries and museums. The University, founded in 1567 and suppressed during the French Revolution as a stronghold of German sentiment,[citation needed] was reopened in 1872 under the name Kaiser-Wilhelms-Universität. +A belt of massive fortifications was established around the city, most of which still stands today, renamed after French generals and generally classified as Monuments historiques; most notably Fort Roon (now Fort Desaix) and Fort Podbielski (now Fort Ducrot) in Mundolsheim, Fort von Moltke (now Fort Rapp) in Reichstett, Fort Bismarck (now Fort Kléber) in Wolfisheim, Fort Kronprinz (now Fort Foch) in Niederhausbergen, Fort Kronprinz von Sachsen (now Fort Joffre) in Holtzheim and Fort Großherzog von Baden (now Fort Frère) in Oberhausbergen. +Following the defeat of the German empire in World War I and the abdication of the German Emperor, some revolutionary insurgents declared Alsace-Lorraine as an independent Republic, without preliminary referendum or vote. On 11 November 1918 (Armistice Day), communist insurgents proclaimed a "soviet government" in Strasbourg, following the example of Kurt Eisner in Munich as well as other German towns. French troops commanded by French general Henri Gouraud entered triumphantly in the city on 22 November. A major street of the city now bears the name of that date (Rue du 22 Novembre) which celebrates the entry of the French in the city. Viewing the massive cheering crowd gathered under the balcony of Strasbourg's town hall, French President Raymond Poincaré stated that "the plebiscite is done". +In 1919, following the Treaty of Versailles, the city was restituted to France in accordance with U.S. President Woodrow Wilson's "Fourteen Points" without a referendum. The date of the assignment was retroactively established on Armistice Day. It is doubtful whether a referendum in Strasbourg would have ended in France's favour since the political parties striving for an autonomous Alsace or a connection to France accounted only for a small proportion of votes in the last Reichstag as well as in the local elections. The Alsatian autonomists who were pro French had won many votes in the more rural parts of the region and other towns since the annexation of the region by Germany in 1871. The movement started with the first election for the Reichstag; those elected were called "les députés protestataires", and until the fall of Bismarck in 1890, they were the only deputies elected by the Alsatians to the German parliament demanding the return of those territories to France. At the last Reichstag election in Strasbourg and its periphery, the clear winners were the Social Democrats; the city was the administrative capital of the region, was inhabited by many Germans appointed by the central government in Berlin and its flourishing economy attracted many Germans. This could explain the difference between the rural vote and the one in Strasbourg. After the war, many Germans left Strasbourg and went back to Germany; some of them were denounced by the locals or expelled by the newly appointed authorities. The Saverne Affair was vivid in the memory among the Alsatians. +Between the German invasion of Poland on 1 September 1939 and the Anglo-French declaration of War against the German Reich on 3 September 1939, the entire city (a total of 120,000 people) was evacuated, like other border towns as well. Until the arrival of the Wehrmacht troops mid-June 1940, the city was, for ten months, completely empty, with the exception of the garrisoned soldiers. The Jews of Strasbourg had been evacuated to Périgueux and Limoges, the University had been evacuated to Clermont-Ferrand. +After the ceasefire following the Fall of France in June 1940, Alsace was annexed to Germany and a rigorous policy of Germanisation was imposed upon it by the Gauleiter Robert Heinrich Wagner. When, in July 1940, the first evacuees were allowed to return, only residents of Alsatian origin were admitted. The last Jews were deported on 15 July 1940 and the main synagogue, a huge Romanesque revival building that had been a major architectural landmark with its 54-metre-high dome since its completion in 1897, was set ablaze, then razed. +In September 1940 the first Alsatian resistance movement led by Marcel Weinum called La main noire (The black hand) was created. It was composed by a group of 25 young men aged from 14 to 18 years old who led several attacks against the German occupation. The actions culminated with the attack of the Gauleiter Robert Wagner, the highest commander of Alsace directly under the order of Hitler. In March 1942, Marcel Weinum was prosecuted by the Gestapo and sentenced to be beheaded at the age of 18 in April 1942 in Stuttgart, Germany. His last words will be: "If I have to die, I shall die but with a pure heart". From 1943 the city was bombarded by Allied aircraft. While the First World War had not notably damaged the city, Anglo-American bombing caused extensive destruction in raids of which at least one was allegedly carried out by mistake. In August 1944, several buildings in the Old Town were damaged by bombs, particularly the Palais Rohan, the Old Customs House (Ancienne Douane) and the Cathedral. On 23 November 1944, the city was officially liberated by the 2nd French Armoured Division under General Leclerc. He achieved the oath that he made with his soldiers, after the decisive Capture of Kufra. With the Oath of Kuffra, they swore to keep up the fight until the French flag flew over the Cathedral of Strasbourg. +Many people from Strasbourg were incorporated in the German Army against their will, and were sent to the eastern front, those young men and women were called Malgré-nous. Many tried to escape from the incorporation, join the French Resistance, or desert the Wehrmacht but many couldn't because they were running the risk of having their families sent to work or concentration camps by the Germans. Many of these men, especially those who did not answer the call immediately, were pressured to "volunteer" for service with the SS, often by direct threats on their families. This threat obliged the majority of them to remain in the German army. After the war, the few that survived were often accused of being traitors or collaborationists, because this tough situation was not known in the rest of France, and they had to face the incomprehension of many. In July 1944, 1500 malgré-nous were released from Soviet captivity and sent to Algiers, where they joined the Free French Forces. Nowadays history recognizes the suffering of those people, and museums, public discussions and memorials have been built to commemorate this terrible period of history of this part of Eastern France (Alsace and Moselle). Liberation of Strasbourg took place on 23 November 1944. +In 1949, the city was chosen to be the seat of the Council of Europe with its European Court of Human Rights and European Pharmacopoeia. Since 1952, the European Parliament has met in Strasbourg, which was formally designated its official 'seat' at the Edinburgh meeting of the European Council of EU heads of state and government in December 1992. (This position was reconfirmed and given treaty status in the 1997 Treaty of Amsterdam). However, only the (four-day) plenary sessions of the Parliament are held in Strasbourg each month, with all other business being conducted in Brussels and Luxembourg. Those sessions take place in the Immeuble Louise Weiss, inaugurated in 1999, which houses the largest parliamentary assembly room in Europe and of any democratic institution in the world. Before that, the EP sessions had to take place in the main Council of Europe building, the Palace of Europe, whose unusual inner architecture had become a familiar sight to European TV audiences. In 1992, Strasbourg became the seat of the Franco-German TV channel and movie-production society Arte. +In addition to the cathedral, Strasbourg houses several other medieval churches that have survived the many wars and destructions that have plagued the city: the Romanesque Église Saint-Étienne, partly destroyed in 1944 by Allied bombing raids, the part Romanesque, part Gothic, very large Église Saint-Thomas with its Silbermann organ on which Wolfgang Amadeus Mozart and Albert Schweitzer played, the Gothic Église protestante Saint-Pierre-le-Jeune with its crypt dating back to the seventh century and its cloister partly from the eleventh century, the Gothic Église Saint-Guillaume with its fine early-Renaissance stained glass and furniture, the Gothic Église Saint-Jean, the part Gothic, part Art Nouveau Église Sainte-Madeleine, etc. The Neo-Gothic church Saint-Pierre-le-Vieux Catholique (there is also an adjacent church Saint-Pierre-le-Vieux Protestant) serves as a shrine for several 15th-century wood worked and painted altars coming from other, now destroyed churches and installed there for public display. Among the numerous secular medieval buildings, the monumental Ancienne Douane (old custom-house) stands out. +The German Renaissance has bequeathed the city some noteworthy buildings (especially the current Chambre de commerce et d'industrie, former town hall, on Place Gutenberg), as did the French Baroque and Classicism with several hôtels particuliers (i.e. palaces), among which the Palais Rohan (1742, now housing three museums) is the most spectacular. Other buildings of its kind are the "Hôtel de Hanau" (1736, now the city hall), the Hôtel de Klinglin (1736, now residence of the préfet), the Hôtel des Deux-Ponts (1755, now residence of the military governor), the Hôtel d'Andlau-Klinglin (1725, now seat of the administration of the Port autonome de Strasbourg) etc. The largest baroque building of Strasbourg though is the 150 m (490 ft) long 1720s main building of the Hôpital civil. As for French Neo-classicism, it is the Opera House on Place Broglie that most prestigiously represents this style. +Strasbourg also offers high-class eclecticist buildings in its very extended German district, the Neustadt, being the main memory of Wilhelmian architecture since most of the major cities in Germany proper suffered intensive damage during World War II. Streets, boulevards and avenues are homogeneous, surprisingly high (up to seven stories) and broad examples of German urban lay-out and of this architectural style that summons and mixes up five centuries of European architecture as well as Neo-Egyptian, Neo-Greek and Neo-Babylonian styles. The former imperial palace Palais du Rhin, the most political and thus heavily criticized of all German Strasbourg buildings epitomizes the grand scale and stylistic sturdiness of this period. But the two most handsome and ornate buildings of these times are the École internationale des Pontonniers (the former Höhere Mädchenschule, girls college) with its towers, turrets and multiple round and square angles and the École des Arts décoratifs with its lavishly ornate façade of painted bricks, woodwork and majolica. +As for modern and contemporary architecture, Strasbourg possesses some fine Art Nouveau buildings (such as the huge Palais des Fêtes and houses and villas like Villa Schutzenberger and Hôtel Brion), good examples of post-World War II functional architecture (the Cité Rotterdam, for which Le Corbusier did not succeed in the architectural contest) and, in the very extended Quartier Européen, some spectacular administrative buildings of sometimes utterly large size, among which the European Court of Human Rights building by Richard Rogers is arguably the finest. Other noticeable contemporary buildings are the new Music school Cité de la Musique et de la Danse, the Musée d'Art moderne et contemporain and the Hôtel du Département facing it, as well as, in the outskirts, the tramway-station Hoenheim-Nord designed by Zaha Hadid. +Strasbourg features a number of prominent parks, of which several are of cultural and historical interest: the Parc de l'Orangerie, laid out as a French garden by André le Nôtre and remodeled as an English garden on behalf of Joséphine de Beauharnais, now displaying noteworthy French gardens, a neo-classical castle and a small zoo; the Parc de la Citadelle, built around impressive remains of the 17th-century fortress erected close to the Rhine by Vauban; the Parc de Pourtalès, laid out in English style around a baroque castle (heavily restored in the 19th century) that now houses a small three-star hotel, and featuring an open-air museum of international contemporary sculpture. The Jardin botanique de l'Université de Strasbourg (botanical garden) was created under the German administration next to the Observatory of Strasbourg, built in 1881, and still owns some greenhouses of those times. The Parc des Contades, although the oldest park of the city, was completely remodeled after World War II. The futuristic Parc des Poteries is an example of European park-conception in the late 1990s. The Jardin des deux Rives, spread over Strasbourg and Kehl on both sides of the Rhine opened in 2004 and is the most extended (60-hectare) park of the agglomeration. The most recent park is Parc du Heyritz (8,7 ha), opened in 2014 along a canal facing the hôpital civil. +Unlike most other cities, Strasbourg's collections of European art are divided into several museums according not only to type and area, but also to epoch. Old master paintings from the Germanic Rhenish territories and until 1681 are displayed in the Musée de l'Œuvre Notre-Dame, old master paintings from all the rest of Europe (including the Dutch Rhenish territories) and until 1871 as well as old master paintings from the Germanic Rhenish territories between 1681 and 1871 are displayed in the Musée des Beaux-Arts. Old master graphic arts until 1871 is displayed in the Cabinet des estampes et dessins. Decorative arts until 1681 ("German period") are displayed in the Musée de l'Œuvre Notre-Dame, decorative arts from 1681 to 1871 ("French period") are displayed in the Musée des Arts décoratifs. International art (painting, sculpture, graphic arts) and decorative art since 1871 is displayed in the Musée d'art moderne et contemporain. The latter museum also displays the city's photographic library. +Strasbourg, well known as centre of humanism, has a long history of excellence in higher-education, at the crossroads of French and German intellectual traditions. Although Strasbourg had been annexed by the Kingdom of France in 1683, it still remained connected to the German-speaking intellectual world throughout the 18th century and the university attracted numerous students from the Holy Roman Empire, including Goethe, Metternich and Montgelas, who studied law in Strasbourg, among the most prominent. Nowadays, Strasbourg is known to offer among the best university courses in France, after Paris. +The Bibliothèque nationale et universitaire (BNU) is, with its collection of more than 3,000,000 titles, the second largest library in France after the Bibliothèque nationale de France. It was founded by the German administration after the complete destruction of the previous municipal library in 1871 and holds the unique status of being simultaneously a students' and a national library. The Strasbourg municipal library had been marked erroneously as "City Hall" in a French commercial map, which had been captured and used by the German artillery to lay their guns. A librarian from Munich later pointed out "...that the destruction of the precious collection was not the fault of a German artillery officer, who used the French map, but of the slovenly and inaccurate scholarship of a Frenchman." +As one of the earliest centers of book-printing in Europe (see above: History), Strasbourg for a long time held a large number of incunabula—documents printed before 1500—in her library as one of her most precious heritages. After the total destruction of this institution in 1870, however, a new collection had to be reassembled from scratch. Today, Strasbourg's different public and institutional libraries again display a sizable total number of incunabula, distributed as follows: Bibliothèque nationale et universitaire, ca. 2 098 Médiathèque de la ville et de la communauté urbaine de Strasbourg, 394 Bibliothèque du Grand Séminaire, 238 Médiathèque protestante, 94 and Bibliothèque alsatique du Crédit Mutuel, 5. +City transportation in Strasbourg includes the futurist-looking Strasbourg tramway that opened in 1994 and is operated by the regional transit company Compagnie des Transports Strasbourgeois (CTS), consisting of 6 lines with a total length of 55.8 km (34.7 mi). The CTS also operates a comprehensive bus network throughout the city that is integrated with the trams. With more than 500 km (311 mi) of bicycle paths, biking in the city is convenient and the CTS operates a cheap bike-sharing scheme named Vélhop'. The CTS, and its predecessors, also operated a previous generation of tram system between 1878 and 1960, complemented by trolleybus routes between 1939 and 1962. +Being a city on the Ill and close to the Rhine, Strasbourg has always been an important centre of fluvial navigation, as is attested by archeological findings. In 1682 the Canal de la Bruche was added to the river navigations, initially to provide transport for sandstone from quarries in the Vosges for use in the fortification of the city. That canal has since closed, but the subsequent Canal du Rhone au Rhine, Canal de la Marne au Rhin and Grand Canal d'Alsace are still in use, as is the important activity of the Port autonome de Strasbourg. Water tourism inside the city proper attracts hundreds of thousands of tourists yearly. +The tram system that now criss-crosses the historic city centre complements walking and biking in it. The centre has been transformed into a pedestrian priority zone that enables and invites walking and biking by making these active modes of transport comfortable, safe and enjoyable. These attributes are accomplished by applying the principle of "filtered permeability" to the existing irregular network of streets. It means that the network adaptations favour active transportation and, selectively, "filter out" the car by reducing the number of streets that run through the centre. While certain streets are discontinuous for cars, they connect to a network of pedestrian and bike paths which permeate the entire centre. In addition, these paths go through public squares and open spaces increasing the enjoyment of the trip. This logic of filtering a mode of transport is fully expressed in a comprehensive model for laying out neighbourhoods and districts – the Fused Grid. +At present the A35 autoroute, which parallels the Rhine between Karlsruhe and Basel, and the A4 autoroute, which links Paris with Strasbourg, penetrate close to the centre of the city. The Grand contournement ouest (GCO) project, programmed since 1999, plans to construct a 24 km (15 mi) long highway connection between the junctions of the A4 and the A35 autoroutes in the north and of the A35 and A352 autoroutes in the south. This routes well to the west of the city and is meant to divest a significant portion of motorized traffic from the unité urbaine. diff --git a/testbed/deepset-ai__haystack/e2e/samples/test_documents/War_on_Terror.txt b/testbed/deepset-ai__haystack/e2e/samples/test_documents/War_on_Terror.txt new file mode 100644 index 0000000000000000000000000000000000000000..1601e747ba0ea6fee4280742e3eb35233e54135c --- /dev/null +++ b/testbed/deepset-ai__haystack/e2e/samples/test_documents/War_on_Terror.txt @@ -0,0 +1,31 @@ +On 16 September 2001, at Camp David, President George W. Bush used the phrase war on terrorism in an unscripted and controversial comment when he said, "This crusade – this war on terrorism – is going to take a while, ... " Bush later apologized for this remark due to the negative connotations the term crusade has to people, e.g. of Muslim faith. The word crusade was not used again. On 20 September 2001, during a televised address to a joint session of congress, Bush stated that, "(o)ur 'war on terror' begins with al-Qaeda, but it does not end there. It will not end until every terrorist group of global reach has been found, stopped, and defeated." +U.S. President Barack Obama has rarely used the term, but in his inaugural address on 20 January 2009, he stated "Our nation is at war, against a far-reaching network of violence and hatred." In March 2009 the Defense Department officially changed the name of operations from "Global War on Terror" to "Overseas Contingency Operation" (OCO). In March 2009, the Obama administration requested that Pentagon staff members avoid use of the term, instead using "Overseas Contingency Operation". Basic objectives of the Bush administration "war on terror", such as targeting al Qaeda and building international counterterrorism alliances, remain in place. In December 2012, Jeh Johnson, the General Counsel of the Department of Defense, stated that the military fight will be replaced by a law enforcement operation when speaking at Oxford University, predicting that al Qaeda will be so weakened to be ineffective, and has been "effectively destroyed", and thus the conflict will not be an armed conflict under international law. In May 2013, Obama stated that the goal is "to dismantle specific networks of violent extremists that threaten America"; which coincided with the U.S. Office of Management and Budget having changed the wording from "Overseas Contingency Operations" to "Countering Violent Extremism" in 2010. +Because the actions involved in the "war on terrorism" are diffuse, and the criteria for inclusion are unclear, political theorist Richard Jackson has argued that "the 'war on terrorism' therefore, is simultaneously a set of actual practices—wars, covert operations, agencies, and institutions—and an accompanying series of assumptions, beliefs, justifications, and narratives—it is an entire language or discourse." Jackson cites among many examples a statement by John Ashcroft that "the attacks of September 11 drew a bright line of demarcation between the civil and the savage". Administration officials also described "terrorists" as hateful, treacherous, barbarous, mad, twisted, perverted, without faith, parasitical, inhuman, and, most commonly, evil. Americans, in contrast, were described as brave, loving, generous, strong, resourceful, heroic, and respectful of human rights. +The origins of al-Qaeda can be traced to the Soviet war in Afghanistan (December 1979 – February 1989). The United States, United Kingdom, Saudi Arabia, Pakistan, and the People's Republic of China supported the Islamist Afghan mujahadeen guerillas against the military forces of the Soviet Union and the Democratic Republic of Afghanistan. A small number of "Afghan Arab" volunteers joined the fight against the Soviets, including Osama bin Laden, but there is no evidence they received any external assistance. In May 1996 the group World Islamic Front for Jihad Against Jews and Crusaders (WIFJAJC), sponsored by bin Laden (and later re-formed as al-Qaeda), started forming a large base of operations in Afghanistan, where the Islamist extremist regime of the Taliban had seized power earlier in the year. In February 1998, Osama bin Laden signed a fatwā, as head of al-Qaeda, declaring war on the West and Israel, later in May of that same year al-Qaeda released a video declaring war on the U.S. and the West. +On 7 August 1998, al-Qaeda struck the U.S. embassies in Kenya and Tanzania, killing 224 people, including 12 Americans. In retaliation, U.S. President Bill Clinton launched Operation Infinite Reach, a bombing campaign in Sudan and Afghanistan against targets the U.S. asserted were associated with WIFJAJC, although others have questioned whether a pharmaceutical plant in Sudan was used as a chemical warfare plant. The plant produced much of the region's antimalarial drugs and around 50% of Sudan's pharmaceutical needs. The strikes failed to kill any leaders of WIFJAJC or the Taliban. +On the morning of 11 September 2001, 19 men affiliated with al-Qaeda hijacked four airliners all bound for California. Once the hijackers assumed control of the airliners, they told the passengers that they had the bomb on board and would spare the lives of passengers and crew once their demands were met – no passenger and crew actually suspected that they would use the airliners as suicide weapons since it had never happened before in history. The hijackers – members of al-Qaeda's Hamburg cell – intentionally crashed two airliners into the Twin Towers of the World Trade Center in New York City. Both buildings collapsed within two hours from fire damage related to the crashes, destroying nearby buildings and damaging others. The hijackers crashed a third airliner into the Pentagon in Arlington County, Virginia, just outside Washington D.C. The fourth plane crashed into a field near Shanksville, Pennsylvania, after some of its passengers and flight crew attempted to retake control of the plane, which the hijackers had redirected toward Washington D.C., to target the White House, or the U.S. Capitol. No flights had survivors. A total of 2,977 victims and the 19 hijackers perished in the attacks. +The Authorization for Use of Military Force Against Terrorists or "AUMF" was made law on 14 September 2001, to authorize the use of United States Armed Forces against those responsible for the attacks on 11 September 2001. It authorized the President to use all necessary and appropriate force against those nations, organizations, or persons he determines planned, authorized, committed, or aided the terrorist attacks that occurred on 11 September 2001, or harbored such organizations or persons, in order to prevent any future acts of international terrorism against the United States by such nations, organizations or persons. Congress declares this is intended to constitute specific statutory authorization within the meaning of section 5(b) of the War Powers Resolution of 1973. +Subsequently, in October 2001, U.S. forces (with UK and coalition allies) invaded Afghanistan to oust the Taliban regime. On 7 October 2001, the official invasion began with British and U.S. forces conducting airstrike campaigns over enemy targets. Kabul, the capital city of Afghanistan, fell by mid-November. The remaining al-Qaeda and Taliban remnants fell back to the rugged mountains of eastern Afghanistan, mainly Tora Bora. In December, Coalition forces (the U.S. and its allies) fought within that region. It is believed that Osama bin Laden escaped into Pakistan during the battle. +The Taliban regrouped in western Pakistan and began to unleash an insurgent-style offensive against Coalition forces in late 2002. Throughout southern and eastern Afghanistan, firefights broke out between the surging Taliban and Coalition forces. Coalition forces responded with a series of military offensives and an increase in the amount of troops in Afghanistan. In February 2010, Coalition forces launched Operation Moshtarak in southern Afghanistan along with other military offensives in the hopes that they would destroy the Taliban insurgency once and for all. Peace talks are also underway between Taliban affiliated fighters and Coalition forces. In September 2014, Afghanistan and the United States signed a security agreement, which permits United States and NATO forces to remain in Afghanistan until at least 2024. The United States and other NATO and non-NATO forces are planning to withdraw; with the Taliban claiming it has defeated the United States and NATO, and the Obama Administration viewing it as a victory. In December 2014, ISAF encasing its colors, and Resolute Support began as the NATO operation in Afghanistan. Continued United States operations within Afghanistan will continue under the name "Operation Freedom's Sentinel". +In January 2002, the United States Special Operations Command, Pacific deployed to the Philippines to advise and assist the Armed Forces of the Philippines in combating Filipino Islamist groups. The operations were mainly focused on removing the Abu Sayyaf group and Jemaah Islamiyah (JI) from their stronghold on the island of Basilan. The second portion of the operation was conducted as a humanitarian program called "Operation Smiles". The goal of the program was to provide medical care and services to the region of Basilan as part of a "Hearts and Minds" program. Joint Special Operations Task Force – Philippines disbanded in June 2014, ending a 14-year mission. After JSOTF-P disbanded, as late as November 2014, American forces continued to operate in the Philippines under the name "PACOM Augmentation Team". +On 14 September 2009, U.S. Special Forces killed two men and wounded and captured two others near the Somali village of Baarawe. Witnesses claim that helicopters used for the operation launched from French-flagged warships, but that could not be confirmed. A Somali-based al-Qaida affiliated group, the Al-Shabaab, has confirmed the death of "sheik commander" Saleh Ali Saleh Nabhan along with an unspecified number of militants. Nabhan, a Kenyan, was wanted in connection with the 2002 Mombasa attacks. +The conflict in northern Mali began in January 2012 with radical Islamists (affiliated to al-Qaeda) advancing into northern Mali. The Malian government had a hard time maintaining full control over their country. The fledgling government requested support from the international community on combating the Islamic militants. In January 2013, France intervened on behalf of the Malian government's request and deployed troops into the region. They launched Operation Serval on 11 January 2013, with the hopes of dislodging the al-Qaeda affiliated groups from northern Mali. +Following the ceasefire agreement that suspended hostilities (but not officially ended) in the 1991 Gulf War, the United States and its allies instituted and began patrolling Iraqi no-fly zones, to protect Iraq's Kurdish and Shi'a Arab population—both of which suffered attacks from the Hussein regime before and after the Gulf War—in Iraq's northern and southern regions, respectively. U.S. forces continued in combat zone deployments through November 1995 and launched Operation Desert Fox against Iraq in 1998 after it failed to meet U.S. demands of "unconditional cooperation" in weapons inspections. +The first ground attack came at the Battle of Umm Qasr on 21 March 2003 when a combined force of British, American and Polish forces seized control of the port city of Umm Qasr. Baghdad, Iraq's capital city, fell to American forces in April 2003 and Saddam Hussein's government quickly dissolved. On 1 May 2003, Bush announced that major combat operations in Iraq had ended. However, an insurgency arose against the U.S.-led coalition and the newly developing Iraqi military and post-Saddam government. The insurgency, which included al-Qaeda affiliated groups, led to far more coalition casualties than the invasion. Other elements of the insurgency were led by fugitive members of President Hussein's Ba'ath regime, which included Iraqi nationalists and pan-Arabists. Many insurgency leaders are Islamists and claim to be fighting a religious war to reestablish the Islamic Caliphate of centuries past. Iraq's former president, Saddam Hussein was captured by U.S. forces in December 2003. He was executed in 2006. +In a major split in the ranks of Al Qaeda's organization, the Iraqi franchise, known as Al Qaeda in Iraq covertly invaded Syria and the Levant and began participating in the ongoing Syrian Civil War, gaining enough support and strength to re-invade Iraq's western provinces under the name of the Islamic State of Iraq and the Levant (ISIS/ISIL), taking over much of the country in a blitzkrieg-like action and combining the Iraq insurgency and Syrian Civil War into a single conflict. Due to their extreme brutality and a complete change in their overall ideology, Al Qaeda's core organization in Central Asia eventually denounced ISIS and directed their affiliates to cut off all ties with this organization. Many analysts[who?] believe that because of this schism, Al Qaeda and ISIL are now in a competition to retain the title of the world's most powerful terrorist organization. +The Obama administration began to reengage in Iraq with a series of airstrikes aimed at ISIS beginning on 10 August 2014. On 9 September 2014 President Obama said that he had the authority he needed to take action to destroy the militant group known as the Islamic State of Iraq and the Levant, citing the 2001 Authorization for Use of Military Force Against Terrorists, and thus did not require additional approval from Congress. The following day on 10 September 2014 President Barack Obama made a televised speech about ISIL, which he stated "Our objective is clear: We will degrade, and ultimately destroy, ISIL through a comprehensive and sustained counter-terrorism strategy". Obama has authorized the deployment of additional U.S. Forces into Iraq, as well as authorizing direct military operations against ISIL within Syria. On the night of 21/22 September the United States, Saudi Arabia, Bahrain, the UAE, Jordan and Qatar started air attacks against ISIS in Syria.[citation needed] +Following the 11 September 2001 attacks, former President of Pakistan Pervez Musharraf sided with the U.S. against the Taliban government in Afghanistan after an ultimatum by then U.S. President George W. Bush. Musharraf agreed to give the U.S. the use of three airbases for Operation Enduring Freedom. United States Secretary of State Colin Powell and other U.S. administration officials met with Musharraf. On 19 September 2001, Musharraf addressed the people of Pakistan and stated that, while he opposed military tactics against the Taliban, Pakistan risked being endangered by an alliance of India and the U.S. if it did not cooperate. In 2006, Musharraf testified that this stance was pressured by threats from the U.S., and revealed in his memoirs that he had "war-gamed" the United States as an adversary and decided that it would end in a loss for Pakistan. +On 12 January 2002, Musharraf gave a speech against Islamic extremism. He unequivocally condemned all acts of terrorism and pledged to combat Islamic extremism and lawlessness within Pakistan itself. He stated that his government was committed to rooting out extremism and made it clear that the banned militant organizations would not be allowed to resurface under any new name. He said, "the recent decision to ban extremist groups promoting militancy was taken in the national interest after thorough consultations. It was not taken under any foreign influence". +In 2002, the Musharraf-led government took a firm stand against the jihadi organizations and groups promoting extremism, and arrested Maulana Masood Azhar, head of the Jaish-e-Mohammed, and Hafiz Muhammad Saeed, chief of the Lashkar-e-Taiba, and took dozens of activists into custody. An official ban was imposed on the groups on 12 January. Later that year, the Saudi born Zayn al-Abidn Muhammed Hasayn Abu Zubaydah was arrested by Pakistani officials during a series of joint U.S.-Pakistan raids. Zubaydah is said to have been a high-ranking al-Qaeda official with the title of operations chief and in charge of running al-Qaeda training camps. Other prominent al-Qaeda members were arrested in the following two years, namely Ramzi bin al-Shibh, who is known to have been a financial backer of al-Qaeda operations, and Khalid Sheikh Mohammed, who at the time of his capture was the third highest-ranking official in al-Qaeda and had been directly in charge of the planning for the 11 September attacks. +The use of drones by the Central Intelligence Agency in Pakistan to carry out operations associated with the Global War on Terror sparks debate over sovereignty and the laws of war. The U.S. Government uses the CIA rather than the U.S. Air Force for strikes in Pakistan in order to avoid breaching sovereignty through military invasion. The United States was criticized by[according to whom?] a report on drone warfare and aerial sovereignty for abusing the term 'Global War on Terror' to carry out military operations through government agencies without formally declaring war. +In a 'Letter to American People' written by Osama bin Laden in 2002, he stated that one of the reasons he was fighting America is because of its support of India on the Kashmir issue. While on a trip to Delhi in 2002, U.S. Secretary of Defense Donald Rumsfeld suggested that Al-Qaeda was active in Kashmir, though he did not have any hard evidence. An investigation in 2002 unearthed evidence that Al-Qaeda and its affiliates were prospering in Pakistan-administered Kashmir with tacit approval of Pakistan's National Intelligence agency Inter-Services Intelligence. A team of Special Air Service and Delta Force was sent into Indian-administered Kashmir in 2002 to hunt for Osama bin Laden after reports that he was being sheltered by the Kashmiri militant group Harkat-ul-Mujahideen. U.S. officials believed that Al-Qaeda was helping organize a campaign of terror in Kashmir in order to provoke conflict between India and Pakistan. Fazlur Rehman Khalil, the leader of the Harkat-ul-Mujahideen, signed al-Qaeda's 1998 declaration of holy war, which called on Muslims to attack all Americans and their allies. Indian sources claimed that In 2006, Al-Qaeda claimed they had established a wing in Kashmir; this worried the Indian government. India also claimed that Al-Qaeda has strong ties with the Kashmir militant groups Lashkar-e-Taiba and Jaish-e-Mohammed in Pakistan. While on a visit to Pakistan in January 2010, U.S. Defense secretary Robert Gates stated that Al-Qaeda was seeking to destabilize the region and planning to provoke a nuclear war between India and Pakistan. +In September 2009, a U.S. Drone strike reportedly killed Ilyas Kashmiri, who was the chief of Harkat-ul-Jihad al-Islami, a Kashmiri militant group associated with Al-Qaeda. Kashmiri was described by Bruce Riedel as a 'prominent' Al-Qaeda member, while others described him as the head of military operations for Al-Qaeda. Waziristan had now become the new battlefield for Kashmiri militants, who were now fighting NATO in support of Al-Qaeda. On 8 July 2012, Al-Badar Mujahideen, a breakaway faction of Kashmir centric terror group Hizbul Mujahideen, on conclusion of their two-day Shuhada Conference called for mobilisation of resources for continuation of jihad in Kashmir. +In the following months, NATO took a wide range of measures to respond to the threat of terrorism. On 22 November 2002, the member states of the Euro-Atlantic Partnership Council (EAPC) decided on a Partnership Action Plan against Terrorism, which explicitly states, "EAPC States are committed to the protection and promotion of fundamental freedoms and human rights, as well as the rule of law, in combating terrorism." NATO started naval operations in the Mediterranean Sea designed to prevent the movement of terrorists or weapons of mass destruction as well as to enhance the security of shipping in general called Operation Active Endeavour. +Support for the U.S. cooled when America made clear its determination to invade Iraq in late 2002. Even so, many of the "coalition of the willing" countries that unconditionally supported the U.S.-led military action have sent troops to Afghanistan, particular neighboring Pakistan, which has disowned its earlier support for the Taliban and contributed tens of thousands of soldiers to the conflict. Pakistan was also engaged in the War in North-West Pakistan (Waziristan War). Supported by U.S. intelligence, Pakistan was attempting to remove the Taliban insurgency and al-Qaeda element from the northern tribal areas. +The British 16th Air Assault Brigade (later reinforced by Royal Marines) formed the core of the force in southern Afghanistan, along with troops and helicopters from Australia, Canada and the Netherlands. The initial force consisted of roughly 3,300 British, 2,000 Canadian, 1,400 from the Netherlands and 240 from Australia, along with special forces from Denmark and Estonia and small contingents from other nations. The monthly supply of cargo containers through Pakistani route to ISAF in Afghanistan is over 4,000 costing around 12 billion in Pakistani Rupees. +In addition to military efforts abroad, in the aftermath of 9/11 the Bush Administration increased domestic efforts to prevent future attacks. Various government bureaucracies that handled security and military functions were reorganized. A new cabinet-level agency called the United States Department of Homeland Security was created in November 2002 to lead and coordinate the largest reorganization of the U.S. federal government since the consolidation of the armed forces into the Department of Defense.[citation needed] +The USA PATRIOT Act of October 2001 dramatically reduces restrictions on law enforcement agencies' ability to search telephone, e-mail communications, medical, financial, and other records; eases restrictions on foreign intelligence gathering within the United States; expands the Secretary of the Treasury's authority to regulate financial transactions, particularly those involving foreign individuals and entities; and broadens the discretion of law enforcement and immigration authorities in detaining and deporting immigrants suspected of terrorism-related acts. The act also expanded the definition of terrorism to include domestic terrorism, thus enlarging the number of activities to which the USA PATRIOT Act's expanded law enforcement powers could be applied. A new Terrorist Finance Tracking Program monitored the movements of terrorists' financial resources (discontinued after being revealed by The New York Times). Global telecommunication usage, including those with no links to terrorism, is being collected and monitored through the NSA electronic surveillance program. The Patriot Act is still in effect. +Political interest groups have stated that these laws remove important restrictions on governmental authority, and are a dangerous encroachment on civil liberties, possible unconstitutional violations of the Fourth Amendment. On 30 July 2003, the American Civil Liberties Union (ACLU) filed the first legal challenge against Section 215 of the Patriot Act, claiming that it allows the FBI to violate a citizen's First Amendment rights, Fourth Amendment rights, and right to due process, by granting the government the right to search a person's business, bookstore, and library records in a terrorist investigation, without disclosing to the individual that records were being searched. Also, governing bodies in a number of communities have passed symbolic resolutions against the act. +In 2005, the UN Security Council adopted Resolution 1624 concerning incitement to commit acts of terrorism and the obligations of countries to comply with international human rights laws. Although both resolutions require mandatory annual reports on counter-terrorism activities by adopting nations, the United States and Israel have both declined to submit reports. In the same year, the United States Department of Defense and the Chairman of the Joint Chiefs of Staff issued a planning document, by the name "National Military Strategic Plan for the War on Terrorism", which stated that it constituted the "comprehensive military plan to prosecute the Global War on Terror for the Armed Forces of the United States...including the findings and recommendations of the 9/11 Commission and a rigorous examination with the Department of Defense". +Criticism of the War on Terror addresses the issues, morality, efficiency, economics, and other questions surrounding the War on Terror and made against the phrase itself, calling it a misnomer. The notion of a "war" against "terrorism" has proven highly contentious, with critics charging that it has been exploited by participating governments to pursue long-standing policy/military objectives, reduce civil liberties, and infringe upon human rights. It is argued that the term war is not appropriate in this context (as in War on Drugs), since there is no identifiable enemy, and that it is unlikely international terrorism can be brought to an end by military means. +Other critics, such as Francis Fukuyama, note that "terrorism" is not an enemy, but a tactic; calling it a "war on terror", obscures differences between conflicts such as anti-occupation insurgents and international mujahideen. With a military presence in Iraq and Afghanistan and its associated collateral damage Shirley Williams maintains this increases resentment and terrorist threats against the West. There is also perceived U.S. hypocrisy, media-induced hysteria, and that differences in foreign and security policy have damaged America's image in most of the world. diff --git a/testbed/deepset-ai__haystack/haystack/components/audio/__init__.py b/testbed/deepset-ai__haystack/haystack/components/audio/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c25e2ddeb81f90ab2e5835ce3608517629eea48b --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/audio/__init__.py @@ -0,0 +1,8 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from haystack.components.audio.whisper_local import LocalWhisperTranscriber +from haystack.components.audio.whisper_remote import RemoteWhisperTranscriber + +__all__ = ["LocalWhisperTranscriber", "RemoteWhisperTranscriber"] diff --git a/testbed/deepset-ai__haystack/haystack/components/audio/whisper_local.py b/testbed/deepset-ai__haystack/haystack/components/audio/whisper_local.py new file mode 100644 index 0000000000000000000000000000000000000000..79ac83b144139764fd68d9d45d6ae4771d281455 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/audio/whisper_local.py @@ -0,0 +1,197 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import tempfile +from pathlib import Path +from typing import Any, Dict, List, Literal, Optional, Union, get_args + +from haystack import Document, component, default_from_dict, default_to_dict, logging +from haystack.dataclasses import ByteStream +from haystack.lazy_imports import LazyImport +from haystack.utils import ComponentDevice + +with LazyImport("Run 'pip install \"openai-whisper>=20231106\"' to install whisper.") as whisper_import: + import whisper + + +logger = logging.getLogger(__name__) +WhisperLocalModel = Literal[ + "base", + "base.en", + "large", + "large-v1", + "large-v2", + "large-v3", + "medium", + "medium.en", + "small", + "small.en", + "tiny", + "tiny.en", +] + + +@component +class LocalWhisperTranscriber: + """ + Transcribes audio files using OpenAI's Whisper model on your local machine. + + For the supported audio formats, languages, and other parameters, see the + [Whisper API documentation](https://platform.openai.com/docs/guides/speech-to-text) and the official Whisper + [GitHub repository](https://github.com/openai/whisper). + + ### Usage example + + ```python + from haystack.components.audio import LocalWhisperTranscriber + + whisper = LocalWhisperTranscriber(model="small") + whisper.warm_up() + transcription = whisper.run(sources=["path/to/audio/file"]) + ``` + """ + + def __init__( + self, + model: WhisperLocalModel = "large", + device: Optional[ComponentDevice] = None, + whisper_params: Optional[Dict[str, Any]] = None, + ): + """ + Creates an instance of the LocalWhisperTranscriber component. + + :param model: + The name of the model to use. Set to one of the following models: + "tiny", "base", "small", "medium", "large" (default). + For details on the models and their modifications, see the + [Whisper documentation](https://github.com/openai/whisper?tab=readme-ov-file#available-models-and-languages). + :param device: + The device for loading the model. If `None`, automatically selects the default device. + """ + whisper_import.check() + if model not in get_args(WhisperLocalModel): + raise ValueError( + f"Model name '{model}' not recognized. Choose one among: " f"{', '.join(get_args(WhisperLocalModel))}." + ) + self.model = model + self.whisper_params = whisper_params or {} + self.device = ComponentDevice.resolve_device(device) + self._model = None + + def warm_up(self) -> None: + """ + Loads the model in memory. + """ + if not self._model: + self._model = whisper.load_model(self.model, device=self.device.to_torch()) + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict(self, model=self.model, device=self.device.to_dict(), whisper_params=self.whisper_params) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "LocalWhisperTranscriber": + """ + Deserializes the component from a dictionary. + + :param data: + The dictionary to deserialize from. + :returns: + The deserialized component. + """ + init_params = data["init_parameters"] + if init_params.get("device") is not None: + init_params["device"] = ComponentDevice.from_dict(init_params["device"]) + return default_from_dict(cls, data) + + @component.output_types(documents=List[Document]) + def run(self, sources: List[Union[str, Path, ByteStream]], whisper_params: Optional[Dict[str, Any]] = None): + """ + Transcribes a list of audio files into a list of documents. + + For the supported audio formats, languages, and other parameters, see the + [Whisper API documentation](https://platform.openai.com/docs/guides/speech-to-text) and the official Whisper + [GitHup repo](https://github.com/openai/whisper). + + :param sources: + A list of paths or binary streams to transcribe. + + :returns: A dictionary with the following keys: + - `documents`: A list of documents where each document is a transcribed audio file. The content of + the document is the transcription text, and the document's metadata contains the values returned by + the Whisper model, such as the alignment data and the path to the audio file used + for the transcription. + """ + if self._model is None: + raise RuntimeError( + "The component LocalWhisperTranscriber was not warmed up. Run 'warm_up()' before calling 'run()'." + ) + + if whisper_params is None: + whisper_params = self.whisper_params + + documents = self.transcribe(sources, **whisper_params) + return {"documents": documents} + + def transcribe(self, sources: List[Union[str, Path, ByteStream]], **kwargs) -> List[Document]: + """ + Transcribes the audio files into a list of Documents, one for each input file. + + For the supported audio formats, languages, and other parameters, see the + [Whisper API documentation](https://platform.openai.com/docs/guides/speech-to-text) and the official Whisper + [github repo](https://github.com/openai/whisper). + + :param sources: + A list of paths or binary streams to transcribe. + :returns: + A list of Documents, one for each file. + """ + transcriptions = self._raw_transcribe(sources, **kwargs) + documents = [] + for path, transcript in transcriptions.items(): + content = transcript.pop("text") + doc = Document(content=content, meta={"audio_file": path, **transcript}) + documents.append(doc) + return documents + + def _raw_transcribe(self, sources: List[Union[str, Path, ByteStream]], **kwargs) -> Dict[Path, Any]: + """ + Transcribes the given audio files. Returns the output of the model, a dictionary, for each input file. + + For the supported audio formats, languages, and other parameters, see the + [Whisper API documentation](https://platform.openai.com/docs/guides/speech-to-text) and the official Whisper + [github repo](https://github.com/openai/whisper). + + :param sources: + A list of paths or binary streams to transcribe. + :returns: + A dictionary mapping 'file_path' to 'transcription'. + """ + if self._model is None: + raise RuntimeError("Model is not loaded, please run 'warm_up()' before calling 'run()'") + + return_segments = kwargs.pop("return_segments", False) + transcriptions = {} + + for source in sources: + path = Path(source) if not isinstance(source, ByteStream) else source.meta.get("file_path") + + if isinstance(source, ByteStream) and path is None: + with tempfile.NamedTemporaryFile(delete=False) as fp: + path = Path(fp.name) + source.to_file(path) + + transcription = self._model.transcribe(str(path), **kwargs) + + if not return_segments: + transcription.pop("segments", None) + + transcriptions[path] = transcription + + return transcriptions diff --git a/testbed/deepset-ai__haystack/haystack/components/audio/whisper_remote.py b/testbed/deepset-ai__haystack/haystack/components/audio/whisper_remote.py new file mode 100644 index 0000000000000000000000000000000000000000..da794be349e5933edaed989a7f028bdf984f167e --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/audio/whisper_remote.py @@ -0,0 +1,153 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import io +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +from openai import OpenAI + +from haystack import Document, component, default_from_dict, default_to_dict, logging +from haystack.dataclasses import ByteStream +from haystack.utils import Secret, deserialize_secrets_inplace + +logger = logging.getLogger(__name__) + + +@component +class RemoteWhisperTranscriber: + """ + Transcribes audio files using the OpenAI's Whisper API. + + The component requires an OpenAI API key, see the + [OpenAI documentation](https://platform.openai.com/docs/api-reference/authentication) for more details. + For the supported audio formats, languages, and other parameters, see the + [Whisper API documentation](https://platform.openai.com/docs/guides/speech-to-text). + + ### Usage example + + ```python + from haystack.components.audio import RemoteWhisperTranscriber + + whisper = RemoteWhisperTranscriber(api_key=Secret.from_token(""), model="tiny") + transcription = whisper.run(sources=["path/to/audio/file"]) + ``` + """ + + def __init__( + self, + api_key: Secret = Secret.from_env_var("OPENAI_API_KEY"), + model: str = "whisper-1", + api_base_url: Optional[str] = None, + organization: Optional[str] = None, + **kwargs, + ): + """ + Creates an instance of the RemoteWhisperTranscriber component. + + :param api_key: + OpenAI API key. + You can set it with an environment variable `OPENAI_API_KEY`, or pass with this parameter + during initialization. + :param model: + Name of the model to use. Currently accepts only `whisper-1`. + :param organization: + Your OpenAI organization ID. See OpenAI's documentation on + [Setting Up Your Organization](https://platform.openai.com/docs/guides/production-best-practices/setting-up-your-organization). + :param api_base: + An optional URL to use as the API base. For details, see the + OpenAI [documentation](https://platform.openai.com/docs/api-reference/audio). + :param kwargs: + Other optional parameters for the model. These are sent directly to the OpenAI + endpoint. See OpenAI [documentation](https://platform.openai.com/docs/api-reference/audio) for more details. + Some of the supported parameters are: + - `language`: The language of the input audio. + Provide the input language in ISO-639-1 format + to improve transcription accuracy and latency. + - `prompt`: An optional text to guide the model's + style or continue a previous audio segment. + The prompt should match the audio language. + - `response_format`: The format of the transcript + output. This component only supports `json`. + - `temperature`: The sampling temperature, between 0 + and 1. Higher values like 0.8 make the output more + random, while lower values like 0.2 make it more + focused and deterministic. If set to 0, the model + uses log probability to automatically increase the + temperature until certain thresholds are hit. + """ + + self.organization = organization + self.model = model + self.api_base_url = api_base_url + self.api_key = api_key + + # Only response_format = "json" is supported + whisper_params = kwargs + response_format = whisper_params.get("response_format", "json") + if response_format != "json": + logger.warning( + "RemoteWhisperTranscriber only supports 'response_format: json'. This parameter will be overwritten." + ) + whisper_params["response_format"] = "json" + self.whisper_params = whisper_params + self.client = OpenAI(api_key=api_key.resolve_value(), organization=organization, base_url=api_base_url) + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict( + self, + api_key=self.api_key.to_dict(), + model=self.model, + organization=self.organization, + api_base_url=self.api_base_url, + **self.whisper_params, + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "RemoteWhisperTranscriber": + """ + Deserializes the component from a dictionary. + + :param data: + The dictionary to deserialize from. + :returns: + The deserialized component. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["api_key"]) + return default_from_dict(cls, data) + + @component.output_types(documents=List[Document]) + def run(self, sources: List[Union[str, Path, ByteStream]]): + """ + Transcribes the list of audio files into a list of documents. + + :param sources: + A list of file paths or `ByteStream` objects containing the audio files to transcribe. + + :returns: A dictionary with the following keys: + - `documents`: A list of documents, one document for each file. + The content of each document is the transcribed text. + """ + documents = [] + + for source in sources: + if not isinstance(source, ByteStream): + path = source + source = ByteStream.from_file_path(Path(source)) + source.meta["file_path"] = path + + file = io.BytesIO(source.data) + file.name = str(source.meta["file_path"]) if "file_path" in source.meta else "__fallback__.wav" + + content = self.client.audio.transcriptions.create(file=file, model=self.model, **self.whisper_params) + doc = Document(content=content.text, meta=source.meta) + documents.append(doc) + + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/builders/__init__.py b/testbed/deepset-ai__haystack/haystack/components/builders/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5e1ebf6d1152eac0e38b6ce8edf320f3ffe93091 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/builders/__init__.py @@ -0,0 +1,9 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from haystack.components.builders.answer_builder import AnswerBuilder +from haystack.components.builders.chat_prompt_builder import ChatPromptBuilder +from haystack.components.builders.prompt_builder import PromptBuilder + +__all__ = ["AnswerBuilder", "PromptBuilder", "ChatPromptBuilder"] diff --git a/testbed/deepset-ai__haystack/haystack/components/builders/answer_builder.py b/testbed/deepset-ai__haystack/haystack/components/builders/answer_builder.py new file mode 100644 index 0000000000000000000000000000000000000000..1577fb3e99b89417dbbbd5f9297187e230a0d787 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/builders/answer_builder.py @@ -0,0 +1,176 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import re +from typing import Any, Dict, List, Optional, Union + +from haystack import Document, GeneratedAnswer, component, logging +from haystack.dataclasses.chat_message import ChatMessage + +logger = logging.getLogger(__name__) + + +@component +class AnswerBuilder: + """ + Converts a query and Generator replies into a `GeneratedAnswer` object. + + AnswerBuilder parses Generator replies using custom regular expressions. + Check out the usage example below to see how it works. + Optionally, it can also take documents and metadata from the Generator to add to the `GeneratedAnswer` object. + AnswerBuilder works with both non-chat and chat Generators. + + ### Usage example + + ```python + from haystack.components.builders import AnswerBuilder + + builder = AnswerBuilder(pattern="Answer: (.*)") + builder.run(query="What's the answer?", replies=["This is an argument. Answer: This is the answer."]) + ``` + """ + + def __init__(self, pattern: Optional[str] = None, reference_pattern: Optional[str] = None): + """ + Creates an instance of the AnswerBuilder component. + + :param pattern: + The regular expression pattern to extract the answer text from the Generator. + If not specified, the entire response is used as the answer. + The regular expression can have one capture group at most. + If present, the capture group text + is used as the answer. If no capture group is present, the whole match is used as the answer. + Examples: + `[^\\n]+$` finds "this is an answer" in a string "this is an argument.\\nthis is an answer". + `Answer: (.*)` finds "this is an answer" in a string "this is an argument. Answer: this is an answer". + + :param reference_pattern: + The regular expression pattern used for parsing the document references. + If not specified, no parsing is done, and all documents are referenced. + References need to be specified as indices of the input documents and start at [1]. + Example: `\\[(\\d+)\\]` finds "1" in a string "this is an answer[1]". + """ + if pattern: + AnswerBuilder._check_num_groups_in_regex(pattern) + + self.pattern = pattern + self.reference_pattern = reference_pattern + + @component.output_types(answers=List[GeneratedAnswer]) + def run( + self, + query: str, + replies: Union[List[str], List[ChatMessage]], + meta: Optional[List[Dict[str, Any]]] = None, + documents: Optional[List[Document]] = None, + pattern: Optional[str] = None, + reference_pattern: Optional[str] = None, + ): + """ + Turns the output of a Generator into `GeneratedAnswer` objects using regular expressions. + + :param query: + The input query used as the Generator prompt. + :param replies: + The output of the Generator. Can be a list of strings or a list of `ChatMessage` objects. + :param meta: + The metadata returned by the Generator. If not specified, the generated answer will contain no metadata. + :param documents: + The documents used as the Generator inputs. If specified, they are added to + the`GeneratedAnswer` objects. + If both `documents` and `reference_pattern` are specified, the documents referenced in the + Generator output are extracted from the input documents and added to the `GeneratedAnswer` objects. + :param pattern: + The regular expression pattern to extract the answer text from the Generator. + If not specified, the entire response is used as the answer. + The regular expression can have one capture group at most. + If present, the capture group text + is used as the answer. If no capture group is present, the whole match is used as the answer. + Examples: + `[^\\n]+$` finds "this is an answer" in a string "this is an argument.\\nthis is an answer". + `Answer: (.*)` finds "this is an answer" in a string + "this is an argument. Answer: this is an answer". + :param reference_pattern: + The regular expression pattern used for parsing the document references. + If not specified, no parsing is done, and all documents are referenced. + References need to be specified as indices of the input documents and start at [1]. + Example: `\\[(\\d+)\\]` finds "1" in a string "this is an answer[1]". + + :returns: A dictionary with the following keys: + - `answers`: The answers received from the output of the Generator. + """ + if not meta: + meta = [{}] * len(replies) + elif len(replies) != len(meta): + raise ValueError(f"Number of replies ({len(replies)}), and metadata ({len(meta)}) must match.") + + if pattern: + AnswerBuilder._check_num_groups_in_regex(pattern) + + pattern = pattern or self.pattern + reference_pattern = reference_pattern or self.reference_pattern + all_answers = [] + for reply, metadata in zip(replies, meta): + # Extract content from ChatMessage objects if reply is a ChatMessages, else use the string as is + extracted_reply: str = reply.content if isinstance(reply, ChatMessage) else reply # type: ignore + extracted_metadata = reply.meta if isinstance(reply, ChatMessage) else metadata + referenced_docs = [] + if documents: + if reference_pattern: + reference_idxs = AnswerBuilder._extract_reference_idxs(extracted_reply, reference_pattern) + else: + reference_idxs = [doc_idx for doc_idx, _ in enumerate(documents)] + + for idx in reference_idxs: + try: + referenced_docs.append(documents[idx]) + except IndexError: + logger.warning( + "Document index '{index}' referenced in Generator output is out of range. ", index=idx + 1 + ) + + answer_string = AnswerBuilder._extract_answer_string(extracted_reply, pattern) + answer = GeneratedAnswer( + data=answer_string, query=query, documents=referenced_docs, meta=extracted_metadata + ) + all_answers.append(answer) + + return {"answers": all_answers} + + @staticmethod + def _extract_answer_string(reply: str, pattern: Optional[str] = None) -> str: + """ + Extract the answer string from the generator output using the specified pattern. + + If no pattern is specified, the whole string is used as the answer. + + :param reply: + The output of the Generator. A string. + :param pattern: + The regular expression pattern to use to extract the answer text from the generator output. + """ + if pattern is None: + return reply + + if match := re.search(pattern, reply): + # No capture group in pattern -> use the whole match as answer + if not match.lastindex: + return match.group(0) + # One capture group in pattern -> use the capture group as answer + return match.group(1) + return "" + + @staticmethod + def _extract_reference_idxs(reply: str, reference_pattern: str) -> List[int]: + document_idxs = re.findall(reference_pattern, reply) + return [int(idx) - 1 for idx in document_idxs] + + @staticmethod + def _check_num_groups_in_regex(pattern: str): + num_groups = re.compile(pattern).groups + if num_groups > 1: + raise ValueError( + f"Pattern '{pattern}' contains multiple capture groups. " + f"Please specify a pattern with at most one capture group." + ) diff --git a/testbed/deepset-ai__haystack/haystack/components/builders/chat_prompt_builder.py b/testbed/deepset-ai__haystack/haystack/components/builders/chat_prompt_builder.py new file mode 100644 index 0000000000000000000000000000000000000000..a8824dffcadb59503ea29ae5c881afca123c5641 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/builders/chat_prompt_builder.py @@ -0,0 +1,254 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from copy import deepcopy +from typing import Any, Dict, List, Optional, Set + +from jinja2 import meta +from jinja2.sandbox import SandboxedEnvironment + +from haystack import component, default_from_dict, default_to_dict, logging +from haystack.dataclasses.chat_message import ChatMessage, ChatRole + +logger = logging.getLogger(__name__) + + +@component +class ChatPromptBuilder: + """ + Renders a chat prompt from a template string using Jinja2 syntax. + + It constructs prompts using static or dynamic templates, which you can update for each pipeline run. + + Template variables in the template are optional unless specified otherwise. + If an optional variable isn't provided, it defaults to an empty string. Use `variable` and `required_variables` + to define input types and required variables. + + ### Usage examples + + #### With static prompt template + + ```python + template = [ChatMessage.from_user("Translate to {{ target_language }}. Context: {{ snippet }}; Translation:")] + builder = ChatPromptBuilder(template=template) + builder.run(target_language="spanish", snippet="I can't speak spanish.") + ``` + + #### Overriding static template at runtime + + ```python + template = [ChatMessage.from_user("Translate to {{ target_language }}. Context: {{ snippet }}; Translation:")] + builder = ChatPromptBuilder(template=template) + builder.run(target_language="spanish", snippet="I can't speak spanish.") + + msg = "Translate to {{ target_language }} and summarize. Context: {{ snippet }}; Summary:" + summary_template = [ChatMessage.from_user(msg)] + builder.run(target_language="spanish", snippet="I can't speak spanish.", template=summary_template) + ``` + + #### With dynamic prompt template + + ```python + from haystack.components.builders import ChatPromptBuilder + from haystack.components.generators.chat import OpenAIChatGenerator + from haystack.dataclasses import ChatMessage + from haystack import Pipeline + from haystack.utils import Secret + + # no parameter init, we don't use any runtime template variables + prompt_builder = ChatPromptBuilder() + llm = OpenAIChatGenerator(api_key=Secret.from_token(""), model="gpt-4o-mini") + + pipe = Pipeline() + pipe.add_component("prompt_builder", prompt_builder) + pipe.add_component("llm", llm) + pipe.connect("prompt_builder.prompt", "llm.messages") + + location = "Berlin" + language = "English" + system_message = ChatMessage.from_system("You are an assistant giving information to tourists in {{language}}") + messages = [system_message, ChatMessage.from_user("Tell me about {{location}}")] + + res = pipe.run(data={"prompt_builder": {"template_variables": {"location": location, "language": language}, + "template": messages}}) + print(res) + + >> {'llm': {'replies': [ChatMessage(content="Berlin is the capital city of Germany and one of the most vibrant + and diverse cities in Europe. Here are some key things to know...Enjoy your time exploring the vibrant and dynamic + capital of Germany!", role=, name=None, meta={'model': 'gpt-4o-mini', + 'index': 0, 'finish_reason': 'stop', 'usage': {'prompt_tokens': 27, 'completion_tokens': 681, 'total_tokens': + 708}})]}} + + + messages = [system_message, ChatMessage.from_user("What's the weather forecast for {{location}} in the next + {{day_count}} days?")] + + res = pipe.run(data={"prompt_builder": {"template_variables": {"location": location, "day_count": "5"}, + "template": messages}}) + + print(res) + >> {'llm': {'replies': [ChatMessage(content="Here is the weather forecast for Berlin in the next 5 + days:\\n\\nDay 1: Mostly cloudy with a high of 22°C (72°F) and...so it's always a good idea to check for updates + closer to your visit.", role=, name=None, meta={'model': 'gpt-4o-mini', + 'index': 0, 'finish_reason': 'stop', 'usage': {'prompt_tokens': 37, 'completion_tokens': 201, + 'total_tokens': 238}})]}} + ``` + + """ + + def __init__( + self, + template: Optional[List[ChatMessage]] = None, + required_variables: Optional[List[str]] = None, + variables: Optional[List[str]] = None, + ): + """ + Constructs a ChatPromptBuilder component. + + :param template: + A list of `ChatMessage` objects. The component looks for Jinja2 template syntax and + renders the prompt with the provided variables. Provide the template in either + the `init` method` or the `run` method. + :param required_variables: + List variables that must be provided as input to ChatPromptBuilder. + If a variable listed as required is not provided, an exception is raised. Optional. + :param variables: + List input variables to use in prompt templates instead of the ones inferred from the + `template` parameter. For example, to use more variables during prompt engineering than the ones present + in the default template, you can provide them here. + """ + self._variables = variables + self._required_variables = required_variables + self.required_variables = required_variables or [] + self.template = template + variables = variables or [] + self._env = SandboxedEnvironment() + if template and not variables: + for message in template: + if message.is_from(ChatRole.USER) or message.is_from(ChatRole.SYSTEM): + # infere variables from template + ast = self._env.parse(message.content) + template_variables = meta.find_undeclared_variables(ast) + variables += list(template_variables) + + # setup inputs + for var in variables: + if var in self.required_variables: + component.set_input_type(self, var, Any) + else: + component.set_input_type(self, var, Any, "") + + @component.output_types(prompt=List[ChatMessage]) + def run( + self, + template: Optional[List[ChatMessage]] = None, + template_variables: Optional[Dict[str, Any]] = None, + **kwargs, + ): + """ + Renders the prompt template with the provided variables. + + It applies the template variables to render the final prompt. You can provide variables with pipeline kwargs. + To overwrite the default template, you can set the `template` parameter. + To overwrite pipeline kwargs, you can set the `template_variables` parameter. + + :param template: + An optional list of `ChatMessage` objects to overwrite ChatPromptBuilder's default template. + If `None`, the default template provided at initialization is used. + :param template_variables: + An optional dictionary of template variables to overwrite the pipeline variables. + :param kwargs: + Pipeline variables used for rendering the prompt. + + :returns: A dictionary with the following keys: + - `prompt`: The updated list of `ChatMessage` objects after rendering the templates. + :raises ValueError: + If `chat_messages` is empty or contains elements that are not instances of `ChatMessage`. + """ + kwargs = kwargs or {} + template_variables = template_variables or {} + template_variables_combined = {**kwargs, **template_variables} + + if template is None: + template = self.template + + if not template: + raise ValueError( + f"The {self.__class__.__name__} requires a non-empty list of ChatMessage instances. " + f"Please provide a valid list of ChatMessage instances to render the prompt." + ) + + if not all(isinstance(message, ChatMessage) for message in template): + raise ValueError( + f"The {self.__class__.__name__} expects a list containing only ChatMessage instances. " + f"The provided list contains other types. Please ensure that all elements in the list " + f"are ChatMessage instances." + ) + + processed_messages = [] + for message in template: + if message.is_from(ChatRole.USER) or message.is_from(ChatRole.SYSTEM): + self._validate_variables(set(template_variables_combined.keys())) + + compiled_template = self._env.from_string(message.content) + rendered_content = compiled_template.render(template_variables_combined) + # deep copy the message to avoid modifying the original message + rendered_message: ChatMessage = deepcopy(message) + rendered_message.content = rendered_content + processed_messages.append(rendered_message) + else: + processed_messages.append(message) + + return {"prompt": processed_messages} + + def _validate_variables(self, provided_variables: Set[str]): + """ + Checks if all the required template variables are provided. + + :param provided_variables: + A set of provided template variables. + :raises ValueError: + If no template is provided or if all the required template variables are not provided. + """ + missing_variables = [var for var in self.required_variables if var not in provided_variables] + if missing_variables: + missing_vars_str = ", ".join(missing_variables) + raise ValueError( + f"Missing required input variables in ChatPromptBuilder: {missing_vars_str}. " + f"Required variables: {self.required_variables}. Provided variables: {provided_variables}." + ) + + def to_dict(self) -> Dict[str, Any]: + """ + Returns a dictionary representation of the component. + + :returns: + Serialized dictionary representation of the component. + """ + if self.template is not None: + template = [m.to_dict() for m in self.template] + else: + template = None + + return default_to_dict( + self, template=template, variables=self._variables, required_variables=self._required_variables + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "ChatPromptBuilder": + """ + Deserialize this component from a dictionary. + + :param data: + The dictionary to deserialize and create the component. + + :returns: + The deserialized component. + """ + init_parameters = data["init_parameters"] + template = init_parameters.get("template") + if template: + init_parameters["template"] = [ChatMessage.from_dict(d) for d in template] + + return default_from_dict(cls, data) diff --git a/testbed/deepset-ai__haystack/haystack/components/builders/prompt_builder.py b/testbed/deepset-ai__haystack/haystack/components/builders/prompt_builder.py new file mode 100644 index 0000000000000000000000000000000000000000..3cb29e121113d32d3732367aa6372a21f6f52c20 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/builders/prompt_builder.py @@ -0,0 +1,247 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Dict, List, Optional, Set + +from jinja2 import meta +from jinja2.sandbox import SandboxedEnvironment + +from haystack import component, default_to_dict +from haystack.utils import Jinja2TimeExtension + + +@component +class PromptBuilder: + """ + + Renders a prompt filling in any variables so that it can send it to a Generator. + + The prompt uses Jinja2 template syntax. + The variables in the default template are used as PromptBuilder's input and are all optional. + If they're not provided, they're replaced with an empty string in the rendered prompt. + To try out different prompts, you can replace the prompt template at runtime by + providing a template for each pipeline run invocation. + + ### Usage examples + + #### On its own + + This example uses PromptBuilder to render a prompt template and fill it with `target_language` + and `snippet`. PromptBuilder returns a prompt with the string "Translate the following context to Spanish. + Context: I can't speak Spanish.; Translation:". + ```python + from haystack.components.builders import PromptBuilder + + template = "Translate the following context to {{ target_language }}. Context: {{ snippet }}; Translation:" + builder = PromptBuilder(template=template) + builder.run(target_language="spanish", snippet="I can't speak spanish.") + ``` + + #### In a Pipeline + + This is an example of a RAG pipeline where PromptBuilder renders a custom prompt template and fills it + with the contents of the retrieved documents and a query. The rendered prompt is then sent to a Generator. + ```python + from haystack import Pipeline, Document + from haystack.utils import Secret + from haystack.components.generators import OpenAIGenerator + from haystack.components.builders.prompt_builder import PromptBuilder + + # in a real world use case documents could come from a retriever, web, or any other source + documents = [Document(content="Joe lives in Berlin"), Document(content="Joe is a software engineer")] + prompt_template = \"\"\" + Given these documents, answer the question. + Documents: + {% for doc in documents %} + {{ doc.content }} + {% endfor %} + + Question: {{query}} + Answer: + \"\"\" + p = Pipeline() + p.add_component(instance=PromptBuilder(template=prompt_template), name="prompt_builder") + p.add_component(instance=OpenAIGenerator(api_key=Secret.from_env_var("OPENAI_API_KEY")), name="llm") + p.connect("prompt_builder", "llm") + + question = "Where does Joe live?" + result = p.run({"prompt_builder": {"documents": documents, "query": question}}) + print(result) + ``` + + #### Changing the template at runtime (prompt engineering) + + You can change the prompt template of an existing pipeline, like in this example: + ```python + documents = [ + Document(content="Joe lives in Berlin", meta={"name": "doc1"}), + Document(content="Joe is a software engineer", meta={"name": "doc1"}), + ] + new_template = \"\"\" + You are a helpful assistant. + Given these documents, answer the question. + Documents: + {% for doc in documents %} + Document {{ loop.index }}: + Document name: {{ doc.meta['name'] }} + {{ doc.content }} + {% endfor %} + + Question: {{ query }} + Answer: + \"\"\" + p.run({ + "prompt_builder": { + "documents": documents, + "query": question, + "template": new_template, + }, + }) + ``` + To replace the variables in the default template when testing your prompt, + pass the new variables in the `variables` parameter. + + #### Overwriting variables at runtime + + To overwrite the values of variables, use `template_variables` during runtime: + ```python + language_template = \"\"\" + You are a helpful assistant. + Given these documents, answer the question. + Documents: + {% for doc in documents %} + Document {{ loop.index }}: + Document name: {{ doc.meta['name'] }} + {{ doc.content }} + {% endfor %} + + Question: {{ query }} + Please provide your answer in {{ answer_language | default('English') }} + Answer: + \"\"\" + p.run({ + "prompt_builder": { + "documents": documents, + "query": question, + "template": language_template, + "template_variables": {"answer_language": "German"}, + }, + }) + ``` + Note that `language_template` introduces variable `answer_language` which is not bound to any pipeline variable. + If not set otherwise, it will use its default value 'English'. + This example overwrites its value to 'German'. + Use `template_variables` to overwrite pipeline variables (such as documents) as well. + + """ + + def __init__( + self, template: str, required_variables: Optional[List[str]] = None, variables: Optional[List[str]] = None + ): + """ + Constructs a PromptBuilder component. + + :param template: + A prompt template that uses Jinja2 syntax to add variables. For example: + `"Summarize this document: {{ documents[0].content }}\\nSummary:"` + It's used to render the prompt. + The variables in the default template are input for PromptBuilder and are all optional, + unless explicitly specified. + If an optional variable is not provided, it's replaced with an empty string in the rendered prompt. + :param required_variables: List variables that must be provided as input to PromptBuilder. + If a variable listed as required is not provided, an exception is raised. Optional. + :param variables: + List input variables to use in prompt templates instead of the ones inferred from the + `template` parameter. For example, to use more variables during prompt engineering than the ones present + in the default template, you can provide them here. + """ + self._template_string = template + self._variables = variables + self._required_variables = required_variables + self.required_variables = required_variables or [] + try: + # The Jinja2TimeExtension needs an optional dependency to be installed. + # If it's not available we can do without it and use the PromptBuilder as is. + self._env = SandboxedEnvironment(extensions=[Jinja2TimeExtension]) + except ImportError: + self._env = SandboxedEnvironment() + + self.template = self._env.from_string(template) + if not variables: + # infer variables from template + ast = self._env.parse(template) + template_variables = meta.find_undeclared_variables(ast) + variables = list(template_variables) + + variables = variables or [] + + # setup inputs + for var in variables: + if var in self.required_variables: + component.set_input_type(self, var, Any) + else: + component.set_input_type(self, var, Any, "") + + def to_dict(self) -> Dict[str, Any]: + """ + Returns a dictionary representation of the component. + + :returns: + Serialized dictionary representation of the component. + """ + return default_to_dict( + self, template=self._template_string, variables=self._variables, required_variables=self._required_variables + ) + + @component.output_types(prompt=str) + def run(self, template: Optional[str] = None, template_variables: Optional[Dict[str, Any]] = None, **kwargs): + """ + Renders the prompt template with the provided variables. + + It applies the template variables to render the final prompt. You can provide variables via pipeline kwargs. + In order to overwrite the default template, you can set the `template` parameter. + In order to overwrite pipeline kwargs, you can set the `template_variables` parameter. + + :param template: + An optional string template to overwrite PromptBuilder's default template. If None, the default template + provided at initialization is used. + :param template_variables: + An optional dictionary of template variables to overwrite the pipeline variables. + :param kwargs: + Pipeline variables used for rendering the prompt. + + :returns: A dictionary with the following keys: + - `prompt`: The updated prompt text after rendering the prompt template. + + :raises ValueError: + If any of the required template variables is not provided. + """ + kwargs = kwargs or {} + template_variables = template_variables or {} + template_variables_combined = {**kwargs, **template_variables} + self._validate_variables(set(template_variables_combined.keys())) + + compiled_template = self.template + if template is not None: + compiled_template = self._env.from_string(template) + + result = compiled_template.render(template_variables_combined) + return {"prompt": result} + + def _validate_variables(self, provided_variables: Set[str]): + """ + Checks if all the required template variables are provided. + + :param provided_variables: + A set of provided template variables. + :raises ValueError: + If any of the required template variables is not provided. + """ + missing_variables = [var for var in self.required_variables if var not in provided_variables] + if missing_variables: + missing_vars_str = ", ".join(missing_variables) + raise ValueError( + f"Missing required input variables in PromptBuilder: {missing_vars_str}. " + f"Required variables: {self.required_variables}. Provided variables: {provided_variables}." + ) diff --git a/testbed/deepset-ai__haystack/haystack/components/caching/__init__.py b/testbed/deepset-ai__haystack/haystack/components/caching/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e922713dc469212be43ba32db4fa6e2b74ca9313 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/caching/__init__.py @@ -0,0 +1,7 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from haystack.components.caching.cache_checker import CacheChecker + +__all__ = ["CacheChecker"] diff --git a/testbed/deepset-ai__haystack/haystack/components/caching/cache_checker.py b/testbed/deepset-ai__haystack/haystack/components/caching/cache_checker.py new file mode 100644 index 0000000000000000000000000000000000000000..b67608b07551b4578fded49bb5a6f1fbcfe3b9be --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/caching/cache_checker.py @@ -0,0 +1,102 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Dict, List + +from haystack import Document, component, default_from_dict, default_to_dict, logging +from haystack.document_stores.types import DocumentStore +from haystack.utils import deserialize_document_store_in_init_params_inplace + +logger = logging.getLogger(__name__) + + +@component +class CacheChecker: + """ + Checks for the presence of documents in a Document Store based on a specified field in each document's metadata. + + If matching documents are found, they are returned as "hits". If not found in the cache, the items + are returned as "misses". + + ### Usage example + + ```python + from haystack import Document + from haystack.document_stores.in_memory import InMemoryDocumentStore + from haystack.components.caching.cache_checker import CacheChecker + + docstore = InMemoryDocumentStore() + documents = [ + Document(content="doc1", meta={"url": "https://example.com/1"}), + Document(content="doc2", meta={"url": "https://example.com/2"}), + Document(content="doc3", meta={"url": "https://example.com/1"}), + Document(content="doc4", meta={"url": "https://example.com/2"}), + ] + docstore.write_documents(documents) + checker = CacheChecker(docstore, cache_field="url") + results = checker.run(items=["https://example.com/1", "https://example.com/5"]) + assert results == {"hits": [documents[0], documents[2]], "misses": ["https://example.com/5"]} + ``` + """ + + def __init__(self, document_store: DocumentStore, cache_field: str): + """ + Creates a CacheChecker component. + + :param document_store: + Document Store to check for the presence of specific documents. + :param cache_field: + Name of the document's metadata field + to check for cache hits. + """ + self.document_store = document_store + self.cache_field = cache_field + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict(self, document_store=self.document_store.to_dict(), cache_field=self.cache_field) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "CacheChecker": + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary to deserialize from. + :returns: + Deserialized component. + """ + # deserialize the document store + deserialize_document_store_in_init_params_inplace(data) + + return default_from_dict(cls, data) + + @component.output_types(hits=List[Document], misses=List) + def run(self, items: List[Any]): + """ + Checks if any document associated with the specified cache field is already present in the store. + + :param items: + Values to be checked against the cache field. + :return: + A dictionary with two keys: + - `hits` - Documents that matched with at least one of the items. + - `misses` - Items that were not present in any documents. + """ + found_documents = [] + misses = [] + + for item in items: + filters = {"field": self.cache_field, "operator": "==", "value": item} + found = self.document_store.filter_documents(filters=filters) + if found: + found_documents.extend(found) + else: + misses.append(item) + return {"hits": found_documents, "misses": misses} diff --git a/testbed/deepset-ai__haystack/haystack/components/classifiers/__init__.py b/testbed/deepset-ai__haystack/haystack/components/classifiers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..662df14d82de7578a5938225de5a0d2626e93141 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/classifiers/__init__.py @@ -0,0 +1,8 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from haystack.components.classifiers.document_language_classifier import DocumentLanguageClassifier +from haystack.components.classifiers.zero_shot_document_classifier import TransformersZeroShotDocumentClassifier + +__all__ = ["DocumentLanguageClassifier", "TransformersZeroShotDocumentClassifier"] diff --git a/testbed/deepset-ai__haystack/haystack/components/classifiers/document_language_classifier.py b/testbed/deepset-ai__haystack/haystack/components/classifiers/document_language_classifier.py new file mode 100644 index 0000000000000000000000000000000000000000..ed9b42b5d5472b5b4c77d4ea9bb0dd152925b9c8 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/classifiers/document_language_classifier.py @@ -0,0 +1,109 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Dict, List, Optional + +from haystack import Document, component, logging +from haystack.lazy_imports import LazyImport + +logger = logging.getLogger(__name__) + +with LazyImport("Run 'pip install langdetect'") as langdetect_import: + import langdetect + + +@component +class DocumentLanguageClassifier: + """ + Classifies the language of each document and adds it to its metadata. + + Provide a list of languages during initialization. If the document's text doesn't match any of the + specified languages, the metadata value is set to "unmatched". + To route documents based on their language, use the MetadataRouter component after DocumentLanguageClassifier. + For routing plain text, use the TextLanguageRouter component instead. + + ### Usage example + + ```python + from haystack import Document, Pipeline + from haystack.document_stores.in_memory import InMemoryDocumentStore + from haystack.components.classifiers import DocumentLanguageClassifier + from haystack.components.routers import MetadataRouter + from haystack.components.writers import DocumentWriter + + docs = [Document(id="1", content="This is an English document"), + Document(id="2", content="Este es un documento en español")] + + document_store = InMemoryDocumentStore() + + p = Pipeline() + p.add_component(instance=DocumentLanguageClassifier(languages=["en"]), name="language_classifier") + p.add_component(instance=MetadataRouter(rules={"en": {"language": {"$eq": "en"}}}), name="router") + p.add_component(instance=DocumentWriter(document_store=document_store), name="writer") + p.connect("language_classifier.documents", "router.documents") + p.connect("router.en", "writer.documents") + + p.run({"language_classifier": {"documents": docs}}) + + written_docs = document_store.filter_documents() + assert len(written_docs) == 1 + assert written_docs[0] == Document(id="1", content="This is an English document", meta={"language": "en"}) + ``` + """ + + def __init__(self, languages: Optional[List[str]] = None): + """ + Initializes the DocumentLanguageClassifier component. + + :param languages: A list of ISO language codes. + See the supported languages in [`langdetect` documentation](https://github.com/Mimino666/langdetect#languages). + If not specified, defaults to ["en"]. + """ + langdetect_import.check() + if not languages: + languages = ["en"] + self.languages = languages + + @component.output_types(documents=List[Document]) + def run(self, documents: List[Document]): + """ + Classifies the language of each document and adds it to its metadata. + + If the document's text doesn't match any of the languages specified at initialization, + sets the metadata value to "unmatched". + + :param documents: A list of documents for language classification. + + :returns: A dictionary with the following key: + - `documents`: A list of documents with an added `language` metadata field. + + :raises TypeError: if the input is not a list of Documents. + """ + if not isinstance(documents, list) or documents and not isinstance(documents[0], Document): + raise TypeError( + "DocumentLanguageClassifier expects a list of Document as input. " + "In case you want to classify a text, please use the TextLanguageClassifier." + ) + + output: Dict[str, List[Document]] = {language: [] for language in self.languages} + output["unmatched"] = [] + + for document in documents: + detected_language = self._detect_language(document) + if detected_language in self.languages: + document.meta["language"] = detected_language + else: + document.meta["language"] = "unmatched" + + return {"documents": documents} + + def _detect_language(self, document: Document) -> Optional[str]: + try: + language = langdetect.detect(document.content) + except langdetect.LangDetectException: + logger.warning( + "Langdetect cannot detect the language of Document with id: {document_id}", document_id=document.id + ) + language = None + return language diff --git a/testbed/deepset-ai__haystack/haystack/components/classifiers/zero_shot_document_classifier.py b/testbed/deepset-ai__haystack/haystack/components/classifiers/zero_shot_document_classifier.py new file mode 100644 index 0000000000000000000000000000000000000000..5aa52fde80fc3c55a1d89f4c3ca68f9fc36566e3 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/classifiers/zero_shot_document_classifier.py @@ -0,0 +1,245 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Dict, List, Optional + +from haystack import Document, component, default_from_dict, default_to_dict, logging +from haystack.lazy_imports import LazyImport +from haystack.utils import ComponentDevice, Secret, deserialize_secrets_inplace +from haystack.utils.hf import deserialize_hf_model_kwargs, resolve_hf_pipeline_kwargs, serialize_hf_model_kwargs + +logger = logging.getLogger(__name__) + + +with LazyImport(message="Run 'pip install transformers[torch,sentencepiece]'") as torch_and_transformers_import: + from transformers import pipeline + + +@component +class TransformersZeroShotDocumentClassifier: + """ + Performs zero-shot classification of documents based on given labels and adds the predicted label to their metadata. + + The component uses a Hugging Face pipeline for zero-shot classification. + Provide the model and the set of labels to be used for categorization during initialization. + Additionally, you can configure the component to allow multiple labels to be true. + + Classification is run on the document's content field by default. If you want it to run on another field, set the + `classification_field` to one of the document's metadata fields. + + Available models for the task of zero-shot-classification include: + - `valhalla/distilbart-mnli-12-3` + - `cross-encoder/nli-distilroberta-base` + - `cross-encoder/nli-deberta-v3-xsmall` + + ### Usage example + + The following is a pipeline that classifies documents based on predefined classification labels + retrieved from a search pipeline: + + ```python + from haystack import Document + from haystack.components.retrievers.in_memory import InMemoryBM25Retriever + from haystack.document_stores.in_memory import InMemoryDocumentStore + from haystack.core.pipeline import Pipeline + from haystack.components.classifiers import TransformersZeroShotDocumentClassifier + + documents = [Document(id="0", content="Today was a nice day!"), + Document(id="1", content="Yesterday was a bad day!")] + + document_store = InMemoryDocumentStore() + retriever = InMemoryBM25Retriever(document_store=document_store) + document_classifier = TransformersZeroShotDocumentClassifier( + model="cross-encoder/nli-deberta-v3-xsmall", + labels=["positive", "negative"], + ) + + document_store.write_documents(documents) + + pipeline = Pipeline() + pipeline.add_component(instance=retriever, name="retriever") + pipeline.add_component(instance=document_classifier, name="document_classifier") + pipeline.connect("retriever", "document_classifier") + + queries = ["How was your day today?", "How was your day yesterday?"] + expected_predictions = ["positive", "negative"] + + for idx, query in enumerate(queries): + result = pipeline.run({"retriever": {"query": query, "top_k": 1}}) + assert result["document_classifier"]["documents"][0].to_dict()["id"] == str(idx) + assert (result["document_classifier"]["documents"][0].to_dict()["classification"]["label"] + == expected_predictions[idx]) + ``` + """ + + def __init__( + self, + model: str, + labels: List[str], + multi_label: bool = False, + classification_field: Optional[str] = None, + device: Optional[ComponentDevice] = None, + token: Optional[Secret] = Secret.from_env_var(["HF_API_TOKEN", "HF_TOKEN"], strict=False), + huggingface_pipeline_kwargs: Optional[Dict[str, Any]] = None, + ): + """ + Initializes the TransformersZeroShotDocumentClassifier. + + See the Hugging Face [website](https://huggingface.co/models?pipeline_tag=zero-shot-classification&sort=downloads&search=nli) + for the full list of zero-shot classification models (NLI) models. + + :param model: + The name or path of a Hugging Face model for zero shot document classification. + :param labels: + The set of possible class labels to classify each document into, for example, + ["positive", "negative"]. The labels depend on the selected model. + :param multi_label: + Whether or not multiple candidate labels can be true. + If `False`, the scores are normalized such that + the sum of the label likelihoods for each sequence is 1. If `True`, the labels are considered + independent and probabilities are normalized for each candidate by doing a softmax of the entailment + score vs. the contradiction score. + :param classification_field: + Name of document's meta field to be used for classification. + If not set, `Document.content` is used by default. + :param device: + The device on which the model is loaded. If `None`, the default device is automatically + selected. If a device/device map is specified in `huggingface_pipeline_kwargs`, it overrides this parameter. + :param token: + The Hugging Face token to use as HTTP bearer authorization. + Check your HF token in your [account settings](https://huggingface.co/settings/tokens). + :param huggingface_pipeline_kwargs: + Dictionary containing keyword arguments used to initialize the + Hugging Face pipeline for text classification. + """ + + torch_and_transformers_import.check() + + self.classification_field = classification_field + + self.token = token + self.labels = labels + self.multi_label = multi_label + + huggingface_pipeline_kwargs = resolve_hf_pipeline_kwargs( + huggingface_pipeline_kwargs=huggingface_pipeline_kwargs or {}, + model=model, + task="zero-shot-classification", + supported_tasks=["zero-shot-classification"], + device=device, + token=token, + ) + + self.huggingface_pipeline_kwargs = huggingface_pipeline_kwargs + self.pipeline = None + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + if isinstance(self.huggingface_pipeline_kwargs["model"], str): + return {"model": self.huggingface_pipeline_kwargs["model"]} + return {"model": f"[object of type {type(self.huggingface_pipeline_kwargs['model'])}]"} + + def warm_up(self): + """ + Initializes the component. + """ + if self.pipeline is None: + self.pipeline = pipeline(**self.huggingface_pipeline_kwargs) + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + serialization_dict = default_to_dict( + self, + labels=self.labels, + model=self.huggingface_pipeline_kwargs["model"], + huggingface_pipeline_kwargs=self.huggingface_pipeline_kwargs, + token=self.token.to_dict() if self.token else None, + ) + + huggingface_pipeline_kwargs = serialization_dict["init_parameters"]["huggingface_pipeline_kwargs"] + huggingface_pipeline_kwargs.pop("token", None) + + serialize_hf_model_kwargs(huggingface_pipeline_kwargs) + return serialization_dict + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "TransformersZeroShotDocumentClassifier": + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary to deserialize from. + :returns: + Deserialized component. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["token"]) + if data["init_parameters"].get("huggingface_pipeline_kwargs") is not None: + deserialize_hf_model_kwargs(data["init_parameters"]["huggingface_pipeline_kwargs"]) + return default_from_dict(cls, data) + + @component.output_types(documents=List[Document]) + def run(self, documents: List[Document], batch_size: int = 1): + """ + Classifies the documents based on the provided labels and adds them to their metadata. + + The classification results are stored in the `classification` dict within + each document's metadata. If `multi_label` is set to `True`, the scores for each label are available under + the `details` key within the `classification` dictionary. + + :param documents: + Documents to process. + :param batch_size: + Batch size used for processing the content in each document. + :returns: + A dictionary with the following key: + - `documents`: A list of documents with an added metadata field called `classification`. + """ + + if self.pipeline is None: + raise RuntimeError( + "The component TransformerZeroShotDocumentClassifier wasn't warmed up. " + "Run 'warm_up()' before calling 'run()'." + ) + + if not isinstance(documents, list) or documents and not isinstance(documents[0], Document): + raise TypeError( + "DocumentLanguageClassifier expects a list of documents as input. " + "In case you want to classify a text, please use the TextLanguageClassifier." + ) + + invalid_doc_ids = [] + + for doc in documents: + if self.classification_field is not None and self.classification_field not in doc.meta: + invalid_doc_ids.append(doc.id) + + if invalid_doc_ids: + raise ValueError( + f"The following documents do not have the classification field '{self.classification_field}': " + f"{', '.join(invalid_doc_ids)}" + ) + + texts = [ + (doc.content if self.classification_field is None else doc.meta[self.classification_field]) + for doc in documents + ] + + predictions = self.pipeline(texts, self.labels, multi_label=self.multi_label, batch_size=batch_size) + + for prediction, document in zip(predictions, documents): + formatted_prediction = { + "label": prediction["labels"][0], + "score": prediction["scores"][0], + "details": dict(zip(prediction["labels"], prediction["scores"])), + } + document.meta["classification"] = formatted_prediction + + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/__init__.py b/testbed/deepset-ai__haystack/haystack/components/converters/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4561dd1e0a2d06eae06adbc7ebce92b589d401b8 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/__init__.py @@ -0,0 +1,34 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from haystack.components.converters.azure import AzureOCRDocumentConverter +from haystack.components.converters.csv import CSVToDocument +from haystack.components.converters.docx import DOCXMetadata, DOCXToDocument +from haystack.components.converters.html import HTMLToDocument +from haystack.components.converters.json import JSONConverter +from haystack.components.converters.markdown import MarkdownToDocument +from haystack.components.converters.openapi_functions import OpenAPIServiceToFunctions +from haystack.components.converters.output_adapter import OutputAdapter +from haystack.components.converters.pdfminer import PDFMinerToDocument +from haystack.components.converters.pptx import PPTXToDocument +from haystack.components.converters.pypdf import PyPDFToDocument +from haystack.components.converters.tika import TikaDocumentConverter +from haystack.components.converters.txt import TextFileToDocument + +__all__ = [ + "TextFileToDocument", + "TikaDocumentConverter", + "AzureOCRDocumentConverter", + "PyPDFToDocument", + "PDFMinerToDocument", + "HTMLToDocument", + "MarkdownToDocument", + "OpenAPIServiceToFunctions", + "OutputAdapter", + "DOCXToDocument", + "DOCXMetadata", + "PPTXToDocument", + "CSVToDocument", + "JSONConverter", +] diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/azure.py b/testbed/deepset-ai__haystack/haystack/components/converters/azure.py new file mode 100644 index 0000000000000000000000000000000000000000..94ca9714f3b481eae7e6e90a96852208e1ee4fc3 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/azure.py @@ -0,0 +1,497 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import copy +import hashlib +from collections import defaultdict +from pathlib import Path +from typing import Any, Dict, List, Literal, Optional, Union + +import networkx as nx +import pandas as pd + +from haystack import Document, component, default_from_dict, default_to_dict, logging +from haystack.components.converters.utils import get_bytestream_from_source, normalize_metadata +from haystack.dataclasses import ByteStream +from haystack.lazy_imports import LazyImport +from haystack.utils import Secret, deserialize_secrets_inplace + +logger = logging.getLogger(__name__) + +with LazyImport(message="Run 'pip install \"azure-ai-formrecognizer>=3.2.0b2\"'") as azure_import: + from azure.ai.formrecognizer import AnalyzeResult, DocumentAnalysisClient, DocumentLine, DocumentParagraph + from azure.core.credentials import AzureKeyCredential + + +@component +class AzureOCRDocumentConverter: + """ + Converts files to documents using Azure's Document Intelligence service. + + Supported file formats are: PDF, JPEG, PNG, BMP, TIFF, DOCX, XLSX, PPTX, and HTML. + + To use this component, you need an active Azure account + and a Document Intelligence or Cognitive Services resource. For help with setting up your resource, see + [Azure documentation](https://learn.microsoft.com/en-us/azure/ai-services/document-intelligence/quickstarts/get-started-sdks-rest-api). + + ### Usage example + + ```python + from haystack.components.converters import AzureOCRDocumentConverter + from haystack.utils import Secret + + converter = AzureOCRDocumentConverter(endpoint="", api_key=Secret.from_token("")) + results = converter.run(sources=["path/to/doc_with_images.pdf"], meta={"date_added": datetime.now().isoformat()}) + documents = results["documents"] + print(documents[0].content) + # 'This is a text from the PDF file.' + ``` + """ + + def __init__( + self, + endpoint: str, + api_key: Secret = Secret.from_env_var("AZURE_AI_API_KEY"), + model_id: str = "prebuilt-read", + preceding_context_len: int = 3, + following_context_len: int = 3, + merge_multiple_column_headers: bool = True, + page_layout: Literal["natural", "single_column"] = "natural", + threshold_y: Optional[float] = 0.05, + ): + """ + Creates an AzureOCRDocumentConverter component. + + :param endpoint: + The endpoint of your Azure resource. + :param api_key: + The API key of your Azure resource. + :param model_id: + The ID of the model you want to use. For a list of available models, see [Azure documentation] + (https://learn.microsoft.com/en-us/azure/ai-services/document-intelligence/choose-model-feature). + :param preceding_context_len: Number of lines before a table to include as preceding context + (this will be added to the metadata). + :param following_context_len: Number of lines after a table to include as subsequent context ( + this will be added to the metadata). + :param merge_multiple_column_headers: If `True`, merges multiple column header rows into a single row. + :param page_layout: The type reading order to follow. Possible options: + - `natural`: Uses the natural reading order determined by Azure. + - `single_column`: Groups all lines with the same height on the page based on a threshold + determined by `threshold_y`. + :param threshold_y: Only relevant if `single_column` is set to `page_layout`. + The threshold, in inches, to determine if two recognized PDF elements are grouped into a + single line. This is crucial for section headers or numbers which may be spatially separated + from the remaining text on the horizontal axis. + """ + azure_import.check() + + self.document_analysis_client = DocumentAnalysisClient( + endpoint=endpoint, credential=AzureKeyCredential(api_key.resolve_value() or "") + ) # type: ignore + self.endpoint = endpoint + self.model_id = model_id + self.api_key = api_key + self.preceding_context_len = preceding_context_len + self.following_context_len = following_context_len + self.merge_multiple_column_headers = merge_multiple_column_headers + self.page_layout = page_layout + self.threshold_y = threshold_y + if self.page_layout == "single_column" and self.threshold_y is None: + self.threshold_y = 0.05 + + @component.output_types(documents=List[Document], raw_azure_response=List[Dict]) + def run(self, sources: List[Union[str, Path, ByteStream]], meta: Optional[List[Dict[str, Any]]] = None): + """ + Convert a list of files to Documents using Azure's Document Intelligence service. + + :param sources: + List of file paths or ByteStream objects. + :param meta: + Optional metadata to attach to the Documents. + This value can be either a list of dictionaries or a single dictionary. + If it's a single dictionary, its content is added to the metadata of all produced Documents. + If it's a list, the length of the list must match the number of sources, because the two lists will be + zipped. If `sources` contains ByteStream objects, their `meta` will be added to the output Documents. + + :returns: + A dictionary with the following keys: + - `documents`: List of created Documents + - `raw_azure_response`: List of raw Azure responses used to create the Documents + """ + documents = [] + azure_output = [] + meta_list: List[Dict[str, Any]] = normalize_metadata(meta=meta, sources_count=len(sources)) + for source, metadata in zip(sources, meta_list): + try: + bytestream = get_bytestream_from_source(source=source) + except Exception as e: + logger.warning("Could not read {source}. Skipping it. Error: {error}", source=source, error=e) + continue + + poller = self.document_analysis_client.begin_analyze_document( + model_id=self.model_id, document=bytestream.data + ) + result = poller.result() + azure_output.append(result.to_dict()) + + merged_metadata = {**bytestream.meta, **metadata} + docs = self._convert_tables_and_text(result=result, meta=merged_metadata) + documents.extend(docs) + + return {"documents": documents, "raw_azure_response": azure_output} + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict( + self, + api_key=self.api_key.to_dict(), + endpoint=self.endpoint, + model_id=self.model_id, + preceding_context_len=self.preceding_context_len, + following_context_len=self.following_context_len, + merge_multiple_column_headers=self.merge_multiple_column_headers, + page_layout=self.page_layout, + threshold_y=self.threshold_y, + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "AzureOCRDocumentConverter": + """ + Deserializes the component from a dictionary. + + :param data: + The dictionary to deserialize from. + :returns: + The deserialized component. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["api_key"]) + return default_from_dict(cls, data) + + # pylint: disable=line-too-long + def _convert_tables_and_text(self, result: "AnalyzeResult", meta: Optional[Dict[str, Any]]) -> List[Document]: + """ + Converts the tables and text extracted by Azure's Document Intelligence service into Haystack Documents. + + :param result: The AnalyzeResult object returned by the `begin_analyze_document` method. Docs on Analyze result + can be found [here](https://azuresdkdocs.blob.core.windows.net/$web/python/azure-ai-formrecognizer/3.3.0/azure.ai.formrecognizer.html?highlight=read#azure.ai.formrecognizer.AnalyzeResult). + :param meta: Optional dictionary with metadata that shall be attached to all resulting documents. + Can be any custom keys and values. + :returns: List of Documents containing the tables and text extracted from the AnalyzeResult object. + """ + tables = self._convert_tables(result=result, meta=meta) + if self.page_layout == "natural": + text = self._convert_to_natural_text(result=result, meta=meta) + else: + assert isinstance(self.threshold_y, float) + text = self._convert_to_single_column_text(result=result, meta=meta, threshold_y=self.threshold_y) + docs = [*tables, text] + return docs + + def _convert_tables(self, result: "AnalyzeResult", meta: Optional[Dict[str, Any]]) -> List[Document]: + """ + Converts the tables extracted by Azure's Document Intelligence service into Haystack Documents. + + :param result: The AnalyzeResult Azure object + :param meta: Optional dictionary with metadata that shall be attached to all resulting documents. + + :returns: List of Documents containing the tables extracted from the AnalyzeResult object. + """ + converted_tables: List[Document] = [] + + if not result.tables: + return converted_tables + + for table in result.tables: + # Initialize table with empty cells + table_list = [[""] * table.column_count for _ in range(table.row_count)] + additional_column_header_rows = set() + caption = "" + row_idx_start = 0 + + for idx, cell in enumerate(table.cells): + # Remove ':selected:'/':unselected:' tags from cell's content + cell.content = cell.content.replace(":selected:", "") + cell.content = cell.content.replace(":unselected:", "") + + # Check if first row is a merged cell spanning whole table + # -> exclude this row and use as a caption + if idx == 0 and cell.column_span == table.column_count: + caption = cell.content + row_idx_start = 1 + table_list.pop(0) + continue + + column_span = cell.column_span if cell.column_span else 0 + for c in range(column_span): # pylint: disable=invalid-name + row_span = cell.row_span if cell.row_span else 0 + for r in range(row_span): # pylint: disable=invalid-name + if ( + self.merge_multiple_column_headers + and cell.kind == "columnHeader" + and cell.row_index > row_idx_start + ): + # More than one row serves as column header + table_list[0][cell.column_index + c] += f"\n{cell.content}" + additional_column_header_rows.add(cell.row_index - row_idx_start) + else: + table_list[cell.row_index + r - row_idx_start][cell.column_index + c] = cell.content + + # Remove additional column header rows, as these got attached to the first row + for row_idx in sorted(additional_column_header_rows, reverse=True): + del table_list[row_idx] + + # Get preceding context of table + if table.bounding_regions: + table_beginning_page = next( + page for page in result.pages if page.page_number == table.bounding_regions[0].page_number + ) + else: + table_beginning_page = None + table_start_offset = table.spans[0].offset + if table_beginning_page and table_beginning_page.lines: + preceding_lines = [ + line.content for line in table_beginning_page.lines if line.spans[0].offset < table_start_offset + ] + else: + preceding_lines = [] + preceding_context = "\n".join(preceding_lines[-self.preceding_context_len :]) + f"\n{caption}" + preceding_context = preceding_context.strip() + + # Get following context + if table.bounding_regions and len(table.bounding_regions) == 1: + table_end_page = table_beginning_page + elif table.bounding_regions: + table_end_page = next( + page for page in result.pages if page.page_number == table.bounding_regions[-1].page_number + ) + else: + table_end_page = None + + table_end_offset = table_start_offset + table.spans[0].length + if table_end_page and table_end_page.lines: + following_lines = [ + line.content for line in table_end_page.lines if line.spans[0].offset > table_end_offset + ] + else: + following_lines = [] + following_context = "\n".join(following_lines[: self.following_context_len]) + + table_meta = copy.deepcopy(meta) + + if isinstance(table_meta, dict): + table_meta["preceding_context"] = preceding_context + table_meta["following_context"] = following_context + else: + table_meta = {"preceding_context": preceding_context, "following_context": following_context} + + if table.bounding_regions: + table_meta["page"] = table.bounding_regions[0].page_number + + table_df = pd.DataFrame(columns=table_list[0], data=table_list[1:]) + + # Use custom ID for tables, as columns might not be unique and thus failing in the default ID generation + pd_hashes = self._hash_dataframe(table_df) + data = f"{pd_hashes}{table_meta}" + doc_id = hashlib.sha256(data.encode()).hexdigest() + converted_tables.append(Document(id=doc_id, dataframe=table_df, meta=table_meta)) + + return converted_tables + + def _convert_to_natural_text(self, result: "AnalyzeResult", meta: Optional[Dict[str, Any]]) -> Document: + """ + This converts the `AnalyzeResult` object into a single document. + + We add "\f" separators between to differentiate between the text on separate pages. This is the expected format + for the PreProcessor. + + :param result: The AnalyzeResult object returned by the `begin_analyze_document` method. Docs on Analyze result + can be found [here](https://azuresdkdocs.blob.core.windows.net/$web/python/azure-ai-formrecognizer/3.3.0/azure.ai.formrecognizer.html?highlight=read#azure.ai.formrecognizer.AnalyzeResult). + :param meta: Optional dictionary with metadata that shall be attached to all resulting documents. + Can be any custom keys and values. + :returns: A single Document containing all the text extracted from the AnalyzeResult object. + """ + table_spans_by_page = self._collect_table_spans(result=result) + + texts = [] + if result.paragraphs: + paragraphs_to_pages: Dict[int, str] = defaultdict(str) + for paragraph in result.paragraphs: + if paragraph.bounding_regions: + # If paragraph spans multiple pages we group it with the first page number + page_numbers = [b.page_number for b in paragraph.bounding_regions] + else: + # If page_number is not available we put the paragraph onto an existing page + current_last_page_number = sorted(paragraphs_to_pages.keys())[-1] if paragraphs_to_pages else 1 + page_numbers = [current_last_page_number] + tables_on_page = table_spans_by_page[page_numbers[0]] + # Check if paragraph is part of a table and if so skip + if self._check_if_in_table(tables_on_page, line_or_paragraph=paragraph): + continue + paragraphs_to_pages[page_numbers[0]] += paragraph.content + "\n" + + max_page_number: int = max(paragraphs_to_pages) + for page_idx in range(1, max_page_number + 1): + # We add empty strings for missing pages so the preprocessor can still extract the correct page number + # from the original PDF. + page_text = paragraphs_to_pages.get(page_idx, "") + texts.append(page_text) + else: + logger.warning("No text paragraphs were detected by the OCR conversion.") + + all_text = "\f".join(texts) + return Document(content=all_text, meta=meta if meta else {}) + + def _convert_to_single_column_text( + self, result: "AnalyzeResult", meta: Optional[Dict[str, str]], threshold_y: float = 0.05 + ) -> Document: + """ + This converts the `AnalyzeResult` object into a single Haystack Document. + + We add "\f" separators between to differentiate between the text on separate pages. This is the expected format + for the PreProcessor. + + :param result: The AnalyzeResult object returned by the `begin_analyze_document` method. Docs on Analyze result + can be found [here](https://azuresdkdocs.blob.core.windows.net/$web/python/azure-ai-formrecognizer/3.3.0/azure.ai.formrecognizer.html?highlight=read#azure.ai.formrecognizer.AnalyzeResult). + :param meta: Optional dictionary with metadata that shall be attached to all resulting documents. + Can be any custom keys and values. + :param threshold_y: height threshold in inches for PDF and pixels for images + :returns: A single Document containing all the text extracted from the AnalyzeResult object. + """ + table_spans_by_page = self._collect_table_spans(result=result) + + # Find all pairs of lines that should be grouped together based on the y-value of the upper left coordinate + # of their bounding box + pairs_by_page = defaultdict(list) + for page_idx, page in enumerate(result.pages): + lines = page.lines if page.lines else [] + # Only works if polygons is available + if all(line.polygon is not None for line in lines): + for i in range(len(lines)): # pylint: disable=consider-using-enumerate + # left_upi, right_upi, right_lowi, left_lowi = lines[i].polygon + left_upi, _, _, _ = lines[i].polygon # type: ignore + pairs_by_page[page_idx].append([i, i]) + for j in range(i + 1, len(lines)): # pylint: disable=invalid-name + left_upj, _, _, _ = lines[j].polygon # type: ignore + close_on_y_axis = abs(left_upi[1] - left_upj[1]) < threshold_y + if close_on_y_axis: + pairs_by_page[page_idx].append([i, j]) + # Default if polygon is not available + else: + logger.info( + "Polygon information for lines on page {page_idx} is not available so it is not possible " + "to enforce a single column page layout.".format(page_idx=page_idx) + ) + for i in range(len(lines)): + pairs_by_page[page_idx].append([i, i]) + + # merged the line pairs that are connected by page + merged_pairs_by_page = {} + for page_idx in pairs_by_page: + graph = nx.Graph() + graph.add_edges_from(pairs_by_page[page_idx]) + merged_pairs_by_page[page_idx] = [list(a) for a in list(nx.connected_components(graph))] + + # Convert line indices to the DocumentLine objects + merged_lines_by_page = {} + for page_idx, page in enumerate(result.pages): + rows = [] + lines = page.lines if page.lines else [] + # We use .get(page_idx, []) since the page could be empty + for row_of_lines in merged_pairs_by_page.get(page_idx, []): + lines_in_row = [lines[line_idx] for line_idx in row_of_lines] + rows.append(lines_in_row) + merged_lines_by_page[page_idx] = rows + + # Sort the merged pairs in each row by the x-value of the upper left bounding box coordinate + x_sorted_lines_by_page = {} + for page_idx, _ in enumerate(result.pages): + sorted_rows = [] + for row_of_lines in merged_lines_by_page[page_idx]: + sorted_rows.append(sorted(row_of_lines, key=lambda x: x.polygon[0][0])) # type: ignore + x_sorted_lines_by_page[page_idx] = sorted_rows + + # Sort each row within the page by the y-value of the upper left bounding box coordinate + y_sorted_lines_by_page = {} + for page_idx, _ in enumerate(result.pages): + sorted_rows = sorted(x_sorted_lines_by_page[page_idx], key=lambda x: x[0].polygon[0][1]) # type: ignore + y_sorted_lines_by_page[page_idx] = sorted_rows + + # Construct the text to write + texts = [] + for page_idx, page in enumerate(result.pages): + tables_on_page = table_spans_by_page[page.page_number] + page_text = "" + for row_of_lines in y_sorted_lines_by_page[page_idx]: + # Check if line is part of a table and if so skip + if any(self._check_if_in_table(tables_on_page, line_or_paragraph=line) for line in row_of_lines): + continue + page_text += " ".join(line.content for line in row_of_lines) + page_text += "\n" + texts.append(page_text) + all_text = "\f".join(texts) + return Document(content=all_text, meta=meta if meta else {}) + + def _collect_table_spans(self, result: "AnalyzeResult") -> Dict: + """ + Collect the spans of all tables by page number. + + :param result: The AnalyzeResult object returned by the `begin_analyze_document` method. + :returns: A dictionary with the page number as key and a list of table spans as value. + """ + table_spans_by_page = defaultdict(list) + tables = result.tables if result.tables else [] + for table in tables: + if not table.bounding_regions: + continue + table_spans_by_page[table.bounding_regions[0].page_number].append(table.spans[0]) + return table_spans_by_page + + def _check_if_in_table( + self, tables_on_page: dict, line_or_paragraph: Union["DocumentLine", "DocumentParagraph"] + ) -> bool: + """ + Check if a line or paragraph is part of a table. + + :param tables_on_page: A dictionary with the page number as key and a list of table spans as value. + :param line_or_paragraph: The line or paragraph to check. + :returns: True if the line or paragraph is part of a table, False otherwise. + """ + in_table = False + # Check if line is part of a table + for table in tables_on_page: + if table.offset <= line_or_paragraph.spans[0].offset <= table.offset + table.length: + in_table = True + break + return in_table + + def _hash_dataframe(self, df: pd.DataFrame, desired_samples=5, hash_length=4) -> str: + """ + Returns a hash of the DataFrame content. + + The hash is based on the content of the DataFrame. + :param df: The DataFrame to hash. + :param desired_samples: The desired number of samples to hash. + :param hash_length: The length of the hash for each sample. + + :returns: A hash of the DataFrame content. + """ + # take adaptive sample of rows to hash because we can have very large dataframes + hasher = hashlib.md5() + total_rows = len(df) + # sample rate based on DataFrame size and desired number of samples + sample_rate = max(1, total_rows // desired_samples) + + hashes = pd.util.hash_pandas_object(df, index=True) + sampled_hashes = hashes[::sample_rate] + + for hash_value in sampled_hashes: + partial_hash = str(hash_value)[:hash_length].encode("utf-8") + hasher.update(partial_hash) + + return hasher.hexdigest() diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/csv.py b/testbed/deepset-ai__haystack/haystack/components/converters/csv.py new file mode 100644 index 0000000000000000000000000000000000000000..721d8cf6257ad5121616b05da5f77910ceb4e294 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/csv.py @@ -0,0 +1,93 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import io +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +from haystack import Document, component, logging +from haystack.components.converters.utils import get_bytestream_from_source, normalize_metadata +from haystack.dataclasses import ByteStream + +logger = logging.getLogger(__name__) + + +@component +class CSVToDocument: + """ + Converts CSV files to Documents. + + By default, it uses UTF-8 encoding when converting files but + you can also set a custom encoding. + It can attach metadata to the resulting documents. + + ### Usage example + + ```python + from haystack.components.converters.csv import CSVToDocument + converter = CSVToDocument() + results = converter.run(sources=["sample.csv"], meta={"date_added": datetime.now().isoformat()}) + documents = results["documents"] + print(documents[0].content) + # 'col1,col2\now1,row1\nrow2row2\n' + ``` + """ + + def __init__(self, encoding: str = "utf-8"): + """ + Creates a CSVToDocument component. + + :param encoding: + The encoding of the csv files to convert. + If the encoding is specified in the metadata of a source ByteStream, + it overrides this value. + """ + self.encoding = encoding + + @component.output_types(documents=List[Document]) + def run( + self, + sources: List[Union[str, Path, ByteStream]], + meta: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None, + ): + """ + Converts a CSV file to a Document. + + :param sources: + List of file paths or ByteStream objects. + :param meta: + Optional metadata to attach to the documents. + This value can be either a list of dictionaries or a single dictionary. + If it's a single dictionary, its content is added to the metadata of all produced documents. + If it's a list, the length of the list must match the number of sources, because the two lists will + be zipped. + If `sources` contains ByteStream objects, their `meta` will be added to the output documents. + :returns: + A dictionary with the following keys: + - `documents`: Created documents + """ + documents = [] + + meta_list = normalize_metadata(meta, sources_count=len(sources)) + + for source, metadata in zip(sources, meta_list): + try: + bytestream = get_bytestream_from_source(source) + except Exception as e: + logger.warning("Could not read {source}. Skipping it. Error: {error}", source=source, error=e) + continue + try: + encoding = bytestream.meta.get("encoding", self.encoding) + data = io.BytesIO(bytestream.data).getvalue().decode(encoding=encoding) + except Exception as e: + logger.warning( + "Could not convert file {source}. Skipping it. Error message: {error}", source=source, error=e + ) + continue + + merged_metadata = {**bytestream.meta, **metadata} + document = Document(content=data, meta=merged_metadata) + documents.append(document) + + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/docx.py b/testbed/deepset-ai__haystack/haystack/components/converters/docx.py new file mode 100644 index 0000000000000000000000000000000000000000..dc0a51f485d9c991bca809aa694ff3e881d9fc58 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/docx.py @@ -0,0 +1,326 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import csv +import io +from dataclasses import dataclass +from enum import Enum +from io import StringIO +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +from haystack import Document, component, default_from_dict, default_to_dict, logging +from haystack.components.converters.utils import get_bytestream_from_source, normalize_metadata +from haystack.dataclasses import ByteStream +from haystack.lazy_imports import LazyImport + +logger = logging.getLogger(__name__) + +with LazyImport("Run 'pip install python-docx'") as docx_import: + import docx + from docx.document import Document as DocxDocument + from docx.table import Table + from docx.text.paragraph import Paragraph + + +@dataclass +class DOCXMetadata: + """ + Describes the metadata of Docx file. + + :param author: The author + :param category: The category + :param comments: The comments + :param content_status: The content status + :param created: The creation date (ISO formatted string) + :param identifier: The identifier + :param keywords: Available keywords + :param language: The language of the document + :param last_modified_by: User who last modified the document + :param last_printed: The last printed date (ISO formatted string) + :param modified: The last modification date (ISO formatted string) + :param revision: The revision number + :param subject: The subject + :param title: The title + :param version: The version + """ + + author: str + category: str + comments: str + content_status: str + created: Optional[str] + identifier: str + keywords: str + language: str + last_modified_by: str + last_printed: Optional[str] + modified: Optional[str] + revision: int + subject: str + title: str + version: str + + +class DOCXTableFormat(Enum): + """ + Supported formats for storing DOCX tabular data in a Document. + """ + + MARKDOWN = "markdown" + CSV = "csv" + + def __str__(self): + return self.value + + @staticmethod + def from_str(string: str) -> "DOCXTableFormat": + """ + Convert a string to a DOCXTableFormat enum. + """ + enum_map = {e.value: e for e in DOCXTableFormat} + table_format = enum_map.get(string.lower()) + if table_format is None: + msg = f"Unknown table format '{string}'. Supported formats are: {list(enum_map.keys())}" + raise ValueError(msg) + return table_format + + +@component +class DOCXToDocument: + """ + Converts DOCX files to Documents. + + Uses `python-docx` library to convert the DOCX file to a document. + This component does not preserve page breaks in the original document. + + Usage example: + ```python + from haystack.components.converters.docx import DOCXToDocument, DOCXTableFormat + + converter = DOCXToDocument(table_format=DOCXTableFormat.CSV) + results = converter.run(sources=["sample.docx"], meta={"date_added": datetime.now().isoformat()}) + documents = results["documents"] + print(documents[0].content) + # 'This is a text from the DOCX file.' + ``` + """ + + def __init__(self, table_format: Union[str, DOCXTableFormat] = DOCXTableFormat.CSV): + """ + Create a DOCXToDocument component. + + :param table_format: The format for table output. Can be either DOCXTableFormat.MARKDOWN, + DOCXTableFormat.CSV, "markdown", or "csv". Defaults to DOCXTableFormat.CSV. + """ + docx_import.check() + self.table_format = DOCXTableFormat.from_str(table_format) if isinstance(table_format, str) else table_format + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict(self, table_format=str(self.table_format)) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "DOCXToDocument": + """ + Deserializes the component from a dictionary. + + :param data: + The dictionary to deserialize from. + :returns: + The deserialized component. + """ + if "table_format" in data["init_parameters"]: + data["init_parameters"]["table_format"] = DOCXTableFormat.from_str(data["init_parameters"]["table_format"]) + return default_from_dict(cls, data) + + @component.output_types(documents=List[Document]) + def run( + self, + sources: List[Union[str, Path, ByteStream]], + meta: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None, + ): + """ + Converts DOCX files to Documents. + + :param sources: + List of file paths or ByteStream objects. + :param meta: + Optional metadata to attach to the Documents. + This value can be either a list of dictionaries or a single dictionary. + If it's a single dictionary, its content is added to the metadata of all produced Documents. + If it's a list, the length of the list must match the number of sources, because the two lists will + be zipped. + If `sources` contains ByteStream objects, their `meta` will be added to the output Documents. + + :returns: + A dictionary with the following keys: + - `documents`: Created Documents + """ + documents = [] + meta_list = normalize_metadata(meta=meta, sources_count=len(sources)) + + for source, metadata in zip(sources, meta_list): + try: + bytestream = get_bytestream_from_source(source) + except Exception as e: + logger.warning("Could not read {source}. Skipping it. Error: {error}", source=source, error=e) + continue + try: + docx_document = docx.Document(io.BytesIO(bytestream.data)) + elements = self._extract_elements(docx_document) + text = "\n".join(elements) + except Exception as e: + logger.warning( + "Could not read {source} and convert it to a DOCX Document, skipping. Error: {error}", + source=source, + error=e, + ) + continue + + docx_metadata = self._get_docx_metadata(document=docx_document) + merged_metadata = {**bytestream.meta, **metadata, "docx": docx_metadata} + document = Document(content=text, meta=merged_metadata) + documents.append(document) + + return {"documents": documents} + + def _extract_elements(self, document: "DocxDocument") -> List[str]: + """ + Extracts elements from a DOCX file. + + :param document: The DOCX Document object. + :returns: List of strings (paragraph texts and table representations) with page breaks added as '\f' characters. + """ + elements = [] + for element in document.element.body: + if element.tag.endswith("p"): + paragraph = Paragraph(element, document) + if paragraph.contains_page_break: + para_text = self._process_paragraph_with_page_breaks(paragraph) + else: + para_text = paragraph.text + elements.append(para_text) + elif element.tag.endswith("tbl"): + table = docx.table.Table(element, document) + table_str = ( + self._table_to_markdown(table) + if self.table_format == DOCXTableFormat.MARKDOWN + else self._table_to_csv(table) + ) + elements.append(table_str) + + return elements + + def _process_paragraph_with_page_breaks(self, paragraph: "Paragraph") -> str: + """ + Processes a paragraph with page breaks. + + :param paragraph: The DOCX paragraph to process. + :returns: A string with page breaks added as '\f' characters. + """ + para_text = "" + # Usually, just 1 page break exists, but could be more if paragraph is really long, so we loop over them + for pb_index, page_break in enumerate(paragraph.rendered_page_breaks): + # Can only extract text from first paragraph page break, unfortunately + if pb_index == 0: + if page_break.preceding_paragraph_fragment: + para_text += page_break.preceding_paragraph_fragment.text + para_text += "\f" + if page_break.following_paragraph_fragment: + # following_paragraph_fragment contains all text for remainder of paragraph. + # However, if the remainder of the paragraph spans multiple page breaks, it won't include + # those later page breaks so we have to add them at end of text in the `else` block below. + # This is not ideal, but this case should be very rare and this is likely good enough. + para_text += page_break.following_paragraph_fragment.text + else: + para_text += "\f" + return para_text + + def _table_to_markdown(self, table: "Table") -> str: + """ + Converts a DOCX table to a Markdown string. + + :param table: The DOCX table to convert. + :returns: A Markdown string representation of the table. + """ + markdown: List[str] = [] + max_col_widths: List[int] = [] + + # Calculate max width for each column + for row in table.rows: + for i, cell in enumerate(row.cells): + cell_text = cell.text.strip() + if i >= len(max_col_widths): + max_col_widths.append(len(cell_text)) + else: + max_col_widths[i] = max(max_col_widths[i], len(cell_text)) + + # Process rows + for i, row in enumerate(table.rows): + md_row = [cell.text.strip().ljust(max_col_widths[j]) for j, cell in enumerate(row.cells)] + markdown.append("| " + " | ".join(md_row) + " |") + + # Add separator after header row + if i == 0: + separator = ["-" * max_col_widths[j] for j in range(len(row.cells))] + markdown.append("| " + " | ".join(separator) + " |") + + return "\n".join(markdown) + + def _table_to_csv(self, table: "Table") -> str: + """ + Converts a DOCX table to a CSV string. + + :param table: The DOCX table to convert. + :returns: A CSV string representation of the table. + """ + csv_output = StringIO() + csv_writer = csv.writer(csv_output, quoting=csv.QUOTE_MINIMAL) + + # Process rows + for row in table.rows: + csv_row = [cell.text.strip() for cell in row.cells] + csv_writer.writerow(csv_row) + + # Get the CSV as a string and strip any trailing newlines + csv_string = csv_output.getvalue().strip() + csv_output.close() + + return csv_string + + def _get_docx_metadata(self, document: "DocxDocument") -> DOCXMetadata: + """ + Get all relevant data from the 'core_properties' attribute from a DOCX Document. + + :param document: + The DOCX Document you want to extract metadata from + + :returns: + A `DOCXMetadata` dataclass all the relevant fields from the 'core_properties' + """ + return DOCXMetadata( + author=document.core_properties.author, + category=document.core_properties.category, + comments=document.core_properties.comments, + content_status=document.core_properties.content_status, + created=(document.core_properties.created.isoformat() if document.core_properties.created else None), + identifier=document.core_properties.identifier, + keywords=document.core_properties.keywords, + language=document.core_properties.language, + last_modified_by=document.core_properties.last_modified_by, + last_printed=( + document.core_properties.last_printed.isoformat() if document.core_properties.last_printed else None + ), + modified=(document.core_properties.modified.isoformat() if document.core_properties.modified else None), + revision=document.core_properties.revision, + subject=document.core_properties.subject, + title=document.core_properties.title, + version=document.core_properties.version, + ) diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/html.py b/testbed/deepset-ai__haystack/haystack/components/converters/html.py new file mode 100644 index 0000000000000000000000000000000000000000..b090ec175467d64cd661e93459db5688c4a873bd --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/html.py @@ -0,0 +1,121 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +from haystack import Document, component, default_from_dict, default_to_dict, logging +from haystack.components.converters.utils import get_bytestream_from_source, normalize_metadata +from haystack.dataclasses import ByteStream +from haystack.lazy_imports import LazyImport + +logger = logging.getLogger(__name__) + +with LazyImport("Run 'pip install trafilatura'") as trafilatura_import: + from trafilatura import extract + + +@component +class HTMLToDocument: + """ + Converts an HTML file to a Document. + + Usage example: + ```python + from haystack.components.converters import HTMLToDocument + + converter = HTMLToDocument() + results = converter.run(sources=["path/to/sample.html"]) + documents = results["documents"] + print(documents[0].content) + # 'This is a text from the HTML file.' + ``` + """ + + def __init__(self, extraction_kwargs: Optional[Dict[str, Any]] = None): + """ + Create an HTMLToDocument component. + + :param extraction_kwargs: A dictionary containing keyword arguments to customize the extraction process. These + are passed to the underlying Trafilatura `extract` function. For the full list of available arguments, see + the [Trafilatura documentation](https://trafilatura.readthedocs.io/en/latest/corefunctions.html#extract). + """ + trafilatura_import.check() + + self.extraction_kwargs = extraction_kwargs or {} + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict(self, extraction_kwargs=self.extraction_kwargs) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "HTMLToDocument": + """ + Deserializes the component from a dictionary. + + :param data: + The dictionary to deserialize from. + :returns: + The deserialized component. + """ + return default_from_dict(cls, data) + + @component.output_types(documents=List[Document]) + def run( + self, + sources: List[Union[str, Path, ByteStream]], + meta: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None, + extraction_kwargs: Optional[Dict[str, Any]] = None, + ): + """ + Converts a list of HTML files to Documents. + + :param sources: + List of HTML file paths or ByteStream objects. + :param meta: + Optional metadata to attach to the Documents. + This value can be either a list of dictionaries or a single dictionary. + If it's a single dictionary, its content is added to the metadata of all produced Documents. + If it's a list, the length of the list must match the number of sources, because the two lists will + be zipped. + If `sources` contains ByteStream objects, their `meta` will be added to the output Documents. + :param extraction_kwargs: + Additional keyword arguments to customize the extraction process. + + :returns: + A dictionary with the following keys: + - `documents`: Created Documents + """ + + merged_extraction_kwargs = {**self.extraction_kwargs, **(extraction_kwargs or {})} + + documents = [] + meta_list = normalize_metadata(meta=meta, sources_count=len(sources)) + + for source, metadata in zip(sources, meta_list): + try: + bytestream = get_bytestream_from_source(source=source) + except Exception as e: + logger.warning("Could not read {source}. Skipping it. Error: {error}", source=source, error=e) + continue + + try: + text = extract(bytestream.data.decode("utf-8"), **merged_extraction_kwargs) + except Exception as conversion_e: + logger.warning( + "Failed to extract text from {source}. Skipping it. Error: {error}", + source=source, + error=conversion_e, + ) + continue + + document = Document(content=text, meta={**bytestream.meta, **metadata}) + documents.append(document) + + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/json.py b/testbed/deepset-ai__haystack/haystack/components/converters/json.py new file mode 100644 index 0000000000000000000000000000000000000000..966da881b83b627313f751528de0b0b289227203 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/json.py @@ -0,0 +1,277 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import json +from pathlib import Path +from typing import Any, Dict, List, Literal, Optional, Set, Tuple, Union + +from haystack import component, default_from_dict, default_to_dict, logging +from haystack.components.converters.utils import get_bytestream_from_source, normalize_metadata +from haystack.dataclasses import ByteStream, Document +from haystack.lazy_imports import LazyImport + +logger = logging.getLogger(__name__) + +with LazyImport("Run 'pip install jq'") as jq_import: + import jq + + +@component +class JSONConverter: + """ + Converts one or more JSON files into a text document. + + ### Usage examples + + ```python + import json + + from haystack.components.converters import JSONConverter + from haystack.dataclasses import ByteStream + + source = ByteStream.from_string(json.dumps({"text": "This is the content of my document"})) + + converter = JSONConverter(content_key="text") + results = converter.run(sources=[source]) + documents = results["documents"] + print(documents[0].content) + # 'This is the content of my document' + ``` + + Optionally, you can also provide a `jq_schema` string to filter the JSON source files and `extra_meta_fields` + to extract from the filtered data: + + ```python + import json + + from haystack.components.converters import JSONConverter + from haystack.dataclasses import ByteStream + + data = { + "laureates": [ + { + "firstname": "Enrico", + "surname": "Fermi", + "motivation": "for his demonstrations of the existence of new radioactive elements produced " + "by neutron irradiation, and for his related discovery of nuclear reactions brought about by" + " slow neutrons", + }, + { + "firstname": "Rita", + "surname": "Levi-Montalcini", + "motivation": "for their discoveries of growth factors", + }, + ], + } + source = ByteStream.from_string(json.dumps(data)) + converter = JSONConverter( + jq_schema=".laureates[]", content_key="motivation", extra_meta_fields={"firstname", "surname"} + ) + + results = converter.run(sources=[source]) + documents = results["documents"] + print(documents[0].content) + # 'for his demonstrations of the existence of new radioactive elements produced by + # neutron irradiation, and for his related discovery of nuclear reactions brought + # about by slow neutrons' + + print(documents[0].meta) + # {'firstname': 'Enrico', 'surname': 'Fermi'} + + print(documents[1].content) + # 'for their discoveries of growth factors' + + print(documents[1].meta) + # {'firstname': 'Rita', 'surname': 'Levi-Montalcini'} + ``` + + """ + + def __init__( + self, + jq_schema: Optional[str] = None, + content_key: Optional[str] = None, + extra_meta_fields: Optional[Union[Set[str], Literal["*"]]] = None, + ): + """ + Creates a JSONConverter component. + + An optional `jq_schema` can be provided to extract nested data in the JSON source files. + See the [official jq documentation](https://jqlang.github.io/jq/) for more info on the filters syntax. + If `jq_schema` is not set, whole JSON source files will be used to extract content. + + Optionally, you can provide a `content_key` to specify which key in the extracted object must + be set as the document's content. + + If both `jq_schema` and `content_key` are set, the component will search for the `content_key` in + the JSON object extracted by `jq_schema`. If the extracted data is not a JSON object, it will be skipped. + + If only `jq_schema` is set, the extracted data must be a scalar value. If it's a JSON object or array, + it will be skipped. + + If only `content_key` is set, the source JSON file must be a JSON object, else it will be skipped. + + `extra_meta_fields` can either be set to a set of strings or a literal `"*"` string. + If it's a set of strings, it must specify fields in the extracted objects that must be set in + the extracted documents. If a field is not found, the meta value will be `None`. + If set to `"*"`, all fields that are not `content_key` found in the filtered JSON object will + be saved as metadata. + + Initialization will fail if neither `jq_schema` nor `content_key` are set. + + :param jq_schema: + Optional jq filter string to extract content. + If not specified, whole JSON object will be used to extract information. + :param content_key: + Optional key to extract document content. + If `jq_schema` is specified, the `content_key` will be extracted from that object. + :param extra_meta_fields: + An optional set of meta keys to extract from the content. + If `jq_schema` is specified, all keys will be extracted from that object. + """ + self._compiled_filter = None + if jq_schema: + jq_import.check() + self._compiled_filter = jq.compile(jq_schema) + + self._jq_schema = jq_schema + self._content_key = content_key + self._meta_fields = extra_meta_fields + + if self._compiled_filter is None and self._content_key is None: + msg = "No `jq_schema` nor `content_key` specified. Set either or both to extract data." + raise ValueError(msg) + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict( + self, jq_schema=self._jq_schema, content_key=self._content_key, extra_meta_fields=self._meta_fields + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "JSONConverter": + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary to deserialize from. + :returns: + Deserialized component. + """ + return default_from_dict(cls, data) + + def _get_content_and_meta(self, source: ByteStream) -> List[Tuple[str, Dict[str, Any]]]: + """ + Utility function to extract text and metadata from a JSON file. + + :param source: + UTF-8 byte stream. + :returns: + Collection of text and metadata dict tuples, each corresponding + to a different document. + """ + try: + file_content = source.data.decode("utf-8") + except UnicodeError as exc: + logger.warning( + "Failed to extract text from {source}. Skipping it. Error: {error}", + source=source.meta["file_path"], + error=exc, + ) + + meta_fields = self._meta_fields or set() + + if self._compiled_filter is not None: + try: + objects = list(self._compiled_filter.input_text(file_content)) + except Exception as exc: + logger.warning( + "Failed to extract text from {source}. Skipping it. Error: {error}", + source=source.meta["file_path"], + error=exc, + ) + return [] + else: + # We just load the whole file as JSON if the user didn't provide a jq filter. + # We put it in a list even if it's not to ease handling it later on. + objects = [json.loads(file_content)] + + result = [] + if self._content_key is not None: + for obj in objects: + if not isinstance(obj, dict): + logger.warning("Expected a dictionary but got {obj}. Skipping it.", obj=obj) + continue + if self._content_key not in obj: + logger.warning( + "'{content_key}' not found in {obj}. Skipping it.", content_key=self._content_key, obj=obj + ) + continue + + text = obj[self._content_key] + if isinstance(text, (dict, list)): + logger.warning("Expected a scalar value but got {obj}. Skipping it.", obj=obj) + continue + + meta = {} + if meta_fields == "*": + meta = {k: v for k, v in obj.items() if k != self._content_key} + else: + for field in meta_fields: + meta[field] = obj.get(field, None) + result.append((text, meta)) + else: + for obj in objects: + if isinstance(obj, (dict, list)): + logger.warning("Expected a scalar value but got {obj}. Skipping it.", obj=obj) + continue + result.append((str(obj), {})) + + return result + + @component.output_types(documents=List[Document]) + def run( + self, + sources: List[Union[str, Path, ByteStream]], + meta: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None, + ): + """ + Converts a list of JSON files to documents. + + :param sources: + A list of file paths or ByteStream objects. + :param meta: + Optional metadata to attach to the documents. + This value can be either a list of dictionaries or a single dictionary. + If it's a single dictionary, its content is added to the metadata of all produced documents. + If it's a list, the length of the list must match the number of sources. + If `sources` contain ByteStream objects, their `meta` will be added to the output documents. + + :returns: + A dictionary with the following keys: + - `documents`: A list of created documents. + """ + documents = [] + meta_list = normalize_metadata(meta=meta, sources_count=len(sources)) + + for source, metadata in zip(sources, meta_list): + try: + bytestream = get_bytestream_from_source(source) + except Exception as exc: + logger.warning("Could not read {source}. Skipping it. Error: {error}", source=source, error=exc) + continue + + data = self._get_content_and_meta(bytestream) + + for text, extra_meta in data: + merged_metadata = {**bytestream.meta, **metadata, **extra_meta} + document = Document(content=text, meta=merged_metadata) + documents.append(document) + + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/markdown.py b/testbed/deepset-ai__haystack/haystack/components/converters/markdown.py new file mode 100644 index 0000000000000000000000000000000000000000..c983ef212fdc452fad1bafa0330e8e518d6e5844 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/markdown.py @@ -0,0 +1,111 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +from tqdm import tqdm + +from haystack import Document, component, logging +from haystack.components.converters.utils import get_bytestream_from_source, normalize_metadata +from haystack.dataclasses import ByteStream +from haystack.lazy_imports import LazyImport + +with LazyImport("Run 'pip install markdown-it-py mdit_plain'") as markdown_conversion_imports: + from markdown_it import MarkdownIt + from mdit_plain.renderer import RendererPlain + + +logger = logging.getLogger(__name__) + + +@component +class MarkdownToDocument: + """ + Converts a Markdown file into a text Document. + + Usage example: + ```python + from haystack.components.converters import MarkdownToDocument + from datetime import datetime + + converter = MarkdownToDocument() + results = converter.run(sources=["path/to/sample.md"], meta={"date_added": datetime.now().isoformat()}) + documents = results["documents"] + print(documents[0].content) + # 'This is a text from the markdown file.' + ``` + """ + + def __init__(self, table_to_single_line: bool = False, progress_bar: bool = True): + """ + Create a MarkdownToDocument component. + + :param table_to_single_line: + If True converts table contents into a single line. + :param progress_bar: + If True shows a progress bar when running. + """ + markdown_conversion_imports.check() + + self.table_to_single_line = table_to_single_line + self.progress_bar = progress_bar + + @component.output_types(documents=List[Document]) + def run( + self, + sources: List[Union[str, Path, ByteStream]], + meta: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None, + ): + """ + Converts a list of Markdown files to Documents. + + :param sources: + List of file paths or ByteStream objects. + :param meta: + Optional metadata to attach to the Documents. + This value can be either a list of dictionaries or a single dictionary. + If it's a single dictionary, its content is added to the metadata of all produced Documents. + If it's a list, the length of the list must match the number of sources, because the two lists will + be zipped. + If `sources` contains ByteStream objects, their `meta` will be added to the output Documents. + + :returns: + A dictionary with the following keys: + - `documents`: List of created Documents + """ + parser = MarkdownIt(renderer_cls=RendererPlain) + if self.table_to_single_line: + parser.enable("table") + + documents = [] + meta_list = normalize_metadata(meta=meta, sources_count=len(sources)) + + for source, metadata in tqdm( + zip(sources, meta_list), + total=len(sources), + desc="Converting markdown files to Documents", + disable=not self.progress_bar, + ): + try: + bytestream = get_bytestream_from_source(source) + except Exception as e: + logger.warning("Could not read {source}. Skipping it. Error: {error}", source=source, error=e) + continue + try: + file_content = bytestream.data.decode("utf-8") + text = parser.render(file_content) + except Exception as conversion_e: + logger.warning( + "Failed to extract text from {source}. Skipping it. Error: {error}", + source=source, + error=conversion_e, + ) + continue + + merged_metadata = {**bytestream.meta, **metadata} + document = Document(content=text, meta=merged_metadata) + documents.append(document) + + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/openapi_functions.py b/testbed/deepset-ai__haystack/haystack/components/converters/openapi_functions.py new file mode 100644 index 0000000000000000000000000000000000000000..acc5d2a2325d49c5c535c588ae4ebe858ccb3b93 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/openapi_functions.py @@ -0,0 +1,258 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import json +import os +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +import yaml + +from haystack import component, logging +from haystack.dataclasses.byte_stream import ByteStream +from haystack.lazy_imports import LazyImport + +logger = logging.getLogger(__name__) + +with LazyImport("Run 'pip install jsonref'") as openapi_imports: + import jsonref + + +@component +class OpenAPIServiceToFunctions: + """ + Converts OpenAPI service definitions to a format suitable for OpenAI function calling. + + The definition must respect OpenAPI specification 3.0.0 or higher. + It can be specified in JSON or YAML format. + Each function must have: + - unique operationId + - description + - requestBody and/or parameters + - schema for the requestBody and/or parameters + For more details on OpenAPI specification see the [official documentation](https://github.com/OAI/OpenAPI-Specification). + For more details on OpenAI function calling see the [official documentation](https://platform.openai.com/docs/guides/function-calling). + + Usage example: + ```python + from haystack.components.converters import OpenAPIServiceToFunctions + + converter = OpenAPIServiceToFunctions() + result = converter.run(sources=["path/to/openapi_definition.yaml"]) + assert result["functions"] + ``` + """ + + MIN_REQUIRED_OPENAPI_SPEC_VERSION = 3 + + def __init__(self): + """ + Create an OpenAPIServiceToFunctions component. + """ + openapi_imports.check() + + @component.output_types(functions=List[Dict[str, Any]], openapi_specs=List[Dict[str, Any]]) + def run(self, sources: List[Union[str, Path, ByteStream]]) -> Dict[str, Any]: + """ + Converts OpenAPI definitions in OpenAI function calling format. + + :param sources: + File paths or ByteStream objects of OpenAPI definitions (in JSON or YAML format). + + :returns: + A dictionary with the following keys: + - functions: Function definitions in JSON object format + - openapi_specs: OpenAPI specs in JSON/YAML object format with resolved references + + :raises RuntimeError: + If the OpenAPI definitions cannot be downloaded or processed. + :raises ValueError: + If the source type is not recognized or no functions are found in the OpenAPI definitions. + """ + all_extracted_fc_definitions: List[Dict[str, Any]] = [] + all_openapi_specs = [] + for source in sources: + openapi_spec_content = None + if isinstance(source, (str, Path)): + if os.path.exists(source): + try: + with open(source, "r") as f: + openapi_spec_content = f.read() + except IOError as e: + logger.warning( + "IO error reading OpenAPI specification file: {source}. Error: {e}", source=source, e=e + ) + else: + logger.warning(f"OpenAPI specification file not found: {source}") + elif isinstance(source, ByteStream): + openapi_spec_content = source.data.decode("utf-8") + if not openapi_spec_content: + logger.warning( + "Invalid OpenAPI specification content provided: {openapi_spec_content}", + openapi_spec_content=openapi_spec_content, + ) + else: + logger.warning( + "Invalid source type {source}. Only str, Path, and ByteStream are supported.", source=type(source) + ) + continue + + if openapi_spec_content: + try: + service_openapi_spec = self._parse_openapi_spec(openapi_spec_content) + functions: List[Dict[str, Any]] = self._openapi_to_functions(service_openapi_spec) + all_extracted_fc_definitions.extend(functions) + all_openapi_specs.append(service_openapi_spec) + except Exception as e: + logger.error( + "Error processing OpenAPI specification from source {source}: {error}", source=source, error=e + ) + + if not all_extracted_fc_definitions: + logger.warning("No OpenAI function definitions extracted from the provided OpenAPI specification sources.") + + return {"functions": all_extracted_fc_definitions, "openapi_specs": all_openapi_specs} + + def _openapi_to_functions(self, service_openapi_spec: Dict[str, Any]) -> List[Dict[str, Any]]: + """ + OpenAPI to OpenAI function conversion. + + Extracts functions from the OpenAPI specification of the service and converts them into a format + suitable for OpenAI function calling. + + :param service_openapi_spec: The OpenAPI specification from which functions are to be extracted. + :type service_openapi_spec: Dict[str, Any] + :return: A list of dictionaries, each representing a function. Each dictionary includes the function's + name, description, and a schema of its parameters. + :rtype: List[Dict[str, Any]] + """ + + # Doesn't enforce rigid spec validation because that would require a lot of dependencies + # We check the version and require minimal fields to be present, so we can extract functions + spec_version = service_openapi_spec.get("openapi") + if not spec_version: + raise ValueError(f"Invalid OpenAPI spec provided. Could not extract version from {service_openapi_spec}") + service_openapi_spec_version = int(spec_version.split(".")[0]) + + # Compare the versions + if service_openapi_spec_version < OpenAPIServiceToFunctions.MIN_REQUIRED_OPENAPI_SPEC_VERSION: + raise ValueError( + f"Invalid OpenAPI spec version {service_openapi_spec_version}. Must be " + f"at least {OpenAPIServiceToFunctions.MIN_REQUIRED_OPENAPI_SPEC_VERSION}." + ) + + functions: List[Dict[str, Any]] = [] + for paths in service_openapi_spec["paths"].values(): + for path_spec in paths.values(): + function_dict = self._parse_endpoint_spec(path_spec) + if function_dict: + functions.append(function_dict) + return functions + + def _parse_endpoint_spec(self, resolved_spec: Dict[str, Any]) -> Optional[Dict[str, Any]]: + if not isinstance(resolved_spec, dict): + logger.warning("Invalid OpenAPI spec format provided. Could not extract function.") + return {} + + function_name = resolved_spec.get("operationId") + description = resolved_spec.get("description") or resolved_spec.get("summary", "") + + schema: Dict[str, Any] = {"type": "object", "properties": {}} + + # requestBody section + req_body_schema = ( + resolved_spec.get("requestBody", {}).get("content", {}).get("application/json", {}).get("schema", {}) + ) + if "properties" in req_body_schema: + for prop_name, prop_schema in req_body_schema["properties"].items(): + schema["properties"][prop_name] = self._parse_property_attributes(prop_schema) + + if "required" in req_body_schema: + schema.setdefault("required", []).extend(req_body_schema["required"]) + + # parameters section + for param in resolved_spec.get("parameters", []): + if "schema" in param: + schema_dict = self._parse_property_attributes(param["schema"]) + # these attributes are not in param[schema] level but on param level + useful_attributes = ["description", "pattern", "enum"] + schema_dict.update({key: param[key] for key in useful_attributes if param.get(key)}) + schema["properties"][param["name"]] = schema_dict + if param.get("required", False): + schema.setdefault("required", []).append(param["name"]) + + if function_name and description and schema["properties"]: + return {"name": function_name, "description": description, "parameters": schema} + else: + logger.warning( + "Invalid OpenAPI spec format provided. Could not extract function from {spec}", spec=resolved_spec + ) + return {} + + def _parse_property_attributes( + self, property_schema: Dict[str, Any], include_attributes: Optional[List[str]] = None + ) -> Dict[str, Any]: + """ + Parses the attributes of a property schema. + + Recursively parses the attributes of a property schema, including nested objects and arrays, + and includes specified attributes like description, pattern, etc. + + :param property_schema: The schema of the property to parse. + :param include_attributes: The list of attributes to include in the parsed schema. + :return: The parsed schema of the property including the specified attributes. + """ + include_attributes = include_attributes or ["description", "pattern", "enum"] + + schema_type = property_schema.get("type") + + parsed_schema = {"type": schema_type} if schema_type else {} + for attr in include_attributes: + if attr in property_schema: + parsed_schema[attr] = property_schema[attr] + + if schema_type == "object": + properties = property_schema.get("properties", {}) + parsed_properties = { + prop_name: self._parse_property_attributes(prop, include_attributes) + for prop_name, prop in properties.items() + } + parsed_schema["properties"] = parsed_properties + + if "required" in property_schema: + parsed_schema["required"] = property_schema["required"] + + elif schema_type == "array": + items = property_schema.get("items", {}) + parsed_schema["items"] = self._parse_property_attributes(items, include_attributes) + + return parsed_schema + + def _parse_openapi_spec(self, content: str) -> Dict[str, Any]: + """ + Parses OpenAPI specification content, supporting both JSON and YAML formats. + + :param content: The content of the OpenAPI specification. + :return: The parsed OpenAPI specification. + """ + open_api_spec_content = None + try: + open_api_spec_content = json.loads(content) + return jsonref.replace_refs(open_api_spec_content) + except json.JSONDecodeError as json_error: + # heuristic to confirm that the content is likely malformed JSON + if content.strip().startswith(("{", "[")): + raise json_error + + try: + open_api_spec_content = yaml.safe_load(content) + except yaml.YAMLError: + error_message = ( + "Failed to parse the OpenAPI specification. " + "The content does not appear to be valid JSON or YAML.\n\n" + ) + raise RuntimeError(error_message, content) + + # Replace references in the object with their resolved values, if any + return jsonref.replace_refs(open_api_spec_content) diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/output_adapter.py b/testbed/deepset-ai__haystack/haystack/components/converters/output_adapter.py new file mode 100644 index 0000000000000000000000000000000000000000..50ffd5391914c9b85e2bdf0e7d72d0e493614897 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/output_adapter.py @@ -0,0 +1,184 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import ast +import contextlib +from typing import Any, Callable, Dict, Optional, Set +from warnings import warn + +import jinja2.runtime +from jinja2 import Environment, TemplateSyntaxError, meta +from jinja2.nativetypes import NativeEnvironment +from jinja2.sandbox import SandboxedEnvironment +from typing_extensions import TypeAlias + +from haystack import component, default_from_dict, default_to_dict +from haystack.utils import deserialize_callable, deserialize_type, serialize_callable, serialize_type + + +class OutputAdaptationException(Exception): + """Exception raised when there is an error during output adaptation.""" + + +@component +class OutputAdapter: + """ + Adapts output of a Component using Jinja templates. + + Usage example: + ```python + from haystack import Document + from haystack.components.converters import OutputAdapter + + adapter = OutputAdapter(template="{{ documents[0].content }}", output_type=str) + documents = [Document(content="Test content"] + result = adapter.run(documents=documents) + + assert result["output"] == "Test content" + ``` + """ + + def __init__( + self, + template: str, + output_type: TypeAlias, + custom_filters: Optional[Dict[str, Callable]] = None, + unsafe: bool = False, + ): + """ + Create an OutputAdapter component. + + :param template: + A Jinja template that defines how to adapt the input data. + The variables in the template define the input of this instance. + e.g. + With this template: + ``` + {{ documents[0].content }} + ``` + The Component input will be `documents`. + :param output_type: + The type of output this instance will return. + :param custom_filters: + A dictionary of custom Jinja filters used in the template. + :param unsafe: + Enable execution of arbitrary code in the Jinja template. + This should only be used if you trust the source of the template as it can be lead to remote code execution. + """ + self.custom_filters = {**(custom_filters or {})} + input_types: Set[str] = set() + + self._unsafe = unsafe + + if self._unsafe: + msg = ( + "Unsafe mode is enabled. This allows execution of arbitrary code in the Jinja template. " + "Use this only if you trust the source of the template." + ) + warn(msg) + self._env = ( + NativeEnvironment() if self._unsafe else SandboxedEnvironment(undefined=jinja2.runtime.StrictUndefined) + ) + + try: + self._env.parse(template) # Validate template syntax + self.template = template + except TemplateSyntaxError as e: + raise ValueError(f"Invalid Jinja template '{template}': {e}") from e + + for name, filter_func in self.custom_filters.items(): + self._env.filters[name] = filter_func + + # b) extract variables in the template + route_input_names = self._extract_variables(self._env) + input_types.update(route_input_names) + + # the env is not needed, discarded automatically + component.set_input_types(self, **{var: Any for var in input_types}) + component.set_output_types(self, **{"output": output_type}) + self.output_type = output_type + + def run(self, **kwargs): + """ + Renders the Jinja template with the provided inputs. + + :param kwargs: + Must contain all variables used in the `template` string. + :returns: + A dictionary with the following keys: + - `output`: Rendered Jinja template. + + :raises OutputAdaptationException: If template rendering fails. + """ + # check if kwargs are empty + if not kwargs: + raise ValueError("No input data provided for output adaptation") + for name, filter_func in self.custom_filters.items(): + self._env.filters[name] = filter_func + adapted_outputs = {} + try: + adapted_output_template = self._env.from_string(self.template) + output_result = adapted_output_template.render(**kwargs) + if isinstance(output_result, jinja2.runtime.Undefined): + raise OutputAdaptationException(f"Undefined variable in the template {self.template}; kwargs: {kwargs}") + + # We suppress the exception in case the output is already a string, otherwise + # we try to evaluate it and would fail. + # This must be done cause the output could be different literal structures. + # This doesn't support any user types. + with contextlib.suppress(Exception): + if not self._unsafe: + output_result = ast.literal_eval(output_result) + + adapted_outputs["output"] = output_result + except Exception as e: + raise OutputAdaptationException(f"Error adapting {self.template} with {kwargs}: {e}") from e + return adapted_outputs + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + se_filters = {name: serialize_callable(filter_func) for name, filter_func in self.custom_filters.items()} + return default_to_dict( + self, + template=self.template, + output_type=serialize_type(self.output_type), + custom_filters=se_filters, + unsafe=self._unsafe, + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "OutputAdapter": + """ + Deserializes the component from a dictionary. + + :param data: + The dictionary to deserialize from. + :returns: + The deserialized component. + """ + init_params = data.get("init_parameters", {}) + init_params["output_type"] = deserialize_type(init_params["output_type"]) + + custom_filters = init_params.get("custom_filters", {}) + if custom_filters: + init_params["custom_filters"] = { + name: deserialize_callable(filter_func) if filter_func else None + for name, filter_func in custom_filters.items() + } + return default_from_dict(cls, data) + + def _extract_variables(self, env: Environment) -> Set[str]: + """ + Extracts all variables from a list of Jinja template strings. + + :param env: A Jinja environment. + :return: A set of variable names extracted from the template strings. + """ + ast = env.parse(self.template) + return meta.find_undeclared_variables(ast) diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/pdfminer.py b/testbed/deepset-ai__haystack/haystack/components/converters/pdfminer.py new file mode 100644 index 0000000000000000000000000000000000000000..acf9db28f293962b3a7539783848c2e752788d99 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/pdfminer.py @@ -0,0 +1,171 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import io +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +from haystack import Document, component, logging +from haystack.components.converters.utils import get_bytestream_from_source, normalize_metadata +from haystack.dataclasses import ByteStream +from haystack.lazy_imports import LazyImport + +with LazyImport("Run 'pip install pdfminer.six'") as pdfminer_import: + from pdfminer.high_level import extract_pages + from pdfminer.layout import LAParams, LTTextContainer + +logger = logging.getLogger(__name__) + + +@component +class PDFMinerToDocument: + """ + Converts PDF files to Documents. + + Uses `pdfminer` compatible converters to convert PDF files to Documents. https://pdfminersix.readthedocs.io/en/latest/ + + Usage example: + ```python + from haystack.components.converters.pdfminer import PDFMinerToDocument + + converter = PDFMinerToDocument() + results = converter.run(sources=["sample.pdf"], meta={"date_added": datetime.now().isoformat()}) + documents = results["documents"] + print(documents[0].content) + # 'This is a text from the PDF file.' + ``` + """ + + def __init__( # pylint: disable=too-many-positional-arguments + self, + line_overlap: float = 0.5, + char_margin: float = 2.0, + line_margin: float = 0.5, + word_margin: float = 0.1, + boxes_flow: Optional[float] = 0.5, + detect_vertical: bool = True, + all_texts: bool = False, + ) -> None: + """ + Create a PDFMinerToDocument component. + + :param line_overlap: + This parameter determines whether two characters are considered to be on + the same line based on the amount of overlap between them. + The overlap is calculated relative to the minimum height of both characters. + :param char_margin: + Determines whether two characters are part of the same line based on the distance between them. + If the distance is less than the margin specified, the characters are considered to be on the same line. + The margin is calculated relative to the width of the character. + :param word_margin: + Determines whether two characters on the same line are part of the same word + based on the distance between them. If the distance is greater than the margin specified, + an intermediate space will be added between them to make the text more readable. + The margin is calculated relative to the width of the character. + :param line_margin: + This parameter determines whether two lines are part of the same paragraph based on + the distance between them. If the distance is less than the margin specified, + the lines are considered to be part of the same paragraph. + The margin is calculated relative to the height of a line. + :param boxes_flow: + This parameter determines the importance of horizontal and vertical position when + determining the order of text boxes. A value between -1.0 and +1.0 can be set, + with -1.0 indicating that only horizontal position matters and +1.0 indicating + that only vertical position matters. Setting the value to 'None' will disable advanced + layout analysis, and text boxes will be ordered based on the position of their bottom left corner. + :param detect_vertical: + This parameter determines whether vertical text should be considered during layout analysis. + :param all_texts: + If layout analysis should be performed on text in figures. + """ + + pdfminer_import.check() + + self.layout_params = LAParams( + line_overlap=line_overlap, + char_margin=char_margin, + line_margin=line_margin, + word_margin=word_margin, + boxes_flow=boxes_flow, + detect_vertical=detect_vertical, + all_texts=all_texts, + ) + + def _converter(self, extractor) -> Document: + """ + Extracts text from PDF pages then convert the text into Documents + + :param extractor: + Python generator that yields PDF pages. + + :returns: + PDF text converted to Haystack Document + """ + pages = [] + for page in extractor: + text = "" + for container in page: + # Keep text only + if isinstance(container, LTTextContainer): + text += container.get_text() + pages.append(text) + + # Add a page delimiter + concat = "\f".join(pages) + + return Document(content=concat) + + @component.output_types(documents=List[Document]) + def run( + self, + sources: List[Union[str, Path, ByteStream]], + meta: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None, + ): + """ + Converts PDF files to Documents. + + :param sources: + List of PDF file paths or ByteStream objects. + :param meta: + Optional metadata to attach to the Documents. + This value can be either a list of dictionaries or a single dictionary. + If it's a single dictionary, its content is added to the metadata of all produced Documents. + If it's a list, the length of the list must match the number of sources, because the two lists will + be zipped. + If `sources` contains ByteStream objects, their `meta` will be added to the output Documents. + + :returns: + A dictionary with the following keys: + - `documents`: Created Documents + """ + documents = [] + + meta_list = normalize_metadata(meta, sources_count=len(sources)) + + for source, metadata in zip(sources, meta_list): + try: + bytestream = get_bytestream_from_source(source) + except Exception as e: + logger.warning("Could not read {source}. Skipping it. Error: {error}", source=source, error=e) + continue + try: + pdf_reader = extract_pages(io.BytesIO(bytestream.data), laparams=self.layout_params) + document = self._converter(pdf_reader) + except Exception as e: + logger.warning( + "Could not read {source} and convert it to Document, skipping. {error}", source=source, error=e + ) + continue + + if document.content is None or document.content.strip() == "": + logger.warning( + "PDFMinerToDocument could not extract text from the file {source}. Returning an empty document.", + source=source, + ) + + merged_metadata = {**bytestream.meta, **metadata} + document.meta = merged_metadata + documents.append(document) + + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/pptx.py b/testbed/deepset-ai__haystack/haystack/components/converters/pptx.py new file mode 100644 index 0000000000000000000000000000000000000000..b665abc4a7615ff096a38d737d2744f07569d61a --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/pptx.py @@ -0,0 +1,102 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import io +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +from haystack import Document, component, logging +from haystack.components.converters.utils import get_bytestream_from_source, normalize_metadata +from haystack.dataclasses import ByteStream +from haystack.lazy_imports import LazyImport + +with LazyImport("Run 'pip install python-pptx'") as pptx_import: + from pptx import Presentation + + +logger = logging.getLogger(__name__) + + +@component +class PPTXToDocument: + """ + Converts PPTX files to Documents. + + Usage example: + ```python + from haystack.components.converters.pptx import PPTXToDocument + + converter = PPTXToDocument() + results = converter.run(sources=["sample.pptx"], meta={"date_added": datetime.now().isoformat()}) + documents = results["documents"] + print(documents[0].content) + # 'This is the text from the PPTX file.' + ``` + """ + + def __init__(self): + """ + Create an PPTXToDocument component. + """ + pptx_import.check() + + def _convert(self, file_content: io.BytesIO) -> str: + """ + Converts the PPTX file to text. + """ + pptx_presentation = Presentation(file_content) + text_all_slides = [] + for slide in pptx_presentation.slides: + text_on_slide = [] + for shape in slide.shapes: + if hasattr(shape, "text"): + text_on_slide.append(shape.text) + text_all_slides.append("\n".join(text_on_slide)) + text = "\f".join(text_all_slides) + return text + + @component.output_types(documents=List[Document]) + def run( + self, + sources: List[Union[str, Path, ByteStream]], + meta: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None, + ): + """ + Converts PPTX files to Documents. + + :param sources: + List of file paths or ByteStream objects. + :param meta: + Optional metadata to attach to the Documents. + This value can be either a list of dictionaries or a single dictionary. + If it's a single dictionary, its content is added to the metadata of all produced Documents. + If it's a list, the length of the list must match the number of sources, because the two lists will + be zipped. + If `sources` contains ByteStream objects, their `meta` will be added to the output Documents. + + :returns: + A dictionary with the following keys: + - `documents`: Created Documents + """ + documents = [] + meta_list = normalize_metadata(meta, sources_count=len(sources)) + + for source, metadata in zip(sources, meta_list): + try: + bytestream = get_bytestream_from_source(source) + except Exception as e: + logger.warning("Could not read {source}. Skipping it. Error: {error}", source=source, error=e) + continue + try: + text = self._convert(io.BytesIO(bytestream.data)) + except Exception as e: + logger.warning( + "Could not read {source} and convert it to Document, skipping. {error}", source=source, error=e + ) + continue + + merged_metadata = {**bytestream.meta, **metadata} + documents.append(Document(content=text, meta=merged_metadata)) + + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/pypdf.py b/testbed/deepset-ai__haystack/haystack/components/converters/pypdf.py new file mode 100644 index 0000000000000000000000000000000000000000..72fbcdc161695c18bafab86313354d5eaa66a04d --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/pypdf.py @@ -0,0 +1,155 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import io +from pathlib import Path +from typing import Any, Dict, List, Optional, Protocol, Union + +from haystack import Document, component, default_from_dict, default_to_dict, logging +from haystack.components.converters.utils import get_bytestream_from_source, normalize_metadata +from haystack.dataclasses import ByteStream +from haystack.lazy_imports import LazyImport +from haystack.utils.base_serialization import deserialize_class_instance, serialize_class_instance + +with LazyImport("Run 'pip install pypdf'") as pypdf_import: + from pypdf import PdfReader + + +logger = logging.getLogger(__name__) + + +class PyPDFConverter(Protocol): + """ + A protocol that defines a converter which takes a PdfReader object and converts it into a Document object. + """ + + def convert(self, reader: "PdfReader") -> Document: # noqa: D102 + ... + + def to_dict(self): # noqa: D102 + ... + + @classmethod + def from_dict(cls, data): # noqa: D102 + ... + + +@component +class PyPDFToDocument: + """ + Converts PDF files to documents your pipeline can query. + + This component uses converters compatible with the PyPDF library. + If no converter is provided, uses a default text extraction converter. + You can attach metadata to the resulting documents. + + ### Usage example + + ```python + from haystack.components.converters.pypdf import PyPDFToDocument + + converter = PyPDFToDocument() + results = converter.run(sources=["sample.pdf"], meta={"date_added": datetime.now().isoformat()}) + documents = results["documents"] + print(documents[0].content) + # 'This is a text from the PDF file.' + ``` + """ + + def __init__(self, converter: Optional[PyPDFConverter] = None): + """ + Create an PyPDFToDocument component. + + :param converter: + An instance of a PyPDFConverter compatible class. + """ + pypdf_import.check() + + self.converter = converter + + def to_dict(self): + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict( + self, converter=(serialize_class_instance(self.converter) if self.converter is not None else None) + ) + + @classmethod + def from_dict(cls, data): + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary with serialized data. + + :returns: + Deserialized component. + """ + init_parameters = data.get("init_parameters", {}) + custom_converter_data = init_parameters.get("converter") + if custom_converter_data is not None: + data["init_parameters"]["converter"] = deserialize_class_instance(custom_converter_data) + return default_from_dict(cls, data) + + def _default_convert(self, reader: "PdfReader") -> Document: + text = "\f".join(page.extract_text() for page in reader.pages) + return Document(content=text) + + @component.output_types(documents=List[Document]) + def run( + self, + sources: List[Union[str, Path, ByteStream]], + meta: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None, + ): + """ + Converts PDF files to documents. + + :param sources: + List of file paths or ByteStream objects to convert. + :param meta: + Optional metadata to attach to the documents. + This value can be a list of dictionaries or a single dictionary. + If it's a single dictionary, its content is added to the metadata of all produced documents. + If it's a list, its length must match the number of sources, as they are zipped together. + For ByteStream objects, their `meta` is added to the output documents. + + :returns: + A dictionary with the following keys: + - `documents`: A list of converted documents. + """ + documents = [] + meta_list = normalize_metadata(meta, sources_count=len(sources)) + + for source, metadata in zip(sources, meta_list): + try: + bytestream = get_bytestream_from_source(source) + except Exception as e: + logger.warning("Could not read {source}. Skipping it. Error: {error}", source=source, error=e) + continue + try: + pdf_reader = PdfReader(io.BytesIO(bytestream.data)) + document = ( + self._default_convert(pdf_reader) if self.converter is None else self.converter.convert(pdf_reader) + ) + except Exception as e: + logger.warning( + "Could not read {source} and convert it to Document, skipping. {error}", source=source, error=e + ) + continue + + if document.content is None or document.content.strip() == "": + logger.warning( + "PyPDFToDocument could not extract text from the file {source}. Returning an empty document.", + source=source, + ) + + merged_metadata = {**bytestream.meta, **metadata} + document.meta = merged_metadata + documents.append(document) + + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/tika.py b/testbed/deepset-ai__haystack/haystack/components/converters/tika.py new file mode 100644 index 0000000000000000000000000000000000000000..926968a176a99a690905c0e6b3f751308bd0d438 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/tika.py @@ -0,0 +1,138 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import io +from html.parser import HTMLParser +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +from haystack import Document, component, logging +from haystack.components.converters.utils import get_bytestream_from_source, normalize_metadata +from haystack.dataclasses import ByteStream +from haystack.lazy_imports import LazyImport + +with LazyImport("Run 'pip install tika'") as tika_import: + from tika import parser as tika_parser + +logger = logging.getLogger(__name__) + + +class XHTMLParser(HTMLParser): + """ + Custom parser to extract pages from Tika XHTML content. + """ + + def __init__(self): + super().__init__() + self.ingest = True + self.page = "" + self.pages: List[str] = [] + + def handle_starttag(self, tag: str, attrs: List[tuple]): + """Identify the start of a page div.""" + if tag == "div" and any(attr == "class" and value == "page" for attr, value in attrs): + self.ingest = True + + def handle_endtag(self, tag: str): + """Identify the end of a page div.""" + if self.ingest and tag in ("div", "body"): + self.ingest = False + # restore words hyphened to the next line + self.pages.append(self.page.replace("-\n", "")) + self.page = "" + + def handle_data(self, data: str): + """Populate the page content.""" + if self.ingest: + self.page += data + + +@component +class TikaDocumentConverter: + """ + Converts files of different types to Documents. + + This component uses [Apache Tika](https://tika.apache.org/) for parsing the files and, therefore, + requires a running Tika server. + For more options on running Tika, + see the [official documentation](https://github.com/apache/tika-docker/blob/main/README.md#usage). + + Usage example: + ```python + from haystack.components.converters.tika import TikaDocumentConverter + + converter = TikaDocumentConverter() + results = converter.run( + sources=["sample.docx", "my_document.rtf", "archive.zip"], + meta={"date_added": datetime.now().isoformat()} + ) + documents = results["documents"] + print(documents[0].content) + # 'This is a text from the docx file.' + ``` + """ + + def __init__(self, tika_url: str = "http://localhost:9998/tika"): + """ + Create a TikaDocumentConverter component. + + :param tika_url: + Tika server URL. + """ + tika_import.check() + self.tika_url = tika_url + + @component.output_types(documents=List[Document]) + def run( + self, + sources: List[Union[str, Path, ByteStream]], + meta: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None, + ): + """ + Converts files to Documents. + + :param sources: + List of HTML file paths or ByteStream objects. + :param meta: + Optional metadata to attach to the Documents. + This value can be either a list of dictionaries or a single dictionary. + If it's a single dictionary, its content is added to the metadata of all produced Documents. + If it's a list, the length of the list must match the number of sources, because the two lists will + be zipped. + If `sources` contains ByteStream objects, their `meta` will be added to the output Documents. + + :returns: + A dictionary with the following keys: + - `documents`: Created Documents + """ + documents = [] + meta_list = normalize_metadata(meta=meta, sources_count=len(sources)) + + for source, metadata in zip(sources, meta_list): + try: + bytestream = get_bytestream_from_source(source) + except Exception as e: + logger.warning("Could not read {source}. Skipping it. Error: {error}", source=source, error=e) + continue + try: + # we extract the content as XHTML to preserve the structure of the document as much as possible + # this works for PDFs, but does not work for other file types (DOCX) + xhtml_content = tika_parser.from_buffer( + io.BytesIO(bytestream.data), serverEndpoint=self.tika_url, xmlContent=True + )["content"] + xhtml_parser = XHTMLParser() + xhtml_parser.feed(xhtml_content) + text = "\f".join(xhtml_parser.pages) + except Exception as conversion_e: + logger.warning( + "Failed to extract text from {source}. Skipping it. Error: {error}", + source=source, + error=conversion_e, + ) + continue + + merged_metadata = {**bytestream.meta, **metadata} + document = Document(content=text, meta=merged_metadata) + documents.append(document) + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/txt.py b/testbed/deepset-ai__haystack/haystack/components/converters/txt.py new file mode 100644 index 0000000000000000000000000000000000000000..da5ca9a259618a0afdbebcb0a4bfab4e5c092edd --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/txt.py @@ -0,0 +1,93 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +from haystack import Document, component, logging +from haystack.components.converters.utils import get_bytestream_from_source, normalize_metadata +from haystack.dataclasses import ByteStream + +logger = logging.getLogger(__name__) + + +@component +class TextFileToDocument: + """ + Converts text files to documents your pipeline can query. + + By default, it uses UTF-8 encoding when converting files but + you can also set custom encoding. + It can attach metadata to the resulting documents. + + ### Usage example + + ```python + from haystack.components.converters.txt import TextFileToDocument + + converter = TextFileToDocument() + results = converter.run(sources=["sample.txt"]) + documents = results["documents"] + print(documents[0].content) + # 'This is the content from the txt file.' + ``` + """ + + def __init__(self, encoding: str = "utf-8"): + """ + Creates a TextFileToDocument component. + + :param encoding: + The encoding of the text files to convert. + If the encoding is specified in the metadata of a source ByteStream, + it overrides this value. + """ + self.encoding = encoding + + @component.output_types(documents=List[Document]) + def run( + self, + sources: List[Union[str, Path, ByteStream]], + meta: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]] = None, + ): + """ + Converts text files to documents. + + :param sources: + List of HTML file paths or ByteStream objects to convert. + :param meta: + Optional metadata to attach to the documents. + This value can be a list of dictionaries or a single dictionary. + If it's a single dictionary, its content is added to the metadata of all produced documents. + If it's a list, its length must match the number of sources as they're zipped together. + For ByteStream objects, their `meta` is added to the output documents. + + :returns: + A dictionary with the following keys: + - `documents`: A list of converted documents. + """ + documents = [] + + meta_list = normalize_metadata(meta, sources_count=len(sources)) + + for source, metadata in zip(sources, meta_list): + try: + bytestream = get_bytestream_from_source(source) + except Exception as e: + logger.warning("Could not read {source}. Skipping it. Error: {error}", source=source, error=e) + continue + try: + encoding = bytestream.meta.get("encoding", self.encoding) + text = bytestream.data.decode(encoding) + except Exception as e: + logger.warning( + "Could not convert file {source}. Skipping it. Error message: {error}", source=source, error=e + ) + continue + + merged_metadata = {**bytestream.meta, **metadata} + document = Document(content=text, meta=merged_metadata) + documents.append(document) + + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/converters/utils.py b/testbed/deepset-ai__haystack/haystack/components/converters/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..87cd143239688424bcb20ee456a933688dd0dbf4 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/converters/utils.py @@ -0,0 +1,51 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from pathlib import Path +from typing import Any, Dict, List, Optional, Union + +from haystack.dataclasses import ByteStream + + +def get_bytestream_from_source(source: Union[str, Path, ByteStream]) -> ByteStream: + """ + Creates a ByteStream object from a source. + + :param source: + A source to convert to a ByteStream. Can be a string (path to a file), a Path object, or a ByteStream. + :return: + A ByteStream object. + """ + + if isinstance(source, ByteStream): + return source + if isinstance(source, (str, Path)): + bs = ByteStream.from_file_path(Path(source)) + bs.meta["file_path"] = str(source) + return bs + raise ValueError(f"Unsupported source type {type(source)}") + + +def normalize_metadata( + meta: Optional[Union[Dict[str, Any], List[Dict[str, Any]]]], sources_count: int +) -> List[Dict[str, Any]]: + """ + Normalize the metadata input for a converter. + + Given all the possible value of the meta input for a converter (None, dictionary or list of dicts), + makes sure to return a list of dictionaries of the correct length for the converter to use. + + :param meta: the meta input of the converter, as-is + :param sources_count: the number of sources the converter received + :returns: a list of dictionaries of the make length as the sources list + """ + if meta is None: + return [{}] * sources_count + if isinstance(meta, dict): + return [meta] * sources_count + if isinstance(meta, list): + if sources_count != len(meta): + raise ValueError("The length of the metadata list must match the number of sources.") + return meta + raise ValueError("meta must be either None, a dictionary or a list of dictionaries.") diff --git a/testbed/deepset-ai__haystack/haystack/components/embedders/__init__.py b/testbed/deepset-ai__haystack/haystack/components/embedders/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2b6cf4301e97bee041e6fcb61df0bda3e934caec --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/embedders/__init__.py @@ -0,0 +1,23 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from haystack.components.embedders.azure_document_embedder import AzureOpenAIDocumentEmbedder +from haystack.components.embedders.azure_text_embedder import AzureOpenAITextEmbedder +from haystack.components.embedders.hugging_face_api_document_embedder import HuggingFaceAPIDocumentEmbedder +from haystack.components.embedders.hugging_face_api_text_embedder import HuggingFaceAPITextEmbedder +from haystack.components.embedders.openai_document_embedder import OpenAIDocumentEmbedder +from haystack.components.embedders.openai_text_embedder import OpenAITextEmbedder +from haystack.components.embedders.sentence_transformers_document_embedder import SentenceTransformersDocumentEmbedder +from haystack.components.embedders.sentence_transformers_text_embedder import SentenceTransformersTextEmbedder + +__all__ = [ + "HuggingFaceAPITextEmbedder", + "HuggingFaceAPIDocumentEmbedder", + "SentenceTransformersTextEmbedder", + "SentenceTransformersDocumentEmbedder", + "OpenAITextEmbedder", + "OpenAIDocumentEmbedder", + "AzureOpenAITextEmbedder", + "AzureOpenAIDocumentEmbedder", +] diff --git a/testbed/deepset-ai__haystack/haystack/components/embedders/azure_document_embedder.py b/testbed/deepset-ai__haystack/haystack/components/embedders/azure_document_embedder.py new file mode 100644 index 0000000000000000000000000000000000000000..e60c8781b6f5af28535148c98c8a57d37c6a4cc0 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/embedders/azure_document_embedder.py @@ -0,0 +1,249 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import os +from typing import Any, Dict, List, Optional, Tuple + +from openai.lib.azure import AzureOpenAI +from tqdm import tqdm + +from haystack import Document, component, default_from_dict, default_to_dict +from haystack.utils import Secret, deserialize_secrets_inplace + + +@component +class AzureOpenAIDocumentEmbedder: + """ + Calculates document embeddings using OpenAI models deployed on Azure. + + ### Usage example + + ```python + from haystack import Document + from haystack.components.embedders import AzureOpenAIDocumentEmbedder + + doc = Document(content="I love pizza!") + + document_embedder = AzureOpenAIDocumentEmbedder() + + result = document_embedder.run([doc]) + print(result['documents'][0].embedding) + + # [0.017020374536514282, -0.023255806416273117, ...] + ``` + """ + + def __init__( # noqa: PLR0913 (too-many-arguments) + self, + azure_endpoint: Optional[str] = None, + api_version: Optional[str] = "2023-05-15", + azure_deployment: str = "text-embedding-ada-002", + dimensions: Optional[int] = None, + api_key: Optional[Secret] = Secret.from_env_var("AZURE_OPENAI_API_KEY", strict=False), + azure_ad_token: Optional[Secret] = Secret.from_env_var("AZURE_OPENAI_AD_TOKEN", strict=False), + organization: Optional[str] = None, + prefix: str = "", + suffix: str = "", + batch_size: int = 32, + progress_bar: bool = True, + meta_fields_to_embed: Optional[List[str]] = None, + embedding_separator: str = "\n", + timeout: Optional[float] = None, + max_retries: Optional[int] = None, + ): + """ + Creates an AzureOpenAIDocumentEmbedder component. + + :param azure_endpoint: + The endpoint of the model deployed on Azure. + :param api_version: + The version of the API to use. + :param azure_deployment: + The name of the model deployed on Azure. The default model is text-embedding-ada-002. + :param dimensions: + The number of dimensions of the resulting embeddings. Only supported in text-embedding-3 + and later models. + :param api_key: + The Azure OpenAI API key. + You can set it with an environment variable `AZURE_OPENAI_API_KEY`, or pass with this + parameter during initialization. + :param azure_ad_token: + Microsoft Entra ID token, see Microsoft's + [Entra ID](https://www.microsoft.com/en-us/security/business/identity-access/microsoft-entra-id) + documentation for more information. You can set it with an environment variable + `AZURE_OPENAI_AD_TOKEN`, or pass with this parameter during initialization. + Previously called Azure Active Directory. + :param organization: + Your organization ID. See OpenAI's + [Setting Up Your Organization](https://platform.openai.com/docs/guides/production-best-practices/setting-up-your-organization) + for more information. + :param prefix: + A string to add at the beginning of each text. + :param suffix: + A string to add at the end of each text. + :param batch_size: + Number of documents to embed at once. + :param progress_bar: + If `True`, shows a progress bar when running. + :param meta_fields_to_embed: + List of metadata fields to embed along with the document text. + :param embedding_separator: + Separator used to concatenate the metadata fields to the document text. + :param timeout: The timeout for `AzureOpenAI` client calls, in seconds. + If not set, defaults to either the + `OPENAI_TIMEOUT` environment variable, or 30 seconds. + :param max_retries: Maximum number of retries to contact AzureOpenAI after an internal error. + If not set, defaults to either the `OPENAI_MAX_RETRIES` environment variable or to 5 retries. + """ + # if not provided as a parameter, azure_endpoint is read from the env var AZURE_OPENAI_ENDPOINT + azure_endpoint = azure_endpoint or os.environ.get("AZURE_OPENAI_ENDPOINT") + if not azure_endpoint: + raise ValueError("Please provide an Azure endpoint or set the environment variable AZURE_OPENAI_ENDPOINT.") + + if api_key is None and azure_ad_token is None: + raise ValueError("Please provide an API key or an Azure Active Directory token.") + + self.api_key = api_key + self.azure_ad_token = azure_ad_token + self.api_version = api_version + self.azure_endpoint = azure_endpoint + self.azure_deployment = azure_deployment + self.dimensions = dimensions + self.organization = organization + self.prefix = prefix + self.suffix = suffix + self.batch_size = batch_size + self.progress_bar = progress_bar + self.meta_fields_to_embed = meta_fields_to_embed or [] + self.embedding_separator = embedding_separator + self.timeout = timeout or float(os.environ.get("OPENAI_TIMEOUT", 30.0)) + self.max_retries = max_retries or int(os.environ.get("OPENAI_MAX_RETRIES", 5)) + + self._client = AzureOpenAI( + api_version=api_version, + azure_endpoint=azure_endpoint, + azure_deployment=azure_deployment, + api_key=api_key.resolve_value() if api_key is not None else None, + azure_ad_token=azure_ad_token.resolve_value() if azure_ad_token is not None else None, + organization=organization, + timeout=self.timeout, + max_retries=self.max_retries, + ) + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + return {"model": self.azure_deployment} + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict( + self, + azure_endpoint=self.azure_endpoint, + azure_deployment=self.azure_deployment, + dimensions=self.dimensions, + organization=self.organization, + api_version=self.api_version, + prefix=self.prefix, + suffix=self.suffix, + batch_size=self.batch_size, + progress_bar=self.progress_bar, + meta_fields_to_embed=self.meta_fields_to_embed, + embedding_separator=self.embedding_separator, + api_key=self.api_key.to_dict() if self.api_key is not None else None, + azure_ad_token=self.azure_ad_token.to_dict() if self.azure_ad_token is not None else None, + timeout=self.timeout, + max_retries=self.max_retries, + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "AzureOpenAIDocumentEmbedder": + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary to deserialize from. + :returns: + Deserialized component. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["api_key", "azure_ad_token"]) + return default_from_dict(cls, data) + + def _prepare_texts_to_embed(self, documents: List[Document]) -> List[str]: + """ + Prepare the texts to embed by concatenating the Document text with the metadata fields to embed. + """ + texts_to_embed = [] + for doc in documents: + meta_values_to_embed = [ + str(doc.meta[key]) for key in self.meta_fields_to_embed if key in doc.meta and doc.meta[key] is not None + ] + + text_to_embed = ( + self.prefix + self.embedding_separator.join(meta_values_to_embed + [doc.content or ""]) + self.suffix + ).replace("\n", " ") + + texts_to_embed.append(text_to_embed) + return texts_to_embed + + def _embed_batch(self, texts_to_embed: List[str], batch_size: int) -> Tuple[List[List[float]], Dict[str, Any]]: + """ + Embed a list of texts in batches. + """ + + all_embeddings: List[List[float]] = [] + meta: Dict[str, Any] = {"model": "", "usage": {"prompt_tokens": 0, "total_tokens": 0}} + for i in tqdm(range(0, len(texts_to_embed), batch_size), desc="Embedding Texts"): + batch = texts_to_embed[i : i + batch_size] + if self.dimensions is not None: + response = self._client.embeddings.create( + model=self.azure_deployment, dimensions=self.dimensions, input=batch + ) + else: + response = self._client.embeddings.create(model=self.azure_deployment, input=batch) + + # Append embeddings to the list + all_embeddings.extend(el.embedding for el in response.data) + + # Update the meta information only once if it's empty + if not meta["model"]: + meta["model"] = response.model + meta["usage"] = dict(response.usage) + else: + # Update the usage tokens + meta["usage"]["prompt_tokens"] += response.usage.prompt_tokens + meta["usage"]["total_tokens"] += response.usage.total_tokens + + return all_embeddings, meta + + @component.output_types(documents=List[Document], meta=Dict[str, Any]) + def run(self, documents: List[Document]) -> Dict[str, Any]: + """ + Embeds a list of documents. + + :param documents: + Documents to embed. + + :returns: + A dictionary with the following keys: + - `documents`: A list of documents with embeddings. + - `meta`: Information about the usage of the model. + """ + if not (isinstance(documents, list) and all(isinstance(doc, Document) for doc in documents)): + raise TypeError("Input must be a list of Document instances. For strings, use AzureOpenAITextEmbedder.") + + texts_to_embed = self._prepare_texts_to_embed(documents=documents) + embeddings, meta = self._embed_batch(texts_to_embed=texts_to_embed, batch_size=self.batch_size) + + # Assign the corresponding embeddings to each document + for doc, emb in zip(documents, embeddings): + doc.embedding = emb + + return {"documents": documents, "meta": meta} diff --git a/testbed/deepset-ai__haystack/haystack/components/embedders/azure_text_embedder.py b/testbed/deepset-ai__haystack/haystack/components/embedders/azure_text_embedder.py new file mode 100644 index 0000000000000000000000000000000000000000..961cd910ada302fa4eb530e87769a9a56315f970 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/embedders/azure_text_embedder.py @@ -0,0 +1,196 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import os +from typing import Any, Dict, List, Optional + +from openai.lib.azure import AzureOpenAI + +from haystack import Document, component, default_from_dict, default_to_dict +from haystack.utils import Secret, deserialize_secrets_inplace + + +@component +class AzureOpenAITextEmbedder: + """ + Embeds strings using OpenAI models deployed on Azure. + + ### Usage example + + ```python + from haystack.components.embedders import AzureOpenAITextEmbedder + + text_to_embed = "I love pizza!" + + text_embedder = AzureOpenAITextEmbedder() + + print(text_embedder.run(text_to_embed)) + + # {'embedding': [0.017020374536514282, -0.023255806416273117, ...], + # 'meta': {'model': 'text-embedding-ada-002-v2', + # 'usage': {'prompt_tokens': 4, 'total_tokens': 4}}} + ``` + """ + + def __init__( + self, + azure_endpoint: Optional[str] = None, + api_version: Optional[str] = "2023-05-15", + azure_deployment: str = "text-embedding-ada-002", + dimensions: Optional[int] = None, + api_key: Optional[Secret] = Secret.from_env_var("AZURE_OPENAI_API_KEY", strict=False), + azure_ad_token: Optional[Secret] = Secret.from_env_var("AZURE_OPENAI_AD_TOKEN", strict=False), + organization: Optional[str] = None, + timeout: Optional[float] = None, + max_retries: Optional[int] = None, + prefix: str = "", + suffix: str = "", + ): + """ + Creates an AzureOpenAITextEmbedder component. + + :param azure_endpoint: + The endpoint of the model deployed on Azure. + :param api_version: + The version of the API to use. + :param azure_deployment: + The name of the model deployed on Azure. The default model is text-embedding-ada-002. + :param dimensions: + The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 + and later models. + :param api_key: + The Azure OpenAI API key. + You can set it with an environment variable `AZURE_OPENAI_API_KEY`, or pass with this + parameter during initialization. + :param azure_ad_token: + Microsoft Entra ID token, see Microsoft's + [Entra ID](https://www.microsoft.com/en-us/security/business/identity-access/microsoft-entra-id) + documentation for more information. You can set it with an environment variable + `AZURE_OPENAI_AD_TOKEN`, or pass with this parameter during initialization. + Previously called Azure Active Directory. + :param organization: + Your organization ID. See OpenAI's + [Setting Up Your Organization](https://platform.openai.com/docs/guides/production-best-practices/setting-up-your-organization) + for more information. + :param timeout: The timeout for `AzureOpenAI` client calls, in seconds. + If not set, defaults to either the + `OPENAI_TIMEOUT` environment variable, or 30 seconds. + :param max_retries: Maximum number of retries to contact AzureOpenAI after an internal error. + If not set, defaults to either the `OPENAI_MAX_RETRIES` environment variable, or to 5 retries. + :param prefix: + A string to add at the beginning of each text. + :param suffix: + A string to add at the end of each text. + """ + # Why is this here? + # AzureOpenAI init is forcing us to use an init method that takes either base_url or azure_endpoint as not + # None init parameters. This way we accommodate the use case where env var AZURE_OPENAI_ENDPOINT is set instead + # of passing it as a parameter. + azure_endpoint = azure_endpoint or os.environ.get("AZURE_OPENAI_ENDPOINT") + if not azure_endpoint: + raise ValueError("Please provide an Azure endpoint or set the environment variable AZURE_OPENAI_ENDPOINT.") + + if api_key is None and azure_ad_token is None: + raise ValueError("Please provide an API key or an Azure Active Directory token.") + + self.api_key = api_key + self.azure_ad_token = azure_ad_token + self.api_version = api_version + self.azure_endpoint = azure_endpoint + self.azure_deployment = azure_deployment + self.dimensions = dimensions + self.organization = organization + self.timeout = timeout or float(os.environ.get("OPENAI_TIMEOUT", 30.0)) + self.max_retries = max_retries or int(os.environ.get("OPENAI_MAX_RETRIES", 5)) + self.prefix = prefix + self.suffix = suffix + + self._client = AzureOpenAI( + api_version=api_version, + azure_endpoint=azure_endpoint, + azure_deployment=azure_deployment, + api_key=api_key.resolve_value() if api_key is not None else None, + azure_ad_token=azure_ad_token.resolve_value() if azure_ad_token is not None else None, + organization=organization, + timeout=self.timeout, + max_retries=self.max_retries, + ) + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + return {"model": self.azure_deployment} + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict( + self, + azure_endpoint=self.azure_endpoint, + azure_deployment=self.azure_deployment, + dimensions=self.dimensions, + organization=self.organization, + api_version=self.api_version, + prefix=self.prefix, + suffix=self.suffix, + api_key=self.api_key.to_dict() if self.api_key is not None else None, + azure_ad_token=self.azure_ad_token.to_dict() if self.azure_ad_token is not None else None, + timeout=self.timeout, + max_retries=self.max_retries, + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "AzureOpenAITextEmbedder": + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary to deserialize from. + :returns: + Deserialized component. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["api_key", "azure_ad_token"]) + return default_from_dict(cls, data) + + @component.output_types(embedding=List[float], meta=Dict[str, Any]) + def run(self, text: str): + """ + Embeds a single string. + + :param text: + Text to embed. + + :returns: + A dictionary with the following keys: + - `embedding`: The embedding of the input text. + - `meta`: Information about the usage of the model. + """ + if not isinstance(text, str): + # Check if input is a list and all elements are instances of Document + if isinstance(text, list) and all(isinstance(elem, Document) for elem in text): + error_message = "Input must be a string. Use AzureOpenAIDocumentEmbedder for a list of Documents." + else: + error_message = "Input must be a string." + raise TypeError(error_message) + + # Preprocess the text by adding prefixes/suffixes + # finally, replace newlines as recommended by OpenAI docs + processed_text = f"{self.prefix}{text}{self.suffix}".replace("\n", " ") + + if self.dimensions is not None: + response = self._client.embeddings.create( + model=self.azure_deployment, dimensions=self.dimensions, input=processed_text + ) + else: + response = self._client.embeddings.create(model=self.azure_deployment, input=processed_text) + + return { + "embedding": response.data[0].embedding, + "meta": {"model": response.model, "usage": dict(response.usage)}, + } diff --git a/testbed/deepset-ai__haystack/haystack/components/embedders/backends/__init__.py b/testbed/deepset-ai__haystack/haystack/components/embedders/backends/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c1764a6e039233b694403c434fa97c13e847f6ba --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/embedders/backends/__init__.py @@ -0,0 +1,3 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/testbed/deepset-ai__haystack/haystack/components/embedders/backends/sentence_transformers_backend.py b/testbed/deepset-ai__haystack/haystack/components/embedders/backends/sentence_transformers_backend.py new file mode 100644 index 0000000000000000000000000000000000000000..e3550183e9f25c9b5c4bb109fe4c52902af59dee --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/embedders/backends/sentence_transformers_backend.py @@ -0,0 +1,80 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Dict, List, Optional + +from haystack.lazy_imports import LazyImport +from haystack.utils.auth import Secret + +with LazyImport(message="Run 'pip install \"sentence-transformers>=3.0.0\"'") as sentence_transformers_import: + from sentence_transformers import SentenceTransformer + + +class _SentenceTransformersEmbeddingBackendFactory: + """ + Factory class to create instances of Sentence Transformers embedding backends. + """ + + _instances: Dict[str, "_SentenceTransformersEmbeddingBackend"] = {} + + @staticmethod + def get_embedding_backend( + model: str, + device: Optional[str] = None, + auth_token: Optional[Secret] = None, + trust_remote_code: bool = False, + truncate_dim: Optional[int] = None, + model_kwargs: Optional[Dict[str, Any]] = None, + tokenizer_kwargs: Optional[Dict[str, Any]] = None, + config_kwargs: Optional[Dict[str, Any]] = None, + ): + embedding_backend_id = f"{model}{device}{auth_token}{truncate_dim}" + + if embedding_backend_id in _SentenceTransformersEmbeddingBackendFactory._instances: + return _SentenceTransformersEmbeddingBackendFactory._instances[embedding_backend_id] + embedding_backend = _SentenceTransformersEmbeddingBackend( + model=model, + device=device, + auth_token=auth_token, + trust_remote_code=trust_remote_code, + truncate_dim=truncate_dim, + model_kwargs=model_kwargs, + tokenizer_kwargs=tokenizer_kwargs, + config_kwargs=config_kwargs, + ) + _SentenceTransformersEmbeddingBackendFactory._instances[embedding_backend_id] = embedding_backend + return embedding_backend + + +class _SentenceTransformersEmbeddingBackend: + """ + Class to manage Sentence Transformers embeddings. + """ + + def __init__( + self, + model: str, + device: Optional[str] = None, + auth_token: Optional[Secret] = None, + trust_remote_code: bool = False, + truncate_dim: Optional[int] = None, + model_kwargs: Optional[Dict[str, Any]] = None, + tokenizer_kwargs: Optional[Dict[str, Any]] = None, + config_kwargs: Optional[Dict[str, Any]] = None, + ): + sentence_transformers_import.check() + self.model = SentenceTransformer( + model_name_or_path=model, + device=device, + use_auth_token=auth_token.resolve_value() if auth_token else None, + trust_remote_code=trust_remote_code, + truncate_dim=truncate_dim, + model_kwargs=model_kwargs, + tokenizer_kwargs=tokenizer_kwargs, + config_kwargs=config_kwargs, + ) + + def embed(self, data: List[str], **kwargs) -> List[List[float]]: + embeddings = self.model.encode(data, **kwargs).tolist() + return embeddings diff --git a/testbed/deepset-ai__haystack/haystack/components/embedders/hugging_face_api_document_embedder.py b/testbed/deepset-ai__haystack/haystack/components/embedders/hugging_face_api_document_embedder.py new file mode 100644 index 0000000000000000000000000000000000000000..43f719e27deb382ab69d9686533b100ab1fe3366 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/embedders/hugging_face_api_document_embedder.py @@ -0,0 +1,282 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import json +from typing import Any, Dict, List, Optional, Union + +from tqdm import tqdm + +from haystack import component, default_from_dict, default_to_dict, logging +from haystack.dataclasses import Document +from haystack.lazy_imports import LazyImport +from haystack.utils import Secret, deserialize_secrets_inplace +from haystack.utils.hf import HFEmbeddingAPIType, HFModelType, check_valid_model +from haystack.utils.url_validation import is_valid_http_url + +with LazyImport(message="Run 'pip install \"huggingface_hub>=0.23.0\"'") as huggingface_hub_import: + from huggingface_hub import InferenceClient + +logger = logging.getLogger(__name__) + + +@component +class HuggingFaceAPIDocumentEmbedder: + """ + Embeds documents using Hugging Face APIs. + + Use it with the following Hugging Face APIs: + - [Free Serverless Inference API](https://huggingface.co/inference-api) + - [Paid Inference Endpoints](https://huggingface.co/inference-endpoints) + - [Self-hosted Text Embeddings Inference](https://github.com/huggingface/text-embeddings-inference) + + + ### Usage examples + + #### With free serverless inference API + + ```python + from haystack.components.embedders import HuggingFaceAPIDocumentEmbedder + from haystack.utils import Secret + from haystack.dataclasses import Document + + doc = Document(content="I love pizza!") + + doc_embedder = HuggingFaceAPIDocumentEmbedder(api_type="serverless_inference_api", + api_params={"model": "BAAI/bge-small-en-v1.5"}, + token=Secret.from_token("")) + + result = document_embedder.run([doc]) + print(result["documents"][0].embedding) + + # [0.017020374536514282, -0.023255806416273117, ...] + ``` + + #### With paid inference endpoints + + ```python + from haystack.components.embedders import HuggingFaceAPIDocumentEmbedder + from haystack.utils import Secret + from haystack.dataclasses import Document + + doc = Document(content="I love pizza!") + + doc_embedder = HuggingFaceAPIDocumentEmbedder(api_type="inference_endpoints", + api_params={"url": ""}, + token=Secret.from_token("")) + + result = document_embedder.run([doc]) + print(result["documents"][0].embedding) + + # [0.017020374536514282, -0.023255806416273117, ...] + ``` + + #### With self-hosted text embeddings inference + + ```python + from haystack.components.embedders import HuggingFaceAPIDocumentEmbedder + from haystack.dataclasses import Document + + doc = Document(content="I love pizza!") + + doc_embedder = HuggingFaceAPIDocumentEmbedder(api_type="text_embeddings_inference", + api_params={"url": "http://localhost:8080"}) + + result = document_embedder.run([doc]) + print(result["documents"][0].embedding) + + # [0.017020374536514282, -0.023255806416273117, ...] + ``` + """ + + def __init__( + self, + api_type: Union[HFEmbeddingAPIType, str], + api_params: Dict[str, str], + token: Optional[Secret] = Secret.from_env_var(["HF_API_TOKEN", "HF_TOKEN"], strict=False), + prefix: str = "", + suffix: str = "", + truncate: bool = True, + normalize: bool = False, + batch_size: int = 32, + progress_bar: bool = True, + meta_fields_to_embed: Optional[List[str]] = None, + embedding_separator: str = "\n", + ): # pylint: disable=too-many-positional-arguments + """ + Creates a HuggingFaceAPIDocumentEmbedder component. + + :param api_type: + The type of Hugging Face API to use. + :param api_params: + A dictionary with the following keys: + - `model`: Hugging Face model ID. Required when `api_type` is `SERVERLESS_INFERENCE_API`. + - `url`: URL of the inference endpoint. Required when `api_type` is `INFERENCE_ENDPOINTS` or + `TEXT_EMBEDDINGS_INFERENCE`. + :param token: The Hugging Face token to use as HTTP bearer authorization. + Check your HF token in your [account settings](https://huggingface.co/settings/tokens). + :param prefix: + A string to add at the beginning of each text. + :param suffix: + A string to add at the end of each text. + :param truncate: + Truncates the input text to the maximum length supported by the model. + Applicable when `api_type` is `TEXT_EMBEDDINGS_INFERENCE`, or `INFERENCE_ENDPOINTS` + if the backend uses Text Embeddings Inference. + If `api_type` is `SERVERLESS_INFERENCE_API`, this parameter is ignored. + It is always set to `True` and cannot be changed. + :param normalize: + Normalizes the embeddings to unit length. + Applicable when `api_type` is `TEXT_EMBEDDINGS_INFERENCE`, or `INFERENCE_ENDPOINTS` + if the backend uses Text Embeddings Inference. + If `api_type` is `SERVERLESS_INFERENCE_API`, this parameter is ignored. + It is always set to `False` and cannot be changed. + :param batch_size: + Number of documents to process at once. + :param progress_bar: + If `True`, shows a progress bar when running. + :param meta_fields_to_embed: + List of metadata fields to embed along with the document text. + :param embedding_separator: + Separator used to concatenate the metadata fields to the document text. + """ + huggingface_hub_import.check() + + if isinstance(api_type, str): + api_type = HFEmbeddingAPIType.from_str(api_type) + + api_params = api_params or {} + + if api_type == HFEmbeddingAPIType.SERVERLESS_INFERENCE_API: + model = api_params.get("model") + if model is None: + raise ValueError( + "To use the Serverless Inference API, you need to specify the `model` parameter in `api_params`." + ) + check_valid_model(model, HFModelType.EMBEDDING, token) + model_or_url = model + elif api_type in [HFEmbeddingAPIType.INFERENCE_ENDPOINTS, HFEmbeddingAPIType.TEXT_EMBEDDINGS_INFERENCE]: + url = api_params.get("url") + if url is None: + msg = ( + "To use Text Embeddings Inference or Inference Endpoints, you need to specify the `url` " + "parameter in `api_params`." + ) + raise ValueError(msg) + if not is_valid_http_url(url): + raise ValueError(f"Invalid URL: {url}") + model_or_url = url + else: + msg = f"Unknown api_type {api_type}" + raise ValueError(msg) + + self.api_type = api_type + self.api_params = api_params + self.token = token + self.prefix = prefix + self.suffix = suffix + self.truncate = truncate + self.normalize = normalize + self.batch_size = batch_size + self.progress_bar = progress_bar + self.meta_fields_to_embed = meta_fields_to_embed or [] + self.embedding_separator = embedding_separator + self._client = InferenceClient(model_or_url, token=token.resolve_value() if token else None) + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict( + self, + api_type=str(self.api_type), + api_params=self.api_params, + prefix=self.prefix, + suffix=self.suffix, + token=self.token.to_dict() if self.token else None, + truncate=self.truncate, + normalize=self.normalize, + batch_size=self.batch_size, + progress_bar=self.progress_bar, + meta_fields_to_embed=self.meta_fields_to_embed, + embedding_separator=self.embedding_separator, + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "HuggingFaceAPIDocumentEmbedder": + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary to deserialize from. + :returns: + Deserialized component. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["token"]) + return default_from_dict(cls, data) + + def _prepare_texts_to_embed(self, documents: List[Document]) -> List[str]: + """ + Prepare the texts to embed by concatenating the Document text with the metadata fields to embed. + """ + texts_to_embed = [] + for doc in documents: + meta_values_to_embed = [ + str(doc.meta[key]) for key in self.meta_fields_to_embed if key in doc.meta and doc.meta[key] is not None + ] + + text_to_embed = ( + self.prefix + self.embedding_separator.join(meta_values_to_embed + [doc.content or ""]) + self.suffix + ) + + texts_to_embed.append(text_to_embed) + return texts_to_embed + + def _embed_batch(self, texts_to_embed: List[str], batch_size: int) -> List[List[float]]: + """ + Embed a list of texts in batches. + """ + + all_embeddings = [] + for i in tqdm( + range(0, len(texts_to_embed), batch_size), disable=not self.progress_bar, desc="Calculating embeddings" + ): + batch = texts_to_embed[i : i + batch_size] + response = self._client.post( + json={"inputs": batch, "truncate": self.truncate, "normalize": self.normalize}, + task="feature-extraction", + ) + embeddings = json.loads(response.decode()) + all_embeddings.extend(embeddings) + + return all_embeddings + + @component.output_types(documents=List[Document]) + def run(self, documents: List[Document]): + """ + Embeds a list of documents. + + :param documents: + Documents to embed. + + :returns: + A dictionary with the following keys: + - `documents`: A list of documents with embeddings. + """ + if not isinstance(documents, list) or documents and not isinstance(documents[0], Document): + raise TypeError( + "HuggingFaceAPIDocumentEmbedder expects a list of Documents as input." + " In case you want to embed a string, please use the HuggingFaceAPITextEmbedder." + ) + + texts_to_embed = self._prepare_texts_to_embed(documents=documents) + + embeddings = self._embed_batch(texts_to_embed=texts_to_embed, batch_size=self.batch_size) + + for doc, emb in zip(documents, embeddings): + doc.embedding = emb + + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/embedders/hugging_face_api_text_embedder.py b/testbed/deepset-ai__haystack/haystack/components/embedders/hugging_face_api_text_embedder.py new file mode 100644 index 0000000000000000000000000000000000000000..f60a9e5fd766a189bf2e90f91fd136679bce75c9 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/embedders/hugging_face_api_text_embedder.py @@ -0,0 +1,209 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import json +from typing import Any, Dict, List, Optional, Union + +from haystack import component, default_from_dict, default_to_dict, logging +from haystack.lazy_imports import LazyImport +from haystack.utils import Secret, deserialize_secrets_inplace +from haystack.utils.hf import HFEmbeddingAPIType, HFModelType, check_valid_model +from haystack.utils.url_validation import is_valid_http_url + +with LazyImport(message="Run 'pip install \"huggingface_hub>=0.23.0\"'") as huggingface_hub_import: + from huggingface_hub import InferenceClient + +logger = logging.getLogger(__name__) + + +@component +class HuggingFaceAPITextEmbedder: + """ + Embeds strings using Hugging Face APIs. + + Use it with the following Hugging Face APIs: + - [Free Serverless Inference API](https://huggingface.co/inference-api) + - [Paid Inference Endpoints](https://huggingface.co/inference-endpoints) + - [Self-hosted Text Embeddings Inference](https://github.com/huggingface/text-embeddings-inference) + + ### Usage examples + + #### With free serverless inference API + + ```python + from haystack.components.embedders import HuggingFaceAPITextEmbedder + from haystack.utils import Secret + + text_embedder = HuggingFaceAPITextEmbedder(api_type="serverless_inference_api", + api_params={"model": "BAAI/bge-small-en-v1.5"}, + token=Secret.from_token("")) + + print(text_embedder.run("I love pizza!")) + + # {'embedding': [0.017020374536514282, -0.023255806416273117, ...], + ``` + + #### With paid inference endpoints + + ```python + from haystack.components.embedders import HuggingFaceAPITextEmbedder + from haystack.utils import Secret + text_embedder = HuggingFaceAPITextEmbedder(api_type="inference_endpoints", + api_params={"model": "BAAI/bge-small-en-v1.5"}, + token=Secret.from_token("")) + + print(text_embedder.run("I love pizza!")) + + # {'embedding': [0.017020374536514282, -0.023255806416273117, ...], + ``` + + #### With self-hosted text embeddings inference + + ```python + from haystack.components.embedders import HuggingFaceAPITextEmbedder + from haystack.utils import Secret + + text_embedder = HuggingFaceAPITextEmbedder(api_type="text_embeddings_inference", + api_params={"url": "http://localhost:8080"}) + + print(text_embedder.run("I love pizza!")) + + # {'embedding': [0.017020374536514282, -0.023255806416273117, ...], + ``` + """ + + def __init__( + self, + api_type: Union[HFEmbeddingAPIType, str], + api_params: Dict[str, str], + token: Optional[Secret] = Secret.from_env_var(["HF_API_TOKEN", "HF_TOKEN"], strict=False), + prefix: str = "", + suffix: str = "", + truncate: bool = True, + normalize: bool = False, + ): # pylint: disable=too-many-positional-arguments + """ + Creates a HuggingFaceAPITextEmbedder component. + + :param api_type: + The type of Hugging Face API to use. + :param api_params: + A dictionary with the following keys: + - `model`: Hugging Face model ID. Required when `api_type` is `SERVERLESS_INFERENCE_API`. + - `url`: URL of the inference endpoint. Required when `api_type` is `INFERENCE_ENDPOINTS` or + `TEXT_EMBEDDINGS_INFERENCE`. + :param token: The Hugging Face token to use as HTTP bearer authorization. + Check your HF token in your [account settings](https://huggingface.co/settings/tokens). + :param prefix: + A string to add at the beginning of each text. + :param suffix: + A string to add at the end of each text. + :param truncate: + Truncates the input text to the maximum length supported by the model. + Applicable when `api_type` is `TEXT_EMBEDDINGS_INFERENCE`, or `INFERENCE_ENDPOINTS` + if the backend uses Text Embeddings Inference. + If `api_type` is `SERVERLESS_INFERENCE_API`, this parameter is ignored. + It is always set to `True` and cannot be changed. + :param normalize: + Normalizes the embeddings to unit length. + Applicable when `api_type` is `TEXT_EMBEDDINGS_INFERENCE`, or `INFERENCE_ENDPOINTS` + if the backend uses Text Embeddings Inference. + If `api_type` is `SERVERLESS_INFERENCE_API`, this parameter is ignored. + It is always set to `False` and cannot be changed. + """ + huggingface_hub_import.check() + + if isinstance(api_type, str): + api_type = HFEmbeddingAPIType.from_str(api_type) + + if api_type == HFEmbeddingAPIType.SERVERLESS_INFERENCE_API: + model = api_params.get("model") + if model is None: + raise ValueError( + "To use the Serverless Inference API, you need to specify the `model` parameter in `api_params`." + ) + check_valid_model(model, HFModelType.EMBEDDING, token) + model_or_url = model + elif api_type in [HFEmbeddingAPIType.INFERENCE_ENDPOINTS, HFEmbeddingAPIType.TEXT_EMBEDDINGS_INFERENCE]: + url = api_params.get("url") + if url is None: + msg = ( + "To use Text Embeddings Inference or Inference Endpoints, you need to specify the `url` " + "parameter in `api_params`." + ) + raise ValueError(msg) + if not is_valid_http_url(url): + raise ValueError(f"Invalid URL: {url}") + model_or_url = url + else: + msg = f"Unknown api_type {api_type}" + raise ValueError(msg) + + self.api_type = api_type + self.api_params = api_params + self.token = token + self.prefix = prefix + self.suffix = suffix + self.truncate = truncate + self.normalize = normalize + self._client = InferenceClient(model_or_url, token=token.resolve_value() if token else None) + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict( + self, + api_type=str(self.api_type), + api_params=self.api_params, + prefix=self.prefix, + suffix=self.suffix, + token=self.token.to_dict() if self.token else None, + truncate=self.truncate, + normalize=self.normalize, + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "HuggingFaceAPITextEmbedder": + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary to deserialize from. + :returns: + Deserialized component. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["token"]) + return default_from_dict(cls, data) + + @component.output_types(embedding=List[float]) + def run(self, text: str): + """ + Embeds a single string. + + :param text: + Text to embed. + + :returns: + A dictionary with the following keys: + - `embedding`: The embedding of the input text. + """ + if not isinstance(text, str): + raise TypeError( + "HuggingFaceAPITextEmbedder expects a string as an input." + "In case you want to embed a list of Documents, please use the HuggingFaceAPIDocumentEmbedder." + ) + + text_to_embed = self.prefix + text + self.suffix + + response = self._client.post( + json={"inputs": [text_to_embed], "truncate": self.truncate, "normalize": self.normalize}, + task="feature-extraction", + ) + embedding = json.loads(response.decode())[0] + + return {"embedding": embedding} diff --git a/testbed/deepset-ai__haystack/haystack/components/embedders/openai_document_embedder.py b/testbed/deepset-ai__haystack/haystack/components/embedders/openai_document_embedder.py new file mode 100644 index 0000000000000000000000000000000000000000..61b0cb4df945ae0a8d842eacb4cb72dd351beb66 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/embedders/openai_document_embedder.py @@ -0,0 +1,235 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import os +from typing import Any, Dict, List, Optional, Tuple + +from openai import OpenAI +from tqdm import tqdm + +from haystack import Document, component, default_from_dict, default_to_dict +from haystack.utils import Secret, deserialize_secrets_inplace + + +@component +class OpenAIDocumentEmbedder: + """ + Computes document embeddings using OpenAI models. + + ### Usage example + + ```python + from haystack import Document + from haystack.components.embedders import OpenAIDocumentEmbedder + + doc = Document(content="I love pizza!") + + document_embedder = OpenAIDocumentEmbedder() + + result = document_embedder.run([doc]) + print(result['documents'][0].embedding) + + # [0.017020374536514282, -0.023255806416273117, ...] + ``` + """ + + def __init__( + self, + api_key: Secret = Secret.from_env_var("OPENAI_API_KEY"), + model: str = "text-embedding-ada-002", + dimensions: Optional[int] = None, + api_base_url: Optional[str] = None, + organization: Optional[str] = None, + prefix: str = "", + suffix: str = "", + batch_size: int = 32, + progress_bar: bool = True, + meta_fields_to_embed: Optional[List[str]] = None, + embedding_separator: str = "\n", + timeout: Optional[float] = None, + max_retries: Optional[int] = None, + ): + """ + Creates an OpenAIDocumentEmbedder component. + + Before initializing the component, you can set the 'OPENAI_TIMEOUT' and 'OPENAI_MAX_RETRIES' + environment variables to override the `timeout` and `max_retries` parameters respectively + in the OpenAI client. + + :param api_key: + The OpenAI API key. + You can set it with an environment variable `OPENAI_API_KEY`, or pass with this parameter + during initialization. + :param model: + The name of the model to use for calculating embeddings. + The default model is `text-embedding-ada-002`. + :param dimensions: + The number of dimensions of the resulting embeddings. Only `text-embedding-3` and + later models support this parameter. + :param api_base_url: + Overrides the default base URL for all HTTP requests. + :param organization: + Your OpenAI organization ID. See OpenAI's + [Setting Up Your Organization](https://platform.openai.com/docs/guides/production-best-practices/setting-up-your-organization) + for more information. + :param prefix: + A string to add at the beginning of each text. + :param suffix: + A string to add at the end of each text. + :param batch_size: + Number of documents to embed at once. + :param progress_bar: + If `True`, shows a progress bar when running. + :param meta_fields_to_embed: + List of metadata fields to embed along with the document text. + :param embedding_separator: + Separator used to concatenate the metadata fields to the document text. + :param timeout: + Timeout for OpenAI client calls. If not set, it defaults to either the + `OPENAI_TIMEOUT` environment variable, or 30 seconds. + :param max_retries: + Maximum number of retries to contact OpenAI after an internal error. + If not set, it defaults to either the `OPENAI_MAX_RETRIES` environment variable, or 5 retries. + """ + self.api_key = api_key + self.model = model + self.dimensions = dimensions + self.api_base_url = api_base_url + self.organization = organization + self.prefix = prefix + self.suffix = suffix + self.batch_size = batch_size + self.progress_bar = progress_bar + self.meta_fields_to_embed = meta_fields_to_embed or [] + self.embedding_separator = embedding_separator + + if timeout is None: + timeout = float(os.environ.get("OPENAI_TIMEOUT", 30.0)) + if max_retries is None: + max_retries = int(os.environ.get("OPENAI_MAX_RETRIES", 5)) + + self.client = OpenAI( + api_key=api_key.resolve_value(), + organization=organization, + base_url=api_base_url, + timeout=timeout, + max_retries=max_retries, + ) + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + return {"model": self.model} + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict( + self, + model=self.model, + dimensions=self.dimensions, + organization=self.organization, + api_base_url=self.api_base_url, + prefix=self.prefix, + suffix=self.suffix, + batch_size=self.batch_size, + progress_bar=self.progress_bar, + meta_fields_to_embed=self.meta_fields_to_embed, + embedding_separator=self.embedding_separator, + api_key=self.api_key.to_dict(), + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "OpenAIDocumentEmbedder": + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary to deserialize from. + :returns: + Deserialized component. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["api_key"]) + return default_from_dict(cls, data) + + def _prepare_texts_to_embed(self, documents: List[Document]) -> List[str]: + """ + Prepare the texts to embed by concatenating the Document text with the metadata fields to embed. + """ + texts_to_embed = [] + for doc in documents: + meta_values_to_embed = [ + str(doc.meta[key]) for key in self.meta_fields_to_embed if key in doc.meta and doc.meta[key] is not None + ] + + text_to_embed = ( + self.prefix + self.embedding_separator.join(meta_values_to_embed + [doc.content or ""]) + self.suffix + ) + + # copied from OpenAI embedding_utils (https://github.com/openai/openai-python/blob/main/openai/embeddings_utils.py) + # replace newlines, which can negatively affect performance. + text_to_embed = text_to_embed.replace("\n", " ") + texts_to_embed.append(text_to_embed) + return texts_to_embed + + def _embed_batch(self, texts_to_embed: List[str], batch_size: int) -> Tuple[List[List[float]], Dict[str, Any]]: + """ + Embed a list of texts in batches. + """ + + all_embeddings = [] + meta: Dict[str, Any] = {} + for i in tqdm( + range(0, len(texts_to_embed), batch_size), disable=not self.progress_bar, desc="Calculating embeddings" + ): + batch = texts_to_embed[i : i + batch_size] + if self.dimensions is not None: + response = self.client.embeddings.create(model=self.model, dimensions=self.dimensions, input=batch) + else: + response = self.client.embeddings.create(model=self.model, input=batch) + embeddings = [el.embedding for el in response.data] + all_embeddings.extend(embeddings) + + if "model" not in meta: + meta["model"] = response.model + if "usage" not in meta: + meta["usage"] = dict(response.usage) + else: + meta["usage"]["prompt_tokens"] += response.usage.prompt_tokens + meta["usage"]["total_tokens"] += response.usage.total_tokens + + return all_embeddings, meta + + @component.output_types(documents=List[Document], meta=Dict[str, Any]) + def run(self, documents: List[Document]): + """ + Embeds a list of documents. + + :param documents: + A list of documents to embed. + + :returns: + A dictionary with the following keys: + - `documents`: A list of documents with embeddings. + - `meta`: Information about the usage of the model. + """ + if not isinstance(documents, list) or documents and not isinstance(documents[0], Document): + raise TypeError( + "OpenAIDocumentEmbedder expects a list of Documents as input." + "In case you want to embed a string, please use the OpenAITextEmbedder." + ) + + texts_to_embed = self._prepare_texts_to_embed(documents=documents) + + embeddings, meta = self._embed_batch(texts_to_embed=texts_to_embed, batch_size=self.batch_size) + + for doc, emb in zip(documents, embeddings): + doc.embedding = emb + + return {"documents": documents, "meta": meta} diff --git a/testbed/deepset-ai__haystack/haystack/components/embedders/openai_text_embedder.py b/testbed/deepset-ai__haystack/haystack/components/embedders/openai_text_embedder.py new file mode 100644 index 0000000000000000000000000000000000000000..4a2d9d3bee91c9be9dd283cdb5cdb2398fddb2f8 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/embedders/openai_text_embedder.py @@ -0,0 +1,177 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import os +from typing import Any, Dict, List, Optional + +from openai import OpenAI + +from haystack import component, default_from_dict, default_to_dict +from haystack.utils import Secret, deserialize_secrets_inplace + +OPENAI_TIMEOUT = float(os.environ.get("OPENAI_TIMEOUT", 30)) +OPENAI_MAX_RETRIES = int(os.environ.get("OPENAI_MAX_RETRIES", 5)) + + +@component +class OpenAITextEmbedder: + """ + Embeds strings using OpenAI models. + + You can use it to embed user query and send it to an embedding Retriever. + + ### Usage example + + ```python + from haystack.components.embedders import OpenAITextEmbedder + + text_to_embed = "I love pizza!" + + text_embedder = OpenAITextEmbedder() + + print(text_embedder.run(text_to_embed)) + + # {'embedding': [0.017020374536514282, -0.023255806416273117, ...], + # 'meta': {'model': 'text-embedding-ada-002-v2', + # 'usage': {'prompt_tokens': 4, 'total_tokens': 4}}} + ``` + """ + + def __init__( + self, + api_key: Secret = Secret.from_env_var("OPENAI_API_KEY"), + model: str = "text-embedding-ada-002", + dimensions: Optional[int] = None, + api_base_url: Optional[str] = None, + organization: Optional[str] = None, + prefix: str = "", + suffix: str = "", + timeout: Optional[float] = None, + max_retries: Optional[int] = None, + ): + """ + Creates an OpenAITextEmbedder component. + + Before initializing the component, you can set the 'OPENAI_TIMEOUT' and 'OPENAI_MAX_RETRIES' + environment variables to override the `timeout` and `max_retries` parameters respectively + in the OpenAI client. + + :param api_key: + The OpenAI API key. + You can set it with an environment variable `OPENAI_API_KEY`, or pass with this parameter + during initialization. + :param model: + The name of the model to use for calculating embeddings. + The default model is `text-embedding-ada-002`. + :param dimensions: + The number of dimensions of the resulting embeddings. Only `text-embedding-3` and + later models support this parameter. + :param api_base_url: + Overrides default base URL for all HTTP requests. + :param organization: + Your organization ID. See OpenAI's + [production best practices](https://platform.openai.com/docs/guides/production-best-practices/setting-up-your-organization) + for more information. + :param prefix: + A string to add at the beginning of each text to embed. + :param suffix: + A string to add at the end of each text to embed. + :param timeout: + Timeout for OpenAI client calls. If not set, it defaults to either the + `OPENAI_TIMEOUT` environment variable, or 30 seconds. + :param max_retries: + Maximum number of retries to contact OpenAI after an internal error. + If not set, it defaults to either the `OPENAI_MAX_RETRIES` environment variable, or set to 5. + """ + self.model = model + self.dimensions = dimensions + self.api_base_url = api_base_url + self.organization = organization + self.prefix = prefix + self.suffix = suffix + self.api_key = api_key + + if timeout is None: + timeout = float(os.environ.get("OPENAI_TIMEOUT", 30.0)) + if max_retries is None: + max_retries = int(os.environ.get("OPENAI_MAX_RETRIES", 5)) + + self.client = OpenAI( + api_key=api_key.resolve_value(), + organization=organization, + base_url=api_base_url, + timeout=timeout, + max_retries=max_retries, + ) + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + return {"model": self.model} + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + return default_to_dict( + self, + model=self.model, + api_base_url=self.api_base_url, + organization=self.organization, + prefix=self.prefix, + suffix=self.suffix, + dimensions=self.dimensions, + api_key=self.api_key.to_dict(), + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "OpenAITextEmbedder": + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary to deserialize from. + :returns: + Deserialized component. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["api_key"]) + return default_from_dict(cls, data) + + @component.output_types(embedding=List[float], meta=Dict[str, Any]) + def run(self, text: str): + """ + Embeds a single string. + + :param text: + Text to embed. + + :returns: + A dictionary with the following keys: + - `embedding`: The embedding of the input text. + - `meta`: Information about the usage of the model. + """ + if not isinstance(text, str): + raise TypeError( + "OpenAITextEmbedder expects a string as an input." + "In case you want to embed a list of Documents, please use the OpenAIDocumentEmbedder." + ) + + text_to_embed = self.prefix + text + self.suffix + + # copied from OpenAI embedding_utils (https://github.com/openai/openai-python/blob/main/openai/embeddings_utils.py) + # replace newlines, which can negatively affect performance. + text_to_embed = text_to_embed.replace("\n", " ") + + if self.dimensions is not None: + response = self.client.embeddings.create(model=self.model, dimensions=self.dimensions, input=text_to_embed) + else: + response = self.client.embeddings.create(model=self.model, input=text_to_embed) + + meta = {"model": response.model, "usage": dict(response.usage)} + + return {"embedding": response.data[0].embedding, "meta": meta} diff --git a/testbed/deepset-ai__haystack/haystack/components/embedders/sentence_transformers_document_embedder.py b/testbed/deepset-ai__haystack/haystack/components/embedders/sentence_transformers_document_embedder.py new file mode 100644 index 0000000000000000000000000000000000000000..195c5d10c850857cb76a7fd3db8d7544f3b31958 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/embedders/sentence_transformers_document_embedder.py @@ -0,0 +1,240 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Dict, List, Literal, Optional + +from haystack import Document, component, default_from_dict, default_to_dict +from haystack.components.embedders.backends.sentence_transformers_backend import ( + _SentenceTransformersEmbeddingBackendFactory, +) +from haystack.utils import ComponentDevice, Secret, deserialize_secrets_inplace +from haystack.utils.hf import deserialize_hf_model_kwargs, serialize_hf_model_kwargs + + +@component +class SentenceTransformersDocumentEmbedder: + """ + Calculates document embeddings using Sentence Transformers models. + + It stores the embeddings in the `embedding` metadata field of each document. + You can also embed documents' metadata. + Use this component in indexing pipelines to embed input documents + and send them to DocumentWriter to write a into a Document Store. + + ### Usage example: + + ```python + from haystack import Document + from haystack.components.embedders import SentenceTransformersDocumentEmbedder + doc = Document(content="I love pizza!") + doc_embedder = SentenceTransformersDocumentEmbedder() + doc_embedder.warm_up() + + result = doc_embedder.run([doc]) + print(result['documents'][0].embedding) + + # [-0.07804739475250244, 0.1498992145061493, ...] + ``` + """ + + def __init__( # noqa: PLR0913 + self, + model: str = "sentence-transformers/all-mpnet-base-v2", + device: Optional[ComponentDevice] = None, + token: Optional[Secret] = Secret.from_env_var(["HF_API_TOKEN", "HF_TOKEN"], strict=False), + prefix: str = "", + suffix: str = "", + batch_size: int = 32, + progress_bar: bool = True, + normalize_embeddings: bool = False, + meta_fields_to_embed: Optional[List[str]] = None, + embedding_separator: str = "\n", + trust_remote_code: bool = False, + truncate_dim: Optional[int] = None, + model_kwargs: Optional[Dict[str, Any]] = None, + tokenizer_kwargs: Optional[Dict[str, Any]] = None, + config_kwargs: Optional[Dict[str, Any]] = None, + precision: Literal["float32", "int8", "uint8", "binary", "ubinary"] = "float32", + ): + """ + Creates a SentenceTransformersDocumentEmbedder component. + + :param model: + The model to use for calculating embeddings. + Pass a local path or ID of the model on Hugging Face. + :param device: + The device to use for loading the model. + Overrides the default device. + :param token: + The API token to download private models from Hugging Face. + :param prefix: + A string to add at the beginning of each document text. + Can be used to prepend the text with an instruction, as required by some embedding models, + such as E5 and bge. + :param suffix: + A string to add at the end of each document text. + :param batch_size: + Number of documents to embed at once. + :param progress_bar: + If `True`, shows a progress bar when embedding documents. + :param normalize_embeddings: + If `True`, returns vectors with length 1. + :param meta_fields_to_embed: + List of metadata fields to embed along with the document text. + :param embedding_separator: + Separator used to concatenate the metadata fields to the document text. + :param trust_remote_code: + If `False`, allows only Hugging Face verified model architectures. + If `True`, allows custom models and scripts. + :param truncate_dim: + The dimension to truncate sentence embeddings to. `None` does no truncation. + If the model wasn't trained with Matryoshka Representation Learning, + truncating embeddings can significantly affect performance. + :param model_kwargs: + Additional keyword arguments for `AutoModelForSequenceClassification.from_pretrained` + when loading the model. Refer to specific model documentation for available kwargs. + :param tokenizer_kwargs: + Additional keyword arguments for `AutoTokenizer.from_pretrained` when loading the tokenizer. + Refer to specific model documentation for available kwargs. + :param config_kwargs: + Additional keyword arguments for `AutoConfig.from_pretrained` when loading the model configuration. + :param precision: + The precision to use for the embeddings. + All non-float32 precisions are quantized embeddings. + Quantized embeddings are smaller and faster to compute, but may have a lower accuracy. + They are useful for reducing the size of the embeddings of a corpus for semantic search, among other tasks. + """ + + self.model = model + self.device = ComponentDevice.resolve_device(device) + self.token = token + self.prefix = prefix + self.suffix = suffix + self.batch_size = batch_size + self.progress_bar = progress_bar + self.normalize_embeddings = normalize_embeddings + self.meta_fields_to_embed = meta_fields_to_embed or [] + self.embedding_separator = embedding_separator + self.trust_remote_code = trust_remote_code + self.truncate_dim = truncate_dim + self.model_kwargs = model_kwargs + self.tokenizer_kwargs = tokenizer_kwargs + self.config_kwargs = config_kwargs + self.embedding_backend = None + self.precision = precision + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + return {"model": self.model} + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + serialization_dict = default_to_dict( + self, + model=self.model, + device=self.device.to_dict(), + token=self.token.to_dict() if self.token else None, + prefix=self.prefix, + suffix=self.suffix, + batch_size=self.batch_size, + progress_bar=self.progress_bar, + normalize_embeddings=self.normalize_embeddings, + meta_fields_to_embed=self.meta_fields_to_embed, + embedding_separator=self.embedding_separator, + trust_remote_code=self.trust_remote_code, + truncate_dim=self.truncate_dim, + model_kwargs=self.model_kwargs, + tokenizer_kwargs=self.tokenizer_kwargs, + config_kwargs=self.config_kwargs, + precision=self.precision, + ) + if serialization_dict["init_parameters"].get("model_kwargs") is not None: + serialize_hf_model_kwargs(serialization_dict["init_parameters"]["model_kwargs"]) + return serialization_dict + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "SentenceTransformersDocumentEmbedder": + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary to deserialize from. + :returns: + Deserialized component. + """ + init_params = data["init_parameters"] + if init_params.get("device") is not None: + init_params["device"] = ComponentDevice.from_dict(init_params["device"]) + deserialize_secrets_inplace(init_params, keys=["token"]) + if init_params.get("model_kwargs") is not None: + deserialize_hf_model_kwargs(init_params["model_kwargs"]) + return default_from_dict(cls, data) + + def warm_up(self): + """ + Initializes the component. + """ + if self.embedding_backend is None: + self.embedding_backend = _SentenceTransformersEmbeddingBackendFactory.get_embedding_backend( + model=self.model, + device=self.device.to_torch_str(), + auth_token=self.token, + trust_remote_code=self.trust_remote_code, + truncate_dim=self.truncate_dim, + model_kwargs=self.model_kwargs, + tokenizer_kwargs=self.tokenizer_kwargs, + config_kwargs=self.config_kwargs, + ) + if self.tokenizer_kwargs and self.tokenizer_kwargs.get("model_max_length"): + self.embedding_backend.model.max_seq_length = self.tokenizer_kwargs["model_max_length"] + + @component.output_types(documents=List[Document]) + def run(self, documents: List[Document]): + """ + Embed a list of documents. + + :param documents: + Documents to embed. + + :returns: + A dictionary with the following keys: + - `documents`: Documents with embeddings. + """ + if not isinstance(documents, list) or documents and not isinstance(documents[0], Document): + raise TypeError( + "SentenceTransformersDocumentEmbedder expects a list of Documents as input." + "In case you want to embed a list of strings, please use the SentenceTransformersTextEmbedder." + ) + if self.embedding_backend is None: + raise RuntimeError("The embedding model has not been loaded. Please call warm_up() before running.") + + texts_to_embed = [] + for doc in documents: + meta_values_to_embed = [ + str(doc.meta[key]) for key in self.meta_fields_to_embed if key in doc.meta and doc.meta[key] + ] + text_to_embed = ( + self.prefix + self.embedding_separator.join(meta_values_to_embed + [doc.content or ""]) + self.suffix + ) + texts_to_embed.append(text_to_embed) + + embeddings = self.embedding_backend.embed( + texts_to_embed, + batch_size=self.batch_size, + show_progress_bar=self.progress_bar, + normalize_embeddings=self.normalize_embeddings, + precision=self.precision, + ) + + for doc, emb in zip(documents, embeddings): + doc.embedding = emb + + return {"documents": documents} diff --git a/testbed/deepset-ai__haystack/haystack/components/embedders/sentence_transformers_text_embedder.py b/testbed/deepset-ai__haystack/haystack/components/embedders/sentence_transformers_text_embedder.py new file mode 100644 index 0000000000000000000000000000000000000000..f8b959495fa550b8ea65893183452e1b42a1306a --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/embedders/sentence_transformers_text_embedder.py @@ -0,0 +1,213 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Dict, List, Literal, Optional + +from haystack import component, default_from_dict, default_to_dict +from haystack.components.embedders.backends.sentence_transformers_backend import ( + _SentenceTransformersEmbeddingBackendFactory, +) +from haystack.utils import ComponentDevice, Secret, deserialize_secrets_inplace +from haystack.utils.hf import deserialize_hf_model_kwargs, serialize_hf_model_kwargs + + +@component +class SentenceTransformersTextEmbedder: + """ + Embeds strings using Sentence Transformers models. + + You can use it to embed user query and send it to an embedding retriever. + + Usage example: + ```python + from haystack.components.embedders import SentenceTransformersTextEmbedder + + text_to_embed = "I love pizza!" + + text_embedder = SentenceTransformersTextEmbedder() + text_embedder.warm_up() + + print(text_embedder.run(text_to_embed)) + + # {'embedding': [-0.07804739475250244, 0.1498992145061493,, ...]} + ``` + """ + + def __init__( # noqa: PLR0913 + self, + model: str = "sentence-transformers/all-mpnet-base-v2", + device: Optional[ComponentDevice] = None, + token: Optional[Secret] = Secret.from_env_var(["HF_API_TOKEN", "HF_TOKEN"], strict=False), + prefix: str = "", + suffix: str = "", + batch_size: int = 32, + progress_bar: bool = True, + normalize_embeddings: bool = False, + trust_remote_code: bool = False, + truncate_dim: Optional[int] = None, + model_kwargs: Optional[Dict[str, Any]] = None, + tokenizer_kwargs: Optional[Dict[str, Any]] = None, + config_kwargs: Optional[Dict[str, Any]] = None, + precision: Literal["float32", "int8", "uint8", "binary", "ubinary"] = "float32", + ): + """ + Create a SentenceTransformersTextEmbedder component. + + :param model: + The model to use for calculating embeddings. + Specify the path to a local model or the ID of the model on Hugging Face. + :param device: + Overrides the default device used to load the model. + :param token: + An API token to use private models from Hugging Face. + :param prefix: + A string to add at the beginning of each text to be embedded. + You can use it to prepend the text with an instruction, as required by some embedding models, + such as E5 and bge. + :param suffix: + A string to add at the end of each text to embed. + :param batch_size: + Number of texts to embed at once. + :param progress_bar: + If `True`, shows a progress bar for calculating embeddings. + If `False`, disables the progress bar. + :param normalize_embeddings: + If `True`, returned vectors have a length of 1. + :param trust_remote_code: + If `False`, permits only Hugging Face verified model architectures. + If `True`, permits custom models and scripts. + :param truncate_dim: + The dimension to truncate sentence embeddings to. `None` does no truncation. + If the model has not been trained with Matryoshka Representation Learning, + truncation of embeddings can significantly affect performance. + :param model_kwargs: + Additional keyword arguments for `AutoModelForSequenceClassification.from_pretrained` + when loading the model. Refer to specific model documentation for available kwargs. + :param tokenizer_kwargs: + Additional keyword arguments for `AutoTokenizer.from_pretrained` when loading the tokenizer. + Refer to specific model documentation for available kwargs. + :param config_kwargs: + Additional keyword arguments for `AutoConfig.from_pretrained` when loading the model configuration. + :param precision: + The precision to use for the embeddings. + All non-float32 precisions are quantized embeddings. + Quantized embeddings are smaller in size and faster to compute, but may have a lower accuracy. + They are useful for reducing the size of the embeddings of a corpus for semantic search, among other tasks. + """ + + self.model = model + self.device = ComponentDevice.resolve_device(device) + self.token = token + self.prefix = prefix + self.suffix = suffix + self.batch_size = batch_size + self.progress_bar = progress_bar + self.normalize_embeddings = normalize_embeddings + self.trust_remote_code = trust_remote_code + self.truncate_dim = truncate_dim + self.model_kwargs = model_kwargs + self.tokenizer_kwargs = tokenizer_kwargs + self.config_kwargs = config_kwargs + self.embedding_backend = None + self.precision = precision + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + return {"model": self.model} + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + serialization_dict = default_to_dict( + self, + model=self.model, + device=self.device.to_dict(), + token=self.token.to_dict() if self.token else None, + prefix=self.prefix, + suffix=self.suffix, + batch_size=self.batch_size, + progress_bar=self.progress_bar, + normalize_embeddings=self.normalize_embeddings, + trust_remote_code=self.trust_remote_code, + truncate_dim=self.truncate_dim, + model_kwargs=self.model_kwargs, + tokenizer_kwargs=self.tokenizer_kwargs, + config_kwargs=self.config_kwargs, + precision=self.precision, + ) + if serialization_dict["init_parameters"].get("model_kwargs") is not None: + serialize_hf_model_kwargs(serialization_dict["init_parameters"]["model_kwargs"]) + return serialization_dict + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "SentenceTransformersTextEmbedder": + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary to deserialize from. + :returns: + Deserialized component. + """ + init_params = data["init_parameters"] + if init_params.get("device") is not None: + init_params["device"] = ComponentDevice.from_dict(init_params["device"]) + deserialize_secrets_inplace(init_params, keys=["token"]) + if init_params.get("model_kwargs") is not None: + deserialize_hf_model_kwargs(init_params["model_kwargs"]) + return default_from_dict(cls, data) + + def warm_up(self): + """ + Initializes the component. + """ + if self.embedding_backend is None: + self.embedding_backend = _SentenceTransformersEmbeddingBackendFactory.get_embedding_backend( + model=self.model, + device=self.device.to_torch_str(), + auth_token=self.token, + trust_remote_code=self.trust_remote_code, + truncate_dim=self.truncate_dim, + model_kwargs=self.model_kwargs, + tokenizer_kwargs=self.tokenizer_kwargs, + config_kwargs=self.config_kwargs, + ) + if self.tokenizer_kwargs and self.tokenizer_kwargs.get("model_max_length"): + self.embedding_backend.model.max_seq_length = self.tokenizer_kwargs["model_max_length"] + + @component.output_types(embedding=List[float]) + def run(self, text: str): + """ + Embed a single string. + + :param text: + Text to embed. + + :returns: + A dictionary with the following keys: + - `embedding`: The embedding of the input text. + """ + if not isinstance(text, str): + raise TypeError( + "SentenceTransformersTextEmbedder expects a string as input." + "In case you want to embed a list of Documents, please use the SentenceTransformersDocumentEmbedder." + ) + if self.embedding_backend is None: + raise RuntimeError("The embedding model has not been loaded. Please call warm_up() before running.") + + text_to_embed = self.prefix + text + self.suffix + embedding = self.embedding_backend.embed( + [text_to_embed], + batch_size=self.batch_size, + show_progress_bar=self.progress_bar, + normalize_embeddings=self.normalize_embeddings, + precision=self.precision, + )[0] + return {"embedding": embedding} diff --git a/testbed/deepset-ai__haystack/haystack/components/fetchers/__init__.py b/testbed/deepset-ai__haystack/haystack/components/fetchers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..010695cdae66600d4889525d94558a33d43a01a0 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/fetchers/__init__.py @@ -0,0 +1,7 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from haystack.components.fetchers.link_content import LinkContentFetcher + +__all__ = ["LinkContentFetcher"] diff --git a/testbed/deepset-ai__haystack/haystack/components/fetchers/link_content.py b/testbed/deepset-ai__haystack/haystack/components/fetchers/link_content.py new file mode 100644 index 0000000000000000000000000000000000000000..38712d683f7f33bc6549465f6f2f110228de6841 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/fetchers/link_content.py @@ -0,0 +1,260 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from collections import defaultdict +from concurrent.futures import ThreadPoolExecutor +from fnmatch import fnmatch +from typing import Callable, Dict, List, Optional, Tuple + +import requests +from requests import Response +from requests.exceptions import HTTPError +from tenacity import RetryCallState, retry, retry_if_exception_type, stop_after_attempt, wait_exponential + +from haystack import component, logging +from haystack.dataclasses import ByteStream +from haystack.version import __version__ + +logger = logging.getLogger(__name__) + + +DEFAULT_USER_AGENT = f"haystack/LinkContentFetcher/{__version__}" + +REQUEST_HEADERS = { + "accept": "*/*", + "User-Agent": DEFAULT_USER_AGENT, + "Accept-Language": "en-US,en;q=0.9,it;q=0.8,es;q=0.7", + "referer": "https://www.google.com/", +} + + +def _text_content_handler(response: Response) -> ByteStream: + """ + Handles text content. + + :param response: Response object from the request. + :return: The extracted text. + """ + return ByteStream.from_string(response.text) + + +def _binary_content_handler(response: Response) -> ByteStream: + """ + Handles binary content. + + :param response: Response object from the request. + :return: The extracted binary file-like object. + """ + return ByteStream(data=response.content) + + +@component +class LinkContentFetcher: + """ + Fetches and extracts content from URLs. + + It supports various content types, retries on failures, and automatic user-agent rotation for failed web + requests. Use it as the data-fetching step in your pipelines. + + You may need to convert LinkContentFetcher's output into a list of documents. Use HTMLToDocument + converter to do this. + + ### Usage example + + ```python + from haystack.components.fetchers.link_content import LinkContentFetcher + + fetcher = LinkContentFetcher() + streams = fetcher.run(urls=["https://www.google.com"])["streams"] + + assert len(streams) == 1 + assert streams[0].meta == {'content_type': 'text/html', 'url': 'https://www.google.com'} + assert streams[0].data + ``` + """ + + def __init__( + self, + raise_on_failure: bool = True, + user_agents: Optional[List[str]] = None, + retry_attempts: int = 2, + timeout: int = 3, + ): + """ + Initializes the component. + + :param raise_on_failure: If `True`, raises an exception if it fails to fetch a single URL. + For multiple URLs, it logs errors and returns the content it successfully fetched. + :param user_agents: [User agents](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent) + for fetching content. If `None`, a default user agent is used. + :param retry_attempts: The number of times to retry to fetch the URL's content. + :param timeout: Timeout in seconds for the request. + """ + self.raise_on_failure = raise_on_failure + self.user_agents = user_agents or [DEFAULT_USER_AGENT] + self.current_user_agent_idx: int = 0 + self.retry_attempts = retry_attempts + self.timeout = timeout + + # register default content handlers that extract data from the response + self.handlers: Dict[str, Callable[[Response], ByteStream]] = defaultdict(lambda: _text_content_handler) + self.handlers["text/*"] = _text_content_handler + self.handlers["text/html"] = _binary_content_handler + self.handlers["application/json"] = _text_content_handler + self.handlers["application/*"] = _binary_content_handler + self.handlers["image/*"] = _binary_content_handler + self.handlers["audio/*"] = _binary_content_handler + self.handlers["video/*"] = _binary_content_handler + + @retry( + reraise=True, + stop=stop_after_attempt(self.retry_attempts), + wait=wait_exponential(multiplier=1, min=2, max=10), + retry=(retry_if_exception_type((HTTPError, requests.RequestException))), + # This method is invoked only after failed requests (exception raised) + after=self._switch_user_agent, + ) + def get_response(url): + # we need to copy because we modify the headers + headers = REQUEST_HEADERS.copy() + headers["User-Agent"] = self.user_agents[self.current_user_agent_idx] + response = requests.get(url, headers=headers, timeout=timeout or 3) + response.raise_for_status() + return response + + self._get_response: Callable = get_response + + @component.output_types(streams=List[ByteStream]) + def run(self, urls: List[str]): + """ + Fetches content from a list of URLs and returns a list of extracted content streams. + + Each content stream is a `ByteStream` object containing the extracted content as binary data. + Each ByteStream object in the returned list corresponds to the contents of a single URL. + The content type of each stream is stored in the metadata of the ByteStream object under + the key "content_type". The URL of the fetched content is stored under the key "url". + + :param urls: A list of URLs to fetch content from. + :return: `ByteStream` objects representing the extracted content. + + :raises Exception: If the provided list of URLs contains only a single URL, and `raise_on_failure` is set to + `True`, an exception will be raised in case of an error during content retrieval. + In all other scenarios, any retrieval errors are logged, and a list of successfully retrieved `ByteStream` + objects is returned. + """ + streams: List[ByteStream] = [] + if not urls: + return {"streams": streams} + + # don't use multithreading if there's only one URL + if len(urls) == 1: + stream_metadata, stream = self._fetch(urls[0]) + stream.meta.update(stream_metadata) + streams.append(stream) + else: + with ThreadPoolExecutor() as executor: + results = executor.map(self._fetch_with_exception_suppression, urls) + + for stream_metadata, stream in results: # type: ignore + if stream_metadata is not None and stream is not None: + stream.meta.update(stream_metadata) + stream.mime_type = stream.meta.get("content_type", None) + streams.append(stream) + + return {"streams": streams} + + def _fetch(self, url: str) -> Tuple[Dict[str, str], ByteStream]: + """ + Fetches content from a URL and returns it as a ByteStream. + + :param url: The URL to fetch content from. + :return: A tuple containing the ByteStream metadata dict and the corresponding ByteStream. + ByteStream metadata contains the URL and the content type of the fetched content. + The content type is a string indicating the type of content fetched (for example, "text/html", + "application/pdf"). The ByteStream object contains the fetched content as binary data. + + :raises: If an error occurs during content retrieval and `raise_on_failure` is set to True, this method will + raise an exception. Otherwise, all fetching errors are logged, and an empty ByteStream is returned. + + """ + content_type: str = "text/html" + stream: ByteStream = ByteStream(data=b"") + try: + response = self._get_response(url) + content_type = self._get_content_type(response) + handler: Callable = self._resolve_handler(content_type) + stream = handler(response) + except Exception as e: + if self.raise_on_failure: + raise e + # less verbose log as this is expected to happen often (requests failing, blocked, etc.) + logger.debug("Couldn't retrieve content from {url} because {error}", url=url, error=str(e)) + + finally: + self.current_user_agent_idx = 0 + + return {"content_type": content_type, "url": url}, stream + + def _fetch_with_exception_suppression(self, url: str) -> Tuple[Optional[Dict[str, str]], Optional[ByteStream]]: + """ + Fetches content from a URL and returns it as a ByteStream. + + If `raise_on_failure` is set to True, this method will wrap the fetch() method and catch any exceptions. + Otherwise, it will simply call the fetch() method. + :param url: The URL to fetch content from. + :return: A tuple containing the ByteStream metadata dict and the corresponding ByteStream. + + """ + if self.raise_on_failure: + try: + return self._fetch(url) + except Exception as e: + logger.warning("Error fetching {url}: {error}", url=url, error=str(e)) + return {"content_type": "Unknown", "url": url}, None + else: + return self._fetch(url) + + def _get_content_type(self, response: Response): + """ + Get the content type of the response. + + :param response: The response object. + :return: The content type of the response. + """ + content_type = response.headers.get("Content-Type", "") + return content_type.split(";")[0] + + def _resolve_handler(self, content_type: str) -> Callable[[Response], ByteStream]: + """ + Resolves the handler for the given content type. + + First, it tries to find a direct match for the content type in the handlers dictionary. + If no direct match is found, it tries to find a pattern match using the fnmatch function. + If no pattern match is found, it returns the default handler for text/plain. + + :param content_type: The content type to resolve the handler for. + :returns: The handler for the given content type, if found. Otherwise, the default handler for text/plain. + """ + # direct match + if content_type in self.handlers: + return self.handlers[content_type] + + # pattern matches + for pattern, handler in self.handlers.items(): + if fnmatch(content_type, pattern): + return handler + + # default handler + return self.handlers["text/plain"] + + def _switch_user_agent(self, retry_state: RetryCallState) -> None: + """ + Switches the User-Agent for this LinkContentRetriever to the next one in the list of user agents. + + Used by tenacity to retry the requests with a different user agent. + + :param retry_state: The retry state (unused, required by tenacity). + """ + self.current_user_agent_idx = (self.current_user_agent_idx + 1) % len(self.user_agents) + logger.debug("Switched user agent to {user_agent}", user_agent=self.user_agents[self.current_user_agent_idx]) diff --git a/testbed/deepset-ai__haystack/haystack/components/generators/__init__.py b/testbed/deepset-ai__haystack/haystack/components/generators/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b93270d82f95d6117ad26e667b22a7f8d63edce2 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/generators/__init__.py @@ -0,0 +1,12 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from haystack.components.generators.openai import ( # noqa: I001 (otherwise we end up with partial imports) + OpenAIGenerator, +) +from haystack.components.generators.azure import AzureOpenAIGenerator +from haystack.components.generators.hugging_face_local import HuggingFaceLocalGenerator +from haystack.components.generators.hugging_face_api import HuggingFaceAPIGenerator + +__all__ = ["HuggingFaceLocalGenerator", "HuggingFaceAPIGenerator", "OpenAIGenerator", "AzureOpenAIGenerator"] diff --git a/testbed/deepset-ai__haystack/haystack/components/generators/azure.py b/testbed/deepset-ai__haystack/haystack/components/generators/azure.py new file mode 100644 index 0000000000000000000000000000000000000000..20bb2cda8ee637d5dccf03a18f82279bc9ec1428 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/generators/azure.py @@ -0,0 +1,194 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import os +from typing import Any, Callable, Dict, Optional + +# pylint: disable=import-error +from openai.lib.azure import AzureOpenAI + +from haystack import component, default_from_dict, default_to_dict, logging +from haystack.components.generators import OpenAIGenerator +from haystack.dataclasses import StreamingChunk +from haystack.utils import Secret, deserialize_callable, deserialize_secrets_inplace, serialize_callable + +logger = logging.getLogger(__name__) + + +@component +class AzureOpenAIGenerator(OpenAIGenerator): + """ + Generates text using OpenAI's large language models (LLMs). + + It works with the gpt-4 and gpt-3.5-turbo family of models. + You can customize how the text is generated by passing parameters to the + OpenAI API. Use the `**generation_kwargs` argument when you initialize + the component or when you run it. Any parameter that works with + `openai.ChatCompletion.create` will work here too. + + + For details on OpenAI API parameters, see + [OpenAI documentation](https://platform.openai.com/docs/api-reference/chat). + + + ### Usage example + + ```python + from haystack.components.generators import AzureOpenAIGenerator + from haystack.utils import Secret + client = AzureOpenAIGenerator( + azure_endpoint="", + api_key=Secret.from_token(""), + azure_deployment="") + response = client.run("What's Natural Language Processing? Be brief.") + print(response) + ``` + + ``` + >> {'replies': ['Natural Language Processing (NLP) is a branch of artificial intelligence that focuses on + >> the interaction between computers and human language. It involves enabling computers to understand, interpret, + >> and respond to natural human language in a way that is both meaningful and useful.'], 'meta': [{'model': + >> 'gpt-4o-mini', 'index': 0, 'finish_reason': 'stop', 'usage': {'prompt_tokens': 16, + >> 'completion_tokens': 49, 'total_tokens': 65}}]} + ``` + """ + + # pylint: disable=super-init-not-called + def __init__( + self, + azure_endpoint: Optional[str] = None, + api_version: Optional[str] = "2023-05-15", + azure_deployment: Optional[str] = "gpt-4o-mini", + api_key: Optional[Secret] = Secret.from_env_var("AZURE_OPENAI_API_KEY", strict=False), + azure_ad_token: Optional[Secret] = Secret.from_env_var("AZURE_OPENAI_AD_TOKEN", strict=False), + organization: Optional[str] = None, + streaming_callback: Optional[Callable[[StreamingChunk], None]] = None, + system_prompt: Optional[str] = None, + timeout: Optional[float] = None, + max_retries: Optional[int] = None, + generation_kwargs: Optional[Dict[str, Any]] = None, + default_headers: Optional[Dict[str, str]] = None, + ): + """ + Initialize the Azure OpenAI Generator. + + :param azure_endpoint: The endpoint of the deployed model, for example `https://example-resource.azure.openai.com/`. + :param api_version: The version of the API to use. Defaults to 2023-05-15. + :param azure_deployment: The deployment of the model, usually the model name. + :param api_key: The API key to use for authentication. + :param azure_ad_token: [Azure Active Directory token](https://www.microsoft.com/en-us/security/business/identity-access/microsoft-entra-id). + :param organization: Your organization ID, defaults to `None`. For help, see + [Setting up your organization](https://platform.openai.com/docs/guides/production-best-practices/setting-up-your-organization). + :param streaming_callback: A callback function called when a new token is received from the stream. + It accepts [StreamingChunk](https://docs.haystack.deepset.ai/docs/data-classes#streamingchunk) + as an argument. + :param system_prompt: The system prompt to use for text generation. If not provided, the Generator + omits the system prompt and uses the default system prompt. + :param timeout: Timeout for AzureOpenAI client. If not set, it is inferred from the + `OPENAI_TIMEOUT` environment variable or set to 30. + :param max_retries: Maximum retries to establish contact with AzureOpenAI if it returns an internal error. + If not set, it is inferred from the `OPENAI_MAX_RETRIES` environment variable or set to 5. + :param generation_kwargs: Other parameters to use for the model, sent directly to + the OpenAI endpoint. See [OpenAI documentation](https://platform.openai.com/docs/api-reference/chat) for + more details. + Some of the supported parameters: + - `max_tokens`: The maximum number of tokens the output text can have. + - `temperature`: The sampling temperature to use. Higher values mean the model takes more risks. + Try 0.9 for more creative applications and 0 (argmax sampling) for ones with a well-defined answer. + - `top_p`: An alternative to sampling with temperature, called nucleus sampling, where the model + considers the results of the tokens with top_p probability mass. For example, 0.1 means only the tokens + comprising the top 10% probability mass are considered. + - `n`: The number of completions to generate for each prompt. For example, with 3 prompts and n=2, + the LLM will generate two completions per prompt, resulting in 6 completions total. + - `stop`: One or more sequences after which the LLM should stop generating tokens. + - `presence_penalty`: The penalty applied if a token is already present. + Higher values make the model less likely to repeat the token. + - `frequency_penalty`: Penalty applied if a token has already been generated. + Higher values make the model less likely to repeat the token. + - `logit_bias`: Adds a logit bias to specific tokens. The keys of the dictionary are tokens, and the + values are the bias to add to that token. + :param default_headers: Default headers to use for the AzureOpenAI client. + """ + # We intentionally do not call super().__init__ here because we only need to instantiate the client to interact + # with the API. + + # Why is this here? + # AzureOpenAI init is forcing us to use an init method that takes either base_url or azure_endpoint as not + # None init parameters. This way we accommodate the use case where env var AZURE_OPENAI_ENDPOINT is set instead + # of passing it as a parameter. + azure_endpoint = azure_endpoint or os.environ.get("AZURE_OPENAI_ENDPOINT") + if not azure_endpoint: + raise ValueError("Please provide an Azure endpoint or set the environment variable AZURE_OPENAI_ENDPOINT.") + + if api_key is None and azure_ad_token is None: + raise ValueError("Please provide an API key or an Azure Active Directory token.") + + # The check above makes mypy incorrectly infer that api_key is never None, + # which propagates the incorrect type. + self.api_key = api_key # type: ignore + self.azure_ad_token = azure_ad_token + self.generation_kwargs = generation_kwargs or {} + self.system_prompt = system_prompt + self.streaming_callback = streaming_callback + self.api_version = api_version + self.azure_endpoint = azure_endpoint + self.azure_deployment = azure_deployment + self.organization = organization + self.model: str = azure_deployment or "gpt-4o-mini" + self.timeout = timeout or float(os.environ.get("OPENAI_TIMEOUT", 30.0)) + self.max_retries = max_retries or int(os.environ.get("OPENAI_MAX_RETRIES", 5)) + self.default_headers = default_headers or {} + + self.client = AzureOpenAI( + api_version=api_version, + azure_endpoint=azure_endpoint, + azure_deployment=azure_deployment, + api_key=api_key.resolve_value() if api_key is not None else None, + azure_ad_token=azure_ad_token.resolve_value() if azure_ad_token is not None else None, + organization=organization, + timeout=self.timeout, + max_retries=self.max_retries, + default_headers=self.default_headers, + ) + + def to_dict(self) -> Dict[str, Any]: + """ + Serialize this component to a dictionary. + + :returns: + The serialized component as a dictionary. + """ + callback_name = serialize_callable(self.streaming_callback) if self.streaming_callback else None + return default_to_dict( + self, + azure_endpoint=self.azure_endpoint, + azure_deployment=self.azure_deployment, + organization=self.organization, + api_version=self.api_version, + streaming_callback=callback_name, + generation_kwargs=self.generation_kwargs, + system_prompt=self.system_prompt, + api_key=self.api_key.to_dict() if self.api_key is not None else None, + azure_ad_token=self.azure_ad_token.to_dict() if self.azure_ad_token is not None else None, + timeout=self.timeout, + max_retries=self.max_retries, + default_headers=self.default_headers, + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "AzureOpenAIGenerator": + """ + Deserialize this component from a dictionary. + + :param data: + The dictionary representation of this component. + :returns: + The deserialized component instance. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["api_key", "azure_ad_token"]) + init_params = data.get("init_parameters", {}) + serialized_callback_handler = init_params.get("streaming_callback") + if serialized_callback_handler: + data["init_parameters"]["streaming_callback"] = deserialize_callable(serialized_callback_handler) + return default_from_dict(cls, data) diff --git a/testbed/deepset-ai__haystack/haystack/components/generators/chat/__init__.py b/testbed/deepset-ai__haystack/haystack/components/generators/chat/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1c7ecbad8d6810da0ec6def3dccceba52c410af8 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/generators/chat/__init__.py @@ -0,0 +1,17 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from haystack.components.generators.chat.openai import ( # noqa: I001 (otherwise we end up with partial imports) + OpenAIChatGenerator, +) +from haystack.components.generators.chat.azure import AzureOpenAIChatGenerator +from haystack.components.generators.chat.hugging_face_local import HuggingFaceLocalChatGenerator +from haystack.components.generators.chat.hugging_face_api import HuggingFaceAPIChatGenerator + +__all__ = [ + "HuggingFaceLocalChatGenerator", + "HuggingFaceAPIChatGenerator", + "OpenAIChatGenerator", + "AzureOpenAIChatGenerator", +] diff --git a/testbed/deepset-ai__haystack/haystack/components/generators/chat/azure.py b/testbed/deepset-ai__haystack/haystack/components/generators/chat/azure.py new file mode 100644 index 0000000000000000000000000000000000000000..445e58040211cb40bc402a184101e1865d544de8 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/generators/chat/azure.py @@ -0,0 +1,194 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import os +from typing import Any, Callable, Dict, Optional + +# pylint: disable=import-error +from openai.lib.azure import AzureOpenAI + +from haystack import component, default_from_dict, default_to_dict, logging +from haystack.components.generators.chat import OpenAIChatGenerator +from haystack.dataclasses import StreamingChunk +from haystack.utils import Secret, deserialize_callable, deserialize_secrets_inplace, serialize_callable + +logger = logging.getLogger(__name__) + + +@component +class AzureOpenAIChatGenerator(OpenAIChatGenerator): + """ + Generates text using OpenAI's models on Azure. + + It works with the gpt-4 and gpt-3.5-turbo - type models and supports streaming responses + from OpenAI API. It uses [ChatMessage](https://docs.haystack.deepset.ai/docs/data-classes#chatmessage) + format in input and output. + + You can customize how the text is generated by passing parameters to the + OpenAI API. Use the `**generation_kwargs` argument when you initialize + the component or when you run it. Any parameter that works with + `openai.ChatCompletion.create` will work here too. + + For details on OpenAI API parameters, see + [OpenAI documentation](https://platform.openai.com/docs/api-reference/chat). + + ### Usage example + + ```python + from haystack.components.generators.chat import AzureOpenAIGenerator + from haystack.dataclasses import ChatMessage + from haystack.utils import Secret + + messages = [ChatMessage.from_user("What's Natural Language Processing?")] + + client = AzureOpenAIGenerator( + azure_endpoint="", + api_key=Secret.from_token(""), + azure_deployment="") + response = client.run(messages) + print(response) + ``` + + ``` + {'replies': + [ChatMessage(content='Natural Language Processing (NLP) is a branch of artificial intelligence that focuses on + enabling computers to understand, interpret, and generate human language in a way that is useful.', + role=, name=None, + meta={'model': 'gpt-4o-mini', 'index': 0, 'finish_reason': 'stop', + 'usage': {'prompt_tokens': 15, 'completion_tokens': 36, 'total_tokens': 51}})] + } + ``` + """ + + # pylint: disable=super-init-not-called + def __init__( + self, + azure_endpoint: Optional[str] = None, + api_version: Optional[str] = "2023-05-15", + azure_deployment: Optional[str] = "gpt-4o-mini", + api_key: Optional[Secret] = Secret.from_env_var("AZURE_OPENAI_API_KEY", strict=False), + azure_ad_token: Optional[Secret] = Secret.from_env_var("AZURE_OPENAI_AD_TOKEN", strict=False), + organization: Optional[str] = None, + streaming_callback: Optional[Callable[[StreamingChunk], None]] = None, + timeout: Optional[float] = None, + max_retries: Optional[int] = None, + generation_kwargs: Optional[Dict[str, Any]] = None, + default_headers: Optional[Dict[str, str]] = None, + ): + """ + Initialize the Azure OpenAI Chat Generator component. + + :param azure_endpoint: The endpoint of the deployed model, for example `"https://example-resource.azure.openai.com/"`. + :param api_version: The version of the API to use. Defaults to 2023-05-15. + :param azure_deployment: The deployment of the model, usually the model name. + :param api_key: The API key to use for authentication. + :param azure_ad_token: [Azure Active Directory token](https://www.microsoft.com/en-us/security/business/identity-access/microsoft-entra-id). + :param organization: Your organization ID, defaults to `None`. For help, see + [Setting up your organization](https://platform.openai.com/docs/guides/production-best-practices/setting-up-your-organization). + :param streaming_callback: A callback function called when a new token is received from the stream. + It accepts [StreamingChunk](https://docs.haystack.deepset.ai/docs/data-classes#streamingchunk) + as an argument. + :param timeout: Timeout for OpenAI client calls. If not set, it defaults to either the + `OPENAI_TIMEOUT` environment variable, or 30 seconds. + :param max_retries: Maximum number of retries to contact OpenAI after an internal error. + If not set, it defaults to either the `OPENAI_MAX_RETRIES` environment variable, or set to 5. + :param generation_kwargs: Other parameters to use for the model. These parameters are sent directly to + the OpenAI endpoint. For details, see [OpenAI documentation](https://platform.openai.com/docs/api-reference/chat). + Some of the supported parameters: + - `max_tokens`: The maximum number of tokens the output text can have. + - `temperature`: The sampling temperature to use. Higher values mean the model takes more risks. + Try 0.9 for more creative applications and 0 (argmax sampling) for ones with a well-defined answer. + - `top_p`: Nucleus sampling is an alternative to sampling with temperature, where the model considers + tokens with a top_p probability mass. For example, 0.1 means only the tokens comprising + the top 10% probability mass are considered. + - `n`: The number of completions to generate for each prompt. For example, with 3 prompts and n=2, + the LLM will generate two completions per prompt, resulting in 6 completions total. + - `stop`: One or more sequences after which the LLM should stop generating tokens. + - `presence_penalty`: The penalty applied if a token is already present. + Higher values make the model less likely to repeat the token. + - `frequency_penalty`: Penalty applied if a token has already been generated. + Higher values make the model less likely to repeat the token. + - `logit_bias`: Adds a logit bias to specific tokens. The keys of the dictionary are tokens, and the + values are the bias to add to that token. + :param default_headers: Default headers to use for the AzureOpenAI client. + """ + # We intentionally do not call super().__init__ here because we only need to instantiate the client to interact + # with the API. + + # Why is this here? + # AzureOpenAI init is forcing us to use an init method that takes either base_url or azure_endpoint as not + # None init parameters. This way we accommodate the use case where env var AZURE_OPENAI_ENDPOINT is set instead + # of passing it as a parameter. + azure_endpoint = azure_endpoint or os.environ.get("AZURE_OPENAI_ENDPOINT") + if not azure_endpoint: + raise ValueError("Please provide an Azure endpoint or set the environment variable AZURE_OPENAI_ENDPOINT.") + + if api_key is None and azure_ad_token is None: + raise ValueError("Please provide an API key or an Azure Active Directory token.") + + # The check above makes mypy incorrectly infer that api_key is never None, + # which propagates the incorrect type. + self.api_key = api_key # type: ignore + self.azure_ad_token = azure_ad_token + self.generation_kwargs = generation_kwargs or {} + self.streaming_callback = streaming_callback + self.api_version = api_version + self.azure_endpoint = azure_endpoint + self.azure_deployment = azure_deployment + self.organization = organization + self.model = azure_deployment or "gpt-4o-mini" + self.timeout = timeout or float(os.environ.get("OPENAI_TIMEOUT", 30.0)) + self.max_retries = max_retries or int(os.environ.get("OPENAI_MAX_RETRIES", 5)) + self.default_headers = default_headers or {} + + self.client = AzureOpenAI( + api_version=api_version, + azure_endpoint=azure_endpoint, + azure_deployment=azure_deployment, + api_key=api_key.resolve_value() if api_key is not None else None, + azure_ad_token=azure_ad_token.resolve_value() if azure_ad_token is not None else None, + organization=organization, + timeout=self.timeout, + max_retries=self.max_retries, + default_headers=self.default_headers, + ) + + def to_dict(self) -> Dict[str, Any]: + """ + Serialize this component to a dictionary. + + :returns: + The serialized component as a dictionary. + """ + callback_name = serialize_callable(self.streaming_callback) if self.streaming_callback else None + return default_to_dict( + self, + azure_endpoint=self.azure_endpoint, + azure_deployment=self.azure_deployment, + organization=self.organization, + api_version=self.api_version, + streaming_callback=callback_name, + generation_kwargs=self.generation_kwargs, + timeout=self.timeout, + max_retries=self.max_retries, + api_key=self.api_key.to_dict() if self.api_key is not None else None, + azure_ad_token=self.azure_ad_token.to_dict() if self.azure_ad_token is not None else None, + default_headers=self.default_headers, + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "AzureOpenAIChatGenerator": + """ + Deserialize this component from a dictionary. + + :param data: The dictionary representation of this component. + :returns: + The deserialized component instance. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["api_key", "azure_ad_token"]) + init_params = data.get("init_parameters", {}) + serialized_callback_handler = init_params.get("streaming_callback") + if serialized_callback_handler: + data["init_parameters"]["streaming_callback"] = deserialize_callable(serialized_callback_handler) + return default_from_dict(cls, data) diff --git a/testbed/deepset-ai__haystack/haystack/components/generators/chat/hugging_face_api.py b/testbed/deepset-ai__haystack/haystack/components/generators/chat/hugging_face_api.py new file mode 100644 index 0000000000000000000000000000000000000000..d4ecd53f10126dd08677e0fa31f20a4a99b3ee32 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/generators/chat/hugging_face_api.py @@ -0,0 +1,279 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Callable, Dict, Iterable, List, Optional, Union + +from haystack import component, default_from_dict, default_to_dict, logging +from haystack.dataclasses import ChatMessage, StreamingChunk +from haystack.lazy_imports import LazyImport +from haystack.utils import Secret, deserialize_callable, deserialize_secrets_inplace, serialize_callable +from haystack.utils.hf import HFGenerationAPIType, HFModelType, check_valid_model +from haystack.utils.url_validation import is_valid_http_url + +with LazyImport(message="Run 'pip install \"huggingface_hub[inference]>=0.23.0\"'") as huggingface_hub_import: + from huggingface_hub import ChatCompletionOutput, ChatCompletionStreamOutput, InferenceClient + + +logger = logging.getLogger(__name__) + + +def _convert_message_to_hfapi_format(message: ChatMessage) -> Dict[str, str]: + """ + Convert a message to the format expected by Hugging Face APIs. + + :returns: A dictionary with the following keys: + - `role` + - `content` + - `name` (optional) + """ + formatted_msg = {"role": message.role.value, "content": message.content} + if message.name: + formatted_msg["name"] = message.name + + return formatted_msg + + +@component +class HuggingFaceAPIChatGenerator: + """ + Completes chats using Hugging Face APIs. + + HuggingFaceAPIChatGenerator uses the [ChatMessage](https://docs.haystack.deepset.ai/docs/data-classes#chatmessage) + format for input and output. Use it to generate text with Hugging Face APIs: + - [Free Serverless Inference API](https://huggingface.co/inference-api) + - [Paid Inference Endpoints](https://huggingface.co/inference-endpoints) + - [Self-hosted Text Generation Inference](https://github.com/huggingface/text-generation-inference) + + ### Usage examples + + #### With the free serverless inference API + + ```python + from haystack.components.generators.chat import HuggingFaceAPIChatGenerator + from haystack.dataclasses import ChatMessage + from haystack.utils import Secret + from haystack.utils.hf import HFGenerationAPIType + + messages = [ChatMessage.from_system("\\nYou are a helpful, respectful and honest assistant"), + ChatMessage.from_user("What's Natural Language Processing?")] + + # the api_type can be expressed using the HFGenerationAPIType enum or as a string + api_type = HFGenerationAPIType.SERVERLESS_INFERENCE_API + api_type = "serverless_inference_api" # this is equivalent to the above + + generator = HuggingFaceAPIChatGenerator(api_type=api_type, + api_params={"model": "HuggingFaceH4/zephyr-7b-beta"}, + token=Secret.from_token("")) + + result = generator.run(messages) + print(result) + ``` + + #### With paid inference endpoints + + ```python + from haystack.components.generators.chat import HuggingFaceAPIChatGenerator + from haystack.dataclasses import ChatMessage + from haystack.utils import Secret + + messages = [ChatMessage.from_system("\\nYou are a helpful, respectful and honest assistant"), + ChatMessage.from_user("What's Natural Language Processing?")] + + generator = HuggingFaceAPIChatGenerator(api_type="inference_endpoints", + api_params={"url": ""}, + token=Secret.from_token("")) + + result = generator.run(messages) + print(result) + + #### With self-hosted text generation inference + + ```python + from haystack.components.generators.chat import HuggingFaceAPIChatGenerator + from haystack.dataclasses import ChatMessage + + messages = [ChatMessage.from_system("\\nYou are a helpful, respectful and honest assistant"), + ChatMessage.from_user("What's Natural Language Processing?")] + + generator = HuggingFaceAPIChatGenerator(api_type="text_generation_inference", + api_params={"url": "http://localhost:8080"}) + + result = generator.run(messages) + print(result) + ``` + """ + + def __init__( # pylint: disable=too-many-positional-arguments + self, + api_type: Union[HFGenerationAPIType, str], + api_params: Dict[str, str], + token: Optional[Secret] = Secret.from_env_var(["HF_API_TOKEN", "HF_TOKEN"], strict=False), + generation_kwargs: Optional[Dict[str, Any]] = None, + stop_words: Optional[List[str]] = None, + streaming_callback: Optional[Callable[[StreamingChunk], None]] = None, + ): + """ + Initialize the HuggingFaceAPIChatGenerator instance. + + :param api_type: + The type of Hugging Face API to use. Available types: + - `text_generation_inference`: See [TGI](https://github.com/huggingface/text-generation-inference). + - `inference_endpoints`: See [Inference Endpoints](https://huggingface.co/inference-endpoints). + - `serverless_inference_api`: See [Serverless Inference API](https://huggingface.co/inference-api). + :param api_params: + A dictionary with the following keys: + - `model`: Hugging Face model ID. Required when `api_type` is `SERVERLESS_INFERENCE_API`. + - `url`: URL of the inference endpoint. Required when `api_type` is `INFERENCE_ENDPOINTS` or + `TEXT_GENERATION_INFERENCE`. + :param token: The Hugging Face token to use as HTTP bearer authorization. + Check your HF token in your [account settings](https://huggingface.co/settings/tokens). + :param generation_kwargs: + A dictionary with keyword arguments to customize text generation. + Some examples: `max_tokens`, `temperature`, `top_p`. + For details, see [Hugging Face chat_completion documentation](https://huggingface.co/docs/huggingface_hub/package_reference/inference_client#huggingface_hub.InferenceClient.chat_completion). + :param stop_words: An optional list of strings representing the stop words. + :param streaming_callback: An optional callable for handling streaming responses. + """ + + huggingface_hub_import.check() + + if isinstance(api_type, str): + api_type = HFGenerationAPIType.from_str(api_type) + + if api_type == HFGenerationAPIType.SERVERLESS_INFERENCE_API: + model = api_params.get("model") + if model is None: + raise ValueError( + "To use the Serverless Inference API, you need to specify the `model` parameter in `api_params`." + ) + check_valid_model(model, HFModelType.GENERATION, token) + model_or_url = model + elif api_type in [HFGenerationAPIType.INFERENCE_ENDPOINTS, HFGenerationAPIType.TEXT_GENERATION_INFERENCE]: + url = api_params.get("url") + if url is None: + msg = ( + "To use Text Generation Inference or Inference Endpoints, you need to specify the `url` parameter " + "in `api_params`." + ) + raise ValueError(msg) + if not is_valid_http_url(url): + raise ValueError(f"Invalid URL: {url}") + model_or_url = url + else: + msg = f"Unknown api_type {api_type}" + raise ValueError(msg) + + # handle generation kwargs setup + generation_kwargs = generation_kwargs.copy() if generation_kwargs else {} + generation_kwargs["stop"] = generation_kwargs.get("stop", []) + generation_kwargs["stop"].extend(stop_words or []) + generation_kwargs.setdefault("max_tokens", 512) + + self.api_type = api_type + self.api_params = api_params + self.token = token + self.generation_kwargs = generation_kwargs + self.streaming_callback = streaming_callback + self._client = InferenceClient(model_or_url, token=token.resolve_value() if token else None) + + def to_dict(self) -> Dict[str, Any]: + """ + Serialize this component to a dictionary. + + :returns: + A dictionary containing the serialized component. + """ + callback_name = serialize_callable(self.streaming_callback) if self.streaming_callback else None + return default_to_dict( + self, + api_type=str(self.api_type), + api_params=self.api_params, + token=self.token.to_dict() if self.token else None, + generation_kwargs=self.generation_kwargs, + streaming_callback=callback_name, + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "HuggingFaceAPIChatGenerator": + """ + Deserialize this component from a dictionary. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["token"]) + init_params = data.get("init_parameters", {}) + serialized_callback_handler = init_params.get("streaming_callback") + if serialized_callback_handler: + data["init_parameters"]["streaming_callback"] = deserialize_callable(serialized_callback_handler) + return default_from_dict(cls, data) + + @component.output_types(replies=List[ChatMessage]) + def run(self, messages: List[ChatMessage], generation_kwargs: Optional[Dict[str, Any]] = None): + """ + Invoke the text generation inference based on the provided messages and generation parameters. + + :param messages: A list of ChatMessage objects representing the input messages. + :param generation_kwargs: Additional keyword arguments for text generation. + :returns: A dictionary with the following keys: + - `replies`: A list containing the generated responses as ChatMessage objects. + """ + + # update generation kwargs by merging with the default ones + generation_kwargs = {**self.generation_kwargs, **(generation_kwargs or {})} + + formatted_messages = [_convert_message_to_hfapi_format(message) for message in messages] + + if self.streaming_callback: + return self._run_streaming(formatted_messages, generation_kwargs) + + return self._run_non_streaming(formatted_messages, generation_kwargs) + + def _run_streaming(self, messages: List[Dict[str, str]], generation_kwargs: Dict[str, Any]): + api_output: Iterable[ChatCompletionStreamOutput] = self._client.chat_completion( + messages, stream=True, **generation_kwargs + ) + + generated_text = "" + + for chunk in api_output: # pylint: disable=not-an-iterable + text = chunk.choices[0].delta.content + if text: + generated_text += text + finish_reason = chunk.choices[0].finish_reason + + meta = {} + if finish_reason: + meta["finish_reason"] = finish_reason + + stream_chunk = StreamingChunk(text, meta) + self.streaming_callback(stream_chunk) # type: ignore # streaming_callback is not None (verified in the run method) + + message = ChatMessage.from_assistant(generated_text) + message.meta.update( + { + "model": self._client.model, + "finish_reason": finish_reason, + "index": 0, + "usage": {"prompt_tokens": 0, "completion_tokens": 0}, # not available in streaming + } + ) + return {"replies": [message]} + + def _run_non_streaming( + self, messages: List[Dict[str, str]], generation_kwargs: Dict[str, Any] + ) -> Dict[str, List[ChatMessage]]: + chat_messages: List[ChatMessage] = [] + + api_chat_output: ChatCompletionOutput = self._client.chat_completion(messages, **generation_kwargs) + for choice in api_chat_output.choices: + message = ChatMessage.from_assistant(choice.message.content) + message.meta.update( + { + "model": self._client.model, + "finish_reason": choice.finish_reason, + "index": choice.index, + "usage": api_chat_output.usage or {"prompt_tokens": 0, "completion_tokens": 0}, + } + ) + chat_messages.append(message) + + return {"replies": chat_messages} diff --git a/testbed/deepset-ai__haystack/haystack/components/generators/chat/hugging_face_local.py b/testbed/deepset-ai__haystack/haystack/components/generators/chat/hugging_face_local.py new file mode 100644 index 0000000000000000000000000000000000000000..419fde20b669317206c52ca81764006c8fe0804d --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/generators/chat/hugging_face_local.py @@ -0,0 +1,356 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import sys +from typing import Any, Callable, Dict, List, Literal, Optional, Union + +from haystack import component, default_from_dict, default_to_dict, logging +from haystack.dataclasses import ChatMessage, StreamingChunk +from haystack.lazy_imports import LazyImport +from haystack.utils import ( + ComponentDevice, + Secret, + deserialize_callable, + deserialize_secrets_inplace, + serialize_callable, +) + +logger = logging.getLogger(__name__) + +with LazyImport(message="Run 'pip install \"transformers[torch]\"'") as torch_and_transformers_import: + from huggingface_hub import model_info + from transformers import PreTrainedTokenizer, PreTrainedTokenizerFast, StoppingCriteriaList, pipeline + + from haystack.utils.hf import ( # pylint: disable=ungrouped-imports + HFTokenStreamingHandler, + StopWordsCriteria, + deserialize_hf_model_kwargs, + serialize_hf_model_kwargs, + ) + + +PIPELINE_SUPPORTED_TASKS = ["text-generation", "text2text-generation"] + + +@component +class HuggingFaceLocalChatGenerator: + """ + Generates chat responses using models from Hugging Face that run locally. + + Use this component with chat-based models, + such as `HuggingFaceH4/zephyr-7b-beta` or `meta-llama/Llama-2-7b-chat-hf`. + LLMs running locally may need powerful hardware. + + ### Usage example + + ```python + from haystack.components.generators.chat import HuggingFaceLocalChatGenerator + from haystack.dataclasses import ChatMessage + + generator = HuggingFaceLocalChatGenerator(model="HuggingFaceH4/zephyr-7b-beta") + generator.warm_up() + messages = [ChatMessage.from_user("What's Natural Language Processing? Be brief.")] + print(generator.run(messages)) + ``` + + ``` + {'replies': + [ChatMessage(content=' Natural Language Processing (NLP) is a subfield of artificial intelligence that deals + with the interaction between computers and human language. It enables computers to understand, interpret, and + generate human language in a valuable way. NLP involves various techniques such as speech recognition, text + analysis, sentiment analysis, and machine translation. The ultimate goal is to make it easier for computers to + process and derive meaning from human language, improving communication between humans and machines.', + role=, + name=None, + meta={'finish_reason': 'stop', 'index': 0, 'model': + 'mistralai/Mistral-7B-Instruct-v0.2', + 'usage': {'completion_tokens': 90, 'prompt_tokens': 19, 'total_tokens': 109}}) + ] + } + ``` + """ + + def __init__( + self, + model: str = "HuggingFaceH4/zephyr-7b-beta", + task: Optional[Literal["text-generation", "text2text-generation"]] = None, + device: Optional[ComponentDevice] = None, + token: Optional[Secret] = Secret.from_env_var(["HF_API_TOKEN", "HF_TOKEN"], strict=False), + chat_template: Optional[str] = None, + generation_kwargs: Optional[Dict[str, Any]] = None, + huggingface_pipeline_kwargs: Optional[Dict[str, Any]] = None, + stop_words: Optional[List[str]] = None, + streaming_callback: Optional[Callable[[StreamingChunk], None]] = None, + ): + """ + Initializes the HuggingFaceLocalChatGenerator component. + + :param model: The Hugging Face text generation model name or path, + for example, `mistralai/Mistral-7B-Instruct-v0.2` or `TheBloke/OpenHermes-2.5-Mistral-7B-16k-AWQ`. + The model must be a chat model supporting the ChatML messaging + format. + If the model is specified in `huggingface_pipeline_kwargs`, this parameter is ignored. + :param task: The task for the Hugging Face pipeline. Possible options: + - `text-generation`: Supported by decoder models, like GPT. + - `text2text-generation`: Supported by encoder-decoder models, like T5. + If the task is specified in `huggingface_pipeline_kwargs`, this parameter is ignored. + If not specified, the component calls the Hugging Face API to infer the task from the model name. + :param device: The device for loading the model. If `None`, automatically selects the default device. + If a device or device map is specified in `huggingface_pipeline_kwargs`, it overrides this parameter. + :param token: The token to use as HTTP bearer authorization for remote files. + If the token is specified in `huggingface_pipeline_kwargs`, this parameter is ignored. + :param chat_template: Specifies an optional Jinja template for formatting chat + messages. Most high-quality chat models have their own templates, but for models without this + feature or if you prefer a custom template, use this parameter. + :param generation_kwargs: A dictionary with keyword arguments to customize text generation. + Some examples: `max_length`, `max_new_tokens`, `temperature`, `top_k`, `top_p`. + See Hugging Face's documentation for more information: + - - [customize-text-generation](https://huggingface.co/docs/transformers/main/en/generation_strategies#customize-text-generation) + - - [GenerationConfig](https://huggingface.co/docs/transformers/main/en/main_classes/text_generation#transformers.GenerationConfig) + The only `generation_kwargs` set by default is `max_new_tokens`, which is set to 512 tokens. + :param huggingface_pipeline_kwargs: Dictionary with keyword arguments to initialize the + Hugging Face pipeline for text generation. + These keyword arguments provide fine-grained control over the Hugging Face pipeline. + In case of duplication, these kwargs override `model`, `task`, `device`, and `token` init parameters. + For kwargs, see [Hugging Face documentation](https://huggingface.co/docs/transformers/en/main_classes/pipelines#transformers.pipeline.task). + In this dictionary, you can also include `model_kwargs` to specify the kwargs for [model initialization](https://huggingface.co/docs/transformers/en/main_classes/model#transformers.PreTrainedModel.from_pretrained) + :param stop_words: A list of stop words. If the model generates a stop word, the generation stops. + If you provide this parameter, don't specify the `stopping_criteria` in `generation_kwargs`. + For some chat models, the output includes both the new text and the original prompt. + In these cases, make sure your prompt has no stop words. + :param streaming_callback: An optional callable for handling streaming responses. + """ + torch_and_transformers_import.check() + + huggingface_pipeline_kwargs = huggingface_pipeline_kwargs or {} + generation_kwargs = generation_kwargs or {} + + self.token = token + token = token.resolve_value() if token else None + + # check if the huggingface_pipeline_kwargs contain the essential parameters + # otherwise, populate them with values from other init parameters + huggingface_pipeline_kwargs.setdefault("model", model) + huggingface_pipeline_kwargs.setdefault("token", token) + + device = ComponentDevice.resolve_device(device) + device.update_hf_kwargs(huggingface_pipeline_kwargs, overwrite=False) + + # task identification and validation + if task is None: + if "task" in huggingface_pipeline_kwargs: + task = huggingface_pipeline_kwargs["task"] + elif isinstance(huggingface_pipeline_kwargs["model"], str): + task = model_info( + huggingface_pipeline_kwargs["model"], token=huggingface_pipeline_kwargs["token"] + ).pipeline_tag + + if task not in PIPELINE_SUPPORTED_TASKS: + raise ValueError( + f"Task '{task}' is not supported. " f"The supported tasks are: {', '.join(PIPELINE_SUPPORTED_TASKS)}." + ) + huggingface_pipeline_kwargs["task"] = task + + # if not specified, set return_full_text to False for text-generation + # only generated text is returned (excluding prompt) + if task == "text-generation": + generation_kwargs.setdefault("return_full_text", False) + + if stop_words and "stopping_criteria" in generation_kwargs: + raise ValueError( + "Found both the `stop_words` init parameter and the `stopping_criteria` key in `generation_kwargs`. " + "Please specify only one of them." + ) + generation_kwargs.setdefault("max_new_tokens", 512) + generation_kwargs["stop_sequences"] = generation_kwargs.get("stop_sequences", []) + generation_kwargs["stop_sequences"].extend(stop_words or []) + + self.huggingface_pipeline_kwargs = huggingface_pipeline_kwargs + self.generation_kwargs = generation_kwargs + self.chat_template = chat_template + self.streaming_callback = streaming_callback + self.pipeline = None + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + if isinstance(self.huggingface_pipeline_kwargs["model"], str): + return {"model": self.huggingface_pipeline_kwargs["model"]} + return {"model": f"[object of type {type(self.huggingface_pipeline_kwargs['model'])}]"} + + def warm_up(self): + """ + Initializes the component. + """ + if self.pipeline is None: + self.pipeline = pipeline(**self.huggingface_pipeline_kwargs) + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + callback_name = serialize_callable(self.streaming_callback) if self.streaming_callback else None + serialization_dict = default_to_dict( + self, + huggingface_pipeline_kwargs=self.huggingface_pipeline_kwargs, + generation_kwargs=self.generation_kwargs, + streaming_callback=callback_name, + token=self.token.to_dict() if self.token else None, + ) + + huggingface_pipeline_kwargs = serialization_dict["init_parameters"]["huggingface_pipeline_kwargs"] + huggingface_pipeline_kwargs.pop("token", None) + + serialize_hf_model_kwargs(huggingface_pipeline_kwargs) + return serialization_dict + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "HuggingFaceLocalChatGenerator": + """ + Deserializes the component from a dictionary. + + :param data: + The dictionary to deserialize from. + :returns: + The deserialized component. + """ + torch_and_transformers_import.check() # leave this, cls method + deserialize_secrets_inplace(data["init_parameters"], keys=["token"]) + init_params = data.get("init_parameters", {}) + serialized_callback_handler = init_params.get("streaming_callback") + if serialized_callback_handler: + data["init_parameters"]["streaming_callback"] = deserialize_callable(serialized_callback_handler) + + huggingface_pipeline_kwargs = init_params.get("huggingface_pipeline_kwargs", {}) + deserialize_hf_model_kwargs(huggingface_pipeline_kwargs) + return default_from_dict(cls, data) + + @component.output_types(replies=List[ChatMessage]) + def run(self, messages: List[ChatMessage], generation_kwargs: Optional[Dict[str, Any]] = None): + """ + Invoke text generation inference based on the provided messages and generation parameters. + + :param messages: A list of ChatMessage objects representing the input messages. + :param generation_kwargs: Additional keyword arguments for text generation. + :returns: + A list containing the generated responses as ChatMessage instances. + """ + if self.pipeline is None: + raise RuntimeError("The generation model has not been loaded. Please call warm_up() before running.") + + tokenizer = self.pipeline.tokenizer + + # Check and update generation parameters + generation_kwargs = {**self.generation_kwargs, **(generation_kwargs or {})} + + stop_words = generation_kwargs.pop("stop_words", []) + generation_kwargs.pop("stop_sequences", []) + # pipeline call doesn't support stop_sequences, so we need to pop it + stop_words = self._validate_stop_words(stop_words) + + # Set up stop words criteria if stop words exist + stop_words_criteria = StopWordsCriteria(tokenizer, stop_words, self.pipeline.device) if stop_words else None + if stop_words_criteria: + generation_kwargs["stopping_criteria"] = StoppingCriteriaList([stop_words_criteria]) + + if self.streaming_callback: + num_responses = generation_kwargs.get("num_return_sequences", 1) + if num_responses > 1: + msg = ( + "Streaming is enabled, but the number of responses is set to {num_responses}. " + "Streaming is only supported for single response generation. " + "Setting the number of responses to 1." + ) + logger.warning(msg, num_responses=num_responses) + generation_kwargs["num_return_sequences"] = 1 + # streamer parameter hooks into HF streaming, HFTokenStreamingHandler is an adapter to our streaming + generation_kwargs["streamer"] = HFTokenStreamingHandler(tokenizer, self.streaming_callback, stop_words) + + # Prepare the prompt for the model + prepared_prompt = tokenizer.apply_chat_template( + messages, tokenize=False, chat_template=self.chat_template, add_generation_prompt=True + ) + + # Avoid some unnecessary warnings in the generation pipeline call + generation_kwargs["pad_token_id"] = ( + generation_kwargs.get("pad_token_id", tokenizer.pad_token_id) or tokenizer.eos_token_id + ) + + # Generate responses + output = self.pipeline(prepared_prompt, **generation_kwargs) + replies = [o.get("generated_text", "") for o in output] + + # Remove stop words from replies if present + for stop_word in stop_words: + replies = [reply.replace(stop_word, "").rstrip() for reply in replies] + + # Create ChatMessage instances for each reply + chat_messages = [ + self.create_message(reply, r_index, tokenizer, prepared_prompt, generation_kwargs) + for r_index, reply in enumerate(replies) + ] + return {"replies": chat_messages} + + def create_message( + self, + text: str, + index: int, + tokenizer: Union["PreTrainedTokenizer", "PreTrainedTokenizerFast"], + prompt: str, + generation_kwargs: Dict[str, Any], + ) -> ChatMessage: + """ + Create a ChatMessage instance from the provided text, populated with metadata. + + :param text: The generated text. + :param index: The index of the generated text. + :param tokenizer: The tokenizer used for generation. + :param prompt: The prompt used for generation. + :param generation_kwargs: The generation parameters. + :returns: A ChatMessage instance. + """ + completion_tokens = len(tokenizer.encode(text, add_special_tokens=False)) + prompt_token_count = len(tokenizer.encode(prompt, add_special_tokens=False)) + total_tokens = prompt_token_count + completion_tokens + + # not the most sophisticated finish_reason detection, improve later to match + # https://platform.openai.com/docs/guides/text-generation/chat-completions-response-format + finish_reason = ( + "length" if completion_tokens >= generation_kwargs.get("max_new_tokens", sys.maxsize) else "stop" + ) + + meta = { + "finish_reason": finish_reason, + "index": index, + "model": self.huggingface_pipeline_kwargs["model"], + "usage": { + "completion_tokens": completion_tokens, + "prompt_tokens": prompt_token_count, + "total_tokens": total_tokens, + }, + } + + return ChatMessage.from_assistant(text, meta=meta) + + def _validate_stop_words(self, stop_words: Optional[List[str]]) -> Optional[List[str]]: + """ + Validates the provided stop words. + + :param stop_words: A list of stop words to validate. + :return: A sanitized list of stop words or None if validation fails. + """ + if stop_words and not all(isinstance(word, str) for word in stop_words): + logger.warning( + "Invalid stop words provided. Stop words must be specified as a list of strings. " + "Ignoring stop words: {stop_words}", + stop_words=stop_words, + ) + return None + + # deduplicate stop words + stop_words = list(set(stop_words or [])) + return stop_words diff --git a/testbed/deepset-ai__haystack/haystack/components/generators/chat/openai.py b/testbed/deepset-ai__haystack/haystack/components/generators/chat/openai.py new file mode 100644 index 0000000000000000000000000000000000000000..6013880e74b33ecc9d49a4d70cfd8fdd5dcbe985 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/generators/chat/openai.py @@ -0,0 +1,380 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import copy +import json +import os +from datetime import datetime +from typing import Any, Callable, Dict, List, Optional, Union + +from openai import OpenAI, Stream +from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage +from openai.types.chat.chat_completion import Choice +from openai.types.chat.chat_completion_chunk import Choice as ChunkChoice + +from haystack import component, default_from_dict, default_to_dict, logging +from haystack.components.generators.openai_utils import _convert_message_to_openai_format +from haystack.dataclasses import ChatMessage, StreamingChunk +from haystack.utils import Secret, deserialize_callable, deserialize_secrets_inplace, serialize_callable + +logger = logging.getLogger(__name__) + + +@component +class OpenAIChatGenerator: + """ + Completes chats using OpenAI's large language models (LLMs). + + It works with the gpt-4 and gpt-3.5-turbo models and supports streaming responses + from OpenAI API. It uses [ChatMessage](https://docs.haystack.deepset.ai/docs/data-classes#chatmessage) + format in input and output. + + You can customize how the text is generated by passing parameters to the + OpenAI API. Use the `**generation_kwargs` argument when you initialize + the component or when you run it. Any parameter that works with + `openai.ChatCompletion.create` will work here too. + + For details on OpenAI API parameters, see + [OpenAI documentation](https://platform.openai.com/docs/api-reference/chat). + + ### Usage example + + ```python + from haystack.components.generators.chat import OpenAIChatGenerator + from haystack.dataclasses import ChatMessage + + messages = [ChatMessage.from_user("What's Natural Language Processing?")] + + client = OpenAIChatGenerator() + response = client.run(messages) + print(response) + ``` + Output: + ``` + {'replies': + [ChatMessage(content='Natural Language Processing (NLP) is a branch of artificial intelligence + that focuses on enabling computers to understand, interpret, and generate human language in + a way that is meaningful and useful.', + role=, name=None, + meta={'model': 'gpt-4o-mini', 'index': 0, 'finish_reason': 'stop', + 'usage': {'prompt_tokens': 15, 'completion_tokens': 36, 'total_tokens': 51}}) + ] + } + ``` + """ + + def __init__( # pylint: disable=too-many-positional-arguments + self, + api_key: Secret = Secret.from_env_var("OPENAI_API_KEY"), + model: str = "gpt-4o-mini", + streaming_callback: Optional[Callable[[StreamingChunk], None]] = None, + api_base_url: Optional[str] = None, + organization: Optional[str] = None, + generation_kwargs: Optional[Dict[str, Any]] = None, + timeout: Optional[float] = None, + max_retries: Optional[int] = None, + ): + """ + Creates an instance of OpenAIChatGenerator. Unless specified otherwise in `model`, uses OpenAI's gpt-4o-mini + + Before initializing the component, you can set the 'OPENAI_TIMEOUT' and 'OPENAI_MAX_RETRIES' + environment variables to override the `timeout` and `max_retries` parameters respectively + in the OpenAI client. + + :param api_key: The OpenAI API key. + You can set it with an environment variable `OPENAI_API_KEY`, or pass with this parameter + during initialization. + :param model: The name of the model to use. + :param streaming_callback: A callback function that is called when a new token is received from the stream. + The callback function accepts [StreamingChunk](https://docs.haystack.deepset.ai/docs/data-classes#streamingchunk) + as an argument. + :param api_base_url: An optional base URL. + :param organization: Your organization ID, defaults to `None`. See + [production best practices](https://platform.openai.com/docs/guides/production-best-practices/setting-up-your-organization). + :param generation_kwargs: Other parameters to use for the model. These parameters are sent directly to + the OpenAI endpoint. See OpenAI [documentation](https://platform.openai.com/docs/api-reference/chat) for + more details. + Some of the supported parameters: + - `max_tokens`: The maximum number of tokens the output text can have. + - `temperature`: What sampling temperature to use. Higher values mean the model will take more risks. + Try 0.9 for more creative applications and 0 (argmax sampling) for ones with a well-defined answer. + - `top_p`: An alternative to sampling with temperature, called nucleus sampling, where the model + considers the results of the tokens with top_p probability mass. For example, 0.1 means only the tokens + comprising the top 10% probability mass are considered. + - `n`: How many completions to generate for each prompt. For example, if the LLM gets 3 prompts and n is 2, + it will generate two completions for each of the three prompts, ending up with 6 completions in total. + - `stop`: One or more sequences after which the LLM should stop generating tokens. + - `presence_penalty`: What penalty to apply if a token is already present at all. Bigger values mean + the model will be less likely to repeat the same token in the text. + - `frequency_penalty`: What penalty to apply if a token has already been generated in the text. + Bigger values mean the model will be less likely to repeat the same token in the text. + - `logit_bias`: Add a logit bias to specific tokens. The keys of the dictionary are tokens, and the + values are the bias to add to that token. + :param timeout: + Timeout for OpenAI client calls. If not set, it defaults to either the + `OPENAI_TIMEOUT` environment variable, or 30 seconds. + :param max_retries: + Maximum number of retries to contact OpenAI after an internal error. + If not set, it defaults to either the `OPENAI_MAX_RETRIES` environment variable, or set to 5. + """ + self.api_key = api_key + self.model = model + self.generation_kwargs = generation_kwargs or {} + self.streaming_callback = streaming_callback + self.api_base_url = api_base_url + self.organization = organization + + if timeout is None: + timeout = float(os.environ.get("OPENAI_TIMEOUT", 30.0)) + if max_retries is None: + max_retries = int(os.environ.get("OPENAI_MAX_RETRIES", 5)) + + self.client = OpenAI( + api_key=api_key.resolve_value(), + organization=organization, + base_url=api_base_url, + timeout=timeout, + max_retries=max_retries, + ) + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + return {"model": self.model} + + def to_dict(self) -> Dict[str, Any]: + """ + Serialize this component to a dictionary. + + :returns: + The serialized component as a dictionary. + """ + callback_name = serialize_callable(self.streaming_callback) if self.streaming_callback else None + return default_to_dict( + self, + model=self.model, + streaming_callback=callback_name, + api_base_url=self.api_base_url, + organization=self.organization, + generation_kwargs=self.generation_kwargs, + api_key=self.api_key.to_dict(), + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "OpenAIChatGenerator": + """ + Deserialize this component from a dictionary. + + :param data: The dictionary representation of this component. + :returns: + The deserialized component instance. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["api_key"]) + init_params = data.get("init_parameters", {}) + serialized_callback_handler = init_params.get("streaming_callback") + if serialized_callback_handler: + data["init_parameters"]["streaming_callback"] = deserialize_callable(serialized_callback_handler) + return default_from_dict(cls, data) + + @component.output_types(replies=List[ChatMessage]) + def run( + self, + messages: List[ChatMessage], + streaming_callback: Optional[Callable[[StreamingChunk], None]] = None, + generation_kwargs: Optional[Dict[str, Any]] = None, + ): + """ + Invokes chat completion based on the provided messages and generation parameters. + + :param messages: A list of ChatMessage instances representing the input messages. + :param streaming_callback: A callback function that is called when a new token is received from the stream. + :param generation_kwargs: Additional keyword arguments for text generation. These parameters will + override the parameters passed during component initialization. + For details on OpenAI API parameters, see + [OpenAI documentation](https://platform.openai.com/docs/api-reference/chat/create). + + :returns: + A list containing the generated responses as ChatMessage instances. + """ + + # update generation kwargs by merging with the generation kwargs passed to the run method + generation_kwargs = {**self.generation_kwargs, **(generation_kwargs or {})} + + # check if streaming_callback is passed + streaming_callback = streaming_callback or self.streaming_callback + + # adapt ChatMessage(s) to the format expected by the OpenAI API + openai_formatted_messages = [_convert_message_to_openai_format(message) for message in messages] + + chat_completion: Union[Stream[ChatCompletionChunk], ChatCompletion] = self.client.chat.completions.create( + model=self.model, + messages=openai_formatted_messages, # type: ignore # openai expects list of specific message types + stream=streaming_callback is not None, + **generation_kwargs, + ) + + completions: List[ChatMessage] = [] + # if streaming is enabled, the completion is a Stream of ChatCompletionChunk + if isinstance(chat_completion, Stream): + num_responses = generation_kwargs.pop("n", 1) + if num_responses > 1: + raise ValueError("Cannot stream multiple responses, please set n=1.") + chunks: List[StreamingChunk] = [] + chunk = None + _first_token = True + + # pylint: disable=not-an-iterable + for chunk in chat_completion: + if chunk.choices and streaming_callback: + chunk_delta: StreamingChunk = self._build_chunk(chunk) + if _first_token: + _first_token = False + chunk_delta.meta["completion_start_time"] = datetime.now().isoformat() + chunks.append(chunk_delta) + streaming_callback(chunk_delta) # invoke callback with the chunk_delta + completions = [self._connect_chunks(chunk, chunks)] + # if streaming is disabled, the completion is a ChatCompletion + elif isinstance(chat_completion, ChatCompletion): + completions = [self._build_message(chat_completion, choice) for choice in chat_completion.choices] + + # before returning, do post-processing of the completions + for message in completions: + self._check_finish_reason(message) + + return {"replies": completions} + + def _connect_chunks(self, chunk: Any, chunks: List[StreamingChunk]) -> ChatMessage: + """ + Connects the streaming chunks into a single ChatMessage. + + :param chunk: The last chunk returned by the OpenAI API. + :param chunks: The list of all chunks returned by the OpenAI API. + """ + is_tools_call = bool(chunks[0].meta.get("tool_calls")) + is_function_call = bool(chunks[0].meta.get("function_call")) + # if it's a tool call or function call, we need to build the payload dict from all the chunks + if is_tools_call or is_function_call: + tools_len = 1 if is_function_call else len(chunks[0].meta.get("tool_calls", [])) + # don't change this approach of building payload dicts, otherwise mypy will complain + p_def: Dict[str, Any] = { + "index": 0, + "id": "", + "function": {"arguments": "", "name": ""}, + "type": "function", + } + payloads = [copy.deepcopy(p_def) for _ in range(tools_len)] + for chunk_payload in chunks: + if is_tools_call: + deltas = chunk_payload.meta.get("tool_calls") or [] + else: + deltas = [chunk_payload.meta["function_call"]] if chunk_payload.meta.get("function_call") else [] + + # deltas is a list of ChoiceDeltaToolCall or ChoiceDeltaFunctionCall + for i, delta in enumerate(deltas): + payload = payloads[i] + if is_tools_call: + payload["id"] = delta.id or payload["id"] + payload["type"] = delta.type or payload["type"] + if delta.function: + payload["function"]["name"] += delta.function.name or "" + payload["function"]["arguments"] += delta.function.arguments or "" + elif is_function_call: + payload["function"]["name"] += delta.name or "" + payload["function"]["arguments"] += delta.arguments or "" + complete_response = ChatMessage.from_assistant(json.dumps(payloads)) + else: + total_content = "" + total_meta = {} + for streaming_chunk in chunks: + total_content += streaming_chunk.content + total_meta.update(streaming_chunk.meta) + complete_response = ChatMessage.from_assistant(total_content, meta=total_meta) + complete_response.meta.update( + { + "model": chunk.model, + "index": 0, + "finish_reason": chunk.choices[0].finish_reason, + "usage": {}, # we don't have usage data for streaming responses + } + ) + return complete_response + + def _build_message(self, completion: ChatCompletion, choice: Choice) -> ChatMessage: + """ + Converts the non-streaming response from the OpenAI API to a ChatMessage. + + :param completion: The completion returned by the OpenAI API. + :param choice: The choice returned by the OpenAI API. + :return: The ChatMessage. + """ + message: ChatCompletionMessage = choice.message + content = message.content or "" + if message.function_call: + # here we mimic the tools format response so that if user passes deprecated `functions` parameter + # she'll get the same output as if new `tools` parameter was passed + # use pydantic model dump to serialize the function call + content = json.dumps( + [{"function": message.function_call.model_dump(), "type": "function", "id": completion.id}] + ) + elif message.tool_calls: + # new `tools` parameter was passed, use pydantic model dump to serialize the tool calls + content = json.dumps([tc.model_dump() for tc in message.tool_calls]) + + chat_message = ChatMessage.from_assistant(content) + chat_message.meta.update( + { + "model": completion.model, + "index": choice.index, + "finish_reason": choice.finish_reason, + "usage": dict(completion.usage or {}), + } + ) + return chat_message + + def _build_chunk(self, chunk: ChatCompletionChunk) -> StreamingChunk: + """ + Converts the streaming response chunk from the OpenAI API to a StreamingChunk. + + :param chunk: The chunk returned by the OpenAI API. + :param choice: The choice returned by the OpenAI API. + :return: The StreamingChunk. + """ + # we stream the content of the chunk if it's not a tool or function call + choice: ChunkChoice = chunk.choices[0] + content = choice.delta.content or "" + chunk_message = StreamingChunk(content) + # but save the tool calls and function call in the meta if they are present + # and then connect the chunks in the _connect_chunks method + chunk_message.meta.update( + { + "model": chunk.model, + "index": choice.index, + "tool_calls": choice.delta.tool_calls, + "function_call": choice.delta.function_call, + "finish_reason": choice.finish_reason, + } + ) + return chunk_message + + def _check_finish_reason(self, message: ChatMessage) -> None: + """ + Check the `finish_reason` returned with the OpenAI completions. + + If the `finish_reason` is `length` or `content_filter`, log a warning. + :param message: The message returned by the LLM. + """ + if message.meta["finish_reason"] == "length": + logger.warning( + "The completion for index {index} has been truncated before reaching a natural stopping point. " + "Increase the max_tokens parameter to allow for longer completions.", + index=message.meta["index"], + finish_reason=message.meta["finish_reason"], + ) + if message.meta["finish_reason"] == "content_filter": + logger.warning( + "The completion for index {index} has been truncated due to the content filter.", + index=message.meta["index"], + finish_reason=message.meta["finish_reason"], + ) diff --git a/testbed/deepset-ai__haystack/haystack/components/generators/hugging_face_local.py b/testbed/deepset-ai__haystack/haystack/components/generators/hugging_face_local.py new file mode 100644 index 0000000000000000000000000000000000000000..0e2c6ae5f0ec432b874a09a8d5b0f41177f6538b --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/generators/hugging_face_local.py @@ -0,0 +1,265 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Callable, Dict, List, Literal, Optional + +from haystack import component, default_from_dict, default_to_dict, logging +from haystack.dataclasses import StreamingChunk +from haystack.lazy_imports import LazyImport +from haystack.utils import ( + ComponentDevice, + Secret, + deserialize_callable, + deserialize_secrets_inplace, + serialize_callable, +) +from haystack.utils.hf import deserialize_hf_model_kwargs, serialize_hf_model_kwargs + +logger = logging.getLogger(__name__) + +SUPPORTED_TASKS = ["text-generation", "text2text-generation"] + +with LazyImport(message="Run 'pip install \"transformers[torch]\"'") as transformers_import: + from transformers import StoppingCriteriaList, pipeline + + from haystack.utils.hf import ( # pylint: disable=ungrouped-imports + HFTokenStreamingHandler, + StopWordsCriteria, + resolve_hf_pipeline_kwargs, + ) + + +@component +class HuggingFaceLocalGenerator: + """ + Generates text using models from Hugging Face that run locally. + + LLMs running locally may need powerful hardware. + + ### Usage example + + ```python + from haystack.components.generators import HuggingFaceLocalGenerator + + generator = HuggingFaceLocalGenerator( + model="google/flan-t5-large", + task="text2text-generation", + generation_kwargs={"max_new_tokens": 100, "temperature": 0.9}) + + generator.warm_up() + + print(generator.run("Who is the best American actor?")) + # {'replies': ['John Cusack']} + ``` + """ + + def __init__( # pylint: disable=too-many-positional-arguments + self, + model: str = "google/flan-t5-base", + task: Optional[Literal["text-generation", "text2text-generation"]] = None, + device: Optional[ComponentDevice] = None, + token: Optional[Secret] = Secret.from_env_var(["HF_API_TOKEN", "HF_TOKEN"], strict=False), + generation_kwargs: Optional[Dict[str, Any]] = None, + huggingface_pipeline_kwargs: Optional[Dict[str, Any]] = None, + stop_words: Optional[List[str]] = None, + streaming_callback: Optional[Callable[[StreamingChunk], None]] = None, + ): + """ + Creates an instance of a HuggingFaceLocalGenerator. + + :param model: The Hugging Face text generation model name or path. + :param task: The task for the Hugging Face pipeline. Possible options: + - `text-generation`: Supported by decoder models, like GPT. + - `text2text-generation`: Supported by encoder-decoder models, like T5. + If the task is specified in `huggingface_pipeline_kwargs`, this parameter is ignored. + If not specified, the component calls the Hugging Face API to infer the task from the model name. + :param device: The device for loading the model. If `None`, automatically selects the default device. + If a device or device map is specified in `huggingface_pipeline_kwargs`, it overrides this parameter. + :param token: The token to use as HTTP bearer authorization for remote files. + If the token is specified in `huggingface_pipeline_kwargs`, this parameter is ignored. + :param generation_kwargs: A dictionary with keyword arguments to customize text generation. + Some examples: `max_length`, `max_new_tokens`, `temperature`, `top_k`, `top_p`. + See Hugging Face's documentation for more information: + - [customize-text-generation](https://huggingface.co/docs/transformers/main/en/generation_strategies#customize-text-generation) + - [transformers.GenerationConfig](https://huggingface.co/docs/transformers/main/en/main_classes/text_generation#transformers.GenerationConfig) + :param huggingface_pipeline_kwargs: Dictionary with keyword arguments to initialize the + Hugging Face pipeline for text generation. + These keyword arguments provide fine-grained control over the Hugging Face pipeline. + In case of duplication, these kwargs override `model`, `task`, `device`, and `token` init parameters. + For available kwargs, see [Hugging Face documentation](https://huggingface.co/docs/transformers/en/main_classes/pipelines#transformers.pipeline.task). + In this dictionary, you can also include `model_kwargs` to specify the kwargs for model initialization: + [transformers.PreTrainedModel.from_pretrained](https://huggingface.co/docs/transformers/en/main_classes/model#transformers.PreTrainedModel.from_pretrained) + :param stop_words: If the model generates a stop word, the generation stops. + If you provide this parameter, don't specify the `stopping_criteria` in `generation_kwargs`. + For some chat models, the output includes both the new text and the original prompt. + In these cases, make sure your prompt has no stop words. + :param streaming_callback: An optional callable for handling streaming responses. + """ + transformers_import.check() + + self.token = token + generation_kwargs = generation_kwargs or {} + + huggingface_pipeline_kwargs = resolve_hf_pipeline_kwargs( + huggingface_pipeline_kwargs=huggingface_pipeline_kwargs or {}, + model=model, + task=task, + supported_tasks=SUPPORTED_TASKS, + device=device, + token=token, + ) + + # if not specified, set return_full_text to False for text-generation + # only generated text is returned (excluding prompt) + task = huggingface_pipeline_kwargs["task"] + if task == "text-generation": + generation_kwargs.setdefault("return_full_text", False) + + if stop_words and "stopping_criteria" in generation_kwargs: + raise ValueError( + "Found both the `stop_words` init parameter and the `stopping_criteria` key in `generation_kwargs`. " + "Please specify only one of them." + ) + generation_kwargs.setdefault("max_new_tokens", 512) + + self.huggingface_pipeline_kwargs = huggingface_pipeline_kwargs + self.generation_kwargs = generation_kwargs + self.stop_words = stop_words + self.pipeline = None + self.stopping_criteria_list = None + self.streaming_callback = streaming_callback + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + if isinstance(self.huggingface_pipeline_kwargs["model"], str): + return {"model": self.huggingface_pipeline_kwargs["model"]} + return {"model": f"[object of type {type(self.huggingface_pipeline_kwargs['model'])}]"} + + @property + def _warmed_up(self) -> bool: + if self.stop_words: + return (self.pipeline is not None) and (self.stopping_criteria_list is not None) + return self.pipeline is not None + + def warm_up(self): + """ + Initializes the component. + """ + if self._warmed_up: + return + + if self.pipeline is None: + self.pipeline = pipeline(**self.huggingface_pipeline_kwargs) + + if self.stop_words: + stop_words_criteria = StopWordsCriteria( + tokenizer=self.pipeline.tokenizer, stop_words=self.stop_words, device=self.pipeline.device + ) + self.stopping_criteria_list = StoppingCriteriaList([stop_words_criteria]) + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + callback_name = serialize_callable(self.streaming_callback) if self.streaming_callback else None + serialization_dict = default_to_dict( + self, + huggingface_pipeline_kwargs=self.huggingface_pipeline_kwargs, + generation_kwargs=self.generation_kwargs, + streaming_callback=callback_name, + stop_words=self.stop_words, + token=self.token.to_dict() if self.token else None, + ) + + huggingface_pipeline_kwargs = serialization_dict["init_parameters"]["huggingface_pipeline_kwargs"] + huggingface_pipeline_kwargs.pop("token", None) + + serialize_hf_model_kwargs(huggingface_pipeline_kwargs) + return serialization_dict + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "HuggingFaceLocalGenerator": + """ + Deserializes the component from a dictionary. + + :param data: + The dictionary to deserialize from. + :returns: + The deserialized component. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["token"]) + init_params = data.get("init_parameters", {}) + serialized_callback_handler = init_params.get("streaming_callback") + if serialized_callback_handler: + data["init_parameters"]["streaming_callback"] = deserialize_callable(serialized_callback_handler) + + huggingface_pipeline_kwargs = init_params.get("huggingface_pipeline_kwargs", {}) + deserialize_hf_model_kwargs(huggingface_pipeline_kwargs) + return default_from_dict(cls, data) + + @component.output_types(replies=List[str]) + def run( + self, + prompt: str, + streaming_callback: Optional[Callable[[StreamingChunk], None]] = None, + generation_kwargs: Optional[Dict[str, Any]] = None, + ): + """ + Run the text generation model on the given prompt. + + :param prompt: + A string representing the prompt. + :param streaming_callback: + A callback function that is called when a new token is received from the stream. + :param generation_kwargs: + Additional keyword arguments for text generation. + + :returns: + A dictionary containing the generated replies. + - replies: A list of strings representing the generated replies. + """ + if not self._warmed_up: + raise RuntimeError( + "The component HuggingFaceLocalGenerator was not warmed up. Please call warm_up() before running." + ) + + if not prompt: + return {"replies": []} + + # merge generation kwargs from init method with those from run method + updated_generation_kwargs = {**self.generation_kwargs, **(generation_kwargs or {})} + + # check if streaming_callback is passed + streaming_callback = streaming_callback or self.streaming_callback + + if streaming_callback: + num_responses = updated_generation_kwargs.get("num_return_sequences", 1) + if num_responses > 1: + msg = ( + "Streaming is enabled, but the number of responses is set to {num_responses}. " + "Streaming is only supported for single response generation. " + "Setting the number of responses to 1." + ) + logger.warning(msg, num_responses=num_responses) + updated_generation_kwargs["num_return_sequences"] = 1 + # streamer parameter hooks into HF streaming, HFTokenStreamingHandler is an adapter to our streaming + updated_generation_kwargs["streamer"] = HFTokenStreamingHandler( + self.pipeline.tokenizer, # type: ignore + streaming_callback, + self.stop_words, # type: ignore + ) + + output = self.pipeline(prompt, stopping_criteria=self.stopping_criteria_list, **updated_generation_kwargs) # type: ignore + replies = [o["generated_text"] for o in output if "generated_text" in o] + + if self.stop_words: + # the output of the pipeline includes the stop word + replies = [reply.replace(stop_word, "").rstrip() for reply in replies for stop_word in self.stop_words] + + return {"replies": replies} diff --git a/testbed/deepset-ai__haystack/haystack/components/generators/openai.py b/testbed/deepset-ai__haystack/haystack/components/generators/openai.py new file mode 100644 index 0000000000000000000000000000000000000000..48180fc3217945a730b0db9f2772edc81b26b78f --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/generators/openai.py @@ -0,0 +1,327 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import os +from typing import Any, Callable, Dict, List, Optional, Union + +from openai import OpenAI, Stream +from openai.types.chat import ChatCompletion, ChatCompletionChunk + +from haystack import component, default_from_dict, default_to_dict, logging +from haystack.components.generators.openai_utils import _convert_message_to_openai_format +from haystack.dataclasses import ChatMessage, StreamingChunk +from haystack.utils import Secret, deserialize_callable, deserialize_secrets_inplace, serialize_callable + +logger = logging.getLogger(__name__) + + +@component +class OpenAIGenerator: + """ + Generates text using OpenAI's large language models (LLMs). + + It works with the gpt-4 and gpt-3.5-turbo models and supports streaming responses + from OpenAI API. It uses strings as input and output. + + You can customize how the text is generated by passing parameters to the + OpenAI API. Use the `**generation_kwargs` argument when you initialize + the component or when you run it. Any parameter that works with + `openai.ChatCompletion.create` will work here too. + + + For details on OpenAI API parameters, see + [OpenAI documentation](https://platform.openai.com/docs/api-reference/chat). + + ### Usage example + + ```python + from haystack.components.generators import OpenAIGenerator + client = OpenAIGenerator() + response = client.run("What's Natural Language Processing? Be brief.") + print(response) + + >> {'replies': ['Natural Language Processing (NLP) is a branch of artificial intelligence that focuses on + >> the interaction between computers and human language. It involves enabling computers to understand, interpret, + >> and respond to natural human language in a way that is both meaningful and useful.'], 'meta': [{'model': + >> 'gpt-4o-mini', 'index': 0, 'finish_reason': 'stop', 'usage': {'prompt_tokens': 16, + >> 'completion_tokens': 49, 'total_tokens': 65}}]} + ``` + """ + + def __init__( + self, + api_key: Secret = Secret.from_env_var("OPENAI_API_KEY"), + model: str = "gpt-4o-mini", + streaming_callback: Optional[Callable[[StreamingChunk], None]] = None, + api_base_url: Optional[str] = None, + organization: Optional[str] = None, + system_prompt: Optional[str] = None, + generation_kwargs: Optional[Dict[str, Any]] = None, + timeout: Optional[float] = None, + max_retries: Optional[int] = None, + ): + """ + Creates an instance of OpenAIGenerator. Unless specified otherwise in `model`, uses OpenAI's gpt-4o-mini + + By setting the 'OPENAI_TIMEOUT' and 'OPENAI_MAX_RETRIES' you can change the timeout and max_retries parameters + in the OpenAI client. + + :param api_key: The OpenAI API key to connect to OpenAI. + :param model: The name of the model to use. + :param streaming_callback: A callback function that is called when a new token is received from the stream. + The callback function accepts StreamingChunk as an argument. + :param api_base_url: An optional base URL. + :param organization: The Organization ID, defaults to `None`. + :param system_prompt: The system prompt to use for text generation. If not provided, the system prompt is + omitted, and the default system prompt of the model is used. + :param generation_kwargs: Other parameters to use for the model. These parameters are all sent directly to + the OpenAI endpoint. See OpenAI [documentation](https://platform.openai.com/docs/api-reference/chat) for + more details. + Some of the supported parameters: + - `max_tokens`: The maximum number of tokens the output text can have. + - `temperature`: What sampling temperature to use. Higher values mean the model will take more risks. + Try 0.9 for more creative applications and 0 (argmax sampling) for ones with a well-defined answer. + - `top_p`: An alternative to sampling with temperature, called nucleus sampling, where the model + considers the results of the tokens with top_p probability mass. So, 0.1 means only the tokens + comprising the top 10% probability mass are considered. + - `n`: How many completions to generate for each prompt. For example, if the LLM gets 3 prompts and n is 2, + it will generate two completions for each of the three prompts, ending up with 6 completions in total. + - `stop`: One or more sequences after which the LLM should stop generating tokens. + - `presence_penalty`: What penalty to apply if a token is already present at all. Bigger values mean + the model will be less likely to repeat the same token in the text. + - `frequency_penalty`: What penalty to apply if a token has already been generated in the text. + Bigger values mean the model will be less likely to repeat the same token in the text. + - `logit_bias`: Add a logit bias to specific tokens. The keys of the dictionary are tokens, and the + values are the bias to add to that token. + :param timeout: + Timeout for OpenAI Client calls, if not set it is inferred from the `OPENAI_TIMEOUT` environment variable + or set to 30. + :param max_retries: + Maximum retries to establish contact with OpenAI if it returns an internal error, if not set it is inferred + from the `OPENAI_MAX_RETRIES` environment variable or set to 5. + + """ + self.api_key = api_key + self.model = model + self.generation_kwargs = generation_kwargs or {} + self.system_prompt = system_prompt + self.streaming_callback = streaming_callback + + self.api_base_url = api_base_url + self.organization = organization + + if timeout is None: + timeout = float(os.environ.get("OPENAI_TIMEOUT", 30.0)) + if max_retries is None: + max_retries = int(os.environ.get("OPENAI_MAX_RETRIES", 5)) + + self.client = OpenAI( + api_key=api_key.resolve_value(), + organization=organization, + base_url=api_base_url, + timeout=timeout, + max_retries=max_retries, + ) + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + return {"model": self.model} + + def to_dict(self) -> Dict[str, Any]: + """ + Serialize this component to a dictionary. + + :returns: + The serialized component as a dictionary. + """ + callback_name = serialize_callable(self.streaming_callback) if self.streaming_callback else None + return default_to_dict( + self, + model=self.model, + streaming_callback=callback_name, + api_base_url=self.api_base_url, + organization=self.organization, + generation_kwargs=self.generation_kwargs, + system_prompt=self.system_prompt, + api_key=self.api_key.to_dict(), + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "OpenAIGenerator": + """ + Deserialize this component from a dictionary. + + :param data: + The dictionary representation of this component. + :returns: + The deserialized component instance. + """ + deserialize_secrets_inplace(data["init_parameters"], keys=["api_key"]) + init_params = data.get("init_parameters", {}) + serialized_callback_handler = init_params.get("streaming_callback") + if serialized_callback_handler: + data["init_parameters"]["streaming_callback"] = deserialize_callable(serialized_callback_handler) + return default_from_dict(cls, data) + + @component.output_types(replies=List[str], meta=List[Dict[str, Any]]) + def run( + self, + prompt: str, + system_prompt: Optional[str] = None, + streaming_callback: Optional[Callable[[StreamingChunk], None]] = None, + generation_kwargs: Optional[Dict[str, Any]] = None, + ): + """ + Invoke the text generation inference based on the provided messages and generation parameters. + + :param prompt: + The string prompt to use for text generation. + :param system_prompt: + The system prompt to use for text generation. If this run time system prompt is omitted, the system + prompt, if defined at initialisation time, is used. + :param streaming_callback: + A callback function that is called when a new token is received from the stream. + :param generation_kwargs: + Additional keyword arguments for text generation. These parameters will potentially override the parameters + passed in the `__init__` method. For more details on the parameters supported by the OpenAI API, refer to + the OpenAI [documentation](https://platform.openai.com/docs/api-reference/chat/create). + :returns: + A list of strings containing the generated responses and a list of dictionaries containing the metadata + for each response. + """ + message = ChatMessage.from_user(prompt) + if system_prompt is not None: + messages = [ChatMessage.from_system(system_prompt), message] + elif self.system_prompt: + messages = [ChatMessage.from_system(self.system_prompt), message] + else: + messages = [message] + + # update generation kwargs by merging with the generation kwargs passed to the run method + generation_kwargs = {**self.generation_kwargs, **(generation_kwargs or {})} + + # check if streaming_callback is passed + streaming_callback = streaming_callback or self.streaming_callback + + # adapt ChatMessage(s) to the format expected by the OpenAI API + openai_formatted_messages = [_convert_message_to_openai_format(message) for message in messages] + + completion: Union[Stream[ChatCompletionChunk], ChatCompletion] = self.client.chat.completions.create( + model=self.model, + messages=openai_formatted_messages, # type: ignore + stream=streaming_callback is not None, + **generation_kwargs, + ) + + completions: List[ChatMessage] = [] + if isinstance(completion, Stream): + num_responses = generation_kwargs.pop("n", 1) + if num_responses > 1: + raise ValueError("Cannot stream multiple responses, please set n=1.") + chunks: List[StreamingChunk] = [] + chunk = None + + # pylint: disable=not-an-iterable + for chunk in completion: + if chunk.choices and streaming_callback: + chunk_delta: StreamingChunk = self._build_chunk(chunk) + chunks.append(chunk_delta) + streaming_callback(chunk_delta) # invoke callback with the chunk_delta + completions = [self._connect_chunks(chunk, chunks)] + elif isinstance(completion, ChatCompletion): + completions = [self._build_message(completion, choice) for choice in completion.choices] + + # before returning, do post-processing of the completions + for response in completions: + self._check_finish_reason(response) + + return { + "replies": [message.content for message in completions], + "meta": [message.meta for message in completions], + } + + @staticmethod + def _connect_chunks(chunk: Any, chunks: List[StreamingChunk]) -> ChatMessage: + """ + Connects the streaming chunks into a single ChatMessage. + """ + complete_response = ChatMessage.from_assistant("".join([chunk.content for chunk in chunks])) + complete_response.meta.update( + { + "model": chunk.model, + "index": 0, + "finish_reason": chunk.choices[0].finish_reason, + "usage": {}, # we don't have usage data for streaming responses + } + ) + return complete_response + + @staticmethod + def _build_message(completion: Any, choice: Any) -> ChatMessage: + """ + Converts the response from the OpenAI API to a ChatMessage. + + :param completion: + The completion returned by the OpenAI API. + :param choice: + The choice returned by the OpenAI API. + :returns: + The ChatMessage. + """ + # function or tools calls are not going to happen in non-chat generation + # as users can not send ChatMessage with function or tools calls + chat_message = ChatMessage.from_assistant(choice.message.content or "") + chat_message.meta.update( + { + "model": completion.model, + "index": choice.index, + "finish_reason": choice.finish_reason, + "usage": dict(completion.usage), + } + ) + return chat_message + + @staticmethod + def _build_chunk(chunk: Any) -> StreamingChunk: + """ + Converts the response from the OpenAI API to a StreamingChunk. + + :param chunk: + The chunk returned by the OpenAI API. + :returns: + The StreamingChunk. + """ + # function or tools calls are not going to happen in non-chat generation + # as users can not send ChatMessage with function or tools calls + choice = chunk.choices[0] + content = choice.delta.content or "" + chunk_message = StreamingChunk(content) + chunk_message.meta.update({"model": chunk.model, "index": choice.index, "finish_reason": choice.finish_reason}) + return chunk_message + + @staticmethod + def _check_finish_reason(message: ChatMessage) -> None: + """ + Check the `finish_reason` returned with the OpenAI completions. + + If the `finish_reason` is `length`, log a warning to the user. + + :param message: + The message returned by the LLM. + """ + if message.meta["finish_reason"] == "length": + logger.warning( + "The completion for index {index} has been truncated before reaching a natural stopping point. " + "Increase the max_tokens parameter to allow for longer completions.", + index=message.meta["index"], + finish_reason=message.meta["finish_reason"], + ) + if message.meta["finish_reason"] == "content_filter": + logger.warning( + "The completion for index {index} has been truncated due to the content filter.", + index=message.meta["index"], + finish_reason=message.meta["finish_reason"], + ) diff --git a/testbed/deepset-ai__haystack/haystack/components/generators/openai_utils.py b/testbed/deepset-ai__haystack/haystack/components/generators/openai_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..555e0ee30b902cf45013a060b881cd8baae7575c --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/generators/openai_utils.py @@ -0,0 +1,25 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Dict + +from haystack.dataclasses import ChatMessage + + +def _convert_message_to_openai_format(message: ChatMessage) -> Dict[str, str]: + """ + Convert a message to the format expected by OpenAI's Chat API. + + See the [API reference](https://platform.openai.com/docs/api-reference/chat/create) for details. + + :returns: A dictionary with the following key: + - `role` + - `content` + - `name` (optional) + """ + openai_msg = {"role": message.role.value, "content": message.content} + if message.name: + openai_msg["name"] = message.name + + return openai_msg diff --git a/testbed/deepset-ai__haystack/haystack/components/preprocessors/__init__.py b/testbed/deepset-ai__haystack/haystack/components/preprocessors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f7e132077a4d8c801acad8aabcd44e70125866d7 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/preprocessors/__init__.py @@ -0,0 +1,10 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from .document_cleaner import DocumentCleaner +from .document_splitter import DocumentSplitter +from .nltk_document_splitter import NLTKDocumentSplitter +from .text_cleaner import TextCleaner + +__all__ = ["DocumentSplitter", "DocumentCleaner", "TextCleaner", "NLTKDocumentSplitter"] diff --git a/testbed/deepset-ai__haystack/haystack/components/preprocessors/document_cleaner.py b/testbed/deepset-ai__haystack/haystack/components/preprocessors/document_cleaner.py new file mode 100644 index 0000000000000000000000000000000000000000..d56006130c1ed98ca9087204f8c2175a5288c497 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/preprocessors/document_cleaner.py @@ -0,0 +1,316 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import re +from copy import deepcopy +from functools import partial, reduce +from itertools import chain +from typing import Generator, List, Literal, Optional, Set +from unicodedata import normalize + +from haystack import Document, component, logging + +logger = logging.getLogger(__name__) + + +@component +class DocumentCleaner: + """ + Cleans the text in the documents. + + It removes extra whitespaces, + empty lines, specified substrings, regexes, + page headers and footers (in this order). + + ### Usage example: + + ```python + from haystack import Document + from haystack.components.preprocessors import DocumentCleaner + + doc = Document(content="This is a document to clean\\n\\n\\nsubstring to remove") + + cleaner = DocumentCleaner(remove_substrings = ["substring to remove"]) + result = cleaner.run(documents=[doc]) + + assert result["documents"][0].content == "This is a document to clean " + ``` + """ + + def __init__( + self, + remove_empty_lines: bool = True, + remove_extra_whitespaces: bool = True, + remove_repeated_substrings: bool = False, + keep_id: bool = False, + remove_substrings: Optional[List[str]] = None, + remove_regex: Optional[str] = None, + unicode_normalization: Optional[Literal["NFC", "NFKC", "NFD", "NFKD"]] = None, + ascii_only: bool = False, + ): + """ + Initialize DocumentCleaner. + + :param remove_empty_lines: If `True`, removes empty lines. + :param remove_extra_whitespaces: If `True`, removes extra whitespaces. + :param remove_repeated_substrings: If `True`, removes repeated substrings (headers and footers) from pages. + Pages must be separated by a form feed character "\\f", + which is supported by `TextFileToDocument` and `AzureOCRDocumentConverter`. + :param remove_substrings: List of substrings to remove from the text. + :param remove_regex: Regex to match and replace substrings by "". + :param keep_id: If `True`, keeps the IDs of the original documents. + :param unicode_normalization: Unicode normalization form to apply to the text. + Note: This will run before any other steps. + :param ascii_only: Whether to convert the text to ASCII only. + Will remove accents from characters and replace them with ASCII characters. + Other non-ASCII characters will be removed. + Note: This will run before any pattern matching or removal. + """ + + self._validate_params(unicode_normalization=unicode_normalization) + + self.remove_empty_lines = remove_empty_lines + self.remove_extra_whitespaces = remove_extra_whitespaces + self.remove_repeated_substrings = remove_repeated_substrings + self.remove_substrings = remove_substrings + self.remove_regex = remove_regex + self.keep_id = keep_id + self.unicode_normalization = unicode_normalization + self.ascii_only = ascii_only + + def _validate_params(self, unicode_normalization: Optional[str]): + """ + Validate the parameters of the DocumentCleaner. + + :param unicode_normalization: Unicode normalization form to apply to the text. + :raises ValueError: if the parameters are not valid. + """ + if unicode_normalization and unicode_normalization not in ["NFC", "NFKC", "NFD", "NFKD"]: + raise ValueError("unicode_normalization must be one of 'NFC', 'NFKC', 'NFD', 'NFKD'.") + + @component.output_types(documents=List[Document]) + def run(self, documents: List[Document]): + """ + Cleans up the documents. + + :param documents: List of Documents to clean. + + :returns: A dictionary with the following key: + - `documents`: List of cleaned Documents. + + :raises TypeError: if documents is not a list of Documents. + """ + if not isinstance(documents, list) or documents and not isinstance(documents[0], Document): + raise TypeError("DocumentCleaner expects a List of Documents as input.") + + cleaned_docs = [] + for doc in documents: + if doc.content is None: + logger.warning( + "DocumentCleaner only cleans text documents but document.content for document ID" + " %{document_id} is None.", + document_id=doc.id, + ) + cleaned_docs.append(doc) + continue + text = doc.content + + if self.unicode_normalization: + text = self._normalize_unicode(text, self.unicode_normalization) + if self.ascii_only: + text = self._ascii_only(text) + if self.remove_extra_whitespaces: + text = self._remove_extra_whitespaces(text) + if self.remove_empty_lines: + text = self._remove_empty_lines(text) + if self.remove_substrings: + text = self._remove_substrings(text, self.remove_substrings) + if self.remove_regex: + text = self._remove_regex(text, self.remove_regex) + if self.remove_repeated_substrings: + text = self._remove_repeated_substrings(text) + + cleaned_docs.append(Document(content=text, meta=deepcopy(doc.meta), id=doc.id if self.keep_id else "")) + + return {"documents": cleaned_docs} + + def _normalize_unicode(self, text: str, form: Literal["NFC", "NFKC", "NFD", "NFKD"]) -> str: + """ + Normalize the unicode of the text. + + :param text: Text to normalize. + :param form: Unicode normalization form to apply to the text. + Options: "NFC", "NFKC", "NFD", "NFKD". + :returns: The normalized text. + """ + return normalize(form, text) + + def _ascii_only(self, text: str) -> str: + """ + Convert the text to ASCII only. + + Will remove accents from characters and replace them with ASCII characters. + Other non-ASCII characters will be removed. + + :param text: Text to convert to ASCII only. + :returns: The text in ASCII only. + """ + + # First normalize the text to NFKD to separate the characters and their diacritics + # Then encode it to ASCII and ignore any characters that can't be encoded + return self._normalize_unicode(text, "NFKD").encode("ascii", "ignore").decode("utf-8") + + def _remove_empty_lines(self, text: str) -> str: + """ + Remove empty lines and lines that contain nothing but whitespaces from text. + + :param text: Text to clean. + :returns: The text without empty lines. + """ + pages = text.split("\f") + cleaned_pages = ["\n".join(line for line in page.split("\n") if line.strip()) for page in pages] + return "\f".join(cleaned_pages) + + def _remove_extra_whitespaces(self, text: str) -> str: + """ + Remove extra whitespaces from text. + + :param text: Text to clean. + :returns: The text without extra whitespaces. + """ + texts = text.split("\f") + cleaned_text = [re.sub(r"\s\s+", " ", text).strip() for text in texts] + return "\f".join(cleaned_text) + + def _remove_regex(self, text: str, regex: str) -> str: + """ + Remove substrings that match the specified regex from the text. + + :param text: Text to clean. + :param regex: Regex to match and replace substrings by "". + :returns: The text without the substrings that match the regex. + """ + texts = text.split("\f") + cleaned_text = [re.sub(regex, "", text).strip() for text in texts] + return "\f".join(cleaned_text) + + def _remove_substrings(self, text: str, substrings: List[str]) -> str: + """ + Remove all specified substrings from the text. + + :param text: Text to clean. + :param substrings: Substrings to remove. + :returns: The text without the specified substrings. + """ + for substring in substrings: + text = text.replace(substring, "") + return text + + def _remove_repeated_substrings(self, text: str) -> str: + """ + Remove any substrings from the text that occur repeatedly on every page. For example headers or footers. + + Pages in the text need to be separated by form feed character "\f". + :param text: Text to clean. + :returns: The text without the repeated substrings. + """ + return self._find_and_remove_header_footer( + text, n_chars=300, n_first_pages_to_ignore=1, n_last_pages_to_ignore=1 + ) + + def _find_and_remove_header_footer( + self, text: str, n_chars: int, n_first_pages_to_ignore: int, n_last_pages_to_ignore: int + ) -> str: + """ + Heuristic to find footers and headers across different pages by searching for the longest common string. + + Pages in the text need to be separated by form feed character "\f". + For headers, we only search in the first n_chars characters (for footer: last n_chars). + Note: This heuristic uses exact matches and therefore works well for footers like "Copyright 2019 by XXX", + but won't detect "Page 3 of 4" or similar. + + :param n_chars: The number of first/last characters where the header/footer shall be searched in. + :param n_first_pages_to_ignore: The number of first pages to ignore + (e.g. TOCs often don't contain footer/header). + :param n_last_pages_to_ignore: The number of last pages to ignore. + :returns: The text without the found headers and footers. + """ + + pages = text.split("\f") + + # header + start_of_pages = [p[:n_chars] for p in pages[n_first_pages_to_ignore:-n_last_pages_to_ignore]] + found_header = self._find_longest_common_ngram(start_of_pages) + if found_header: + pages = [page.replace(found_header, "") for page in pages] + + # footer + end_of_pages = [p[-n_chars:] for p in pages[n_first_pages_to_ignore:-n_last_pages_to_ignore]] + found_footer = self._find_longest_common_ngram(end_of_pages) + if found_footer: + pages = [page.replace(found_footer, "") for page in pages] + + logger.debug( + "Removed header '{header}' and footer '{footer}' in document", header=found_header, footer=found_footer + ) + text = "\f".join(pages) + return text + + def _ngram(self, seq: str, n: int) -> Generator[str, None, None]: + """ + Return all ngrams of length n from a text sequence. Each ngram consists of n words split by whitespace. + + :param seq: The sequence to generate ngrams from. + :param n: The length of the ngrams to generate. + :returns: A Generator generating all ngrams of length n from the given sequence. + """ + + # In order to maintain the original whitespace, but still consider \n and \t for n-gram tokenization, + # we add a space here and remove it after creation of the ngrams again (see below) + seq = seq.replace("\n", " \n") + seq = seq.replace("\t", " \t") + + words = seq.split(" ") + ngrams = ( + " ".join(words[i : i + n]).replace(" \n", "\n").replace(" \t", "\t") for i in range(0, len(words) - n + 1) + ) + + return ngrams + + def _allngram(self, seq: str, min_ngram: int, max_ngram: int) -> Set[str]: + """ + Generates all possible ngrams from a given sequence of text. + + Considering all ngram lengths between the minimum and maximum length. + + :param seq: The sequence to generate ngrams from. + :param min_ngram: The minimum length of ngram to consider. + :param max_ngram: The maximum length of ngram to consider. + :returns: A set of all ngrams from the given sequence. + """ + lengths = range(min_ngram, max_ngram) if max_ngram else range(min_ngram, len(seq)) + ngrams = map(partial(self._ngram, seq), lengths) + res = set(chain.from_iterable(ngrams)) + return res + + def _find_longest_common_ngram(self, sequences: List[str], min_ngram: int = 3, max_ngram: int = 30) -> str: + """ + Find the longest common ngram across a list of text sequences (e.g. start of pages). + + Considering all ngram lengths between the minimum and maximum length. Helpful for finding footers, headers etc. + Empty sequences are ignored. + + :param sequences: The list of strings that shall be searched for common n_grams. + :param max_ngram: The maximum length of ngram to consider. + :param min_ngram: The minimum length of ngram to consider. + :returns: The longest ngram that all sequences have in common. + """ + sequences = [s for s in sequences if s] # filter empty sequences + if not sequences: + return "" + seqs_ngrams = map(partial(self._allngram, min_ngram=min_ngram, max_ngram=max_ngram), sequences) + intersection = reduce(set.intersection, seqs_ngrams) + + longest = max(intersection, key=len, default="") + return longest if longest.strip() else "" diff --git a/testbed/deepset-ai__haystack/haystack/components/preprocessors/nltk_document_splitter.py b/testbed/deepset-ai__haystack/haystack/components/preprocessors/nltk_document_splitter.py new file mode 100644 index 0000000000000000000000000000000000000000..a521901382607d0265a8d8baa6e6426a75f0240b --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/preprocessors/nltk_document_splitter.py @@ -0,0 +1,467 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import re +from copy import deepcopy +from pathlib import Path +from typing import Any, Dict, List, Literal, Tuple + +from haystack import Document, component, logging +from haystack.components.preprocessors.document_splitter import DocumentSplitter +from haystack.lazy_imports import LazyImport + +with LazyImport("Run 'pip install nltk'") as nltk_imports: + import nltk + +logger = logging.getLogger(__name__) + +Language = Literal[ + "ru", "sl", "es", "sv", "tr", "cs", "da", "nl", "en", "et", "fi", "fr", "de", "el", "it", "no", "pl", "pt", "ml" +] + + +@component +class NLTKDocumentSplitter(DocumentSplitter): + def __init__( + self, + split_by: Literal["word", "sentence", "page", "passage", "function"] = "word", + split_length: int = 200, + split_overlap: int = 0, + split_threshold: int = 0, + respect_sentence_boundary: bool = False, + language: Language = "en", + use_split_rules: bool = True, + extend_abbreviations: bool = True, + ): + """ + Splits your documents using NLTK to respect sentence boundaries. + + Initialize the NLTKDocumentSplitter. + + :param split_by: Select the unit for splitting your documents. Choose from `word` for splitting by spaces (" "), + `sentence` for splitting by NLTK sentence tokenizer, `page` for splitting by the form feed ("\\f") or + `passage` for splitting by double line breaks ("\\n\\n"). + :param split_length: The maximum number of units in each split. + :param split_overlap: The number of overlapping units for each split. + :param split_threshold: The minimum number of units per split. If a split has fewer units + than the threshold, it's attached to the previous split. + :param respect_sentence_boundary: Choose whether to respect sentence boundaries when splitting by "word". + If True, uses NLTK to detect sentence boundaries, ensuring splits occur only between sentences. + :param language: Choose the language for the NLTK tokenizer. The default is English ("en"). + :param use_split_rules: Choose whether to use additional split rules when splitting by `sentence`. + :param extend_abbreviations: Choose whether to extend NLTK's PunktTokenizer abbreviations with a list + of curated abbreviations, if available. + This is currently supported for English ("en") and German ("de"). + """ + + super(NLTKDocumentSplitter, self).__init__( + split_by=split_by, split_length=split_length, split_overlap=split_overlap, split_threshold=split_threshold + ) + nltk_imports.check() + if respect_sentence_boundary and split_by != "word": + logger.warning( + "The 'respect_sentence_boundary' option is only supported for `split_by='word'`. " + "The option `respect_sentence_boundary` will be set to `False`." + ) + respect_sentence_boundary = False + self.respect_sentence_boundary = respect_sentence_boundary + self.sentence_splitter = SentenceSplitter( + language=language, + use_split_rules=use_split_rules, + extend_abbreviations=extend_abbreviations, + keep_white_spaces=True, + ) + self.language = language + + def _split_into_units( + self, text: str, split_by: Literal["word", "sentence", "passage", "page", "function"] + ) -> List[str]: + """ + Splits the text into units based on the specified split_by parameter. + + :param text: The text to split. + :param split_by: The unit to split the text by. Choose from "word", "sentence", "passage", or "page". + :returns: A list of units. + """ + + if split_by == "page": + self.split_at = "\f" + units = text.split(self.split_at) + elif split_by == "passage": + self.split_at = "\n\n" + units = text.split(self.split_at) + elif split_by == "sentence": + # whitespace is preserved while splitting text into sentences when using keep_white_spaces=True + # so split_at is set to an empty string + self.split_at = "" + result = self.sentence_splitter.split_sentences(text) + units = [sentence["sentence"] for sentence in result] + elif split_by == "word": + self.split_at = " " + units = text.split(self.split_at) + else: + raise NotImplementedError( + "DocumentSplitter only supports 'word', 'sentence', 'page' or 'passage' split_by options." + ) + + # Add the delimiter back to all units except the last one + for i in range(len(units) - 1): + units[i] += self.split_at + return units + + @component.output_types(documents=List[Document]) + def run(self, documents: List[Document]) -> Dict[str, List[Document]]: + """ + Split documents into smaller parts. + + Splits documents by the unit expressed in `split_by`, with a length of `split_length` + and an overlap of `split_overlap`. + + :param documents: The documents to split. + + :returns: A dictionary with the following key: + - `documents`: List of documents with the split texts. Each document includes: + - A metadata field source_id to track the original document. + - A metadata field page_number to track the original page number. + - All other metadata copied from the original document. + + :raises TypeError: if the input is not a list of Documents. + :raises ValueError: if the content of a document is None. + """ + if not isinstance(documents, list) or (documents and not isinstance(documents[0], Document)): + raise TypeError("DocumentSplitter expects a List of Documents as input.") + + split_docs = [] + for doc in documents: + if doc.content is None: + raise ValueError( + f"DocumentSplitter only works with text documents but content for document ID {doc.id} is None." + ) + + if self.respect_sentence_boundary: + units = self._split_into_units(doc.content, "sentence") + text_splits, splits_pages, splits_start_idxs = self._concatenate_sentences_based_on_word_amount( + sentences=units, split_length=self.split_length, split_overlap=self.split_overlap + ) + else: + units = self._split_into_units(doc.content, self.split_by) + text_splits, splits_pages, splits_start_idxs = self._concatenate_units( + elements=units, + split_length=self.split_length, + split_overlap=self.split_overlap, + split_threshold=self.split_threshold, + ) + metadata = deepcopy(doc.meta) + metadata["source_id"] = doc.id + split_docs += self._create_docs_from_splits( + text_splits=text_splits, splits_pages=splits_pages, splits_start_idxs=splits_start_idxs, meta=metadata + ) + return {"documents": split_docs} + + @staticmethod + def _number_of_sentences_to_keep(sentences: List[str], split_length: int, split_overlap: int) -> int: + """ + Returns the number of sentences to keep in the next chunk based on the `split_overlap` and `split_length`. + + :param sentences: The list of sentences to split. + :param split_length: The maximum number of words in each split. + :param split_overlap: The number of overlapping words in each split. + :returns: The number of sentences to keep in the next chunk. + """ + # If the split_overlap is 0, we don't need to keep any sentences + if split_overlap == 0: + return 0 + + num_sentences_to_keep = 0 + num_words = 0 + for sent in reversed(sentences): + num_words += len(sent.split()) + # If the number of words is larger than the split_length then don't add any more sentences + if num_words > split_length: + break + num_sentences_to_keep += 1 + if num_words > split_overlap: + break + return num_sentences_to_keep + + def _concatenate_sentences_based_on_word_amount( + self, sentences: List[str], split_length: int, split_overlap: int + ) -> Tuple[List[str], List[int], List[int]]: + """ + Groups the sentences into chunks of `split_length` words while respecting sentence boundaries. + + :param sentences: The list of sentences to split. + :param split_length: The maximum number of words in each split. + :param split_overlap: The number of overlapping words in each split. + :returns: A tuple containing the concatenated sentences, the start page numbers, and the start indices. + """ + # Chunk information + chunk_word_count = 0 + chunk_starting_page_number = 1 + chunk_start_idx = 0 + current_chunk: List[str] = [] + # Output lists + split_start_page_numbers = [] + list_of_splits: List[List[str]] = [] + split_start_indices = [] + + for sentence_idx, sentence in enumerate(sentences): + current_chunk.append(sentence) + chunk_word_count += len(sentence.split()) + next_sentence_word_count = ( + len(sentences[sentence_idx + 1].split()) if sentence_idx < len(sentences) - 1 else 0 + ) + + # Number of words in the current chunk plus the next sentence is larger than the split_length + # or we reached the last sentence + if (chunk_word_count + next_sentence_word_count) > split_length or sentence_idx == len(sentences) - 1: + # Save current chunk and start a new one + list_of_splits.append(current_chunk) + split_start_page_numbers.append(chunk_starting_page_number) + split_start_indices.append(chunk_start_idx) + + # Get the number of sentences that overlap with the next chunk + num_sentences_to_keep = self._number_of_sentences_to_keep( + sentences=current_chunk, split_length=split_length, split_overlap=split_overlap + ) + # Set up information for the new chunk + if num_sentences_to_keep > 0: + # Processed sentences are the ones that are not overlapping with the next chunk + processed_sentences = current_chunk[:-num_sentences_to_keep] + chunk_starting_page_number += sum(sent.count("\f") for sent in processed_sentences) + chunk_start_idx += len("".join(processed_sentences)) + # Next chunk starts with the sentences that were overlapping with the previous chunk + current_chunk = current_chunk[-num_sentences_to_keep:] + chunk_word_count = sum(len(s.split()) for s in current_chunk) + else: + # Here processed_sentences is the same as current_chunk since there is no overlap + chunk_starting_page_number += sum(sent.count("\f") for sent in current_chunk) + chunk_start_idx += len("".join(current_chunk)) + current_chunk = [] + chunk_word_count = 0 + + # Concatenate the sentences together within each split + text_splits = [] + for split in list_of_splits: + text = "".join(split) + if len(text) > 0: + text_splits.append(text) + + return text_splits, split_start_page_numbers, split_start_indices + + +if nltk_imports.is_successful(): + ISO639_TO_NLTK = { + "ru": "russian", + "sl": "slovene", + "es": "spanish", + "sv": "swedish", + "tr": "turkish", + "cs": "czech", + "da": "danish", + "nl": "dutch", + "en": "english", + "et": "estonian", + "fi": "finnish", + "fr": "french", + "de": "german", + "el": "greek", + "it": "italian", + "no": "norwegian", + "pl": "polish", + "pt": "portuguese", + "ml": "malayalam", + } + + QUOTE_SPANS_RE = re.compile(r"\W(\"+|\'+).*?\1") + + class CustomPunktLanguageVars(nltk.tokenize.punkt.PunktLanguageVars): + # The following adjustment of PunktSentenceTokenizer is inspired by: + # https://stackoverflow.com/questions/33139531/preserve-empty-lines-with-nltks-punkt-tokenizer + # It is needed for preserving whitespace while splitting text into sentences. + _period_context_fmt = r""" + %(SentEndChars)s # a potential sentence ending + \s* # match potential whitespace [ \t\n\x0B\f\r] + (?=(?P + %(NonWord)s # either other punctuation + | + (?P\S+) # or some other token - original version: \s+(?P\S+) + ))""" + + def period_context_re(self) -> re.Pattern: + """ + Compiles and returns a regular expression to find contexts including possible sentence boundaries. + + :returns: A compiled regular expression pattern. + """ + try: + return self._re_period_context # type: ignore + except: # noqa: E722 + self._re_period_context = re.compile( + self._period_context_fmt + % { + "NonWord": self._re_non_word_chars, + # SentEndChars might be followed by closing brackets, so we match them here. + "SentEndChars": self._re_sent_end_chars + r"[\)\]}]*", + }, + re.UNICODE | re.VERBOSE, + ) + return self._re_period_context + + def load_sentence_tokenizer( + language: Language, keep_white_spaces: bool = False + ) -> nltk.tokenize.punkt.PunktSentenceTokenizer: + """ + Utility function to load the nltk sentence tokenizer. + + :param language: The language for the tokenizer. + :param keep_white_spaces: If True, the tokenizer will keep white spaces between sentences. + :returns: nltk sentence tokenizer. + """ + try: + nltk.data.find("tokenizers/punkt_tab") + except LookupError: + try: + nltk.download("punkt_tab") + except FileExistsError as error: + logger.debug("NLTK punkt tokenizer seems to be already downloaded. Error message: {error}", error=error) + + language_name = ISO639_TO_NLTK.get(language) + + if language_name is not None: + sentence_tokenizer = nltk.data.load(f"tokenizers/punkt_tab/{language_name}.pickle") + else: + logger.warning( + "PreProcessor couldn't find the default sentence tokenizer model for {language}. " + " Using English instead. You may train your own model and use the 'tokenizer_model_folder' parameter.", + language=language, + ) + sentence_tokenizer = nltk.data.load("tokenizers/punkt_tab/english.pickle") + + if keep_white_spaces: + sentence_tokenizer._lang_vars = CustomPunktLanguageVars() + + return sentence_tokenizer + + class SentenceSplitter: # pylint: disable=too-few-public-methods + """ + SentenceSplitter splits a text into sentences using the nltk sentence tokenizer + """ + + def __init__( + self, + language: Language = "en", + use_split_rules: bool = True, + extend_abbreviations: bool = True, + keep_white_spaces: bool = False, + ) -> None: + """ + Initializes the SentenceSplitter with the specified language, split rules, and abbreviation handling. + + :param language: The language for the tokenizer. Default is "en". + :param use_split_rules: If True, the additional split rules are used. If False, the rules are not used. + :param extend_abbreviations: If True, the abbreviations used by NLTK's PunktTokenizer are extended by a list + of curated abbreviations if available. If False, the default abbreviations are used. + Currently supported languages are: en, de. + :param keep_white_spaces: If True, the tokenizer will keep white spaces between sentences. + """ + self.language = language + self.sentence_tokenizer = load_sentence_tokenizer(language, keep_white_spaces=keep_white_spaces) + self.use_split_rules = use_split_rules + if extend_abbreviations: + abbreviations = SentenceSplitter._read_abbreviations(language) + self.sentence_tokenizer._params.abbrev_types.update(abbreviations) + self.keep_white_spaces = keep_white_spaces + + def split_sentences(self, text: str) -> List[Dict[str, Any]]: + """ + Splits a text into sentences including references to original char positions for each split. + + :param text: The text to split. + :returns: list of sentences with positions. + """ + sentence_spans = list(self.sentence_tokenizer.span_tokenize(text)) + if self.use_split_rules: + sentence_spans = SentenceSplitter._apply_split_rules(text, sentence_spans) + + sentences = [{"sentence": text[start:end], "start": start, "end": end} for start, end in sentence_spans] + return sentences + + @staticmethod + def _apply_split_rules(text: str, sentence_spans: List[Tuple[int, int]]) -> List[Tuple[int, int]]: + """ + Applies additional split rules to the sentence spans. + + :param text: The text to split. + :param sentence_spans: The list of sentence spans to split. + :returns: The list of sentence spans after applying the split rules. + """ + new_sentence_spans = [] + quote_spans = [match.span() for match in QUOTE_SPANS_RE.finditer(text)] + while sentence_spans: + span = sentence_spans.pop(0) + next_span = sentence_spans[0] if len(sentence_spans) > 0 else None + while next_span and SentenceSplitter._needs_join(text, span, next_span, quote_spans): + sentence_spans.pop(0) + span = (span[0], next_span[1]) + next_span = sentence_spans[0] if len(sentence_spans) > 0 else None + start, end = span + new_sentence_spans.append((start, end)) + return new_sentence_spans + + @staticmethod + def _needs_join( + text: str, span: Tuple[int, int], next_span: Tuple[int, int], quote_spans: List[Tuple[int, int]] + ) -> bool: + """ + Checks if the spans need to be joined as parts of one sentence. + + :param text: The text containing the spans. + :param span: The current sentence span within text. + :param next_span: The next sentence span within text. + :param quote_spans: All quoted spans within text. + :returns: True if the spans needs to be joined. + """ + start, end = span + next_start, next_end = next_span + + # sentence. sentence"\nsentence -> no split (end << quote_end) + # sentence.", sentence -> no split (end < quote_end) + # sentence?", sentence -> no split (end < quote_end) + if any(quote_start < end < quote_end for quote_start, quote_end in quote_spans): + # sentence boundary is inside a quote + return True + + # sentence." sentence -> split (end == quote_end) + # sentence?" sentence -> no split (end == quote_end) + if any( + quote_start < end == quote_end and text[quote_end - 2] == "?" for quote_start, quote_end in quote_spans + ): + # question is cited + return True + + if re.search(r"(^|\n)\s*\d{1,2}\.$", text[start:end]) is not None: + # sentence ends with a numeration + return True + + # next sentence starts with a bracket or we return False + return re.search(r"^\s*[\(\[]", text[next_start:next_end]) is not None + + @staticmethod + def _read_abbreviations(language: Language) -> List[str]: + """ + Reads the abbreviations for a given language from the abbreviations file. + + :param language: The language to read the abbreviations for. + :returns: List of abbreviations. + """ + abbreviations_file = Path(__file__).parent.parent / f"data/abbreviations/{language}.txt" + if not abbreviations_file.exists(): + logger.warning( + "No abbreviations file found for {language}.Using default abbreviations.", language=language + ) + return [] + + abbreviations = abbreviations_file.read_text().split("\n") + return abbreviations diff --git a/testbed/deepset-ai__haystack/haystack/components/preprocessors/text_cleaner.py b/testbed/deepset-ai__haystack/haystack/components/preprocessors/text_cleaner.py new file mode 100644 index 0000000000000000000000000000000000000000..ea31a59fc7efcba22e3a1cfcf7be10c841eae1b9 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/preprocessors/text_cleaner.py @@ -0,0 +1,83 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import re +import string +from typing import Any, Dict, List, Optional + +from haystack import component + + +@component +class TextCleaner: + """ + Cleans text strings. + + It can remove substrings matching a list of regular expressions, convert text to lowercase, + remove punctuation, and remove numbers. + Use it to clean up text data before evaluation. + + ### Usage example + + ```python + from haystack.components.preprocessors import TextCleaner + + text_to_clean = "1Moonlight shimmered softly, 300 Wolves howled nearby, Night enveloped everything." + + cleaner = TextCleaner(convert_to_lowercase=True, remove_punctuation=False, remove_numbers=True) + result = cleaner.run(texts=[text_to_clean]) + ``` + """ + + def __init__( + self, + remove_regexps: Optional[List[str]] = None, + convert_to_lowercase: bool = False, + remove_punctuation: bool = False, + remove_numbers: bool = False, + ): + """ + Initializes the TextCleaner component. + + :param remove_regexps: A list of regex patterns to remove matching substrings from the text. + :param convert_to_lowercase: If `True`, converts all characters to lowercase. + :param remove_punctuation: If `True`, removes punctuation from the text. + :param remove_numbers: If `True`, removes numerical digits from the text. + """ + self._remove_regexps = remove_regexps + self._convert_to_lowercase = convert_to_lowercase + self._remove_punctuation = remove_punctuation + self._remove_numbers = remove_numbers + + self._regex = None + if remove_regexps: + self._regex = re.compile("|".join(remove_regexps), flags=re.IGNORECASE) + to_remove = "" + if remove_punctuation: + to_remove = string.punctuation + if remove_numbers: + to_remove += string.digits + + self._translator = str.maketrans("", "", to_remove) if to_remove else None + + @component.output_types(texts=List[str]) + def run(self, texts: List[str]) -> Dict[str, Any]: + """ + Cleans up the given list of strings. + + :param texts: List of strings to clean. + :returns: A dictionary with the following key: + - `texts`: the cleaned list of strings. + """ + + if self._regex: + texts = [self._regex.sub("", text) for text in texts] + + if self._convert_to_lowercase: + texts = [text.lower() for text in texts] + + if self._translator: + texts = [text.translate(self._translator) for text in texts] + + return {"texts": texts} diff --git a/testbed/deepset-ai__haystack/haystack/components/rankers/lost_in_the_middle.py b/testbed/deepset-ai__haystack/haystack/components/rankers/lost_in_the_middle.py new file mode 100644 index 0000000000000000000000000000000000000000..f757fadddc248c6cce78b72e5ceccf0a4eb19951 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/rankers/lost_in_the_middle.py @@ -0,0 +1,133 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Dict, List, Optional + +from haystack import Document, component + + +@component +class LostInTheMiddleRanker: + """ + A LostInTheMiddle Ranker. + + Ranks documents based on the 'lost in the middle' order so that the most relevant documents are either at the + beginning or end, while the least relevant are in the middle. + + LostInTheMiddleRanker assumes that some prior component in the pipeline has already ranked documents by relevance + and requires no query as input but only documents. It is typically used as the last component before building a + prompt for an LLM to prepare the input context for the LLM. + + Lost in the Middle ranking lays out document contents into LLM context so that the most relevant contents are at + the beginning or end of the input context, while the least relevant is in the middle of the context. See the + paper ["Lost in the Middle: How Language Models Use Long Contexts"](https://arxiv.org/abs/2307.03172) for more + details. + + Usage example: + ```python + from haystack.components.rankers import LostInTheMiddleRanker + from haystack import Document + + ranker = LostInTheMiddleRanker() + docs = [Document(content="Paris"), Document(content="Berlin"), Document(content="Madrid")] + result = ranker.run(documents=docs) + for doc in result["documents"]: + print(doc.content) + ``` + """ + + def __init__(self, word_count_threshold: Optional[int] = None, top_k: Optional[int] = None): + """ + Initialize the LostInTheMiddleRanker. + + If 'word_count_threshold' is specified, this ranker includes all documents up until the point where adding + another document would exceed the 'word_count_threshold'. The last document that causes the threshold to + be breached will be included in the resulting list of documents, but all subsequent documents will be + discarded. + + :param word_count_threshold: The maximum total number of words across all documents selected by the ranker. + :param top_k: The maximum number of documents to return. + """ + if isinstance(word_count_threshold, int) and word_count_threshold <= 0: + raise ValueError( + f"Invalid value for word_count_threshold: {word_count_threshold}. " f"word_count_threshold must be > 0." + ) + if isinstance(top_k, int) and top_k <= 0: + raise ValueError(f"top_k must be > 0, but got {top_k}") + + self.word_count_threshold = word_count_threshold + self.top_k = top_k + + @component.output_types(documents=List[Document]) + def run( + self, documents: List[Document], top_k: Optional[int] = None, word_count_threshold: Optional[int] = None + ) -> Dict[str, List[Document]]: + """ + Reranks documents based on the "lost in the middle" order. + + :param documents: List of Documents to reorder. + :param top_k: The maximum number of documents to return. + :param word_count_threshold: The maximum total number of words across all documents selected by the ranker. + :returns: + A dictionary with the following keys: + - `documents`: Reranked list of Documents + + :raises ValueError: + If any of the documents is not textual. + """ + if isinstance(word_count_threshold, int) and word_count_threshold <= 0: + raise ValueError( + f"Invalid value for word_count_threshold: {word_count_threshold}. " f"word_count_threshold must be > 0." + ) + if isinstance(top_k, int) and top_k <= 0: + raise ValueError(f"top_k must be > 0, but got {top_k}") + + if not documents: + return {"documents": []} + + top_k = top_k or self.top_k + word_count_threshold = word_count_threshold or self.word_count_threshold + + documents_to_reorder = documents[:top_k] if top_k else documents + + # If there's only one document, return it as is + if len(documents_to_reorder) == 1: + return {"documents": documents_to_reorder} + + # Raise an error if any document is not textual + if any(not doc.content_type == "text" for doc in documents_to_reorder): + raise ValueError("Some provided documents are not textual; LostInTheMiddleRanker can process only text.") + + # Initialize word count and indices for the "lost in the middle" order + word_count = 0 + document_index = list(range(len(documents_to_reorder))) + lost_in_the_middle_indices = [0] + + # If word count threshold is set and the first document has content, calculate word count for the first document + if word_count_threshold and documents_to_reorder[0].content: + word_count = len(documents_to_reorder[0].content.split()) + + # If the first document already meets the word count threshold, return it + if word_count >= word_count_threshold: + return {"documents": [documents_to_reorder[0]]} + + # Start from the second document and create "lost in the middle" order + for doc_idx in document_index[1:]: + # Calculate the index at which the current document should be inserted + insertion_index = len(lost_in_the_middle_indices) // 2 + len(lost_in_the_middle_indices) % 2 + + # Insert the document index at the calculated position + lost_in_the_middle_indices.insert(insertion_index, doc_idx) + + # If word count threshold is set and the document has content, calculate the total word count + if word_count_threshold and documents_to_reorder[doc_idx].content: + word_count += len(documents_to_reorder[doc_idx].content.split()) # type: ignore[union-attr] + + # If the total word count meets the threshold, stop processing further documents + if word_count >= word_count_threshold: + break + + # Documents in the "lost in the middle" order + ranked_docs = [documents_to_reorder[idx] for idx in lost_in_the_middle_indices] + return {"documents": ranked_docs} diff --git a/testbed/deepset-ai__haystack/haystack/components/readers/extractive.py b/testbed/deepset-ai__haystack/haystack/components/readers/extractive.py new file mode 100644 index 0000000000000000000000000000000000000000..85d90a74cfe6b6d3dd5b18e49d26bbfc5d7dbed2 --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/readers/extractive.py @@ -0,0 +1,646 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +import math +import warnings +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple, Union + +from haystack import Document, ExtractedAnswer, component, default_from_dict, default_to_dict, logging +from haystack.lazy_imports import LazyImport +from haystack.utils import ComponentDevice, DeviceMap, Secret, deserialize_secrets_inplace +from haystack.utils.hf import deserialize_hf_model_kwargs, resolve_hf_device_map, serialize_hf_model_kwargs + +with LazyImport("Run 'pip install transformers[torch,sentencepiece]'") as torch_and_transformers_import: + import accelerate # pylint: disable=unused-import # the library is used but not directly referenced + import torch + from tokenizers import Encoding + from transformers import AutoModelForQuestionAnswering, AutoTokenizer + + +logger = logging.getLogger(__name__) + + +@component +class ExtractiveReader: + """ + Locates and extracts answers to a given query from Documents. + + The ExtractiveReader component performs extractive question answering. + It assigns a score to every possible answer span independently of other answer spans. + This fixes a common issue of other implementations which make comparisons across documents harder by normalizing + each document's answers independently. + + Example usage: + ```python + from haystack import Document + from haystack.components.readers import ExtractiveReader + + docs = [ + Document(content="Python is a popular programming language"), + Document(content="python ist eine beliebte Programmiersprache"), + ] + + reader = ExtractiveReader() + reader.warm_up() + + question = "What is a popular programming language?" + result = reader.run(query=question, documents=docs) + assert "Python" in result["answers"][0].data + ``` + """ + + def __init__( + self, + model: Union[Path, str] = "deepset/roberta-base-squad2-distilled", + device: Optional[ComponentDevice] = None, + token: Optional[Secret] = Secret.from_env_var(["HF_API_TOKEN", "HF_TOKEN"], strict=False), + top_k: int = 20, + score_threshold: Optional[float] = None, + max_seq_length: int = 384, + stride: int = 128, + max_batch_size: Optional[int] = None, + answers_per_seq: Optional[int] = None, + no_answer: bool = True, + calibration_factor: float = 0.1, + overlap_threshold: Optional[float] = 0.01, + model_kwargs: Optional[Dict[str, Any]] = None, + ) -> None: + """ + Creates an instance of ExtractiveReader. + + :param model: + A Hugging Face transformers question answering model. + Can either be a path to a folder containing the model files or an identifier for the Hugging Face hub. + :param device: + The device on which the model is loaded. If `None`, the default device is automatically selected. + :param token: + The API token used to download private models from Hugging Face. + :param top_k: + Number of answers to return per query. It is required even if score_threshold is set. + An additional answer with no text is returned if no_answer is set to True (default). + :param score_threshold: + Returns only answers with the probability score above this threshold. + :param max_seq_length: + Maximum number of tokens. If a sequence exceeds it, the sequence is split. + :param stride: + Number of tokens that overlap when sequence is split because it exceeds max_seq_length. + :param max_batch_size: + Maximum number of samples that are fed through the model at the same time. + :param answers_per_seq: + Number of answer candidates to consider per sequence. + This is relevant when a Document was split into multiple sequences because of max_seq_length. + :param no_answer: + Whether to return an additional `no answer` with an empty text and a score representing the + probability that the other top_k answers are incorrect. + :param calibration_factor: + Factor used for calibrating probabilities. + :param overlap_threshold: + If set this will remove duplicate answers if they have an overlap larger than the + supplied threshold. For example, for the answers "in the river in Maine" and "the river" we would remove + one of these answers since the second answer has a 100% (1.0) overlap with the first answer. + However, for the answers "the river in" and "in Maine" there is only a max overlap percentage of 25% so + both of these answers could be kept if this variable is set to 0.24 or lower. + If None is provided then all answers are kept. + :param model_kwargs: + Additional keyword arguments passed to `AutoModelForQuestionAnswering.from_pretrained` + when loading the model specified in `model`. For details on what kwargs you can pass, + see the model's documentation. + """ + torch_and_transformers_import.check() + self.model_name_or_path = str(model) + self.model = None + self.tokenizer = None + self.device = None + self.token = token + self.max_seq_length = max_seq_length + self.top_k = top_k + self.score_threshold = score_threshold + self.stride = stride + self.max_batch_size = max_batch_size + self.answers_per_seq = answers_per_seq + self.no_answer = no_answer + self.calibration_factor = calibration_factor + self.overlap_threshold = overlap_threshold + + model_kwargs = resolve_hf_device_map(device=device, model_kwargs=model_kwargs) + self.model_kwargs = model_kwargs + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + return {"model": self.model_name_or_path} + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + serialization_dict = default_to_dict( + self, + model=self.model_name_or_path, + device=None, + token=self.token.to_dict() if self.token else None, + max_seq_length=self.max_seq_length, + top_k=self.top_k, + score_threshold=self.score_threshold, + stride=self.stride, + max_batch_size=self.max_batch_size, + answers_per_seq=self.answers_per_seq, + no_answer=self.no_answer, + calibration_factor=self.calibration_factor, + model_kwargs=self.model_kwargs, + ) + + serialize_hf_model_kwargs(serialization_dict["init_parameters"]["model_kwargs"]) + return serialization_dict + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "ExtractiveReader": + """ + Deserializes the component from a dictionary. + + :param data: + Dictionary to deserialize from. + :returns: + Deserialized component. + """ + init_params = data["init_parameters"] + deserialize_secrets_inplace(init_params, keys=["token"]) + if init_params.get("device") is not None: + init_params["device"] = ComponentDevice.from_dict(init_params["device"]) + if init_params.get("model_kwargs") is not None: + deserialize_hf_model_kwargs(init_params["model_kwargs"]) + + return default_from_dict(cls, data) + + def warm_up(self): + """ + Initializes the component. + """ + # Take the first device used by `accelerate`. Needed to pass inputs from the tokenizer to the correct device. + if self.model is None: + self.model = AutoModelForQuestionAnswering.from_pretrained( + self.model_name_or_path, token=self.token.resolve_value() if self.token else None, **self.model_kwargs + ) + self.tokenizer = AutoTokenizer.from_pretrained( + self.model_name_or_path, token=self.token.resolve_value() if self.token else None + ) + self.device = ComponentDevice.from_multiple(device_map=DeviceMap.from_hf(self.model.hf_device_map)) + + def _flatten_documents( + self, queries: List[str], documents: List[List[Document]] + ) -> Tuple[List[str], List[Document], List[int]]: + """ + Flattens queries and Documents so all query-document pairs are arranged along one batch axis. + """ + flattened_queries = [query for documents_, query in zip(documents, queries) for _ in documents_] + flattened_documents = [document for documents_ in documents for document in documents_] + query_ids = [i for i, documents_ in enumerate(documents) for _ in documents_] + return flattened_queries, flattened_documents, query_ids + + def _preprocess( + self, queries: List[str], documents: List[Document], max_seq_length: int, query_ids: List[int], stride: int + ) -> Tuple["torch.Tensor", "torch.Tensor", "torch.Tensor", List["Encoding"], List[int], List[int]]: + """ + Splits and tokenizes Documents and preserves structures by returning mappings to query and Document IDs. + """ + texts = [] + document_ids = [] + document_contents = [] + for i, doc in enumerate(documents): + if doc.content is None: + warnings.warn( + f"Document with id {doc.id} was passed to ExtractiveReader. The Document doesn't " + f"contain any text and it will be ignored." + ) + continue + texts.append(doc.content) + document_ids.append(i) + document_contents.append(doc.content) + + encodings_pt = self.tokenizer( # type: ignore + queries, + document_contents, + padding=True, + truncation=True, + max_length=max_seq_length, + return_tensors="pt", + return_overflowing_tokens=True, + stride=stride, + ) + + # To make mypy happy even though self.device is set in warm_up() + assert self.device is not None + assert self.device.first_device is not None + + # Take the first device used by `accelerate`. Needed to pass inputs from the tokenizer to the correct device. + first_device = self.device.first_device.to_torch() + + input_ids = encodings_pt.input_ids.to(first_device) + attention_mask = encodings_pt.attention_mask.to(first_device) + + query_ids = [query_ids[index] for index in encodings_pt.overflow_to_sample_mapping] + document_ids = [document_ids[sample_id] for sample_id in encodings_pt.overflow_to_sample_mapping] + + encodings = encodings_pt.encodings + sequence_ids = torch.tensor( + [[id_ if id_ is not None else -1 for id_ in encoding.sequence_ids] for encoding in encodings] + ).to(first_device) + + return input_ids, attention_mask, sequence_ids, encodings, query_ids, document_ids + + def _postprocess( + self, + start: "torch.Tensor", + end: "torch.Tensor", + sequence_ids: "torch.Tensor", + attention_mask: "torch.Tensor", + answers_per_seq: int, + encodings: List["Encoding"], + ) -> Tuple[List[List[int]], List[List[int]], "torch.Tensor"]: + """ + Turns start and end logits into probabilities for each answer span. + + Unlike most other implementations, it doesn't normalize the scores in each split to make them easier to + compare across different splits. Returns the top k answer spans. + """ + mask = sequence_ids == 1 # Only keep tokens from the context (should ignore special tokens) + mask = torch.logical_and(mask, attention_mask == 1) # Definitely remove special tokens + start = torch.where(mask, start, -torch.inf) # Apply the mask on the start logits + end = torch.where(mask, end, -torch.inf) # Apply the mask on the end logits + start = start.unsqueeze(-1) + end = end.unsqueeze(-2) + + logits = start + end # shape: (batch_size, seq_length (start), seq_length (end)) + + # The mask here onwards is the same for all instances in the batch + # As such we do away with the batch dimension + mask = torch.ones(logits.shape[-2:], dtype=torch.bool, device=logits.device) + mask = torch.triu(mask) # End shouldn't be before start + masked_logits = torch.where(mask, logits, -torch.inf) + probabilities = torch.sigmoid(masked_logits * self.calibration_factor) + + flat_probabilities = probabilities.flatten(-2, -1) # necessary for topk + + # topk can return invalid candidates as well if answers_per_seq > num_valid_candidates + # We only keep probability > 0 candidates later on + candidates = torch.topk(flat_probabilities, answers_per_seq) + seq_length = logits.shape[-1] + start_candidates = candidates.indices // seq_length # Recover indices from flattening + end_candidates = candidates.indices % seq_length + candidates_values = candidates.values.cpu() + start_candidates = start_candidates.cpu() + end_candidates = end_candidates.cpu() + + start_candidates_tokens_to_chars = [] + end_candidates_tokens_to_chars = [] + for i, (s_candidates, e_candidates, encoding) in enumerate(zip(start_candidates, end_candidates, encodings)): + # Those with probabilities > 0 are valid + valid = candidates_values[i] > 0 + s_char_spans = [] + e_char_spans = [] + for start_token, end_token in zip(s_candidates[valid], e_candidates[valid]): + # token_to_chars returns `None` for special tokens + # But we shouldn't have special tokens in the answers at this point + # The whole span is given by the start of the start_token (index 0) + # and the end of the end token (index 1) + s_char_spans.append(encoding.token_to_chars(start_token)[0]) + e_char_spans.append(encoding.token_to_chars(end_token)[1]) + start_candidates_tokens_to_chars.append(s_char_spans) + end_candidates_tokens_to_chars.append(e_char_spans) + + return start_candidates_tokens_to_chars, end_candidates_tokens_to_chars, candidates_values + + def _add_answer_page_number(self, answer: ExtractedAnswer) -> ExtractedAnswer: + if answer.meta is None: + answer.meta = {} + + if answer.document_offset is None: + return answer + + if not answer.document or "page_number" not in answer.document.meta: + return answer + + if not isinstance(answer.document.meta["page_number"], int): + logger.warning( + f"Document's page_number must be int but is {type(answer.document.meta['page_number'])}. " + f"No page number will be added to the answer." + ) + return answer + + # Calculate the answer page number + if answer.document.content: + ans_start = answer.document_offset.start + answer_page_number = answer.document.meta["page_number"] + answer.document.content[:ans_start].count("\f") + answer.meta.update({"answer_page_number": answer_page_number}) + + return answer + + def _nest_answers( + self, + start: List[List[int]], + end: List[List[int]], + probabilities: "torch.Tensor", + flattened_documents: List[Document], + queries: List[str], + answers_per_seq: int, + top_k: Optional[int], + score_threshold: Optional[float], + query_ids: List[int], + document_ids: List[int], + no_answer: bool, + overlap_threshold: Optional[float], + ) -> List[List[ExtractedAnswer]]: + """ + Reconstructs the nested structure that existed before flattening. + + Also computes a no answer score. This score is different from most other implementations because it does not + consider the no answer logit introduced with SQuAD 2. Instead, it just computes the probability that the + answer does not exist in the top k or top p. + """ + answers_without_query = [] + for document_id, start_candidates_, end_candidates_, probabilities_ in zip( + document_ids, start, end, probabilities + ): + for start_, end_, probability in zip(start_candidates_, end_candidates_, probabilities_): + doc = flattened_documents[document_id] + answers_without_query.append( + ExtractedAnswer( + query="", # Can't be None but we'll add it later + data=doc.content[start_:end_], # type: ignore + document=doc, + score=probability.item(), + document_offset=ExtractedAnswer.Span(start_, end_), + meta={}, + ) + ) + i = 0 + nested_answers = [] + for query_id in range(query_ids[-1] + 1): + current_answers = [] + while i < len(answers_without_query) and query_ids[i // answers_per_seq] == query_id: + answer = answers_without_query[i] + answer.query = queries[query_id] + current_answers.append(answer) + i += 1 + current_answers = sorted(current_answers, key=lambda ans: ans.score, reverse=True) + current_answers = self.deduplicate_by_overlap(current_answers, overlap_threshold=overlap_threshold) + current_answers = current_answers[:top_k] + + # Calculate the answer page number and add it to meta + current_answers = [self._add_answer_page_number(answer=answer) for answer in current_answers] + + if no_answer: + no_answer_score = math.prod(1 - answer.score for answer in current_answers) + answer_ = ExtractedAnswer( + data=None, query=queries[query_id], meta={}, document=None, score=no_answer_score + ) + current_answers.append(answer_) + current_answers = sorted(current_answers, key=lambda ans: ans.score, reverse=True) + if score_threshold is not None: + current_answers = [answer for answer in current_answers if answer.score >= score_threshold] + nested_answers.append(current_answers) + + return nested_answers + + def _calculate_overlap(self, answer1_start: int, answer1_end: int, answer2_start: int, answer2_end: int) -> int: + """ + Calculates the amount of overlap (in number of characters) between two answer offsets. + + This Stack overflow + [post](https://stackoverflow.com/questions/325933/determine-whether-two-date-ranges-overlap/325964#325964) + explains how to calculate the overlap between two ranges. + """ + # Check for overlap: (StartA <= EndB) and (StartB <= EndA) + if answer1_start <= answer2_end and answer2_start <= answer1_end: + return min( + answer1_end - answer1_start, + answer1_end - answer2_start, + answer2_end - answer1_start, + answer2_end - answer2_start, + ) + return 0 + + def _should_keep( + self, candidate_answer: ExtractedAnswer, current_answers: List[ExtractedAnswer], overlap_threshold: float + ) -> bool: + """ + Determines if the answer should be kept based on how much it overlaps with previous answers. + + NOTE: We might want to avoid throwing away answers that only have a few character (or word) overlap: + - E.g. The answers "the river in" and "in Maine" from the context "I want to go to the river in Maine." + might both want to be kept. + + :param candidate_answer: + Candidate answer that will be checked if it should be kept. + :param current_answers: + Current list of answers that will be kept. + :param overlap_threshold: + If the overlap between two answers is greater than this threshold then return False. + """ + keep = True + + # If the candidate answer doesn't have a document keep it + if not candidate_answer.document: + return keep + + for ans in current_answers: + # If an answer in current_answers doesn't have a document skip the comparison + if not ans.document: + continue + + # If offset is missing then keep both + if ans.document_offset is None: + continue + + # If offset is missing then keep both + if candidate_answer.document_offset is None: + continue + + # If the answers come from different documents then keep both + if candidate_answer.document.id != ans.document.id: + continue + + overlap_len = self._calculate_overlap( + answer1_start=ans.document_offset.start, + answer1_end=ans.document_offset.end, + answer2_start=candidate_answer.document_offset.start, + answer2_end=candidate_answer.document_offset.end, + ) + + # If overlap is 0 then keep + if overlap_len == 0: + continue + + overlap_frac_answer1 = overlap_len / (ans.document_offset.end - ans.document_offset.start) + overlap_frac_answer2 = overlap_len / ( + candidate_answer.document_offset.end - candidate_answer.document_offset.start + ) + + if overlap_frac_answer1 > overlap_threshold or overlap_frac_answer2 > overlap_threshold: + keep = False + break + + return keep + + def deduplicate_by_overlap( + self, answers: List[ExtractedAnswer], overlap_threshold: Optional[float] + ) -> List[ExtractedAnswer]: + """ + De-duplicates overlapping Extractive Answers. + + De-duplicates overlapping Extractive Answers from the same document based on how much the spans of the + answers overlap. + + :param answers: + List of answers to be deduplicated. + :param overlap_threshold: + If set this will remove duplicate answers if they have an overlap larger than the + supplied threshold. For example, for the answers "in the river in Maine" and "the river" we would remove + one of these answers since the second answer has a 100% (1.0) overlap with the first answer. + However, for the answers "the river in" and "in Maine" there is only a max overlap percentage of 25% so + both of these answers could be kept if this variable is set to 0.24 or lower. + If None is provided then all answers are kept. + :returns: + List of deduplicated answers. + """ + if overlap_threshold is None: + return answers + + # Initialize with the first answer and its offsets_in_document + deduplicated_answers = [answers[0]] + + # Loop over remaining answers to check for overlaps + for ans in answers[1:]: + keep = self._should_keep( + candidate_answer=ans, current_answers=deduplicated_answers, overlap_threshold=overlap_threshold + ) + if keep: + deduplicated_answers.append(ans) + + return deduplicated_answers + + @component.output_types(answers=List[ExtractedAnswer]) + def run( + self, + query: str, + documents: List[Document], + top_k: Optional[int] = None, + score_threshold: Optional[float] = None, + max_seq_length: Optional[int] = None, + stride: Optional[int] = None, + max_batch_size: Optional[int] = None, + answers_per_seq: Optional[int] = None, + no_answer: Optional[bool] = None, + overlap_threshold: Optional[float] = None, + ): + """ + Locates and extracts answers from the given Documents using the given query. + + :param query: + Query string. + :param documents: + List of Documents in which you want to search for an answer to the query. + :param top_k: + The maximum number of answers to return. + An additional answer is returned if no_answer is set to True (default). + :param score_threshold: + Returns only answers with the score above this threshold. + :param max_seq_length: + Maximum number of tokens. If a sequence exceeds it, the sequence is split. + :param stride: + Number of tokens that overlap when sequence is split because it exceeds max_seq_length. + :param max_batch_size: + Maximum number of samples that are fed through the model at the same time. + :param answers_per_seq: + Number of answer candidates to consider per sequence. + This is relevant when a Document was split into multiple sequences because of max_seq_length. + :param no_answer: + Whether to return no answer scores. + :param overlap_threshold: + If set this will remove duplicate answers if they have an overlap larger than the + supplied threshold. For example, for the answers "in the river in Maine" and "the river" we would remove + one of these answers since the second answer has a 100% (1.0) overlap with the first answer. + However, for the answers "the river in" and "in Maine" there is only a max overlap percentage of 25% so + both of these answers could be kept if this variable is set to 0.24 or lower. + If None is provided then all answers are kept. + :returns: + List of answers sorted by (desc.) answer score. + + :raises RuntimeError: + If the component was not warmed up by calling 'warm_up()' before. + """ + if self.model is None: + raise RuntimeError( + "The component ExtractiveReader was not warmed up. Run 'warm_up()' before calling 'run()'." + ) + + if not documents: + return {"answers": []} + + queries = [query] # Temporary solution until we have decided what batching should look like in v2 + nested_documents = [documents] + top_k = top_k or self.top_k + score_threshold = score_threshold or self.score_threshold + max_seq_length = max_seq_length or self.max_seq_length + stride = stride or self.stride + max_batch_size = max_batch_size or self.max_batch_size + answers_per_seq = answers_per_seq or self.answers_per_seq or 20 + no_answer = no_answer if no_answer is not None else self.no_answer + overlap_threshold = overlap_threshold or self.overlap_threshold + + flattened_queries, flattened_documents, query_ids = self._flatten_documents(queries, nested_documents) + input_ids, attention_mask, sequence_ids, encodings, query_ids, document_ids = self._preprocess( + flattened_queries, flattened_documents, max_seq_length, query_ids, stride + ) + + num_batches = math.ceil(input_ids.shape[0] / max_batch_size) if max_batch_size else 1 + batch_size = max_batch_size or input_ids.shape[0] + + start_logits_list = [] + end_logits_list = [] + + for i in range(num_batches): + start_index = i * batch_size + end_index = start_index + batch_size + cur_input_ids = input_ids[start_index:end_index] + cur_attention_mask = attention_mask[start_index:end_index] + + with torch.inference_mode(): + output = self.model(input_ids=cur_input_ids, attention_mask=cur_attention_mask) + cur_start_logits = output.start_logits + cur_end_logits = output.end_logits + if num_batches != 1: + cur_start_logits = cur_start_logits.cpu() + cur_end_logits = cur_end_logits.cpu() + start_logits_list.append(cur_start_logits) + end_logits_list.append(cur_end_logits) + + start_logits = torch.cat(start_logits_list) + end_logits = torch.cat(end_logits_list) + + start, end, probabilities = self._postprocess( + start_logits, end_logits, sequence_ids, attention_mask, answers_per_seq, encodings + ) + + answers = self._nest_answers( + start=start, + end=end, + probabilities=probabilities, + flattened_documents=flattened_documents, + queries=queries, + answers_per_seq=answers_per_seq, + top_k=top_k, + score_threshold=score_threshold, + query_ids=query_ids, + document_ids=document_ids, + no_answer=no_answer, + overlap_threshold=overlap_threshold, + ) + + return {"answers": answers[0]} # same temporary batching fix as above diff --git a/testbed/deepset-ai__haystack/haystack/components/retrievers/in_memory/bm25_retriever.py b/testbed/deepset-ai__haystack/haystack/components/retrievers/in_memory/bm25_retriever.py new file mode 100644 index 0000000000000000000000000000000000000000..ac3f8486f80d2902b416f2708ddb5d1e913f032e --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/retrievers/in_memory/bm25_retriever.py @@ -0,0 +1,163 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Dict, List, Optional + +from haystack import DeserializationError, Document, component, default_from_dict, default_to_dict +from haystack.document_stores.in_memory import InMemoryDocumentStore +from haystack.document_stores.types import FilterPolicy + + +@component +class InMemoryBM25Retriever: + """ + Retrieves documents that are most similar to the query using keyword-based algorithm. + + Use this retriever with the InMemoryDocumentStore. + + ### Usage example + + ```python + from haystack import Document + from haystack.components.retrievers.in_memory import InMemoryBM25Retriever + from haystack.document_stores.in_memory import InMemoryDocumentStore + + docs = [ + Document(content="Python is a popular programming language"), + Document(content="python ist eine beliebte Programmiersprache"), + ] + + doc_store = InMemoryDocumentStore() + doc_store.write_documents(docs) + retriever = InMemoryBM25Retriever(doc_store) + + result = retriever.run(query="Programmiersprache") + + print(result["documents"]) + ``` + """ + + def __init__( + self, + document_store: InMemoryDocumentStore, + filters: Optional[Dict[str, Any]] = None, + top_k: int = 10, + scale_score: bool = False, + filter_policy: FilterPolicy = FilterPolicy.REPLACE, + ): + """ + Create the InMemoryBM25Retriever component. + + :param document_store: + An instance of InMemoryDocumentStore where the retriever should search for relevant documents. + :param filters: + A dictionary with filters to narrow down the retriever's search space in the document store. + :param top_k: + The maximum number of documents to retrieve. + :param scale_score: + When `True`, scales the score of retrieved documents to a range of 0 to 1, where 1 means extremely relevant. + When `False`, uses raw similarity scores. + :param filter_policy: The filter policy to apply during retrieval. + Filter policy determines how filters are applied when retrieving documents. You can choose: + - `REPLACE` (default): Overrides the initialization filters with the filters specified at runtime. + Use this policy to dynamically change filtering for specific queries. + - `MERGE`: Combines runtime filters with initialization filters to narrow down the search. + :raises ValueError: + If the specified `top_k` is not > 0. + """ + if not isinstance(document_store, InMemoryDocumentStore): + raise ValueError("document_store must be an instance of InMemoryDocumentStore") + + self.document_store = document_store + + if top_k <= 0: + raise ValueError(f"top_k must be greater than 0. Currently, the top_k is {top_k}") + + self.filters = filters + self.top_k = top_k + self.scale_score = scale_score + self.filter_policy = filter_policy + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + return {"document_store": type(self.document_store).__name__} + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + docstore = self.document_store.to_dict() + return default_to_dict( + self, + document_store=docstore, + filters=self.filters, + top_k=self.top_k, + scale_score=self.scale_score, + filter_policy=self.filter_policy.value, + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "InMemoryBM25Retriever": + """ + Deserializes the component from a dictionary. + + :param data: + The dictionary to deserialize from. + :returns: + The deserialized component. + """ + init_params = data.get("init_parameters", {}) + if "document_store" not in init_params: + raise DeserializationError("Missing 'document_store' in serialization data") + if "type" not in init_params["document_store"]: + raise DeserializationError("Missing 'type' in document store's serialization data") + if "filter_policy" in init_params: + init_params["filter_policy"] = FilterPolicy.from_str(init_params["filter_policy"]) + data["init_parameters"]["document_store"] = InMemoryDocumentStore.from_dict( + data["init_parameters"]["document_store"] + ) + return default_from_dict(cls, data) + + @component.output_types(documents=List[Document]) + def run( + self, + query: str, + filters: Optional[Dict[str, Any]] = None, + top_k: Optional[int] = None, + scale_score: Optional[bool] = None, + ): + """ + Run the InMemoryBM25Retriever on the given input data. + + :param query: + The query string for the Retriever. + :param filters: + A dictionary with filters to narrow down the search space when retrieving documents. + :param top_k: + The maximum number of documents to return. + :param scale_score: + When `True`, scales the score of retrieved documents to a range of 0 to 1, where 1 means extremely relevant. + When `False`, uses raw similarity scores. + :returns: + The retrieved documents. + + :raises ValueError: + If the specified DocumentStore is not found or is not a InMemoryDocumentStore instance. + """ + if self.filter_policy == FilterPolicy.MERGE and filters: + filters = {**(self.filters or {}), **filters} + else: + filters = filters or self.filters + if top_k is None: + top_k = self.top_k + if scale_score is None: + scale_score = self.scale_score + + docs = self.document_store.bm25_retrieval(query=query, filters=filters, top_k=top_k, scale_score=scale_score) + return {"documents": docs} diff --git a/testbed/deepset-ai__haystack/haystack/components/retrievers/in_memory/embedding_retriever.py b/testbed/deepset-ai__haystack/haystack/components/retrievers/in_memory/embedding_retriever.py new file mode 100644 index 0000000000000000000000000000000000000000..cddd2cef4103d8fa5e7022ffb80230034a50f16d --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/retrievers/in_memory/embedding_retriever.py @@ -0,0 +1,194 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Dict, List, Optional + +from haystack import DeserializationError, Document, component, default_from_dict, default_to_dict +from haystack.document_stores.in_memory import InMemoryDocumentStore +from haystack.document_stores.types import FilterPolicy + + +@component +class InMemoryEmbeddingRetriever: + """ + Retrieves documents that are most semantically similar to the query. + + Use this retriever with the InMemoryDocumentStore. + + When using this retriever, make sure it has query and document embeddings available. + In indexing pipelines, use a DocumentEmbedder to embed documents. + In query pipelines, use a TextEmbedder to embed queries and send them to the retriever. + + ### Usage example + ```python + from haystack import Document + from haystack.components.embedders import SentenceTransformersDocumentEmbedder, SentenceTransformersTextEmbedder + from haystack.components.retrievers.in_memory import InMemoryEmbeddingRetriever + from haystack.document_stores.in_memory import InMemoryDocumentStore + + docs = [ + Document(content="Python is a popular programming language"), + Document(content="python ist eine beliebte Programmiersprache"), + ] + doc_embedder = SentenceTransformersDocumentEmbedder() + doc_embedder.warm_up() + docs_with_embeddings = doc_embedder.run(docs)["documents"] + + doc_store = InMemoryDocumentStore() + doc_store.write_documents(docs_with_embeddings) + retriever = InMemoryEmbeddingRetriever(doc_store) + + query="Programmiersprache" + text_embedder = SentenceTransformersTextEmbedder() + text_embedder.warm_up() + query_embedding = text_embedder.run(query)["embedding"] + + result = retriever.run(query_embedding=query_embedding) + + print(result["documents"]) + ``` + """ + + def __init__( + self, + document_store: InMemoryDocumentStore, + filters: Optional[Dict[str, Any]] = None, + top_k: int = 10, + scale_score: bool = False, + return_embedding: bool = False, + filter_policy: FilterPolicy = FilterPolicy.REPLACE, + ): + """ + Create the InMemoryEmbeddingRetriever component. + + :param document_store: + An instance of InMemoryDocumentStore where the retriever should search for relevant documents. + :param filters: + A dictionary with filters to narrow down the retriever's search space in the document store. + :param top_k: + The maximum number of documents to retrieve. + :param scale_score: + When `True`, scales the score of retrieved documents to a range of 0 to 1, where 1 means extremely relevant. + When `False`, uses raw similarity scores. + :param return_embedding: + When `True`, returns the embedding of the retrieved documents. + When `False`, returns just the documents, without their embeddings. + :param filter_policy: The filter policy to apply during retrieval. + Filter policy determines how filters are applied when retrieving documents. You can choose: + - `REPLACE` (default): Overrides the initialization filters with the filters specified at runtime. + Use this policy to dynamically change filtering for specific queries. + - `MERGE`: Combines runtime filters with initialization filters to narrow down the search. + :raises ValueError: + If the specified top_k is not > 0. + """ + if not isinstance(document_store, InMemoryDocumentStore): + raise ValueError("document_store must be an instance of InMemoryDocumentStore") + + self.document_store = document_store + + if top_k <= 0: + raise ValueError(f"top_k must be greater than 0. Currently, top_k is {top_k}") + + self.filters = filters + self.top_k = top_k + self.scale_score = scale_score + self.return_embedding = return_embedding + self.filter_policy = filter_policy + + def _get_telemetry_data(self) -> Dict[str, Any]: + """ + Data that is sent to Posthog for usage analytics. + """ + return {"document_store": type(self.document_store).__name__} + + def to_dict(self) -> Dict[str, Any]: + """ + Serializes the component to a dictionary. + + :returns: + Dictionary with serialized data. + """ + docstore = self.document_store.to_dict() + return default_to_dict( + self, + document_store=docstore, + filters=self.filters, + top_k=self.top_k, + scale_score=self.scale_score, + return_embedding=self.return_embedding, + filter_policy=self.filter_policy.value, + ) + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "InMemoryEmbeddingRetriever": + """ + Deserializes the component from a dictionary. + + :param data: + The dictionary to deserialize from. + :returns: + The deserialized component. + """ + init_params = data.get("init_parameters", {}) + if "document_store" not in init_params: + raise DeserializationError("Missing 'document_store' in serialization data") + if "type" not in init_params["document_store"]: + raise DeserializationError("Missing 'type' in document store's serialization data") + if "filter_policy" in init_params: + init_params["filter_policy"] = FilterPolicy.from_str(init_params["filter_policy"]) + data["init_parameters"]["document_store"] = InMemoryDocumentStore.from_dict( + data["init_parameters"]["document_store"] + ) + return default_from_dict(cls, data) + + @component.output_types(documents=List[Document]) + def run( + self, + query_embedding: List[float], + filters: Optional[Dict[str, Any]] = None, + top_k: Optional[int] = None, + scale_score: Optional[bool] = None, + return_embedding: Optional[bool] = None, + ): + """ + Run the InMemoryEmbeddingRetriever on the given input data. + + :param query_embedding: + Embedding of the query. + :param filters: + A dictionary with filters to narrow down the search space when retrieving documents. + :param top_k: + The maximum number of documents to return. + :param scale_score: + When `True`, scales the score of retrieved documents to a range of 0 to 1, where 1 means extremely relevant. + When `False`, uses raw similarity scores. + :param return_embedding: + When `True`, returns the embedding of the retrieved documents. + When `False`, returns just the documents, without their embeddings. + :returns: + The retrieved documents. + + :raises ValueError: + If the specified DocumentStore is not found or is not an InMemoryDocumentStore instance. + """ + if self.filter_policy == FilterPolicy.MERGE and filters: + filters = {**(self.filters or {}), **filters} + else: + filters = filters or self.filters + if top_k is None: + top_k = self.top_k + if scale_score is None: + scale_score = self.scale_score + if return_embedding is None: + return_embedding = self.return_embedding + + docs = self.document_store.embedding_retrieval( + query_embedding=query_embedding, + filters=filters, + top_k=top_k, + scale_score=scale_score, + return_embedding=return_embedding, + ) + + return {"documents": docs} diff --git a/testbed/deepset-ai__haystack/haystack/components/routers/metadata_router.py b/testbed/deepset-ai__haystack/haystack/components/routers/metadata_router.py new file mode 100644 index 0000000000000000000000000000000000000000..7c6c007d16b3c994dad999a3c1494d392b710ffc --- /dev/null +++ b/testbed/deepset-ai__haystack/haystack/components/routers/metadata_router.py @@ -0,0 +1,111 @@ +# SPDX-FileCopyrightText: 2022-present deepset GmbH +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Dict, List + +from haystack import Document, component +from haystack.utils.filters import document_matches_filter + + +@component +class MetadataRouter: + """ + Routes documents to different connections based on their metadata fields. + + Specify the routing rules in the `init` method. + If a document does not match any of the rules, it's routed to a connection named "unmatched". + + ### Usage example + + ```python + from haystack import Document + from haystack.components.routers import MetadataRouter + + docs = [Document(content="Paris is the capital of France.", meta={"language": "en"}), + Document(content="Berlin ist die Haupststadt von Deutschland.", meta={"language": "de"})] + + router = MetadataRouter(rules={"en": {"field": "meta.language", "operator": "==", "value": "en"}}) + + print(router.run(documents=docs)) + + # {'en': [Document(id=..., content: 'Paris is the capital of France.', meta: {'language': 'en'})], + # 'unmatched': [Document(id=..., content: 'Berlin ist die Haupststadt von Deutschland.', meta: {'language': 'de'})]} + ``` + """ + + def __init__(self, rules: Dict[str, Dict]): + """ + Initializes the MetadataRouter component. + + :param rules: A dictionary defining how to route documents to output connections based on their metadata. + Keys are output connection names, and values are dictionaries of + [filtering expressions](https://docs.haystack.deepset.ai/docs/metadata-filtering) in Haystack. + For example: + ```python + { + "edge_1": { + "operator": "AND", + "conditions": [ + {"field": "meta.created_at", "operator": ">=", "value": "2023-01-01"}, + {"field": "meta.created_at", "operator": "<", "value": "2023-04-01"}, + ], + }, + "edge_2": { + "operator": "AND", + "conditions": [ + {"field": "meta.created_at", "operator": ">=", "value": "2023-04-01"}, + {"field": "meta.created_at", "operator": "<", "value": "2023-07-01"}, + ], + }, + "edge_3": { + "operator": "AND", + "conditions": [ + {"field": "meta.created_at", "operator": ">=", "value": "2023-07-01"}, + {"field": "meta.created_at", "operator": "<", "value": "2023-10-01"}, + ], + }, + "edge_4": { + "operator": "AND", + "conditions": [ + {"field": "meta.created_at", "operator": ">=", "value": "2023-10-01"}, + {"field": "meta.created_at", "operator": "<", "value": "2024-01-01"}, + ], + }, + } + ``` + """ + self.rules = rules + component.set_output_types(self, unmatched=List[Document], **{edge: List[Document] for edge in rules}) + + def run(self, documents: List[Document]): + """ + Routes the documents. + + If a document does not match any of the rules, it's routed to a connection named "unmatched". + + :param documents: A list of documents to route. + + :returns: A dictionary where the keys are the names of the output connections (including `"unmatched"`) + and the values are lists of routed documents. + """ + unmatched_documents = [] + output: Dict[str, List[Document]] = {edge: [] for edge in self.rules} + + for document in documents: + cur_document_matched = False + for edge, rule in self.rules.items(): + if "operator" not in rule: + raise ValueError( + "Invalid filter syntax. " + "See https://docs.haystack.deepset.ai/docs/metadata-filtering for details." + ) + if document_matches_filter(rule, document): + output[edge].append(document) + cur_document_matched = True + + if not cur_document_matched: + unmatched_documents.append(document) + + output["unmatched"] = unmatched_documents + return output